Merge pull request #17933 from rjernst/camelcase4

Remove camelCase support
This commit is contained in:
Ryan Ernst 2016-04-22 13:46:43 -07:00
commit d12a4bb51d
51 changed files with 148 additions and 380 deletions

View File

@ -413,7 +413,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
if (simpleName.startsWith("Elasticsearch")) {
simpleName = simpleName.substring("Elasticsearch".length());
}
return Strings.toUnderscoreCase(simpleName);
// TODO: do we really need to make the exception name in underscore casing?
return toUnderscoreCase(simpleName);
}
@Override
@ -845,4 +846,39 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
interface FunctionThatThrowsIOException<T, R> {
R apply(T t) throws IOException;
}
// lower cases and adds underscores to transitions in a name
private static String toUnderscoreCase(String value) {
StringBuilder sb = new StringBuilder();
boolean changed = false;
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (Character.isUpperCase(c)) {
if (!changed) {
// copy it over here
for (int j = 0; j < i; j++) {
sb.append(value.charAt(j));
}
changed = true;
if (i == 0) {
sb.append(Character.toLowerCase(c));
} else {
sb.append('_');
sb.append(Character.toLowerCase(c));
}
} else {
sb.append('_');
sb.append(Character.toLowerCase(c));
}
} else {
if (changed) {
sb.append(c);
}
}
}
if (!changed) {
return value;
}
return sb.toString();
}
}

View File

@ -224,7 +224,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
boolean required = false;
Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_routing");
for (Map.Entry<String, Object> entry : routingNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
required = lenientNodeBooleanValue(fieldNode);
@ -241,7 +241,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
Boolean ignoreMissing = null;
Map<String, Object> timestampNode = (Map<String, Object>) withoutType.get("_timestamp");
for (Map.Entry<String, Object> entry : timestampNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
enabled = lenientNodeBooleanValue(fieldNode);

View File

@ -32,34 +32,28 @@ public class ParseField {
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ParseField.class));
private final String camelCaseName;
private final String underscoreName;
private final String name;
private final String[] deprecatedNames;
private String allReplacedWith = null;
private final String[] allNames;
public ParseField(String value, String... deprecatedNames) {
camelCaseName = Strings.toCamelCase(value);
underscoreName = Strings.toUnderscoreCase(value);
public ParseField(String name, String... deprecatedNames) {
this.name = name;
if (deprecatedNames == null || deprecatedNames.length == 0) {
this.deprecatedNames = Strings.EMPTY_ARRAY;
} else {
final HashSet<String> set = new HashSet<>();
for (String depName : deprecatedNames) {
set.add(Strings.toCamelCase(depName));
set.add(Strings.toUnderscoreCase(depName));
}
Collections.addAll(set, deprecatedNames);
this.deprecatedNames = set.toArray(new String[set.size()]);
}
Set<String> allNames = new HashSet<>();
allNames.add(camelCaseName);
allNames.add(underscoreName);
allNames.add(name);
Collections.addAll(allNames, this.deprecatedNames);
this.allNames = allNames.toArray(new String[allNames.size()]);
}
public String getPreferredName(){
return underscoreName;
return name;
}
public String[] getAllNamesIncludedDeprecated() {
@ -67,7 +61,7 @@ public class ParseField {
}
public ParseField withDeprecation(String... deprecatedNames) {
return new ParseField(this.underscoreName, deprecatedNames);
return new ParseField(this.name, deprecatedNames);
}
/**
@ -80,13 +74,13 @@ public class ParseField {
}
boolean match(String currentFieldName, boolean strict) {
if (allReplacedWith == null && (currentFieldName.equals(camelCaseName) || currentFieldName.equals(underscoreName))) {
if (allReplacedWith == null && currentFieldName.equals(name)) {
return true;
}
String msg;
for (String depName : deprecatedNames) {
if (currentFieldName.equals(depName)) {
msg = "Deprecated field [" + currentFieldName + "] used, expected [" + underscoreName + "] instead";
msg = "Deprecated field [" + currentFieldName + "] used, expected [" + name + "] instead";
if (allReplacedWith != null) {
msg = "Deprecated field [" + currentFieldName + "] used, replaced by [" + allReplacedWith + "]";
}
@ -110,10 +104,6 @@ public class ParseField {
return allReplacedWith;
}
public String getCamelCaseName() {
return camelCaseName;
}
public String[] getDeprecatedNames() {
return deprecatedNames;
}

View File

@ -930,85 +930,6 @@ public class Strings {
}
}
public static String toCamelCase(String value) {
return toCamelCase(value, null);
}
public static String toCamelCase(String value, StringBuilder sb) {
boolean changed = false;
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
//e.g. _name stays as-is, _first_name becomes _firstName
if (c == '_' && i > 0) {
if (!changed) {
if (sb != null) {
sb.setLength(0);
} else {
sb = new StringBuilder();
}
// copy it over here
for (int j = 0; j < i; j++) {
sb.append(value.charAt(j));
}
changed = true;
}
if (i < value.length() - 1) {
sb.append(Character.toUpperCase(value.charAt(++i)));
}
} else {
if (changed) {
sb.append(c);
}
}
}
if (!changed) {
return value;
}
return sb.toString();
}
public static String toUnderscoreCase(String value) {
return toUnderscoreCase(value, null);
}
public static String toUnderscoreCase(String value, StringBuilder sb) {
boolean changed = false;
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (Character.isUpperCase(c)) {
if (!changed) {
if (sb != null) {
sb.setLength(0);
} else {
sb = new StringBuilder();
}
// copy it over here
for (int j = 0; j < i; j++) {
sb.append(value.charAt(j));
}
changed = true;
if (i == 0) {
sb.append(Character.toLowerCase(c));
} else {
sb.append('_');
sb.append(Character.toLowerCase(c));
}
} else {
sb.append('_');
sb.append(Character.toLowerCase(c));
}
} else {
if (changed) {
sb.append(c);
}
}
}
if (!changed) {
return value;
}
return sb.toString();
}
/**
* Determine whether the given array is empty:
* i.e. <code>null</code> or of zero length.

View File

@ -64,7 +64,6 @@ import java.util.regex.Pattern;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.Strings.toCamelCase;
import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue;
import static org.elasticsearch.common.unit.SizeValue.parseSizeValue;
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
@ -77,23 +76,11 @@ public final class Settings implements ToXContent {
public static final Settings EMPTY = new Builder().build();
private static final Pattern ARRAY_PATTERN = Pattern.compile("(.*)\\.\\d+$");
private final Map<String, String> forcedUnderscoreSettings;
private SortedMap<String, String> settings;
Settings(Map<String, String> settings) {
// we use a sorted map for consistent serialization when using getAsMap()
this.settings = Collections.unmodifiableSortedMap(new TreeMap<>(settings));
Map<String, String> forcedUnderscoreSettings = null;
for (Map.Entry<String, String> entry : settings.entrySet()) {
String toUnderscoreCase = Strings.toUnderscoreCase(entry.getKey());
if (!toUnderscoreCase.equals(entry.getKey())) {
if (forcedUnderscoreSettings == null) {
forcedUnderscoreSettings = new HashMap<>();
}
forcedUnderscoreSettings.put(toUnderscoreCase, entry.getValue());
}
}
this.forcedUnderscoreSettings = forcedUnderscoreSettings == null ? emptyMap() : unmodifiableMap(forcedUnderscoreSettings);
}
/**
@ -240,11 +227,7 @@ public final class Settings implements ToXContent {
* @return The setting value, <tt>null</tt> if it does not exists.
*/
public String get(String setting) {
String retVal = settings.get(setting);
if (retVal != null) {
return retVal;
}
return forcedUnderscoreSettings.get(setting);
return settings.get(setting);
}
/**
@ -637,12 +620,7 @@ public final class Settings implements ToXContent {
* Returns a setting value based on the setting key.
*/
public String get(String key) {
String retVal = map.get(key);
if (retVal != null) {
return retVal;
}
// try camel case version
return map.get(toCamelCase(key));
return map.get(key);
}
/**

View File

@ -366,24 +366,16 @@ public final class AnalysisRegistry implements Closeable {
instance = defaultProvider.get(settings, environment, name, defaultSettings);
}
factories.put(name, instance);
String camelCase = Strings.toCamelCase(name);
if (providerMap.containsKey(camelCase) == false && factories.containsKey(camelCase) == false) {
factories.put(camelCase, instance);
}
}
for (Map.Entry<String, AnalysisModule.AnalysisProvider<T>> entry : defaultInstance.entrySet()) {
final String name = entry.getKey();
final AnalysisModule.AnalysisProvider<T> provider = entry.getValue();
final String camelCase = Strings.toCamelCase(name);
if (factories.containsKey(name) == false || (defaultInstance.containsKey(camelCase) == false && factories.containsKey(camelCase) == false)) {
if (factories.containsKey(name) == false) {
final T instance = provider.get(settings, environment, name, defaultSettings);
if (factories.containsKey(name) == false) {
factories.put(name, instance);
}
if ((defaultInstance.containsKey(camelCase) == false && factories.containsKey(camelCase) == false)) {
factories.put(camelCase, instance);
}
}
}
return factories;

View File

@ -113,7 +113,7 @@ public class DocumentMapperParser {
// parse DocumentMapper
while(iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
MetadataFieldMapper.TypeParser typeParser = rootTypeParsers.get(fieldName);

View File

@ -130,7 +130,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
}
public TypeParser typeParser(String type) {
return typeParsers.apply(Strings.toUnderscoreCase(type));
return typeParsers.apply(type);
}
public Version indexVersionCreated() {

View File

@ -105,7 +105,7 @@ public class BooleanFieldMapper extends FieldMapper {
parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -120,7 +120,7 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -100,7 +100,7 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -154,7 +154,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
boolean configuredFormat = false;
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -101,7 +101,7 @@ public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper {
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -107,7 +107,7 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -107,7 +107,7 @@ public class LegacyLongFieldMapper extends LegacyNumberFieldMapper {
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -103,7 +103,7 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -98,7 +98,7 @@ public class LegacyTokenCountFieldMapper extends LegacyIntegerFieldMapper {
LegacyTokenCountFieldMapper.Builder builder = new LegacyTokenCountFieldMapper.Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeIntegerValue(propNode));

View File

@ -269,7 +269,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
// the index property and still accepts no/not_analyzed/analyzed
final Object index = node.remove("index");
if (index != null) {
final String normalizedIndex = Strings.toUnderscoreCase(index.toString());
final String normalizedIndex = index.toString();
switch (normalizedIndex) {
case "analyzed":
builder.tokenized(true);
@ -304,7 +304,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
parseTextField(builder, fieldName, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -146,7 +146,7 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
parseTextField(builder, fieldName, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("position_increment_gap")) {
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);

View File

@ -89,7 +89,7 @@ public class TokenCountFieldMapper extends FieldMapper {
TokenCountFieldMapper.Builder builder = new TokenCountFieldMapper.Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeIntegerValue(propNode));
@ -113,7 +113,7 @@ public class TokenCountFieldMapper extends FieldMapper {
private NamedAnalyzer analyzer;
protected TokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
protected TokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
this.analyzer = analyzer;

View File

@ -86,7 +86,7 @@ public class TypeParsers {
parseField(builder, name, numberNode, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("precision_step")) {
builder.precisionStep(nodeIntegerValue(propNode));
@ -114,7 +114,7 @@ public class TypeParsers {
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final String propName = entry.getKey();
final Object propNode = entry.getValue();
if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
@ -188,7 +188,7 @@ public class TypeParsers {
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) {
Entry<String, Object> entry2 = propsIterator.next();
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
final String propName2 = entry2.getKey();
final Object propNode2 = entry2.getValue();
if (propName2.equals("enabled")) {
builder.omitNorms(!lenientNodeBooleanValue(propNode2));
@ -222,7 +222,7 @@ public class TypeParsers {
parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final String propName = entry.getKey();
final Object propNode = entry.getValue();
if (parseNorms(builder, propName, propNode, parserContext)) {
iterator.remove();
@ -237,7 +237,7 @@ public class TypeParsers {
Version indexVersionCreated = parserContext.indexVersionCreated();
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final String propName = entry.getKey();
final Object propNode = entry.getValue();
if (propName.equals("store")) {
builder.store(parseStore(name, propNode.toString(), parserContext));
@ -362,7 +362,6 @@ public class TypeParsers {
}
public static void parseTermVector(String fieldName, String termVector, FieldMapper.Builder builder) throws MapperParsingException {
termVector = Strings.toUnderscoreCase(termVector);
if ("no".equals(termVector)) {
builder.storeTermVectors(false);
} else if ("yes".equals(termVector)) {

View File

@ -200,7 +200,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("lat_lon")) {
deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed "

View File

@ -128,7 +128,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals(Names.COERCE)) {
builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode);

View File

@ -163,7 +163,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
Builder builder = new Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (Names.TREE.equals(fieldName)) {
builder.fieldType().setTree(fieldNode.toString());

View File

@ -161,7 +161,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
parseTextField(builder, builder.name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED :

View File

@ -57,7 +57,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
public static class Defaults {
public static final String NAME = FieldNamesFieldMapper.NAME;
public static final boolean ENABLED = true;
public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType();
@ -87,7 +87,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
enabled(index);
return super.index(index);
}
public Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
@ -110,7 +110,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(lenientNodeBooleanValue(fieldNode));
@ -286,12 +286,12 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
if (includeDefaults == false && fieldType().isEnabled() == Defaults.ENABLED) {
return builder;
}
builder.startObject(NAME);
if (includeDefaults || fieldType().isEnabled() != Defaults.ENABLED) {
builder.field("enabled", fieldType().isEnabled());
}
builder.endObject();
return builder;
}

View File

@ -120,7 +120,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
Builder builder = new Builder(parserContext.type());
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());

View File

@ -91,7 +91,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
builder.required(lenientNodeBooleanValue(fieldNode));

View File

@ -115,7 +115,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(lenientNodeBooleanValue(fieldNode));

View File

@ -104,7 +104,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;

View File

@ -131,7 +131,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
Boolean ignoreMissing = null;
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;

View File

@ -144,7 +144,7 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {

View File

@ -85,7 +85,7 @@ public class DynamicTemplate implements ToXContent {
String matchPattern = MatchType.SIMPLE.toString();
for (Map.Entry<String, Object> entry : conf.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
String propName = entry.getKey();
if ("match".equals(propName)) {
match = entry.getValue().toString();
} else if ("path_match".equals(propName)) {

View File

@ -175,7 +175,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
parseNested(name, node, builder);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)) {
iterator.remove();

View File

@ -138,7 +138,7 @@ public class RootObjectMapper extends ObjectMapper {
Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)
|| processField(builder, fieldName, fieldNode, parserContext.indexVersionCreated())) {

View File

@ -28,56 +28,60 @@ import static org.hamcrest.collection.IsArrayContainingInAnyOrder.arrayContainin
public class ParseFieldTests extends ESTestCase {
public void testParse() {
String[] values = new String[]{"foo_bar", "fooBar"};
ParseField field = new ParseField(randomFrom(values));
String[] deprecated = new String[]{"barFoo", "bar_foo"};
ParseField withDeprecations = field.withDeprecation("Foobar", randomFrom(deprecated));
String name = "foo_bar";
ParseField field = new ParseField(name);
String[] deprecated = new String[]{"barFoo", "bar_foo", "Foobar"};
ParseField withDeprecations = field.withDeprecation(deprecated);
assertThat(field, not(sameInstance(withDeprecations)));
assertThat(field.match(randomFrom(values), false), is(true));
assertThat(field.match(name, false), is(true));
assertThat(field.match("foo bar", false), is(false));
assertThat(field.match(randomFrom(deprecated), false), is(false));
assertThat(field.match("barFoo", false), is(false));
for (String deprecatedName : deprecated) {
assertThat(field.match(deprecatedName, false), is(false));
}
assertThat(withDeprecations.match(randomFrom(values), false), is(true));
assertThat(withDeprecations.match(name, false), is(true));
assertThat(withDeprecations.match("foo bar", false), is(false));
assertThat(withDeprecations.match(randomFrom(deprecated), false), is(true));
assertThat(withDeprecations.match("barFoo", false), is(true));
for (String deprecatedName : deprecated) {
assertThat(withDeprecations.match(deprecatedName, false), is(true));
}
// now with strict mode
assertThat(field.match(randomFrom(values), true), is(true));
assertThat(field.match(name, true), is(true));
assertThat(field.match("foo bar", true), is(false));
assertThat(field.match(randomFrom(deprecated), true), is(false));
assertThat(field.match("barFoo", true), is(false));
for (String deprecatedName : deprecated) {
assertThat(field.match(deprecatedName, true), is(false));
}
assertThat(withDeprecations.match(randomFrom(values), true), is(true));
assertThat(withDeprecations.match(name, true), is(true));
assertThat(withDeprecations.match("foo bar", true), is(false));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> withDeprecations.match(randomFrom(deprecated), true));
assertThat(e.getMessage(), containsString("used, expected [foo_bar] instead"));
e = expectThrows(IllegalArgumentException.class, () -> withDeprecations.match("barFoo", true));
assertThat(e.getMessage(), containsString("Deprecated field [barFoo] used, expected [foo_bar] instead"));
for (String deprecatedName : deprecated) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
withDeprecations.match(deprecatedName, true);
});
assertThat(e.getMessage(), containsString("used, expected [foo_bar] instead"));
}
}
public void testAllDeprecated() {
String[] values = new String[]{"like_text", "likeText"};
String name = "like_text";
boolean withDeprecatedNames = randomBoolean();
String[] deprecated = new String[]{"text", "same_as_text"};
String[] allValues;
if (withDeprecatedNames) {
String[] newArray = new String[values.length + deprecated.length];
System.arraycopy(values, 0, newArray, 0, values.length);
System.arraycopy(deprecated, 0, newArray, values.length, deprecated.length);
String[] newArray = new String[1 + deprecated.length];
newArray[0] = name;
System.arraycopy(deprecated, 0, newArray, 1, deprecated.length);
allValues = newArray;
} else {
allValues = values;
allValues = new String[] {name};
}
ParseField field;
if (withDeprecatedNames) {
field = new ParseField(randomFrom(values)).withDeprecation(deprecated).withAllDeprecated("like");
field = new ParseField(name).withDeprecation(deprecated).withAllDeprecated("like");
} else {
field = new ParseField(randomFrom(values)).withAllDeprecated("like");
field = new ParseField(name).withAllDeprecated("like");
}
// strict mode off
@ -94,6 +98,6 @@ public class ParseFieldTests extends ESTestCase {
assertThat(parseField.getAllNamesIncludedDeprecated(), arrayContainingInAnyOrder("terms", "in"));
parseField = new ParseField("more_like_this", "mlt");
assertThat(parseField.getAllNamesIncludedDeprecated(), arrayContainingInAnyOrder("more_like_this", "moreLikeThis", "mlt"));
assertThat(parseField.getAllNamesIncludedDeprecated(), arrayContainingInAnyOrder("more_like_this", "mlt"));
}
}

View File

@ -28,16 +28,6 @@ import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
public class StringsTests extends ESTestCase {
public void testToCamelCase() {
assertEquals("foo", Strings.toCamelCase("foo"));
assertEquals("fooBar", Strings.toCamelCase("fooBar"));
assertEquals("FooBar", Strings.toCamelCase("FooBar"));
assertEquals("fooBar", Strings.toCamelCase("foo_bar"));
assertEquals("fooBarFooBar", Strings.toCamelCase("foo_bar_foo_bar"));
assertEquals("fooBar", Strings.toCamelCase("foo_bar_"));
assertEquals("_foo", Strings.toCamelCase("_foo"));
assertEquals("_fooBar", Strings.toCamelCase("_foo_bar_"));
}
public void testSubstring() {
assertEquals(null, Strings.substring(null, 0, 1000));

View File

@ -36,16 +36,7 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
*/
public class SettingsTests extends ESTestCase {
public void testCamelCaseSupport() {
Settings settings = Settings.builder()
.put("test.camelCase", "bar")
.build();
assertThat(settings.get("test.camelCase"), equalTo("bar"));
assertThat(settings.get("test.camel_case"), equalTo("bar"));
}
public void testLoadFromDelimitedString() {
Settings settings = Settings.builder()

View File

@ -42,7 +42,7 @@ public class ObjectParserTests extends ESTestCase {
"{\n"
+ " \"test\" : \"foo\",\n"
+ " \"test_number\" : 2,\n"
+ " \"testArray\": [1,2,3,4]\n"
+ " \"test_array\": [1,2,3,4]\n"
+ "}");
class TestStruct {
public String test;
@ -68,8 +68,6 @@ public class ObjectParserTests extends ESTestCase {
assertEquals(s.ints, Arrays.asList(1, 2, 3, 4));
assertEquals(objectParser.toString(), "ObjectParser{name='foo', fields=["
+ "FieldParser{preferred_name=test, supportedTokens=[VALUE_STRING], type=STRING}, "
+ "FieldParser{preferred_name=test_number, supportedTokens=[VALUE_STRING, VALUE_NUMBER], type=INT}, "
+ "FieldParser{preferred_name=test_array, supportedTokens=[START_ARRAY, VALUE_STRING, VALUE_NUMBER], type=INT_ARRAY}, "
+ "FieldParser{preferred_name=test_array, supportedTokens=[START_ARRAY, VALUE_STRING, VALUE_NUMBER], type=INT_ARRAY}, "
+ "FieldParser{preferred_name=test_number, supportedTokens=[VALUE_STRING, VALUE_NUMBER], type=INT}]}");
}

View File

@ -170,33 +170,6 @@ public class AnalysisServiceTests extends ESTestCase {
}
}
public void testCameCaseOverride() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.filter.wordDelimiter.type", "word_delimiter")
.put("index.analysis.filter.wordDelimiter.split_on_numerics", false)
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")
.put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
TokenFilterFactory word_delimiter = analysisService.tokenFilter("word_delimiter");
TokenFilterFactory override = analysisService.tokenFilter("wordDelimiter");
assertNotEquals(word_delimiter.name(), override.name());
assertNotSame(analysisService.tokenFilter("wordDelimiter"), analysisService.tokenFilter("word_delimiter"));
assertSame(analysisService.tokenFilter("porterStem"), analysisService.tokenFilter("porter_stem"));
//unconfigured
IndexSettings idxSettings1 = IndexSettingsModule.newIndexSettings("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
AnalysisService analysisService1 = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings1);
assertSame(analysisService1.tokenFilter("wordDelimiter"), analysisService1.tokenFilter("word_delimiter"));
assertSame(analysisService1.tokenFilter("porterStem"), analysisService1.tokenFilter("porter_stem"));
}
public void testBuiltInAnalyzersAreCached() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder()

View File

@ -158,27 +158,6 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
HasParentQueryBuilder queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY);
assertEquals("foo", queryBuilder.type());
boolean score = randomBoolean();
String key = RandomPicks.randomFrom(random(), Arrays.asList("score_mode", "scoreMode"));
builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
builder.startObject("has_parent");
builder.field("query");
new TermQueryBuilder("a", "a").toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.field(key, score ? "score": "none");
builder.field("parent_type", "foo");
builder.endObject();
builder.endObject();
try {
parseQuery(builder.string());
fail(key + " is deprecated");
} catch (IllegalArgumentException ex) {
assertEquals("Deprecated field [" + key + "] used, replaced by [score]", ex.getMessage());
}
queryBuilder = (HasParentQueryBuilder) parseQuery(builder.string(), ParseFieldMatcher.EMPTY);
assertEquals(score, queryBuilder.score());
}
public void testToQueryInnerQueryType() throws IOException {

View File

@ -387,9 +387,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
public void testLegacyFuzzyMatchQuery() throws IOException {
MatchQueryBuilder expectedQB = new MatchQueryBuilder("message", "to be or not to be");
String type = randomFrom("fuzzy_match", "match_fuzzy");
if (randomBoolean()) {
type = Strings.toCamelCase(type);
}
String json = "{\n" +
" \"" + type + "\" : {\n" +
" \"message\" : {\n" +

View File

@ -74,13 +74,12 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
}
public void testBlendedRewriteMethod() throws IOException {
for (String rewrite : Arrays.asList("top_terms_blended_freqs_10", "topTermsBlendedFreqs10")) {
Query parsedQuery = parseQuery(prefixQuery("field", "val").rewrite(rewrite).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;
assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val")));
assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class));
}
String rewrite = "top_terms_blended_freqs_10";
Query parsedQuery = parseQuery(prefixQuery("field", "val").rewrite(rewrite).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;
assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val")));
assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class));
}
public void testFromJson() throws IOException {
@ -94,4 +93,4 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
assertEquals(json, 2.0, parsed.boost(), 0.00001);
assertEquals(json, "user", parsed.fieldName());
}
}
}

View File

@ -76,16 +76,6 @@ public class ScriptParameterParserTests extends ESTestCase {
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
parser = XContentHelper.createParser(new BytesArray("{ \"scriptFile\" : \"scriptValue\" }"));
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
}
public void testTokenDefaultIndexed() throws IOException {
@ -98,16 +88,6 @@ public class ScriptParameterParserTests extends ESTestCase {
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED);
assertThat(paramParser.lang(), nullValue());
parser = XContentHelper.createParser(new BytesArray("{ \"scriptId\" : \"scriptValue\" }"));
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED);
assertThat(paramParser.lang(), nullValue());
}
public void testTokenDefaultNotFound() throws IOException {
@ -562,14 +542,6 @@ public class ScriptParameterParserTests extends ESTestCase {
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
config = new HashMap<>();
config.put("scriptFile", "scriptValue");
paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
}
public void testConfigDefaultIndexed() throws IOException {
@ -580,14 +552,6 @@ public class ScriptParameterParserTests extends ESTestCase {
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
config = new HashMap<>();
config.put("scriptId", "scriptValue");
paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
}
public void testConfigDefaultIndexedNoRemove() throws IOException {
@ -599,15 +563,6 @@ public class ScriptParameterParserTests extends ESTestCase {
assertThat(paramParser.lang(), nullValue());
assertThat(config.size(), equalTo(1));
assertThat((String) config.get("script_id"), equalTo("scriptValue"));
config = new HashMap<>();
config.put("scriptId", "scriptValue");
paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, false, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.size(), equalTo(1));
assertThat((String) config.get("scriptId"), equalTo("scriptValue"));
}
public void testConfigDefaultNotFound() throws IOException {

View File

@ -43,8 +43,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;
/**
*/
public class SearchModuleTests extends ModuleTestCase {
public void testDoubleRegister() {
@ -120,67 +118,38 @@ public class SearchModuleTests extends ModuleTestCase {
"bool",
"boosting",
"common",
"constantScore",
"constant_score",
"disMax",
"dis_max",
"exists",
"fieldMaskingSpan",
"field_masking_span",
"functionScore",
"function_score",
"fuzzy",
"geoBoundingBox",
"geoDistance",
"geoDistanceRange",
"geoPolygon",
"geoShape",
"geo_bounding_box",
"geo_distance",
"geo_distance_range",
"geo_polygon",
"geo_shape",
"geohashCell",
"geohash_cell",
"hasChild",
"hasParent",
"has_child",
"has_parent",
"ids",
"indices",
"match",
"matchAll",
"matchNone",
"matchPhrase",
"matchPhrasePrefix",
"match_all",
"match_none",
"match_phrase",
"match_phrase_prefix",
"moreLikeThis",
"more_like_this",
"multiMatch",
"multi_match",
"nested",
"parentId",
"parent_id",
"percolate",
"prefix",
"queryString",
"query_string",
"range",
"regexp",
"script",
"simpleQueryString",
"simple_query_string",
"spanContaining",
"spanFirst",
"spanMulti",
"spanNear",
"spanNot",
"spanOr",
"spanTerm",
"spanWithin",
"span_containing",
"span_first",
"span_multi",
@ -198,12 +167,9 @@ public class SearchModuleTests extends ModuleTestCase {
};
private static final String[] DEPRECATED_QUERIES = new String[] {
"fuzzyMatch",
"fuzzy_match",
"geoBbox",
"geo_bbox",
"in",
"matchFuzzy",
"match_fuzzy",
"mlt"
};

View File

@ -794,26 +794,26 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.startObject("properties")
.startObject("foo")
.field("type", "text")
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.field("store", true)
.field("analyzer", "english")
.startObject("fields")
.startObject("plain")
.field("type", "text")
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.field("analyzer", "standard")
.endObject()
.endObject()
.endObject()
.startObject("bar")
.field("type", "text")
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.field("store", true)
.field("analyzer", "english")
.startObject("fields")
.startObject("plain")
.field("type", "text")
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.field("analyzer", "standard")
.endObject()
.endObject()
@ -981,10 +981,10 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public XContentBuilder type1TermVectorMapping() throws IOException {
return XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject()
.startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
.startObject("properties")
.startObject("field1").field("type", "text").field("termVector", "with_positions_offsets").endObject()
.startObject("field2").field("type", "text").field("termVector", "with_positions_offsets").endObject()
.startObject("field1").field("type", "text").field("term_vector", "with_positions_offsets").endObject()
.startObject("field2").field("type", "text").field("term_vector", "with_positions_offsets").endObject()
.endObject()
.endObject().endObject();
}
@ -1365,7 +1365,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.putArray("index.analysis.filter.synonym.synonyms", "quick => fast");
assertAcked(prepareCreate("test").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())
.addMapping("type2", "_all", "store=true,termVector=with_positions_offsets",
.addMapping("type2", "_all", "store=true,term_vector=with_positions_offsets",
"field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym",
"field3", "type=text,analyzer=synonym"));
ensureGreen();

View File

@ -1615,7 +1615,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.put("index.analysis.analyzer.index.filter", "lowercase")
.put("index.analysis.analyzer.search.type", "custom")
.put("index.analysis.analyzer.search.tokenizer", "standard")
.putArray("index.analysis.analyzer.search.filter", "lowercase", "keyword_repeat", "porterStem", "unique_stem")
.putArray("index.analysis.analyzer.search.filter", "lowercase", "keyword_repeat", "porter_stem", "unique_stem")
.put("index.analysis.filter.unique_stem.type", "unique")
.put("index.analysis.filter.unique_stem.only_on_same_position", true));
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));

View File

@ -465,7 +465,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
.put("index.analysis.filter.my_shingle.max_shingle_size", 2));
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject()
.startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
.startObject("properties")
.startObject("body").field("type", "text").field("analyzer", "body").endObject()
.startObject("body_reverse").field("type", "text").field("analyzer", "reverse").endObject()
@ -510,7 +510,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("_all")
.field("store", true)
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.endObject()
.startObject("properties")
.startObject("body").
@ -625,7 +625,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
return Files.readAllLines(PathUtils.get(Suggest.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8);
}
public void testSizePararm() throws IOException {
public void testSizeParam() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put("index.analysis.analyzer.reverse.tokenizer", "standard")
@ -644,7 +644,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
.startObject("type1")
.startObject("_all")
.field("store", true)
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.endObject()
.startObject("properties")
.startObject("body")
@ -712,7 +712,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject().startObject("type1")
.startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject()
.startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
.startObject("properties")
.startObject("body").field("type", "text").field("analyzer", "body").endObject()
.startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
@ -911,7 +911,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
.startObject("type1")
.startObject("_all")
.field("store", true)
.field("termVector", "with_positions_offsets")
.field("term_vector", "with_positions_offsets")
.endObject()
.startObject("properties")
.startObject("body")

View File

@ -86,9 +86,9 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
destParser.declareString(IndexRequest::index, new ParseField("index"));
destParser.declareString(IndexRequest::type, new ParseField("type"));
destParser.declareString(IndexRequest::routing, new ParseField("routing"));
destParser.declareString(IndexRequest::opType, new ParseField("opType"));
destParser.declareString(IndexRequest::opType, new ParseField("op_type"));
destParser.declareString(IndexRequest::setPipeline, new ParseField("pipeline"));
destParser.declareString((s, i) -> s.versionType(VersionType.fromString(i)), new ParseField("versionType"));
destParser.declareString((s, i) -> s.versionType(VersionType.fromString(i)), new ParseField("version_type"));
// These exist just so the user can get a nice validation error:
destParser.declareString(IndexRequest::timestamp, new ParseField("timestamp"));

View File

@ -99,7 +99,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.indexVersionCreated());
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);

View File

@ -65,7 +65,7 @@ import static org.hamcrest.Matchers.greaterThan;
transportClientRatio = 0.0)
public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCase {
private String getRepositoryPath() {
String testName = "it-".concat(Strings.toUnderscoreCase(getTestName()).replaceAll("_", "-"));
String testName = "it-" + getTestName();
return testName.contains(" ") ? Strings.split(testName, " ")[0] : testName;
}