Remove mapping backward compatibilit with pre-2.0.
This removes the backward compatibility layer with pre-2.0 indices, notably the extraction of _id, _routing or _timestamp from the source document when a path is defined.
This commit is contained in:
parent
e2fbdcfb4f
commit
af122f4151
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.action.index;
|
||||
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
|
@ -43,11 +42,9 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -605,41 +602,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
|
|||
mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER,
|
||||
getVersion(metaData, concreteIndex));
|
||||
}
|
||||
// extract values if needed
|
||||
if (mappingMd != null) {
|
||||
MappingMetaData.ParseContext parseContext = mappingMd.createParseContext(id, routing, timestamp);
|
||||
|
||||
if (parseContext.shouldParse()) {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentHelper.createParser(source);
|
||||
mappingMd.parse(parser, parseContext);
|
||||
if (parseContext.shouldParseId()) {
|
||||
id = parseContext.id();
|
||||
}
|
||||
if (parseContext.shouldParseRouting()) {
|
||||
if (routing != null && !routing.equals(parseContext.routing())) {
|
||||
throw new MapperParsingException("The provided routing value [" + routing + "] doesn't match the routing key stored in the document: [" + parseContext.routing() + "]");
|
||||
}
|
||||
routing = parseContext.routing();
|
||||
}
|
||||
if (parseContext.shouldParseTimestamp()) {
|
||||
timestamp = parseContext.timestamp();
|
||||
if (timestamp != null) {
|
||||
timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter(), getVersion(metaData, concreteIndex));
|
||||
}
|
||||
}
|
||||
} catch (MapperParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchParseException("failed to parse doc to extract routing/timestamp/id", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// might as well check for routing here
|
||||
if (mappingMd.routing().required() && routing == null) {
|
||||
throw new RoutingMissingException(concreteIndex, type, id);
|
||||
|
|
|
@ -50,92 +50,20 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
public static final MappingMetaData PROTO = new MappingMetaData();
|
||||
|
||||
public static class Id {
|
||||
|
||||
public static final Id EMPTY = new Id(null);
|
||||
|
||||
private final String path;
|
||||
|
||||
private final String[] pathElements;
|
||||
|
||||
public Id(String path) {
|
||||
this.path = path;
|
||||
if (path == null) {
|
||||
pathElements = Strings.EMPTY_ARRAY;
|
||||
} else {
|
||||
pathElements = Strings.delimitedListToStringArray(path, ".");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasPath() {
|
||||
return path != null;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public String[] pathElements() {
|
||||
return this.pathElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Id id = (Id) o;
|
||||
|
||||
if (path != null ? !path.equals(id.path) : id.path != null) return false;
|
||||
if (!Arrays.equals(pathElements, id.pathElements)) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = path != null ? path.hashCode() : 0;
|
||||
result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Routing {
|
||||
|
||||
public static final Routing EMPTY = new Routing(false, null);
|
||||
public static final Routing EMPTY = new Routing(false);
|
||||
|
||||
private final boolean required;
|
||||
|
||||
private final String path;
|
||||
|
||||
private final String[] pathElements;
|
||||
|
||||
public Routing(boolean required, String path) {
|
||||
public Routing(boolean required) {
|
||||
this.required = required;
|
||||
this.path = path;
|
||||
if (path == null) {
|
||||
pathElements = Strings.EMPTY_ARRAY;
|
||||
} else {
|
||||
pathElements = Strings.delimitedListToStringArray(path, ".");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean required() {
|
||||
return required;
|
||||
}
|
||||
|
||||
public boolean hasPath() {
|
||||
return path != null;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public String[] pathElements() {
|
||||
return this.pathElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
@ -143,19 +71,12 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
Routing routing = (Routing) o;
|
||||
|
||||
if (required != routing.required) return false;
|
||||
if (path != null ? !path.equals(routing.path) : routing.path != null) return false;
|
||||
if (!Arrays.equals(pathElements, routing.pathElements)) return false;
|
||||
|
||||
return true;
|
||||
return required == routing.required;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = (required ? 1 : 0);
|
||||
result = 31 * result + (path != null ? path.hashCode() : 0);
|
||||
result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0);
|
||||
return result;
|
||||
return getClass().hashCode() + (required ? 1 : 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,31 +103,21 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
}
|
||||
|
||||
|
||||
public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT,
|
||||
public static final Timestamp EMPTY = new Timestamp(false, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT,
|
||||
TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null);
|
||||
|
||||
private final boolean enabled;
|
||||
|
||||
private final String path;
|
||||
|
||||
private final String format;
|
||||
|
||||
private final String[] pathElements;
|
||||
|
||||
private final FormatDateTimeFormatter dateTimeFormatter;
|
||||
|
||||
private final String defaultTimestamp;
|
||||
|
||||
private final Boolean ignoreMissing;
|
||||
|
||||
public Timestamp(boolean enabled, String path, String format, String defaultTimestamp, Boolean ignoreMissing) {
|
||||
public Timestamp(boolean enabled, String format, String defaultTimestamp, Boolean ignoreMissing) {
|
||||
this.enabled = enabled;
|
||||
this.path = path;
|
||||
if (path == null) {
|
||||
pathElements = Strings.EMPTY_ARRAY;
|
||||
} else {
|
||||
pathElements = Strings.delimitedListToStringArray(path, ".");
|
||||
}
|
||||
this.format = format;
|
||||
this.dateTimeFormatter = Joda.forPattern(format);
|
||||
this.defaultTimestamp = defaultTimestamp;
|
||||
|
@ -217,18 +128,6 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
return enabled;
|
||||
}
|
||||
|
||||
public boolean hasPath() {
|
||||
return path != null;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public String[] pathElements() {
|
||||
return this.pathElements;
|
||||
}
|
||||
|
||||
public String format() {
|
||||
return this.format;
|
||||
}
|
||||
|
@ -258,10 +157,8 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
if (enabled != timestamp.enabled) return false;
|
||||
if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false;
|
||||
if (path != null ? !path.equals(timestamp.path) : timestamp.path != null) return false;
|
||||
if (defaultTimestamp != null ? !defaultTimestamp.equals(timestamp.defaultTimestamp) : timestamp.defaultTimestamp != null) return false;
|
||||
if (ignoreMissing != null ? !ignoreMissing.equals(timestamp.ignoreMissing) : timestamp.ignoreMissing != null) return false;
|
||||
if (!Arrays.equals(pathElements, timestamp.pathElements)) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -269,9 +166,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int result = (enabled ? 1 : 0);
|
||||
result = 31 * result + (path != null ? path.hashCode() : 0);
|
||||
result = 31 * result + (format != null ? format.hashCode() : 0);
|
||||
result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0);
|
||||
result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0);
|
||||
result = 31 * result + (defaultTimestamp != null ? defaultTimestamp.hashCode() : 0);
|
||||
result = 31 * result + (ignoreMissing != null ? ignoreMissing.hashCode() : 0);
|
||||
|
@ -283,7 +178,6 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
private final CompressedXContent source;
|
||||
|
||||
private Id id;
|
||||
private Routing routing;
|
||||
private Timestamp timestamp;
|
||||
private boolean hasParentField;
|
||||
|
@ -291,9 +185,8 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
public MappingMetaData(DocumentMapper docMapper) {
|
||||
this.type = docMapper.type();
|
||||
this.source = docMapper.mappingSource();
|
||||
this.id = new Id(docMapper.idFieldMapper().path());
|
||||
this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path());
|
||||
this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(),
|
||||
this.routing = new Routing(docMapper.routingFieldMapper().required());
|
||||
this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(),
|
||||
docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(),
|
||||
docMapper.timestampFieldMapper().ignoreMissing());
|
||||
this.hasParentField = docMapper.parentFieldMapper().active();
|
||||
|
@ -337,40 +230,22 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
}
|
||||
|
||||
private void initMappers(Map<String, Object> withoutType) {
|
||||
if (withoutType.containsKey("_id")) {
|
||||
String path = null;
|
||||
Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_id");
|
||||
for (Map.Entry<String, Object> entry : routingNode.entrySet()) {
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path")) {
|
||||
path = fieldNode.toString();
|
||||
}
|
||||
}
|
||||
this.id = new Id(path);
|
||||
} else {
|
||||
this.id = Id.EMPTY;
|
||||
}
|
||||
if (withoutType.containsKey("_routing")) {
|
||||
boolean required = false;
|
||||
String path = null;
|
||||
Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_routing");
|
||||
for (Map.Entry<String, Object> entry : routingNode.entrySet()) {
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("required")) {
|
||||
required = nodeBooleanValue(fieldNode);
|
||||
} else if (fieldName.equals("path")) {
|
||||
path = fieldNode.toString();
|
||||
}
|
||||
}
|
||||
this.routing = new Routing(required, path);
|
||||
this.routing = new Routing(required);
|
||||
} else {
|
||||
this.routing = Routing.EMPTY;
|
||||
}
|
||||
if (withoutType.containsKey("_timestamp")) {
|
||||
boolean enabled = false;
|
||||
String path = null;
|
||||
String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT;
|
||||
String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP;
|
||||
Boolean ignoreMissing = null;
|
||||
|
@ -380,8 +255,6 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("enabled")) {
|
||||
enabled = nodeBooleanValue(fieldNode);
|
||||
} else if (fieldName.equals("path")) {
|
||||
path = fieldNode.toString();
|
||||
} else if (fieldName.equals("format")) {
|
||||
format = fieldNode.toString();
|
||||
} else if (fieldName.equals("default") && fieldNode != null) {
|
||||
|
@ -390,7 +263,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
ignoreMissing = nodeBooleanValue(fieldNode);
|
||||
}
|
||||
}
|
||||
this.timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing);
|
||||
this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing);
|
||||
} else {
|
||||
this.timestamp = Timestamp.EMPTY;
|
||||
}
|
||||
|
@ -401,19 +274,15 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
}
|
||||
}
|
||||
|
||||
public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) {
|
||||
public MappingMetaData(String type, CompressedXContent source, Routing routing, Timestamp timestamp, boolean hasParentField) {
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
this.id = id;
|
||||
this.routing = routing;
|
||||
this.timestamp = timestamp;
|
||||
this.hasParentField = hasParentField;
|
||||
}
|
||||
|
||||
void updateDefaultMapping(MappingMetaData defaultMapping) {
|
||||
if (id == Id.EMPTY) {
|
||||
id = defaultMapping.id();
|
||||
}
|
||||
if (routing == Routing.EMPTY) {
|
||||
routing = defaultMapping.routing();
|
||||
}
|
||||
|
@ -453,10 +322,6 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
return sourceAsMap();
|
||||
}
|
||||
|
||||
public Id id() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public Routing routing() {
|
||||
return this.routing;
|
||||
}
|
||||
|
@ -465,114 +330,14 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
return this.timestamp;
|
||||
}
|
||||
|
||||
public ParseContext createParseContext(@Nullable String id, @Nullable String routing, @Nullable String timestamp) {
|
||||
// We parse the routing even if there is already a routing key in the request in order to make sure that
|
||||
// they are the same
|
||||
return new ParseContext(
|
||||
id == null && id().hasPath(),
|
||||
routing().hasPath(),
|
||||
timestamp == null && timestamp().hasPath()
|
||||
);
|
||||
}
|
||||
|
||||
public void parse(XContentParser parser, ParseContext parseContext) throws IOException {
|
||||
innerParse(parser, parseContext);
|
||||
}
|
||||
|
||||
private void innerParse(XContentParser parser, ParseContext context) throws IOException {
|
||||
if (!context.parsingStillNeeded()) {
|
||||
return;
|
||||
}
|
||||
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == null) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
}
|
||||
String idPart = context.idParsingStillNeeded() ? id().pathElements()[context.locationId] : null;
|
||||
String routingPart = context.routingParsingStillNeeded() ? routing().pathElements()[context.locationRouting] : null;
|
||||
String timestampPart = context.timestampParsingStillNeeded() ? timestamp().pathElements()[context.locationTimestamp] : null;
|
||||
|
||||
for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) {
|
||||
// Must point to field name
|
||||
String fieldName = parser.currentName();
|
||||
// And then the value...
|
||||
token = parser.nextToken();
|
||||
boolean incLocationId = false;
|
||||
boolean incLocationRouting = false;
|
||||
boolean incLocationTimestamp = false;
|
||||
if (context.idParsingStillNeeded() && fieldName.equals(idPart)) {
|
||||
if (context.locationId + 1 == id.pathElements().length) {
|
||||
if (!token.isValue()) {
|
||||
throw new MapperParsingException("id field must be a value but was either an object or an array");
|
||||
}
|
||||
context.id = parser.textOrNull();
|
||||
context.idResolved = true;
|
||||
} else {
|
||||
incLocationId = true;
|
||||
}
|
||||
}
|
||||
if (context.routingParsingStillNeeded() && fieldName.equals(routingPart)) {
|
||||
if (context.locationRouting + 1 == routing.pathElements().length) {
|
||||
context.routing = parser.textOrNull();
|
||||
context.routingResolved = true;
|
||||
} else {
|
||||
incLocationRouting = true;
|
||||
}
|
||||
}
|
||||
if (context.timestampParsingStillNeeded() && fieldName.equals(timestampPart)) {
|
||||
if (context.locationTimestamp + 1 == timestamp.pathElements().length) {
|
||||
context.timestamp = parser.textOrNull();
|
||||
context.timestampResolved = true;
|
||||
} else {
|
||||
incLocationTimestamp = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (incLocationId || incLocationRouting || incLocationTimestamp) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
context.locationId += incLocationId ? 1 : 0;
|
||||
context.locationRouting += incLocationRouting ? 1 : 0;
|
||||
context.locationTimestamp += incLocationTimestamp ? 1 : 0;
|
||||
innerParse(parser, context);
|
||||
context.locationId -= incLocationId ? 1 : 0;
|
||||
context.locationRouting -= incLocationRouting ? 1 : 0;
|
||||
context.locationTimestamp -= incLocationTimestamp ? 1 : 0;
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
|
||||
if (!context.parsingStillNeeded()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(type());
|
||||
source().writeTo(out);
|
||||
// id
|
||||
if (id().hasPath()) {
|
||||
out.writeBoolean(true);
|
||||
out.writeString(id().path());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
// routing
|
||||
out.writeBoolean(routing().required());
|
||||
if (routing().hasPath()) {
|
||||
out.writeBoolean(true);
|
||||
out.writeString(routing().path());
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
// timestamp
|
||||
out.writeBoolean(timestamp().enabled());
|
||||
out.writeOptionalString(timestamp().path());
|
||||
out.writeString(timestamp().format());
|
||||
out.writeOptionalString(timestamp().defaultTimestamp());
|
||||
out.writeOptionalBoolean(timestamp().ignoreMissing());
|
||||
|
@ -586,7 +351,6 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
MappingMetaData that = (MappingMetaData) o;
|
||||
|
||||
if (!id.equals(that.id)) return false;
|
||||
if (!routing.equals(that.routing)) return false;
|
||||
if (!source.equals(that.source)) return false;
|
||||
if (!timestamp.equals(that.timestamp)) return false;
|
||||
|
@ -599,7 +363,6 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
public int hashCode() {
|
||||
int result = type.hashCode();
|
||||
result = 31 * result + source.hashCode();
|
||||
result = 31 * result + id.hashCode();
|
||||
result = 31 * result + routing.hashCode();
|
||||
result = 31 * result + timestamp.hashCode();
|
||||
return result;
|
||||
|
@ -608,142 +371,20 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
public MappingMetaData readFrom(StreamInput in) throws IOException {
|
||||
String type = in.readString();
|
||||
CompressedXContent source = CompressedXContent.readCompressedString(in);
|
||||
// id
|
||||
Id id = new Id(in.readBoolean() ? in.readString() : null);
|
||||
// routing
|
||||
Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readString() : null);
|
||||
Routing routing = new Routing(in.readBoolean());
|
||||
// timestamp
|
||||
|
||||
boolean enabled = in.readBoolean();
|
||||
String path = in.readOptionalString();
|
||||
String format = in.readString();
|
||||
String defaultTimestamp = in.readOptionalString();
|
||||
Boolean ignoreMissing = null;
|
||||
|
||||
ignoreMissing = in.readOptionalBoolean();
|
||||
|
||||
final Timestamp timestamp = new Timestamp(enabled, path, format, defaultTimestamp, ignoreMissing);
|
||||
final Timestamp timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing);
|
||||
final boolean hasParentField = in.readBoolean();
|
||||
return new MappingMetaData(type, source, id, routing, timestamp, hasParentField);
|
||||
return new MappingMetaData(type, source, routing, timestamp, hasParentField);
|
||||
}
|
||||
|
||||
public static class ParseContext {
|
||||
final boolean shouldParseId;
|
||||
final boolean shouldParseRouting;
|
||||
final boolean shouldParseTimestamp;
|
||||
|
||||
int locationId = 0;
|
||||
int locationRouting = 0;
|
||||
int locationTimestamp = 0;
|
||||
boolean idResolved;
|
||||
boolean routingResolved;
|
||||
boolean timestampResolved;
|
||||
String id;
|
||||
String routing;
|
||||
String timestamp;
|
||||
|
||||
public ParseContext(boolean shouldParseId, boolean shouldParseRouting, boolean shouldParseTimestamp) {
|
||||
this.shouldParseId = shouldParseId;
|
||||
this.shouldParseRouting = shouldParseRouting;
|
||||
this.shouldParseTimestamp = shouldParseTimestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* The id value parsed, <tt>null</tt> if does not require parsing, or not resolved.
|
||||
*/
|
||||
public String id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does id parsing really needed at all?
|
||||
*/
|
||||
public boolean shouldParseId() {
|
||||
return shouldParseId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Has id been resolved during the parsing phase.
|
||||
*/
|
||||
public boolean idResolved() {
|
||||
return idResolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is id parsing still needed?
|
||||
*/
|
||||
public boolean idParsingStillNeeded() {
|
||||
return shouldParseId && !idResolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* The routing value parsed, <tt>null</tt> if does not require parsing, or not resolved.
|
||||
*/
|
||||
public String routing() {
|
||||
return routing;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does routing parsing really needed at all?
|
||||
*/
|
||||
public boolean shouldParseRouting() {
|
||||
return shouldParseRouting;
|
||||
}
|
||||
|
||||
/**
|
||||
* Has routing been resolved during the parsing phase.
|
||||
*/
|
||||
public boolean routingResolved() {
|
||||
return routingResolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is routing parsing still needed?
|
||||
*/
|
||||
public boolean routingParsingStillNeeded() {
|
||||
return shouldParseRouting && !routingResolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* The timestamp value parsed, <tt>null</tt> if does not require parsing, or not resolved.
|
||||
*/
|
||||
public String timestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does timestamp parsing really needed at all?
|
||||
*/
|
||||
public boolean shouldParseTimestamp() {
|
||||
return shouldParseTimestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Has timestamp been resolved during the parsing phase.
|
||||
*/
|
||||
public boolean timestampResolved() {
|
||||
return timestampResolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is timestamp parsing still needed?
|
||||
*/
|
||||
public boolean timestampParsingStillNeeded() {
|
||||
return shouldParseTimestamp && !timestampResolved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Do we really need parsing?
|
||||
*/
|
||||
public boolean shouldParse() {
|
||||
return shouldParseId || shouldParseRouting || shouldParseTimestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is parsing still needed?
|
||||
*/
|
||||
public boolean parsingStillNeeded() {
|
||||
return idParsingStillNeeded() || routingParsingStillNeeded() || timestampParsingStillNeeded();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
|
|||
* and 100 afterwards so we override the positionIncrementGap if it
|
||||
* doesn't match here.
|
||||
*/
|
||||
int overridePositionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(indexSettings.getIndexVersionCreated());
|
||||
int overridePositionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||
if (analyzerFactory instanceof CustomAnalyzerProvider) {
|
||||
((CustomAnalyzerProvider) analyzerFactory).build(this);
|
||||
/*
|
||||
|
|
|
@ -74,7 +74,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
|
|||
tokenFilters.add(tokenFilter);
|
||||
}
|
||||
|
||||
int positionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(indexSettings.getIndexVersionCreated());
|
||||
int positionIncrementGap = StringFieldMapper.Defaults.POSITION_INCREMENT_GAP;
|
||||
|
||||
if (analyzerSettings.getAsMap().containsKey("position_offset_gap")){
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_2_0_0)){
|
||||
|
|
|
@ -143,11 +143,7 @@ public class DocumentMapperParser {
|
|||
|
||||
public static void checkNoRemainingFields(Map<String, Object> fieldNodeMap, Version indexVersionCreated, String message) {
|
||||
if (!fieldNodeMap.isEmpty()) {
|
||||
if (indexVersionCreated.onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
|
||||
} else {
|
||||
logger.debug(message + "{}", getRemainingFields(fieldNodeMap));
|
||||
}
|
||||
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,15 +23,12 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
|
@ -123,8 +120,7 @@ class DocumentParser implements Closeable {
|
|||
|
||||
// try to parse the next token, this should be null if the object is ended properly
|
||||
// but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch)
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)
|
||||
&& source.parser() == null && parser != null) {
|
||||
if (source.parser() == null && parser != null) {
|
||||
// only check for end of tokens if we created the parser here
|
||||
token = parser.nextToken();
|
||||
if (token != null) {
|
||||
|
@ -191,8 +187,7 @@ class DocumentParser implements Closeable {
|
|||
XContentParser parser = context.parser();
|
||||
|
||||
String currentFieldName = parser.currentName();
|
||||
if (atRoot && MapperService.isMetadataField(currentFieldName) &&
|
||||
Version.indexCreated(context.indexSettings()).onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
if (atRoot && MapperService.isMetadataField(currentFieldName)) {
|
||||
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside a document. Use the index API request parameters.");
|
||||
}
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
|
|
|
@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
@ -231,10 +230,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
|
||||
fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings));
|
||||
}
|
||||
boolean defaultDocValues = false; // pre 2.0
|
||||
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
|
||||
}
|
||||
boolean defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
|
||||
// backcompat for "fielddata: format: docvalues" for now...
|
||||
boolean fieldDataDocValues = fieldType.fieldDataType() != null
|
||||
&& FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldType.fieldDataType().getFormat(context.indexSettings()));
|
||||
|
@ -253,12 +249,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
protected final MappedFieldType defaultFieldType;
|
||||
protected MultiFields multiFields;
|
||||
protected CopyTo copyTo;
|
||||
protected final boolean indexCreatedBefore2x;
|
||||
|
||||
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName);
|
||||
assert indexSettings != null;
|
||||
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
|
||||
fieldType.freeze();
|
||||
this.fieldType = fieldType;
|
||||
defaultFieldType.freeze();
|
||||
|
|
|
@ -229,7 +229,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
if (mapper.type().length() == 0) {
|
||||
throw new InvalidTypeNameException("mapping type name is empty");
|
||||
}
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) {
|
||||
if (mapper.type().length() > 255) {
|
||||
throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]");
|
||||
}
|
||||
if (mapper.type().charAt(0) == '_') {
|
||||
|
@ -245,11 +245,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
throw new IllegalArgumentException("The [_parent.type] option can't point to the same type");
|
||||
}
|
||||
if (typeNameStartsWithIllegalDot(mapper)) {
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'");
|
||||
} else {
|
||||
logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type());
|
||||
}
|
||||
throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'");
|
||||
}
|
||||
|
||||
// 1. compute the merged DocumentMapper
|
||||
|
@ -362,16 +358,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
if (indexSettings.getIndexVersionCreated().before(Version.V_3_0_0)) {
|
||||
// Before 3.0 some metadata mappers are also registered under the root object mapper
|
||||
// So we avoid false positives by deduplicating mappers
|
||||
// given that we check exact equality, this would still catch the case that a mapper
|
||||
// is defined under the root object
|
||||
Collection<FieldMapper> uniqueFieldMappers = Collections.newSetFromMap(new IdentityHashMap<>());
|
||||
uniqueFieldMappers.addAll(fieldMappers);
|
||||
fieldMappers = uniqueFieldMappers;
|
||||
}
|
||||
|
||||
final Set<String> fieldNames = new HashSet<>();
|
||||
for (FieldMapper fieldMapper : fieldMappers) {
|
||||
final String name = fieldMapper.name();
|
||||
|
|
|
@ -27,12 +27,9 @@ import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
|
@ -43,10 +40,6 @@ import static java.util.Collections.unmodifiableMap;
|
|||
*/
|
||||
public final class Mapping implements ToXContent {
|
||||
|
||||
// Set of fields that were included into the root object mapper before 2.0
|
||||
public static final Set<String> LEGACY_INCLUDE_IN_OBJECT = Collections.unmodifiableSet(new HashSet<>(
|
||||
Arrays.asList("_all", "_id", "_parent", "_routing", "_timestamp", "_ttl")));
|
||||
|
||||
final Version indexCreated;
|
||||
final RootObjectMapper root;
|
||||
final MetadataFieldMapper[] metadataMappers;
|
||||
|
@ -58,9 +51,6 @@ public final class Mapping implements ToXContent {
|
|||
this.metadataMappers = metadataMappers;
|
||||
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();
|
||||
for (MetadataFieldMapper metadataMapper : metadataMappers) {
|
||||
if (indexCreated.before(Version.V_2_0_0_beta1) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) {
|
||||
rootObjectMapper = rootObjectMapper.copyAndPutMapper(metadataMapper);
|
||||
}
|
||||
metadataMappersMap.put(metadataMapper.getClass(), metadataMapper);
|
||||
}
|
||||
this.root = rootObjectMapper;
|
||||
|
|
|
@ -26,9 +26,7 @@ import org.apache.lucene.store.ByteArrayDataOutput;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Base64;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -41,7 +39,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
|||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -54,9 +51,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
|||
public class BinaryFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "binary";
|
||||
private static final ParseField COMPRESS = new ParseField("compress").withAllDeprecated("no replacement, implemented at the codec level");
|
||||
private static final ParseField COMPRESS_THRESHOLD = new ParseField("compress_threshold").withAllDeprecated("no replacement");
|
||||
|
||||
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new BinaryFieldType();
|
||||
|
@ -87,14 +81,6 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
BinaryFieldMapper.Builder builder = binaryField(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = entry.getKey();
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1) &&
|
||||
(parserContext.parseFieldMatcher().match(fieldName, COMPRESS) || parserContext.parseFieldMatcher().match(fieldName, COMPRESS_THRESHOLD))) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,7 +73,6 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
|
||||
public static class Defaults extends NumberFieldMapper.Defaults {
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT);
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("date_optional_time", Locale.ROOT);
|
||||
public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS;
|
||||
public static final DateFieldType FIELD_TYPE = new DateFieldType();
|
||||
|
||||
|
@ -128,12 +127,6 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
|
||||
@Override
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0_beta1) &&
|
||||
!fieldType().dateTimeFormatter().format().contains("epoch_")) {
|
||||
String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis";
|
||||
fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + fieldType().dateTimeFormatter().format()));
|
||||
}
|
||||
|
||||
FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
|
||||
if (!locale.equals(dateTimeFormatter.locale())) {
|
||||
fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale));
|
||||
|
@ -186,11 +179,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
if (!configuredFormat) {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER);
|
||||
} else {
|
||||
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER_BEFORE_2_0);
|
||||
}
|
||||
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -69,19 +69,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
* values.
|
||||
*/
|
||||
public static final int POSITION_INCREMENT_GAP = 100;
|
||||
public static final int POSITION_INCREMENT_GAP_PRE_2_0 = 0;
|
||||
|
||||
public static final int IGNORE_ABOVE = -1;
|
||||
|
||||
/**
|
||||
* The default position_increment_gap for a particular version of Elasticsearch.
|
||||
*/
|
||||
public static int positionIncrementGap(Version version) {
|
||||
if (version.before(Version.V_2_0_0_beta1)) {
|
||||
return POSITION_INCREMENT_GAP_PRE_2_0;
|
||||
}
|
||||
return POSITION_INCREMENT_GAP;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, StringFieldMapper> {
|
||||
|
@ -175,8 +164,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
builder.searchQuotedAnalyzer(analyzer);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("position_increment_gap") ||
|
||||
parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) {
|
||||
} else if (propName.equals("position_increment_gap")) {
|
||||
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
|
||||
if (newPositionIncrementGap < 0) {
|
||||
throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed.");
|
||||
|
|
|
@ -193,9 +193,7 @@ public class TypeParsers {
|
|||
} else if (propName.equals("store_term_vector_payloads")) {
|
||||
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
|
||||
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
|
||||
} else if (propName.equals("analyzer")) {
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
|
@ -272,26 +270,12 @@ public class TypeParsers {
|
|||
}
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated());
|
||||
iterator.remove();
|
||||
} else if (propName.equals("omit_term_freq_and_positions")) {
|
||||
final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
||||
if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) {
|
||||
throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead");
|
||||
}
|
||||
// deprecated option for BW compat
|
||||
builder.indexOptions(op);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index_options")) {
|
||||
builder.indexOptions(nodeIndexOptionValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("include_in_all")) {
|
||||
builder.includeInAll(nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("postings_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
|
||||
// ignore for old indexes
|
||||
iterator.remove();
|
||||
} else if (propName.equals("doc_values_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
|
||||
// ignore for old indexes
|
||||
iterator.remove();
|
||||
} else if (propName.equals("similarity")) {
|
||||
builder.similarity(parserContext.getSimilarity(propNode.toString()));
|
||||
iterator.remove();
|
||||
|
|
|
@ -127,30 +127,11 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
|||
}
|
||||
|
||||
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
|
||||
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
|
||||
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
|
||||
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
|
||||
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (propName.equals(Names.COERCE)) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
|
||||
if (propName.equals(Names.COERCE)) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
}
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
|||
import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -96,8 +95,8 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
public static final boolean POINTS_ONLY = false;
|
||||
public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m");
|
||||
public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m");
|
||||
public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d;
|
||||
public static final Orientation ORIENTATION = Orientation.RIGHT;
|
||||
public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d;
|
||||
public static final Explicit<Boolean> COERCE = new Explicit<>(false, false);
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType();
|
||||
|
@ -147,12 +146,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
public GeoShapeFieldMapper build(BuilderContext context) {
|
||||
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
|
||||
|
||||
if (geoShapeFieldType.tree.equals(Names.TREE_QUADTREE) && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
|
||||
geoShapeFieldType.setTree("legacyquadtree");
|
||||
}
|
||||
|
||||
if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1) ||
|
||||
(geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0)) {
|
||||
if (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0) {
|
||||
geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT);
|
||||
}
|
||||
setupFieldType(context);
|
||||
|
|
|
@ -154,9 +154,6 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldName.equals("enabled")) {
|
||||
builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
// Old 1.x setting which is now ignored
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -42,7 +41,6 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
* A mapper that indexes the field names of a document under <code>_field_names</code>. This mapper is typically useful in order
|
||||
|
@ -107,14 +105,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().before(Version.V_1_3_0)) {
|
||||
throw new IllegalArgumentException("type="+CONTENT_TYPE+" is not supported on indices created before version 1.3.0. Is your cluster running multiple datanode versions?");
|
||||
}
|
||||
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
}
|
||||
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
|
@ -203,21 +194,12 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
|
||||
|
||||
private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), indexSettings);
|
||||
}
|
||||
|
||||
private FieldNamesFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
|
||||
if (this.pre13Index) {
|
||||
FieldNamesFieldType newFieldType = fieldType().clone();
|
||||
newFieldType.setEnabled(false);
|
||||
newFieldType.freeze();
|
||||
this.fieldType = newFieldType;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -304,9 +286,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (pre13Index) {
|
||||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
if (includeDefaults == false && fieldType().isEnabled() == Defaults.ENABLED) {
|
||||
|
@ -317,9 +296,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
if (includeDefaults || fieldType().isEnabled() != Defaults.ENABLED) {
|
||||
builder.field("enabled", fieldType().isEnabled());
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || fieldType().equals(Defaults.FIELD_TYPE) == false)) {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
|
|
@ -31,9 +31,7 @@ import org.apache.lucene.search.PrefixQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -51,12 +49,9 @@ import org.elasticsearch.index.query.QueryShardContext;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -80,23 +75,15 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
FIELD_TYPE.setName(NAME);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final String PATH = null;
|
||||
}
|
||||
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, IdFieldMapper> {
|
||||
|
||||
private String path = Defaults.PATH;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
public Builder path(String path) {
|
||||
this.path = path;
|
||||
return builder;
|
||||
}
|
||||
// if we are indexed we use DOCS
|
||||
@Override
|
||||
protected IndexOptions getDefaultIndexOption() {
|
||||
|
@ -106,28 +93,14 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public IdFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new IdFieldMapper(fieldType, path, context.indexSettings());
|
||||
return new IdFieldMapper(fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path")) {
|
||||
builder.path(fieldNode.toString());
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -229,31 +202,12 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private final String path;
|
||||
|
||||
private IdFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(idFieldType(indexSettings, existing), Defaults.PATH, indexSettings);
|
||||
this(existing != null ? existing : Defaults.FIELD_TYPE, indexSettings);
|
||||
}
|
||||
|
||||
private IdFieldMapper(MappedFieldType fieldType, String path, Settings indexSettings) {
|
||||
private IdFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) {
|
||||
if (existing != null) {
|
||||
return existing.clone();
|
||||
}
|
||||
MappedFieldType fieldType = Defaults.FIELD_TYPE.clone();
|
||||
boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
|
||||
if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) {
|
||||
fieldType.setTokenized(false);
|
||||
}
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -299,33 +253,6 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (indexCreatedBefore2x == false) {
|
||||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// if all are defaults, no sense to write it at all
|
||||
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored()
|
||||
&& fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions()
|
||||
&& path == Defaults.PATH
|
||||
&& hasCustomFieldDataSettings() == false) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
if (includeDefaults || fieldType().indexOptions() != Defaults.FIELD_TYPE.indexOptions()) {
|
||||
builder.field("index", indexTokenizeOptionToString(fieldType().indexOptions() != IndexOptions.NONE, fieldType().tokenized()));
|
||||
}
|
||||
if (includeDefaults || path != Defaults.PATH) {
|
||||
builder.field("path", path);
|
||||
}
|
||||
|
||||
if (includeDefaults || hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,9 +24,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -39,12 +37,9 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -99,23 +94,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("enabled")) {
|
||||
EnabledAttributeMapper mapper = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
|
||||
builder.enabled(mapper);
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
return new Builder(parserContext.mapperService().fullName(NAME));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -260,19 +239,13 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// if all defaults, no need to write it at all
|
||||
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && hasCustomFieldDataSettings() == false) {
|
||||
if (includeDefaults == false && enabledState == Defaults.ENABLED_STATE) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -138,9 +138,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldName.equals("type")) {
|
||||
builder.type(fieldNode.toString());
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
// ignore before 2.0, reject on and after 2.0
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("fielddata")) {
|
||||
// Only take over `loading`, since that is the only option now that is configurable:
|
||||
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -40,7 +39,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -67,15 +65,12 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
public static final boolean REQUIRED = false;
|
||||
public static final String PATH = null;
|
||||
}
|
||||
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, RoutingFieldMapper> {
|
||||
|
||||
private boolean required = Defaults.REQUIRED;
|
||||
|
||||
private String path = Defaults.PATH;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
@ -85,14 +80,9 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public Builder path(String path) {
|
||||
this.path = path;
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RoutingFieldMapper build(BuilderContext context) {
|
||||
return new RoutingFieldMapper(fieldType, required, path, context.indexSettings());
|
||||
return new RoutingFieldMapper(fieldType, required, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,9 +90,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
}
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -110,9 +97,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldName.equals("required")) {
|
||||
builder.required(nodeBooleanValue(fieldNode));
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.path(fieldNode.toString());
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
@ -154,16 +138,14 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
private boolean required;
|
||||
private final String path;
|
||||
|
||||
private RoutingFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, Defaults.PATH, indexSettings);
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, indexSettings);
|
||||
}
|
||||
|
||||
private RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) {
|
||||
private RoutingFieldMapper(MappedFieldType fieldType, boolean required, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.required = required;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public void markAsRequired() {
|
||||
|
@ -174,10 +156,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
return this.required;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public String value(Document document) {
|
||||
Field field = (Field) document.getField(fieldType().name());
|
||||
return field == null ? null : (String)fieldType().value(field);
|
||||
|
@ -224,25 +202,13 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// if all are defaults, no sense to write it at all
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
|
||||
if (!includeDefaults && indexed == indexedDefault &&
|
||||
fieldType().stored() == Defaults.FIELD_TYPE.stored() && required == Defaults.REQUIRED && path == Defaults.PATH) {
|
||||
if (!includeDefaults && required == Defaults.REQUIRED) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (indexCreatedBefore2x && (includeDefaults || indexed != indexedDefault)) {
|
||||
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
if (includeDefaults || required != Defaults.REQUIRED) {
|
||||
builder.field("required", required);
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) {
|
||||
builder.field("path", path);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
|
@ -46,7 +45,6 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
public class TimestampFieldMapper extends MetadataFieldMapper {
|
||||
|
||||
|
@ -58,10 +56,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
public static final String NAME = "_timestamp";
|
||||
|
||||
// TODO: this should be removed
|
||||
public static final TimestampFieldType PRE_20_FIELD_TYPE;
|
||||
public static final TimestampFieldType FIELD_TYPE = new TimestampFieldType();
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT);
|
||||
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER_BEFORE_2_0 = Joda.forPattern("epoch_millis||dateOptionalTime");
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setStored(true);
|
||||
|
@ -73,34 +69,20 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE));
|
||||
FIELD_TYPE.setHasDocValues(true);
|
||||
FIELD_TYPE.freeze();
|
||||
PRE_20_FIELD_TYPE = FIELD_TYPE.clone();
|
||||
PRE_20_FIELD_TYPE.setStored(false);
|
||||
PRE_20_FIELD_TYPE.setHasDocValues(false);
|
||||
PRE_20_FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER_BEFORE_2_0);
|
||||
PRE_20_FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Defaults.PRECISION_STEP_64_BIT));
|
||||
PRE_20_FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER_BEFORE_2_0, Integer.MAX_VALUE));
|
||||
PRE_20_FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
public static final String PATH = null;
|
||||
public static final String DEFAULT_TIMESTAMP = "now";
|
||||
}
|
||||
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, TimestampFieldMapper> {
|
||||
|
||||
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
private String path = Defaults.PATH;
|
||||
private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP;
|
||||
private boolean explicitStore = false;
|
||||
private Boolean ignoreMissing = null;
|
||||
|
||||
public Builder(MappedFieldType existing, Settings settings) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, chooseFieldType(settings, null));
|
||||
if (existing != null) {
|
||||
// if there is an existing type, always use that store value (only matters for < 2.0)
|
||||
explicitStore = true;
|
||||
}
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -113,11 +95,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public Builder path(String path) {
|
||||
this.path = path;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
|
||||
fieldType().setDateTimeFormatter(dateTimeFormatter);
|
||||
return this;
|
||||
|
@ -135,42 +112,21 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public Builder store(boolean store) {
|
||||
explicitStore = true;
|
||||
return super.store(store);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimestampFieldMapper build(BuilderContext context) {
|
||||
if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
|
||||
fieldType.setStored(false);
|
||||
}
|
||||
|
||||
if (fieldType().dateTimeFormatter().equals(Defaults.DATE_TIME_FORMATTER)) {
|
||||
fieldType().setDateTimeFormatter(getDateTimeFormatter(context.indexSettings()));
|
||||
}
|
||||
|
||||
setupFieldType(context);
|
||||
return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, path, defaultTimestamp,
|
||||
return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, defaultTimestamp,
|
||||
ignoreMissing, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
private static FormatDateTimeFormatter getDateTimeFormatter(Settings indexSettings) {
|
||||
Version indexCreated = Version.indexCreated(indexSettings);
|
||||
if (indexCreated.onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
return Defaults.DATE_TIME_FORMATTER;
|
||||
} else {
|
||||
return Defaults.DATE_TIME_FORMATTER_BEFORE_2_0;
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings());
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
}
|
||||
boolean defaultSet = false;
|
||||
Boolean ignoreMissing = null;
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
|
@ -181,23 +137,12 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
|
||||
builder.enabled(enabledState);
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.path(fieldNode.toString());
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("format")) {
|
||||
builder.dateTimeFormatter(parseDateTimeFormatter(fieldNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("default")) {
|
||||
if (fieldNode == null) {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_4_0_Beta1) &&
|
||||
parserContext.indexVersionCreated().before(Version.V_1_5_0)) {
|
||||
// We are reading an index created in 1.4 with feature #7036
|
||||
// `default: null` was explicitly set. We need to change this index to
|
||||
// `ignore_missing: false`
|
||||
builder.ignoreMissing(false);
|
||||
} else {
|
||||
throw new TimestampParsingException("default timestamp can not be set to null");
|
||||
}
|
||||
throw new TimestampParsingException("default timestamp can not be set to null");
|
||||
} else {
|
||||
builder.defaultTimestamp(fieldNode.toString());
|
||||
defaultSet = true;
|
||||
|
@ -246,28 +191,19 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private static MappedFieldType chooseFieldType(Settings settings, MappedFieldType existing) {
|
||||
if (existing != null) {
|
||||
return existing;
|
||||
}
|
||||
return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0_beta1) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE;
|
||||
}
|
||||
|
||||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
private final String path;
|
||||
private final String defaultTimestamp;
|
||||
private final Boolean ignoreMissing;
|
||||
|
||||
private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null).clone(), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings);
|
||||
this(existing != null ? existing : Defaults.FIELD_TYPE, Defaults.FIELD_TYPE, Defaults.ENABLED, Defaults.DEFAULT_TIMESTAMP, null, indexSettings);
|
||||
}
|
||||
|
||||
private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path,
|
||||
private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState,
|
||||
String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) {
|
||||
super(NAME, fieldType, defaultFieldType, indexSettings);
|
||||
this.enabledState = enabledState;
|
||||
this.path = path;
|
||||
this.defaultTimestamp = defaultTimestamp;
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
}
|
||||
|
@ -281,10 +217,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
return this.enabledState.enabled;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public String defaultTimestamp() {
|
||||
return this.defaultTimestamp;
|
||||
}
|
||||
|
@ -332,35 +264,19 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
|
||||
|
||||
// if all are defaults, no sense to write it at all
|
||||
if (!includeDefaults && indexed == indexedDefault && hasCustomFieldDataSettings() == false &&
|
||||
fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH
|
||||
if (!includeDefaults && enabledState == Defaults.ENABLED
|
||||
&& fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())
|
||||
&& Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)
|
||||
&& defaultFieldType.hasDocValues() == fieldType().hasDocValues()) {
|
||||
&& Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (includeDefaults || enabledState != Defaults.ENABLED) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || (indexed != indexedDefault) || (fieldType().tokenized() != Defaults.FIELD_TYPE.tokenized()))) {
|
||||
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.PRE_20_FIELD_TYPE.stored())) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
if (indexCreatedBefore2x) {
|
||||
doXContentDocValues(builder, includeDefaults);
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || path != Defaults.PATH)) {
|
||||
builder.field("path", path);
|
||||
}
|
||||
// different format handling depending on index version
|
||||
String defaultDateFormat = indexCreatedBefore2x ? Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format() : Defaults.DATE_TIME_FORMATTER.format();
|
||||
String defaultDateFormat = Defaults.DATE_TIME_FORMATTER.format();
|
||||
if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(defaultDateFormat)) {
|
||||
builder.field("format", fieldType().dateTimeFormatter().format());
|
||||
}
|
||||
|
@ -370,9 +286,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
if (includeDefaults || ignoreMissing != null) {
|
||||
builder.field("ignore_missing", ignoreMissing);
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || hasCustomFieldDataSettings())) {
|
||||
builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
@ -396,13 +309,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
} else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) {
|
||||
conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp());
|
||||
}
|
||||
if (this.path != null) {
|
||||
if (path.equals(timestampFieldMapperMergeWith.path()) == false) {
|
||||
conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is " + (timestampFieldMapperMergeWith.path() == null ? "missing" : timestampFieldMapperMergeWith.path()));
|
||||
}
|
||||
} else if (timestampFieldMapperMergeWith.path() != null) {
|
||||
conflicts.add("Cannot update path in _timestamp value. Value is " + path + " path in merged mapping is missing");
|
||||
}
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Conflicts: " + conflicts);
|
||||
}
|
||||
|
|
|
@ -47,8 +47,6 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -92,12 +90,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
return builder;
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -199,25 +192,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (indexCreatedBefore2x == false) {
|
||||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// if all are defaults, no sense to write it at all
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean defaultIndexed = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
|
||||
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && indexed == defaultIndexed) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
if (includeDefaults || indexed != defaultIndexed) {
|
||||
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -85,7 +85,6 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
@Override
|
||||
public UidFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(context.indexCreatedVersion().before(Version.V_2_0_0_beta1));
|
||||
return new UidFieldMapper(fieldType, defaultFieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
@ -93,12 +92,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
return builder;
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -203,23 +197,6 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (indexCreatedBefore2x == false) {
|
||||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// if defaults, don't output
|
||||
if (!includeDefaults && hasCustomFieldDataSettings() == false) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
|
||||
if (includeDefaults || hasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,8 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
|
@ -35,7 +33,6 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -73,16 +70,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = new Builder();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
if (fieldName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
// ignore in 1.x, reject in 2.x
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -215,14 +215,6 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
this.numericDetection = numericDetection;
|
||||
}
|
||||
|
||||
/** Return a copy of this mapper that has the given {@code mapper} as a
|
||||
* sub mapper. */
|
||||
public RootObjectMapper copyAndPutMapper(Mapper mapper) {
|
||||
RootObjectMapper clone = (RootObjectMapper) clone();
|
||||
clone.putMapper(mapper);
|
||||
return clone;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectMapper mappingUpdate(Mapper mapper) {
|
||||
RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper);
|
||||
|
|
|
@ -1,340 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class MappingMetaDataParserTests extends ESTestCase {
|
||||
public void testParseIdAlone() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1");
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.idResolved(), equalTo(true));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.routingResolved(), equalTo(true));
|
||||
assertThat(parseContext.timestamp(), nullValue());
|
||||
assertThat(parseContext.timestampResolved(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testFailIfIdIsNoValue() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startArray("id").value("id").endArray().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1");
|
||||
try {
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
fail();
|
||||
} catch (MapperParsingException ex) {
|
||||
// bogus its an array
|
||||
}
|
||||
|
||||
bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("id").field("x", "id").endObject().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
parseContext = md.createParseContext(null, "routing_value", "1");
|
||||
try {
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
fail();
|
||||
} catch (MapperParsingException ex) {
|
||||
// bogus its an object
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseRoutingAlone() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "1");
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), nullValue());
|
||||
assertThat(parseContext.idResolved(), equalTo(false));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.routingResolved(), equalTo(true));
|
||||
assertThat(parseContext.timestamp(), nullValue());
|
||||
assertThat(parseContext.timestampResolved(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testParseTimestampAlone() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext("id", "routing_value1", null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), nullValue());
|
||||
assertThat(parseContext.idResolved(), equalTo(false));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.routingResolved(), equalTo(true));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
assertThat(parseContext.timestampResolved(), equalTo(true));
|
||||
}
|
||||
|
||||
public void testParseTimestampEquals() throws Exception {
|
||||
MappingMetaData md1 = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
MappingMetaData md2 = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
assertThat(md1, equalTo(md2));
|
||||
}
|
||||
|
||||
public void testParseIdAndRoutingAndTimestamp() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
}
|
||||
|
||||
public void testParseIdAndRoutingAndTimestampWithPath() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").endObject()
|
||||
.startObject("obj2").field("timestamp", "1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
}
|
||||
|
||||
public void testParseIdWithPath() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").endObject()
|
||||
.startObject("obj2").field("timestamp", "1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "2");
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.idResolved(), equalTo(true));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.routingResolved(), equalTo(true));
|
||||
assertThat(parseContext.timestamp(), nullValue());
|
||||
assertThat(parseContext.timestampResolved(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testParseRoutingWithPath() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").endObject()
|
||||
.startObject("obj2").field("timestamp", "1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "2");
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), nullValue());
|
||||
assertThat(parseContext.idResolved(), equalTo(false));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.routingResolved(), equalTo(true));
|
||||
assertThat(parseContext.timestamp(), nullValue());
|
||||
assertThat(parseContext.timestampResolved(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testParseTimestampWithPath() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("routing", "routing_value").endObject()
|
||||
.startObject("obj2").field("timestamp", "1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value1", null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), nullValue());
|
||||
assertThat(parseContext.idResolved(), equalTo(false));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.routingResolved(), equalTo(true));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
assertThat(parseContext.timestampResolved(), equalTo(true));
|
||||
}
|
||||
|
||||
public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject()
|
||||
.startObject("obj2").field("field1", "value1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
}
|
||||
|
||||
public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.obj0.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.obj2.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1")
|
||||
.startObject("obj0")
|
||||
.field("id", "id")
|
||||
.endObject()
|
||||
.startObject("obj2")
|
||||
.field("routing", "routing_value")
|
||||
.endObject()
|
||||
.startObject("obj3")
|
||||
.field("timestamp", "1")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("obj2").field("field1", "value1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
}
|
||||
|
||||
public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").endObject()
|
||||
.startObject("obj1").field("routing", "routing_value").endObject()
|
||||
.startObject("obj1").field("timestamp", "1").endObject()
|
||||
.endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("id"));
|
||||
assertThat(parseContext.routing(), equalTo("routing_value"));
|
||||
assertThat(parseContext.timestamp(), equalTo("1"));
|
||||
}
|
||||
|
||||
public void testParseIdRoutingTimestampWithRepeatedField() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("field1"),
|
||||
new MappingMetaData.Routing(true, "field1.field1"),
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
|
||||
byte[] bytes = jsonBuilder().startObject()
|
||||
.field("aaa", "wr")
|
||||
.array("arr1", "1", "2", "3")
|
||||
.field("field1", "foo")
|
||||
.field("field1", "bar")
|
||||
.field("test", "value")
|
||||
.field("zzz", "wr")
|
||||
.endObject().bytes().toBytes();
|
||||
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), equalTo("foo"));
|
||||
assertThat(parseContext.routing(), nullValue());
|
||||
assertThat(parseContext.timestamp(), equalTo("foo"));
|
||||
}
|
||||
|
||||
public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "field1.field1.field2"),
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
|
||||
byte[] bytes = jsonBuilder().startObject()
|
||||
.field("aaa", "wr")
|
||||
.array("arr1", "1", "2", "3")
|
||||
.field("field1", "foo")
|
||||
.startObject("field1").field("field2", "bar").endObject()
|
||||
.field("test", "value")
|
||||
.field("zzz", "wr")
|
||||
.endObject().bytes().toBytes();
|
||||
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), nullValue());
|
||||
assertThat(parseContext.routing(), nullValue());
|
||||
assertThat(parseContext.timestamp(), equalTo("foo"));
|
||||
}
|
||||
|
||||
public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception {
|
||||
MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
|
||||
new MappingMetaData.Id(null),
|
||||
new MappingMetaData.Routing(true, "field1.field2"),
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
|
||||
|
||||
byte[] bytes = jsonBuilder().startObject()
|
||||
.field("aaa", "wr")
|
||||
.array("arr1", "1", "2", "3")
|
||||
.field("field1", "foo")
|
||||
.startObject("field1").field("field2", "bar").endObject()
|
||||
.field("test", "value")
|
||||
.field("zzz", "wr")
|
||||
.endObject().bytes().toBytes();
|
||||
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
md.parse(XContentFactory.xContent(bytes).createParser(bytes), parseContext);
|
||||
assertThat(parseContext.id(), nullValue());
|
||||
assertThat(parseContext.routing(), equalTo("bar"));
|
||||
assertThat(parseContext.timestamp(), equalTo("foo"));
|
||||
}
|
||||
}
|
|
@ -242,25 +242,6 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1"));
|
||||
}
|
||||
|
||||
public void testRealtimeGetWithCompressBackcompat() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1).put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id))
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("_source").field("compress", true).endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < 10000; i++) {
|
||||
sb.append((char) i);
|
||||
}
|
||||
String fieldValue = sb.toString();
|
||||
client().prepareIndex("test", "type", "1").setSource("field", fieldValue).get();
|
||||
|
||||
// realtime get
|
||||
GetResponse getResponse = client().prepareGet("test", "type", "1").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue));
|
||||
}
|
||||
|
||||
public void testGetDocWithMultivaluedFields() throws Exception {
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties")
|
||||
|
@ -948,63 +929,6 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1");
|
||||
}
|
||||
|
||||
public void testUngeneratedFieldsPartOfSourceUnstoredSourceDisabledBackcompat() throws IOException {
|
||||
indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(false, false);
|
||||
String[] fieldsList = {};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||
refresh();
|
||||
//after refresh - document is in translog and also indexed
|
||||
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||
flush();
|
||||
//after flush - document is in not anymore translog - only indexed
|
||||
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||
}
|
||||
|
||||
public void testUngeneratedFieldsPartOfSourceEitherStoredOrSourceEnabledBackcompat() throws IOException {
|
||||
boolean stored = randomBoolean();
|
||||
boolean sourceEnabled = true;
|
||||
if (stored) {
|
||||
sourceEnabled = randomBoolean();
|
||||
}
|
||||
indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(stored, sourceEnabled);
|
||||
String[] fieldsList = {};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
|
||||
refresh();
|
||||
//after refresh - document is in translog and also indexed
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
|
||||
flush();
|
||||
//after flush - document is in not anymore translog - only indexed
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
|
||||
}
|
||||
|
||||
void indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(boolean stored, boolean sourceEnabled) {
|
||||
String storedString = stored ? "yes" : "no";
|
||||
String createIndexSource = "{\n" +
|
||||
" \"settings\": {\n" +
|
||||
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
|
||||
" \"refresh_interval\": \"-1\",\n" +
|
||||
" \"" + IndexMetaData.SETTING_VERSION_CREATED + "\": " + Version.V_1_4_2.id + "\n" +
|
||||
" },\n" +
|
||||
" \"mappings\": {\n" +
|
||||
" \"doc\": {\n" +
|
||||
" \"_source\": {\n" +
|
||||
" \"enabled\": " + sourceEnabled + "\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource));
|
||||
ensureGreen();
|
||||
String doc = "{\n" +
|
||||
" \"my_boost\": 5.0,\n" +
|
||||
" \"_ttl\": \"1h\"\n" +
|
||||
"}\n";
|
||||
|
||||
client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get();
|
||||
}
|
||||
|
||||
public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException {
|
||||
String createIndexSource = "{\n" +
|
||||
" \"settings\": {\n" +
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -33,9 +31,6 @@ import java.util.Collections;
|
|||
import java.util.HashSet;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.test.VersionUtils.getFirstVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.getPreviousVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
|
||||
|
@ -58,23 +53,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
.actionGet();
|
||||
}
|
||||
|
||||
public void testThatLongTypeNameIsNotRejectedOnPreElasticsearchVersionTwo() {
|
||||
String index = "text-index";
|
||||
String field = "field";
|
||||
String type = new String(new char[256]).replace("\0", "a");
|
||||
|
||||
CreateIndexResponse response =
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(index)
|
||||
.setSettings(settings(randomVersionBetween(random(), getFirstVersion(), getPreviousVersion(Version.V_2_0_0_beta1))))
|
||||
.addMapping(type, field, "type=string")
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
public void testTypeNameTooLong() {
|
||||
String index = "text-index";
|
||||
String field = "field";
|
||||
|
|
|
@ -456,19 +456,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testIncludeInObjectBackcompat() throws Exception {
|
||||
String mapping = jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject().field("_all", "foo").endObject().bytes());
|
||||
|
||||
assertNull(doc.rootDoc().get("_all"));
|
||||
AllField field = (AllField) doc.rootDoc().getField("_all");
|
||||
// the backcompat behavior is actually ignoring directly specifying _all
|
||||
assertFalse(field.getAllEntries().fields().iterator().hasNext());
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
|
|
@ -1,207 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.date;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
/**
|
||||
* Test class to check for all the conditions defined in
|
||||
* https://github.com/elastic/elasticsearch/issues/10971
|
||||
*/
|
||||
public class DateBackwardsCompatibilityTests extends ESSingleNodeTestCase {
|
||||
|
||||
private String index = "testindex";
|
||||
private String type = "testtype";
|
||||
private Version randomVersionBelow2x;
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
randomVersionBelow2x = randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1);
|
||||
}
|
||||
|
||||
public void testThatPre2xIndicesNumbersAreTreatedAsEpochs() throws Exception {
|
||||
index = createPre2xIndexAndMapping();
|
||||
long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015
|
||||
XContentBuilder document = jsonBuilder().startObject().field("date_field", dateInMillis).endObject();
|
||||
index(document);
|
||||
|
||||
// search for date in time range
|
||||
QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24");
|
||||
SearchResponse response = client().prepareSearch(index).setQuery(query).get();
|
||||
assertHitCount(response, 1);
|
||||
}
|
||||
|
||||
public void testThatPre2xFailedStringParsingLeadsToEpochParsing() throws Exception {
|
||||
index = createPre2xIndexAndMapping();
|
||||
long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015
|
||||
String date = String.valueOf(dateInMillis);
|
||||
XContentBuilder document = jsonBuilder().startObject().field("date_field", date).endObject();
|
||||
index(document);
|
||||
|
||||
// search for date in time range
|
||||
QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24");
|
||||
SearchResponse response = client().prepareSearch(index).setQuery(query).get();
|
||||
assertHitCount(response, 1);
|
||||
}
|
||||
|
||||
public void testThatPre2xSupportsUnixTimestampsInAnyDateFormat() throws Exception {
|
||||
long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015
|
||||
List<String> dateFormats = Arrays.asList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute");
|
||||
|
||||
for (String format : dateFormats) {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("date_field").field("type", "date").field("format", format).endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
index = createIndex(randomVersionBelow2x, mapping);
|
||||
|
||||
XContentBuilder document = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", String.valueOf(dateInMillis))
|
||||
.endObject();
|
||||
index(document);
|
||||
|
||||
// indexing as regular timestamp should work as well
|
||||
document = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", dateInMillis)
|
||||
.endObject();
|
||||
index(document);
|
||||
|
||||
client().admin().indices().prepareDelete(index).get();
|
||||
}
|
||||
}
|
||||
|
||||
public void testThatPre2xIndicesNumbersAreTreatedAsTimestamps() throws Exception {
|
||||
// looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format
|
||||
long date = 2015062301000l;
|
||||
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject()
|
||||
.endObject().endObject();
|
||||
index = createIndex(randomVersionBelow2x, mapping);
|
||||
|
||||
XContentBuilder document = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", randomBoolean() ? String.valueOf(date) : date)
|
||||
.endObject();
|
||||
index(document);
|
||||
|
||||
// no results in expected time range
|
||||
QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime");
|
||||
SearchResponse response = client().prepareSearch(index).setQuery(query).get();
|
||||
assertNoSearchHits(response);
|
||||
|
||||
// result in unix timestamp range
|
||||
QueryBuilder timestampQuery = QueryBuilders.rangeQuery("date_field").from(2015062300000L).to(2015062302000L);
|
||||
assertHitCount(client().prepareSearch(index).setQuery(timestampQuery).get(), 1);
|
||||
|
||||
// result should also work with regular specified dates
|
||||
QueryBuilder regularTimeQuery = QueryBuilders.rangeQuery("date_field").from("2033-11-08").to("2033-11-09").format("dateOptionalTime");
|
||||
assertHitCount(client().prepareSearch(index).setQuery(regularTimeQuery).get(), 1);
|
||||
}
|
||||
|
||||
public void testThatPost2xIndicesNumbersAreTreatedAsStrings() throws Exception {
|
||||
// looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format
|
||||
long date = 2015062301000l;
|
||||
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject()
|
||||
.endObject().endObject();
|
||||
index = createIndex(Version.CURRENT, mapping);
|
||||
|
||||
XContentBuilder document = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", String.valueOf(date))
|
||||
.endObject();
|
||||
index(document);
|
||||
|
||||
document = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", date)
|
||||
.endObject();
|
||||
index(document);
|
||||
|
||||
// search for date in time range
|
||||
QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime");
|
||||
SearchResponse response = client().prepareSearch(index).setQuery(query).get();
|
||||
assertHitCount(response, 2);
|
||||
}
|
||||
|
||||
public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception {
|
||||
try {
|
||||
XContentBuilder mapping = jsonBuilder().startObject()
|
||||
.startArray("dynamic_date_formats").value("dateOptionalTime").value("epoch_seconds").endArray()
|
||||
.endObject();
|
||||
createIndex(Version.CURRENT, mapping);
|
||||
fail("Expected a MapperParsingException, but did not happen");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("Failed to parse mapping [" + type + "]"));
|
||||
assertThat(e.getMessage(), containsString("Epoch [epoch_seconds] is not supported as dynamic date format"));
|
||||
}
|
||||
}
|
||||
|
||||
private String createPre2xIndexAndMapping() throws Exception {
|
||||
return createIndexAndMapping(randomVersionBelow2x);
|
||||
}
|
||||
|
||||
private String createIndexAndMapping(Version version) throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject().startObject("properties")
|
||||
.startObject("date_field").field("type", "date").field("format", "dateOptionalTime").endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
return createIndex(version, mapping);
|
||||
}
|
||||
|
||||
private String createIndex(Version version, XContentBuilder mapping) {
|
||||
Settings settings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
createIndex(index, settings, type, mapping);
|
||||
|
||||
ensureGreen(index);
|
||||
return index;
|
||||
}
|
||||
|
||||
private void index(XContentBuilder document) {
|
||||
IndexResponse indexResponse = client().prepareIndex(index, type).setSource(document).setRefresh(true).get();
|
||||
assertThat(indexResponse.isCreated(), is(true));
|
||||
}
|
||||
}
|
|
@ -433,51 +433,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
throw new AssertionError("missing");
|
||||
}
|
||||
|
||||
public void testNumericResolutionBackwardsCompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("date_field").field("type", "date").field("format", "date_time").field("numeric_resolution", "seconds").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper defaultMapper = mapper("test1", "type", mapping, Version.V_0_90_0);
|
||||
|
||||
// provided as an int
|
||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", 42)
|
||||
.endObject()
|
||||
.bytes());
|
||||
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(42000L));
|
||||
|
||||
// provided as a string
|
||||
doc = defaultMapper.parse("test", "type", "2", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", "43")
|
||||
.endObject()
|
||||
.bytes());
|
||||
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(43000L));
|
||||
|
||||
// but formatted dates still parse as milliseconds
|
||||
doc = defaultMapper.parse("test", "type", "2", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", "1970-01-01T00:00:44.000Z")
|
||||
.endObject()
|
||||
.bytes());
|
||||
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L));
|
||||
|
||||
// expected to fail due to field epoch date formatters not being set
|
||||
DocumentMapper currentMapper = mapper("test2", "type", mapping);
|
||||
try {
|
||||
currentMapper.parse("test", "type", "2", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", randomBoolean() ? "43" : 43)
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail("expected parse failure");
|
||||
} catch (MapperParsingException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("failed to parse [date_field]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject()
|
||||
|
@ -505,31 +460,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(indexResponse.isCreated(), is(true));
|
||||
}
|
||||
|
||||
public void testThatOlderIndicesAllowNonStrictDates() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("date_field").field("type", "date").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
Version randomVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1);
|
||||
IndexService index = createIndex("test", settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build());
|
||||
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
|
||||
assertDateFormat("epoch_millis||date_optional_time");
|
||||
DocumentMapper defaultMapper = index.mapperService().documentMapper("type");
|
||||
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", "1-1-1T00:00:44.000Z")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
// also test normal date
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", "2015-06-06T00:00:44.000Z")
|
||||
.endObject()
|
||||
.bytes());
|
||||
}
|
||||
|
||||
public void testThatNewIndicesOnlyAllowStrictDates() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("date_field").field("type", "date").endObject().endObject()
|
||||
|
@ -559,34 +489,6 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testThatUpgradingAnOlderIndexToStrictDateWorks() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("date_field").field("type", "date").field("format", "date_optional_time").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
Version randomVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1);
|
||||
createIndex("test", settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build());
|
||||
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
|
||||
assertDateFormat("epoch_millis||date_optional_time");
|
||||
|
||||
// index doc
|
||||
client().prepareIndex("test", "type", "1").setSource(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("date_field", "2015-06-06T00:00:44.000Z")
|
||||
.endObject()).get();
|
||||
|
||||
// update mapping
|
||||
String newMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("date_field")
|
||||
.field("type", "date")
|
||||
.field("format", "strict_date_optional_time||epoch_millis")
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(newMapping).get();
|
||||
assertThat(putMappingResponse.isAcknowledged(), is(true));
|
||||
|
||||
assertDateFormat("strict_date_optional_time||epoch_millis");
|
||||
}
|
||||
|
||||
private void assertDateFormat(String expectedFormat) throws IOException {
|
||||
GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").setTypes("type").get();
|
||||
Map<String, Object> mappingMap = response.getMappings().get("test").get("type").getSourceAsMap();
|
||||
|
|
|
@ -653,60 +653,6 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test backward compatibility
|
||||
*/
|
||||
public void testBackwardCompatibleOptions() throws Exception {
|
||||
// backward compatibility testing
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||
Version.V_1_7_1)).build();
|
||||
|
||||
// validate
|
||||
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||
.field("validate", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||
.field("validate_lat", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||
.field("validate_lon", false).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||
|
||||
// normalize
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||
.field("normalize", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true"));
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||
.field("normalize_lat", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true"));
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||
.field("normalize_lon", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"coerce\":true"));
|
||||
}
|
||||
|
||||
public void testGeoPointMapperMerge() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
|
|
|
@ -53,7 +53,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase {
|
|||
GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
|
||||
PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy();
|
||||
|
||||
assertThat(strategy.getDistErrPct(), equalTo(GeoShapeFieldMapper.Defaults.LEGACY_DISTANCE_ERROR_PCT));
|
||||
assertThat(strategy.getDistErrPct(), equalTo(0.025d));
|
||||
assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class));
|
||||
assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoShapeFieldMapper.Defaults.GEOHASH_LEVELS));
|
||||
assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION));
|
||||
|
|
|
@ -64,57 +64,6 @@ public class IdMappingTests extends ESSingleNodeTestCase {
|
|||
assertTrue(e.getMessage().contains("No id found"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testIdIndexedBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_id").field("index", "not_analyzed").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get(UidFieldMapper.NAME), notNullValue());
|
||||
assertThat(doc.rootDoc().get(IdFieldMapper.NAME), notNullValue());
|
||||
}
|
||||
|
||||
public void testIdPathBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_id").field("path", "my_path").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
// serialize the id mapping
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||
builder = docMapper.idFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
String serialized_id_mapping = builder.string();
|
||||
|
||||
String expected_id_mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("_id").field("path", "my_path").endObject()
|
||||
.endObject().string();
|
||||
|
||||
assertThat(serialized_id_mapping, equalTo(expected_id_mapping));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("_id", "1")
|
||||
.endObject()
|
||||
.bytes()).type("type"));
|
||||
|
||||
// _id is not indexed so we need to check _uid
|
||||
assertEquals(Uid.createUid("type", "1"), doc.rootDoc().get(UidFieldMapper.NAME));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
|
|
|
@ -19,59 +19,17 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.index;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class IndexTypeMapperTests extends ESSingleNodeTestCase {
|
||||
private Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
|
||||
public void testSimpleIndexMapperEnabledBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
IndexFieldMapper indexMapper = docMapper.indexMapper();
|
||||
assertThat(indexMapper.enabled(), equalTo(true));
|
||||
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("_index"), equalTo("test"));
|
||||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
}
|
||||
|
||||
public void testExplicitDisabledIndexMapperBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index").field("enabled", false).endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class);
|
||||
assertThat(indexMapper.enabled(), equalTo(false));
|
||||
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("_index"), nullValue());
|
||||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
}
|
||||
|
||||
public void testDefaultDisabledIndexMapper() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -89,42 +47,4 @@ public class IndexTypeMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().get("_index"), nullValue());
|
||||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
}
|
||||
|
||||
public void testThatMergingFieldMappingAllowsDisablingBackcompat() throws Exception {
|
||||
String mappingWithIndexEnabled = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
MapperService mapperService = createIndex("test", bwcSettings).mapperService();
|
||||
DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(mappingWithIndexEnabled), true, false);
|
||||
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(true));
|
||||
|
||||
String mappingWithIndexDisabled = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index").field("enabled", false).endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mappingWithIndexDisabled), false, false);
|
||||
|
||||
assertThat(merged.IndexFieldMapper().enabled(), is(false));
|
||||
}
|
||||
|
||||
public void testCustomSettingsBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_index")
|
||||
.field("enabled", true)
|
||||
.field("store", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
IndexFieldMapper indexMapper = docMapper.metadataMapper(IndexFieldMapper.class);
|
||||
assertThat(indexMapper.enabled(), equalTo(true));
|
||||
assertThat(indexMapper.fieldType().stored(), equalTo(true));
|
||||
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("_index"), equalTo("test"));
|
||||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,8 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -141,45 +139,6 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertNull(doc.rootDoc().get("_field_names"));
|
||||
}
|
||||
|
||||
public void testPre13Disabled() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_2_4.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
|
||||
assertFalse(fieldNamesMapper.fieldType().isEnabled());
|
||||
}
|
||||
|
||||
public void testDisablingBackcompat() throws Exception {
|
||||
// before 1.5, disabling happened by setting index:no
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_field_names").field("index", "no").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
|
||||
assertFalse(fieldNamesMapper.fieldType().isEnabled());
|
||||
|
||||
ParsedDocument doc = docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertNull(doc.rootDoc().get("_field_names"));
|
||||
}
|
||||
|
||||
public void testFieldTypeSettingsBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_field_names").field("store", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class);
|
||||
assertTrue(fieldNamesMapper.fieldType().stored());
|
||||
}
|
||||
|
||||
public void testMergingMappings() throws Exception {
|
||||
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_field_names").field("enabled", true).endObject()
|
||||
|
|
|
@ -18,10 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper.parent;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
|
@ -46,24 +43,6 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testParentSetInDocBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_parent").field("type", "p_type").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = docMapper.parse(SourceToParse.source(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("_parent", "1122")
|
||||
.field("x_field", "x_value")
|
||||
.endObject()
|
||||
.bytes()).type("type").id("1"));
|
||||
|
||||
assertEquals("1122", doc.parent());
|
||||
assertEquals(Uid.createUid("p_type", "1122"), doc.rootDoc().get("_parent"));
|
||||
}
|
||||
|
||||
public void testParentSet() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_parent").field("type", "p_type").endObject()
|
||||
|
|
|
@ -19,31 +19,15 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.routing;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
|
@ -62,72 +46,6 @@ public class RoutingTypeMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().get("field"), equalTo("value"));
|
||||
}
|
||||
|
||||
public void testFieldTypeSettingsBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_routing")
|
||||
.field("store", "no")
|
||||
.field("index", "no")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
assertThat(docMapper.routingFieldMapper().fieldType().stored(), equalTo(false));
|
||||
assertEquals(IndexOptions.NONE, docMapper.routingFieldMapper().fieldType().indexOptions());
|
||||
}
|
||||
|
||||
public void testFieldTypeSettingsSerializationBackcompat() throws Exception {
|
||||
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_routing").field("store", "no").field("index", "no").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper enabledMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping));
|
||||
|
||||
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
|
||||
enabledMapper.routingFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
|
||||
builder.close();
|
||||
Map<String, Object> serializedMap;
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) {
|
||||
serializedMap = parser.map();
|
||||
}
|
||||
assertThat(serializedMap, hasKey("_routing"));
|
||||
assertThat(serializedMap.get("_routing"), instanceOf(Map.class));
|
||||
Map<String, Object> routingConfiguration = (Map<String, Object>) serializedMap.get("_routing");
|
||||
assertThat(routingConfiguration, hasKey("store"));
|
||||
assertThat(routingConfiguration.get("store").toString(), is("false"));
|
||||
assertThat(routingConfiguration, hasKey("index"));
|
||||
assertThat(routingConfiguration.get("index").toString(), is("no"));
|
||||
}
|
||||
|
||||
public void testPathBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_routing").field("path", "custom_routing").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_routing", "routing_value").endObject();
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(MetaData.builder().build(), mappingMetaData, true, "test");
|
||||
|
||||
assertEquals(request.routing(), "routing_value");
|
||||
}
|
||||
|
||||
public void testIncludeInObjectBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject();
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(MetaData.builder().build(), mappingMetaData, true, "test");
|
||||
|
||||
// _routing in a document never worked, so backcompat is ignoring the field
|
||||
assertNull(request.routing());
|
||||
assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_routing"));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
|
|
@ -540,24 +540,4 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test backward compatibility
|
||||
*/
|
||||
public void testBackwardCompatible() throws Exception {
|
||||
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||
Version.V_1_7_1)).build();
|
||||
|
||||
DocumentMapperParser parser = createIndex("backward_compatible_index", settings).mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 10)
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertThat(parser.parse("type", new CompressedXContent(mapping)).mapping().toString(), containsString("\"position_increment_gap\":10"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,11 +32,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -49,20 +46,13 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.Version.V_1_5_0;
|
||||
import static org.elasticsearch.Version.V_2_0_0_beta1;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
@ -71,7 +61,6 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
/**
|
||||
*/
|
||||
public class TimestampMappingTests extends ESSingleNodeTestCase {
|
||||
Settings BWC_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
|
||||
public void testSimpleDisabled() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string();
|
||||
|
@ -104,41 +93,23 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testDefaultValues() throws Exception {
|
||||
for (Version version : Arrays.asList(V_1_5_0, V_2_0_0_beta1, randomVersion(random()))) {
|
||||
for (String mapping : Arrays.asList(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(),
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) {
|
||||
DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions()));
|
||||
assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
|
||||
String expectedFormat = version.onOrAfter(Version.V_2_0_0_beta1) ? TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT :
|
||||
TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format();
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(expectedFormat));
|
||||
assertAcked(client().admin().indices().prepareDelete("test").execute().get());
|
||||
}
|
||||
Version version;
|
||||
do {
|
||||
version = randomVersion(random());
|
||||
} while (version.before(Version.V_2_0_0_beta1));
|
||||
for (String mapping : Arrays.asList(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(),
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) {
|
||||
DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions()));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT));
|
||||
assertAcked(client().admin().indices().prepareDelete("test").execute().get());
|
||||
}
|
||||
}
|
||||
|
||||
public void testBackcompatSetValues() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", "yes").field("store", "no").field("index", "no")
|
||||
.field("path", "timestamp").field("format", "year")
|
||||
.field("doc_values", true)
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(true));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false));
|
||||
assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions());
|
||||
assertThat(docMapper.timestampFieldMapper().path(), equalTo("timestamp"));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo("year"));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(true));
|
||||
}
|
||||
|
||||
public void testThatDisablingDuringMergeIsWorking() throws Exception {
|
||||
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
|
@ -155,55 +126,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false));
|
||||
}
|
||||
|
||||
// issue 3174
|
||||
public void testThatSerializationWorksCorrectlyForIndexField() throws Exception {
|
||||
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).field("store", "yes").field("index", "no").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper enabledMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(enabledMapping));
|
||||
|
||||
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
|
||||
enabledMapper.timestampFieldMapper().toXContent(builder, ToXContent.EMPTY_PARAMS).endObject();
|
||||
builder.close();
|
||||
Map<String, Object> serializedMap;
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) {
|
||||
serializedMap = parser.map();
|
||||
}
|
||||
assertThat(serializedMap, hasKey("_timestamp"));
|
||||
assertThat(serializedMap.get("_timestamp"), instanceOf(Map.class));
|
||||
Map<String, Object> timestampConfiguration = (Map<String, Object>) serializedMap.get("_timestamp");
|
||||
assertThat(timestampConfiguration, hasKey("index"));
|
||||
assertThat(timestampConfiguration.get("index").toString(), is("no"));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testBackcompatPathMissingDefaultValue() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", "yes")
|
||||
.field("path", "timestamp")
|
||||
.field("ignore_missing", false)
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("foo", "bar")
|
||||
.endObject();
|
||||
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
try {
|
||||
request.process(metaData, mappingMetaData, true, "test");
|
||||
fail();
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("timestamp is required by mapping"));
|
||||
}
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testTimestampDefaultValue() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -230,32 +152,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(delay, lessThanOrEqualTo(60000L));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testBackcompatPathMissingDefaultToEpochValue() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", "yes")
|
||||
.field("path", "timestamp")
|
||||
.field("default", "1970-01-01")
|
||||
.field("format", "YYYY-MM-dd")
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("foo", "bar")
|
||||
.endObject();
|
||||
|
||||
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(metaData, mappingMetaData, true, "test");
|
||||
assertThat(request.timestamp(), notNullValue());
|
||||
assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT)));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testTimestampMissingDefaultToEpochValue() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -281,35 +177,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT)));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testBackcompatPathMissingNowDefaultValue() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", "yes")
|
||||
.field("path", "timestamp")
|
||||
.field("default", "now")
|
||||
.field("format", "YYYY-MM-dd")
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("foo", "bar")
|
||||
.endObject();
|
||||
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(metaData, mappingMetaData, true, "test");
|
||||
assertThat(request.timestamp(), notNullValue());
|
||||
|
||||
// We should have less than one minute (probably some ms)
|
||||
long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp());
|
||||
assertThat(delay, lessThanOrEqualTo(60000L));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testTimestampMissingNowDefaultValue() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -355,34 +222,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testBackcompatPathMissingShouldFail() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", "yes")
|
||||
.field("path", "timestamp")
|
||||
.field("ignore_missing", false)
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("foo", "bar")
|
||||
.endObject();
|
||||
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
try {
|
||||
request.process(metaData, mappingMetaData, true, "test");
|
||||
fail("we should reject the mapping with a TimestampParsingException: timestamp is required by mapping");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("timestamp is required by mapping"));
|
||||
}
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -448,10 +287,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
public void testDefaultTimestampStream() throws IOException {
|
||||
// Testing null value for default timestamp
|
||||
{
|
||||
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null,
|
||||
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true,
|
||||
TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null);
|
||||
MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)),
|
||||
new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false);
|
||||
new MappingMetaData.Routing(false), timestamp, false);
|
||||
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
expected.writeTo(out);
|
||||
|
@ -465,10 +304,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
// Testing "now" value for default timestamp
|
||||
{
|
||||
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null,
|
||||
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true,
|
||||
TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null);
|
||||
MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)),
|
||||
new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false);
|
||||
new MappingMetaData.Routing(false), timestamp, false);
|
||||
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
expected.writeTo(out);
|
||||
|
@ -482,10 +321,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
// Testing "ignore_missing" value for default timestamp
|
||||
{
|
||||
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null,
|
||||
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true,
|
||||
TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false);
|
||||
MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)),
|
||||
new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false);
|
||||
new MappingMetaData.Routing(false), timestamp, false);
|
||||
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
expected.writeTo(out);
|
||||
|
@ -498,25 +337,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testMergingFielddataLoadingWorks() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "lazy").field("format", "doc_values").endObject().field("store", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
MapperService mapperService = createIndex("test", indexSettings).mapperService();
|
||||
|
||||
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false);
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values"));
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
docMapper = mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array"));
|
||||
}
|
||||
|
||||
public void testParsingNotDefaultTwiceDoesNotChangeMapping() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
|
@ -530,126 +350,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(docMapper.mappingSource().string(), equalTo(mapping));
|
||||
}
|
||||
|
||||
public void testBackcompatParsingTwiceDoesNotChangeTokenizeValue() throws Exception {
|
||||
String[] index_options = {"no", "analyzed", "not_analyzed"};
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true)
|
||||
.field("index", index_options[randomInt(2)])
|
||||
.field("store", true)
|
||||
.field("path", "foo")
|
||||
.field("default", "1970-01-01")
|
||||
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser();
|
||||
|
||||
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
boolean tokenized = docMapper.timestampFieldMapper().fieldType().tokenized();
|
||||
docMapper = parser.parse("type", docMapper.mappingSource());
|
||||
assertThat(tokenized, equalTo(docMapper.timestampFieldMapper().fieldType().tokenized()));
|
||||
}
|
||||
|
||||
public void testMergingConflicts() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true)
|
||||
.field("store", "yes")
|
||||
.field("index", "analyzed")
|
||||
.field("path", "foo")
|
||||
.field("default", "1970-01-01")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
MapperService mapperService = createIndex("test", indexSettings).mapperService();
|
||||
|
||||
DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false);
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", false)
|
||||
.startObject("fielddata").field("format", "array").endObject()
|
||||
.field("store", "no")
|
||||
.field("index", "no")
|
||||
.field("path", "foo")
|
||||
.field("default", "1970-01-01")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values"));
|
||||
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [store] values"));
|
||||
}
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", false)
|
||||
.startObject("fielddata").field("format", "array").endObject()
|
||||
.field("store", "yes")
|
||||
.field("index", "analyzed")
|
||||
.field("path", "bar")
|
||||
.field("default", "1970-01-02")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value"));
|
||||
assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value"));
|
||||
}
|
||||
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY));
|
||||
assertTrue(docMapper.timestampFieldMapper().enabled());
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true)
|
||||
.field("store", "yes")
|
||||
.field("index", "analyzed")
|
||||
.field("path", "bar")
|
||||
.field("default", "1970-01-02")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02"));
|
||||
assertThat(e.getMessage(), containsString("Cannot update path in _timestamp value. Value is foo path in merged mapping is bar"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBackcompatMergingConflictsForIndexValues() throws Exception {
|
||||
List<String> indexValues = new ArrayList<>();
|
||||
indexValues.add("analyzed");
|
||||
indexValues.add("no");
|
||||
indexValues.add("not_analyzed");
|
||||
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("index", indexValues.remove(randomInt(2)))
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService();
|
||||
|
||||
mapperService.merge("type", new CompressedXContent(mapping), true, false);
|
||||
mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("index", indexValues.remove(randomInt(1)))
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(mapping), false, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [_timestamp] has different [index] values"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for issue #9223
|
||||
*/
|
||||
|
@ -665,31 +365,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
new MappingMetaData(new CompressedXContent(mapping));
|
||||
}
|
||||
|
||||
public void testBackcompatMergePaths() throws Exception {
|
||||
String[] possiblePathValues = {"some_path", "anotherPath", null};
|
||||
MapperService mapperService = createIndex("test", BWC_SETTINGS).mapperService();
|
||||
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp");
|
||||
String path1 = possiblePathValues[randomInt(2)];
|
||||
if (path1!=null) {
|
||||
mapping1.field("path", path1);
|
||||
}
|
||||
mapping1.endObject()
|
||||
.endObject().endObject();
|
||||
XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp");
|
||||
String path2 = possiblePathValues[randomInt(2)];
|
||||
if (path2!=null) {
|
||||
mapping2.field("path", path2);
|
||||
}
|
||||
mapping2.endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
assertConflict(mapperService, "type", mapping1.string(), mapping2.string(), (path1 == path2 ? null : "Cannot update path in _timestamp value"));
|
||||
}
|
||||
|
||||
void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException {
|
||||
mapperService.merge("type", new CompressedXContent(mapping1), true, false);
|
||||
try {
|
||||
|
@ -701,93 +376,6 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testBackcompatDocValuesSerialization() throws Exception {
|
||||
// default
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.endObject().endObject().endObject().string();
|
||||
assertDocValuesSerialization(mapping);
|
||||
|
||||
// just format specified
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
assertDocValuesSerialization(mapping);
|
||||
|
||||
// explicitly enabled
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("doc_values", true)
|
||||
.endObject().endObject().endObject().string();
|
||||
assertDocValuesSerialization(mapping);
|
||||
|
||||
// explicitly disabled
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("doc_values", false)
|
||||
.endObject().endObject().endObject().string();
|
||||
assertDocValuesSerialization(mapping);
|
||||
|
||||
// explicitly enabled, with format
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("doc_values", true)
|
||||
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
assertDocValuesSerialization(mapping);
|
||||
|
||||
// explicitly disabled, with format
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("doc_values", false)
|
||||
.startObject("fielddata").field("format", "doc_values").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
assertDocValuesSerialization(mapping);
|
||||
}
|
||||
|
||||
void assertDocValuesSerialization(String mapping) throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test_doc_values", BWC_SETTINGS).mapperService().documentMapperParser();
|
||||
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues();
|
||||
docMapper = parser.parse("type", docMapper.mappingSource());
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues));
|
||||
assertAcked(client().admin().indices().prepareDelete("test_doc_values"));
|
||||
}
|
||||
|
||||
public void testBackcompatPath() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject();
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(metaData, mappingMetaData, true, "test");
|
||||
|
||||
assertThat(request.timestamp(), is("1"));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject();
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(metaData, mappingMetaData, true, "test");
|
||||
|
||||
// _timestamp in a document never worked, so backcompat is ignoring the field
|
||||
assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY"), Version.V_1_4_2), request.timestamp());
|
||||
assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_timestamp"));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject()
|
||||
|
|
|
@ -86,19 +86,6 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions()));
|
||||
}
|
||||
|
||||
public void testSetValuesBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl")
|
||||
.field("enabled", "yes").field("store", "no")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(true));
|
||||
assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(true)); // store was never serialized, so it was always lost
|
||||
|
||||
}
|
||||
|
||||
public void testThatEnablingTTLFieldOnMergeWorks() throws Exception {
|
||||
String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").field("field").startObject().field("type", "string").endObject().endObject()
|
||||
|
@ -216,23 +203,6 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectBackcompat() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_ttl", "2d").endObject();
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
|
||||
request.process(MetaData.builder().build(), mappingMetaData, true, "test");
|
||||
|
||||
// _ttl in a document never worked, so backcompat is ignoring the field
|
||||
assertNull(request.ttl());
|
||||
assertNull(docMapper.parse("test", "type", "1", doc.bytes()).rootDoc().get("_ttl"));
|
||||
}
|
||||
|
||||
public void testIncludeInObjectNotAllowed() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
|
|
|
@ -247,23 +247,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testIndexFieldParsingBackcompat() throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build());
|
||||
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
|
||||
boolean enabled = randomBoolean();
|
||||
indexMapping.startObject()
|
||||
.startObject("type")
|
||||
.startObject("_index")
|
||||
.field("enabled", enabled)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
|
||||
assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled));
|
||||
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
|
||||
assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled));
|
||||
}
|
||||
|
||||
public void testTimestampParsing() throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build());
|
||||
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
|
||||
|
@ -272,10 +255,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", enabled)
|
||||
.field("store", true)
|
||||
.startObject("fielddata")
|
||||
.field("format", "doc_values")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
|
|
@ -217,76 +217,6 @@ public class SimpleRoutingIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testRequiredRoutingWithPathMapping() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_routing").field("required", true).field("path", "routing_field").endObject().startObject("properties")
|
||||
.startObject("routing_field").field("type", "string").field("index", randomBoolean() ? "no" : "not_analyzed").field("doc_values", randomBoolean() ? "yes" : "no").endObject().endObject()
|
||||
.endObject().endObject())
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)
|
||||
.execute().actionGet();
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> indexing with id [1], and routing [0]");
|
||||
client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", "0").setRefresh(true).execute().actionGet();
|
||||
|
||||
logger.info("--> check failure with different routing");
|
||||
try {
|
||||
client().prepareIndex(indexOrAlias(), "type1", "1").setRouting("1").setSource("field", "value1", "routing_field", "0").setRefresh(true).execute().actionGet();
|
||||
fail();
|
||||
} catch (ElasticsearchException e) {
|
||||
assertThat(e.unwrapCause(), instanceOf(MapperParsingException.class));
|
||||
}
|
||||
|
||||
|
||||
logger.info("--> verifying get with no routing, should fail");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
try {
|
||||
client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists();
|
||||
fail();
|
||||
} catch (RoutingMissingException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
|
||||
}
|
||||
}
|
||||
logger.info("--> verifying get with routing, should find");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRequiredRoutingWithPathMappingBulk() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_routing").field("required", true).field("path", "routing_field").endObject()
|
||||
.endObject().endObject())
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)
|
||||
.execute().actionGet();
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> indexing with id [1], and routing [0]");
|
||||
client().prepareBulk().add(
|
||||
client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", "0")).execute().actionGet();
|
||||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
|
||||
logger.info("--> verifying get with no routing, should fail");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
try {
|
||||
client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists();
|
||||
fail();
|
||||
} catch (RoutingMissingException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
|
||||
}
|
||||
}
|
||||
logger.info("--> verifying get with routing, should find");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRequiredRoutingBulk() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addAlias(new Alias("alias"))
|
||||
|
@ -317,37 +247,6 @@ public class SimpleRoutingIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testRequiredRoutingWithPathNumericType() throws Exception {
|
||||
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_routing").field("required", true).field("path", "routing_field").endObject()
|
||||
.endObject().endObject())
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID)
|
||||
.execute().actionGet();
|
||||
ensureGreen();
|
||||
|
||||
logger.info("--> indexing with id [1], and routing [0]");
|
||||
client().prepareIndex(indexOrAlias(), "type1", "1").setSource("field", "value1", "routing_field", 0).execute().actionGet();
|
||||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
|
||||
logger.info("--> verifying get with no routing, should fail");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
try {
|
||||
client().prepareGet(indexOrAlias(), "type1", "1").execute().actionGet().isExists();
|
||||
fail();
|
||||
} catch (RoutingMissingException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
|
||||
}
|
||||
}
|
||||
logger.info("--> verifying get with routing, should find");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(client().prepareGet(indexOrAlias(), "type1", "1").setRouting("0").execute().actionGet().isExists(), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRequiredRoutingMapping_variousAPIs() throws Exception {
|
||||
client().admin().indices().prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject())
|
||||
|
|
|
@ -476,39 +476,6 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertThirdHit(searchResponse, hasId("2"));
|
||||
}
|
||||
|
||||
public void testOmitTermFreqsAndPositions() throws Exception {
|
||||
cluster().wipeTemplates(); // no randomized template for this test -- we are testing bwc compat and set version explicitly this might cause failures if an unsupported feature
|
||||
// is added randomly via an index template.
|
||||
Version version = Version.CURRENT;
|
||||
int iters = scaledRandomIntBetween(10, 20);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
try {
|
||||
// backwards compat test!
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", "field1", "type=string,omit_term_freq_and_positions=true")
|
||||
.setSettings(settings(version).put(SETTING_NUMBER_OF_SHARDS, 1)));
|
||||
assertThat(version.onOrAfter(Version.V_1_0_0_RC2), equalTo(false));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox"));
|
||||
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
try {
|
||||
client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get();
|
||||
fail("SearchPhaseExecutionException should have been thrown");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertTrue(e.toString().contains("IllegalStateException[field \"field1\" was indexed without position data; cannot run PhraseQuery"));
|
||||
}
|
||||
cluster().wipeIndices("test");
|
||||
} catch (MapperParsingException ex) {
|
||||
assertThat(version.toString(), version.onOrAfter(Version.V_1_0_0_RC2), equalTo(true));
|
||||
assertThat(ex.getCause().getMessage(), equalTo("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead"));
|
||||
}
|
||||
version = randomVersion(random());
|
||||
}
|
||||
}
|
||||
|
||||
public void testQueryStringAnalyzedWildcard() throws Exception {
|
||||
createIndex("test");
|
||||
|
||||
|
@ -635,24 +602,8 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertHitCount(searchResponse, 0l);
|
||||
}
|
||||
|
||||
public void testTypeFilterTypeIndexedTests() throws Exception {
|
||||
typeFilterTests("not_analyzed");
|
||||
}
|
||||
|
||||
public void testTypeFilterTypeNotIndexedTests() throws Exception {
|
||||
typeFilterTests("no");
|
||||
}
|
||||
|
||||
private void typeFilterTests(String index) throws Exception {
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
assertAcked(prepareCreate("test").setSettings(indexSettings)
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_type").field("index", index).endObject()
|
||||
.endObject().endObject())
|
||||
.addMapping("type2", jsonBuilder().startObject().startObject("type2")
|
||||
.startObject("_type").field("index", index).endObject()
|
||||
.endObject().endObject())
|
||||
.setUpdateAllTypes(true));
|
||||
public void testTypeFilter() throws Exception {
|
||||
assertAcked(prepareCreate("test"));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type2", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "value1"),
|
||||
|
@ -669,19 +620,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testIdsQueryTestsIdIndexed() throws Exception {
|
||||
idsQueryTests("not_analyzed");
|
||||
}
|
||||
|
||||
public void testIdsQueryTestsIdNotIndexed() throws Exception {
|
||||
idsQueryTests("no");
|
||||
}
|
||||
|
||||
private void idsQueryTests(String index) throws Exception {
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
assertAcked(client().admin().indices().prepareCreate("test").setSettings(indexSettings)
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_id").field("index", index).endObject()
|
||||
.endObject().endObject()));
|
||||
assertAcked(client().admin().indices().prepareCreate("test"));
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "value2"),
|
||||
|
@ -714,27 +653,13 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertSearchHits(searchResponse, "1", "3");
|
||||
}
|
||||
|
||||
public void testTermIndexQueryIndexed() throws Exception {
|
||||
termIndexQueryTests("not_analyzed");
|
||||
}
|
||||
|
||||
public void testTermIndexQueryNotIndexed() throws Exception {
|
||||
termIndexQueryTests("no");
|
||||
}
|
||||
|
||||
private void termIndexQueryTests(String index) throws Exception {
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||
public void testTermIndexQuery() throws Exception {
|
||||
String[] indexNames = { "test1", "test2" };
|
||||
for (String indexName : indexNames) {
|
||||
assertAcked(client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(indexName)
|
||||
.setSettings(indexSettings)
|
||||
.addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("_index").field("index", index).endObject()
|
||||
.endObject().endObject()));
|
||||
.prepareCreate(indexName));
|
||||
|
||||
indexRandom(true, client().prepareIndex(indexName, "type1", indexName + "1").setSource("field1", "value1"));
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
|
@ -33,10 +32,8 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequestBuilder;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -45,7 +42,6 @@ import org.elasticsearch.script.ScriptService;
|
|||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -53,7 +49,6 @@ import java.util.concurrent.CyclicBarrier;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
||||
|
@ -554,86 +549,6 @@ public class BulkTests extends ESIntegTestCase {
|
|||
assertThat(successes, equalTo(1));
|
||||
}
|
||||
|
||||
// issue 4745
|
||||
public void testPreParsingSourceDueToMappingShouldNotBreakCompleteBulkRequest() throws Exception {
|
||||
XContentBuilder builder = jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", true)
|
||||
.field("path", "last_modified")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type", builder)
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID));
|
||||
|
||||
String brokenBuildRequestData = "{\"index\": {\"_id\": \"1\"}}\n" +
|
||||
"{\"name\": \"Malformed}\n" +
|
||||
"{\"index\": {\"_id\": \"2\"}}\n" +
|
||||
"{\"name\": \"Good\", \"last_modified\" : \"2013-04-05\"}\n";
|
||||
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
assertThat(bulkResponse.getItems().length, is(2));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||
|
||||
assertExists(get("test", "type", "2"));
|
||||
}
|
||||
|
||||
// issue 4745
|
||||
public void testPreParsingSourceDueToRoutingShouldNotBreakCompleteBulkRequest() throws Exception {
|
||||
XContentBuilder builder = jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_routing")
|
||||
.field("required", true)
|
||||
.field("path", "my_routing")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type", builder)
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID));
|
||||
ensureYellow("test");
|
||||
|
||||
String brokenBuildRequestData = "{\"index\": {} }\n" +
|
||||
"{\"name\": \"Malformed}\n" +
|
||||
"{\"index\": { \"_id\" : \"24000\" } }\n" +
|
||||
"{\"name\": \"Good\", \"my_routing\" : \"48000\"}\n";
|
||||
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
assertThat(bulkResponse.getItems().length, is(2));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||
|
||||
assertExists(client().prepareGet("test", "type", "24000").setRouting("48000").get());
|
||||
}
|
||||
|
||||
|
||||
// issue 4745
|
||||
public void testPreParsingSourceDueToIdShouldNotBreakCompleteBulkRequest() throws Exception {
|
||||
XContentBuilder builder = jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_id")
|
||||
.field("path", "my_id")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type", builder)
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2_ID));
|
||||
ensureYellow("test");
|
||||
|
||||
String brokenBuildRequestData = "{\"index\": {} }\n" +
|
||||
"{\"name\": \"Malformed}\n" +
|
||||
"{\"index\": {} }\n" +
|
||||
"{\"name\": \"Good\", \"my_id\" : \"48\"}\n";
|
||||
|
||||
BulkResponse bulkResponse = client().prepareBulk().add(brokenBuildRequestData.getBytes(StandardCharsets.UTF_8), 0, brokenBuildRequestData.length(), "test", "type").setRefresh(true).get();
|
||||
assertThat(bulkResponse.getItems().length, is(2));
|
||||
assertThat(bulkResponse.getItems()[0].isFailed(), is(true));
|
||||
assertThat(bulkResponse.getItems()[1].isFailed(), is(false));
|
||||
|
||||
assertExists(get("test", "type", "48"));
|
||||
}
|
||||
|
||||
// issue 4987
|
||||
public void testThatInvalidIndexNamesShouldNotBreakCompleteBulkRequest() {
|
||||
int bulkEntryCount = randomIntBetween(10, 50);
|
||||
|
|
|
@ -161,16 +161,13 @@ public class SizeFieldMapper extends MetadataFieldMapper {
|
|||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
// all are defaults, no need to write it at all
|
||||
if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && (indexCreatedBefore2x == false || fieldType().stored() == false)) {
|
||||
if (!includeDefaults && enabledState == Defaults.ENABLED_STATE) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(contentType());
|
||||
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() == true)) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue