mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-09 14:34:43 +00:00
Merge remote-tracking branch 'upstream/master' into feature/seq_no
This commit is contained in:
commit
31afc8a9a5
@ -29,7 +29,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -48,7 +47,13 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.*;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShadowIndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardNotFoundException;
|
||||
import org.elasticsearch.index.shard.ShardPath;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
@ -93,7 +98,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
||||
private final AtomicBoolean deleted = new AtomicBoolean(false);
|
||||
private final IndexSettings indexSettings;
|
||||
|
||||
@Inject
|
||||
public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
|
||||
SimilarityService similarityService,
|
||||
ShardStoreDeleter shardStoreDeleter,
|
||||
@ -160,13 +164,17 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
||||
return indexShard;
|
||||
}
|
||||
|
||||
public Set<Integer> shardIds() { return shards.keySet(); }
|
||||
public Set<Integer> shardIds() {
|
||||
return shards.keySet();
|
||||
}
|
||||
|
||||
public IndexCache cache() {
|
||||
return indexCache;
|
||||
}
|
||||
|
||||
public IndexFieldDataService fieldData() { return indexFieldData; }
|
||||
public IndexFieldDataService fieldData() {
|
||||
return indexFieldData;
|
||||
}
|
||||
|
||||
public AnalysisService analysisService() {
|
||||
return this.analysisService;
|
||||
@ -462,6 +470,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the filter associated with listed filtering aliases.
|
||||
* <p>
|
||||
|
@ -19,16 +19,9 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
public class ContentPath {
|
||||
public final class ContentPath {
|
||||
|
||||
public enum Type {
|
||||
JUST_NAME,
|
||||
FULL,
|
||||
}
|
||||
|
||||
private Type pathType;
|
||||
|
||||
private final char delimiter;
|
||||
private static final char DELIMITER = '.';
|
||||
|
||||
private final StringBuilder sb;
|
||||
|
||||
@ -47,7 +40,6 @@ public class ContentPath {
|
||||
* number of path elements to not be included in {@link #pathAsText(String)}.
|
||||
*/
|
||||
public ContentPath(int offset) {
|
||||
this.delimiter = '.';
|
||||
this.sb = new StringBuilder();
|
||||
this.offset = offset;
|
||||
reset();
|
||||
@ -71,26 +63,11 @@ public class ContentPath {
|
||||
}
|
||||
|
||||
public String pathAsText(String name) {
|
||||
if (pathType == Type.JUST_NAME) {
|
||||
return name;
|
||||
}
|
||||
return fullPathAsText(name);
|
||||
}
|
||||
|
||||
public String fullPathAsText(String name) {
|
||||
sb.setLength(0);
|
||||
for (int i = offset; i < index; i++) {
|
||||
sb.append(path[i]).append(delimiter);
|
||||
sb.append(path[i]).append(DELIMITER);
|
||||
}
|
||||
sb.append(name);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public Type pathType() {
|
||||
return pathType;
|
||||
}
|
||||
|
||||
public void pathType(Type type) {
|
||||
this.pathType = type;
|
||||
}
|
||||
}
|
||||
|
@ -234,9 +234,6 @@ class DocumentParser implements Closeable {
|
||||
nestedDoc.add(new Field(TypeFieldMapper.NAME, mapper.nestedTypePathAsString(), TypeFieldMapper.Defaults.FIELD_TYPE));
|
||||
}
|
||||
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(mapper.pathType());
|
||||
|
||||
// if we are at the end of the previous object, advance
|
||||
if (token == XContentParser.Token.END_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
@ -272,7 +269,6 @@ class DocumentParser implements Closeable {
|
||||
}
|
||||
}
|
||||
// restore the enable path flag
|
||||
context.path().pathType(origPathType);
|
||||
if (nested.isNested()) {
|
||||
ParseContext.Document nestedDoc = context.doc();
|
||||
ParseContext.Document parentDoc = nestedDoc.getParent();
|
||||
@ -341,7 +337,7 @@ class DocumentParser implements Closeable {
|
||||
context.path().remove();
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(mapper.pathType());
|
||||
builder = MapperBuilders.object(currentFieldName).enabled(true);
|
||||
// if this is a non root object, then explicitly set the dynamic behavior if set
|
||||
if (!(mapper instanceof RootObjectMapper) && mapper.dynamic() != ObjectMapper.Defaults.DYNAMIC) {
|
||||
((ObjectMapper.Builder) builder).dynamic(mapper.dynamic());
|
||||
@ -610,7 +606,7 @@ class DocumentParser implements Closeable {
|
||||
return null;
|
||||
}
|
||||
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
|
||||
final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().fullPathAsText(currentFieldName));
|
||||
final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().pathAsText(currentFieldName));
|
||||
Mapper.Builder builder = null;
|
||||
if (existingFieldType != null) {
|
||||
// create a builder of the same type
|
||||
@ -695,7 +691,7 @@ class DocumentParser implements Closeable {
|
||||
if (paths.length > 1) {
|
||||
ObjectMapper parent = context.root();
|
||||
for (int i = 0; i < paths.length-1; i++) {
|
||||
mapper = context.docMapper().objectMappers().get(context.path().fullPathAsText(paths[i]));
|
||||
mapper = context.docMapper().objectMappers().get(context.path().pathAsText(paths[i]));
|
||||
if (mapper == null) {
|
||||
// One mapping is missing, check if we are allowed to create a dynamic one.
|
||||
ObjectMapper.Dynamic dynamic = parent.dynamic();
|
||||
@ -713,12 +709,12 @@ class DocumentParser implements Closeable {
|
||||
if (!(parent instanceof RootObjectMapper) && parent.dynamic() != ObjectMapper.Defaults.DYNAMIC) {
|
||||
((ObjectMapper.Builder) builder).dynamic(parent.dynamic());
|
||||
}
|
||||
builder = MapperBuilders.object(paths[i]).enabled(true).pathType(parent.pathType());
|
||||
builder = MapperBuilders.object(paths[i]).enabled(true);
|
||||
}
|
||||
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
|
||||
mapper = (ObjectMapper) builder.build(builderContext);
|
||||
if (mapper.nested() != ObjectMapper.Nested.NO) {
|
||||
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().fullPathAsText(paths[i]) + "]) through `copy_to`");
|
||||
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`");
|
||||
}
|
||||
break;
|
||||
case FALSE:
|
||||
|
@ -207,11 +207,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
return this;
|
||||
}
|
||||
|
||||
public T multiFieldPathType(ContentPath.Type pathType) {
|
||||
multiFieldsBuilder.pathType(pathType);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T addMultiField(Mapper.Builder mapperBuilder) {
|
||||
multiFieldsBuilder.add(mapperBuilder);
|
||||
return builder;
|
||||
@ -242,7 +237,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
}
|
||||
|
||||
protected String buildFullName(BuilderContext context) {
|
||||
return context.path().fullPathAsText(name);
|
||||
return context.path().pathAsText(name);
|
||||
}
|
||||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
@ -540,18 +535,12 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
public static class MultiFields {
|
||||
|
||||
public static MultiFields empty() {
|
||||
return new MultiFields(ContentPath.Type.FULL, ImmutableOpenMap.<String, FieldMapper>of());
|
||||
return new MultiFields(ImmutableOpenMap.<String, FieldMapper>of());
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, Mapper.Builder> mapperBuilders = ImmutableOpenMap.builder();
|
||||
private ContentPath.Type pathType = ContentPath.Type.FULL;
|
||||
|
||||
public Builder pathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder add(Mapper.Builder builder) {
|
||||
mapperBuilders.put(builder.name(), builder);
|
||||
@ -560,13 +549,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public MultiFields build(FieldMapper.Builder mainFieldBuilder, BuilderContext context) {
|
||||
if (pathType == ContentPath.Type.FULL && mapperBuilders.isEmpty()) {
|
||||
if (mapperBuilders.isEmpty()) {
|
||||
return empty();
|
||||
} else if (mapperBuilders.isEmpty()) {
|
||||
return new MultiFields(pathType, ImmutableOpenMap.<String, FieldMapper>of());
|
||||
} else {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(mainFieldBuilder.name());
|
||||
ImmutableOpenMap.Builder mapperBuilders = this.mapperBuilders;
|
||||
for (ObjectObjectCursor<String, Mapper.Builder> cursor : this.mapperBuilders) {
|
||||
@ -577,18 +562,15 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
mapperBuilders.put(key, mapper);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> mappers = mapperBuilders.cast();
|
||||
return new MultiFields(pathType, mappers.build());
|
||||
return new MultiFields(mappers.build());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
private final ImmutableOpenMap<String, FieldMapper> mappers;
|
||||
|
||||
private MultiFields(ContentPath.Type pathType, ImmutableOpenMap<String, FieldMapper> mappers) {
|
||||
this.pathType = pathType;
|
||||
private MultiFields(ImmutableOpenMap<String, FieldMapper> mappers) {
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> builder = new ImmutableOpenMap.Builder<>();
|
||||
// we disable the all in multi-field mappers
|
||||
for (ObjectObjectCursor<String, FieldMapper> cursor : mappers) {
|
||||
@ -609,21 +591,14 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
|
||||
context = context.createMultiFieldContext();
|
||||
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
context.path().add(mainField.simpleName());
|
||||
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
|
||||
cursor.value.parse(context);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
}
|
||||
|
||||
public MultiFields merge(MultiFields mergeWith) {
|
||||
if (pathType != mergeWith.pathType) {
|
||||
throw new IllegalArgumentException("Can't change path type from [" + pathType + "] to [" + mergeWith.pathType + "]");
|
||||
}
|
||||
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = ImmutableOpenMap.builder(mappers);
|
||||
|
||||
for (ObjectCursor<FieldMapper> cursor : mergeWith.mappers.values()) {
|
||||
@ -642,7 +617,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
}
|
||||
|
||||
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
|
||||
return new MultiFields(pathType, mappers);
|
||||
return new MultiFields(mappers);
|
||||
}
|
||||
|
||||
public Iterator<Mapper> iterator() {
|
||||
@ -650,9 +625,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (pathType != ContentPath.Type.FULL) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (!mappers.isEmpty()) {
|
||||
// sort the mappers so we get consistent serialization format
|
||||
Mapper[] sortedMappers = mappers.values().toArray(Mapper.class);
|
||||
|
@ -338,7 +338,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||
|
||||
for (FieldMapper fieldMapper : fieldMappers) {
|
||||
if (fullPathObjectMappers.containsKey(fieldMapper.name())) {
|
||||
throw new IllegalArgumentException("Field [{}] is defined as a field in mapping [" + fieldMapper.name() + "] but this name is already used for an object in other types");
|
||||
throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,6 @@ public class TypeParsers {
|
||||
|
||||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
ContentPath.Type pathType = null;
|
||||
FieldMapper.Builder mainFieldBuilder = null;
|
||||
List<FieldMapper.Builder> fields = null;
|
||||
String firstType = null;
|
||||
@ -70,10 +69,7 @@ public class TypeParsers {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
pathType = parsePathType(name, fieldNode.toString());
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("fields")) {
|
||||
if (fieldName.equals("fields")) {
|
||||
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
|
||||
for (Iterator<Map.Entry<String, Object>> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry1 = fieldsIterator.next();
|
||||
@ -132,17 +128,10 @@ public class TypeParsers {
|
||||
}
|
||||
}
|
||||
|
||||
if (fields != null && pathType != null) {
|
||||
if (fields != null) {
|
||||
for (Mapper.Builder field : fields) {
|
||||
mainFieldBuilder.addMultiField(field);
|
||||
}
|
||||
mainFieldBuilder.multiFieldPathType(pathType);
|
||||
} else if (fields != null) {
|
||||
for (Mapper.Builder field : fields) {
|
||||
mainFieldBuilder.addMultiField(field);
|
||||
}
|
||||
} else if (pathType != null) {
|
||||
mainFieldBuilder.multiFieldPathType(pathType);
|
||||
}
|
||||
return mainFieldBuilder;
|
||||
}
|
||||
@ -337,10 +326,7 @@ public class TypeParsers {
|
||||
|
||||
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
||||
parserContext = parserContext.createMultiFieldContext(parserContext);
|
||||
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
|
||||
return true;
|
||||
} else if (propName.equals("fields")) {
|
||||
if (propName.equals("fields")) {
|
||||
|
||||
final Map<String, Object> multiFieldsPropNodes;
|
||||
|
||||
@ -457,17 +443,6 @@ public class TypeParsers {
|
||||
}
|
||||
}
|
||||
|
||||
public static ContentPath.Type parsePathType(String name, String path) throws MapperParsingException {
|
||||
path = Strings.toUnderscoreCase(path);
|
||||
if ("just_name".equals(path)) {
|
||||
return ContentPath.Type.JUST_NAME;
|
||||
} else if ("full".equals(path)) {
|
||||
return ContentPath.Type.FULL;
|
||||
} else {
|
||||
throw new MapperParsingException("wrong value for pathType [" + path + "] for object [" + name + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void parseCopyFields(Object propNode, FieldMapper.Builder builder) {
|
||||
FieldMapper.CopyTo.Builder copyToBuilder = new FieldMapper.CopyTo.Builder();
|
||||
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
@ -73,7 +72,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
}
|
||||
|
||||
public static class Defaults {
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
public static final boolean ENABLE_LATLON = false;
|
||||
public static final boolean ENABLE_GEOHASH = false;
|
||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||
@ -82,7 +80,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
}
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends BaseGeoPointFieldMapper> extends FieldMapper.Builder<T, Y> {
|
||||
protected ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
protected boolean enableLatLon = Defaults.ENABLE_LATLON;
|
||||
|
||||
@ -105,12 +102,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
return (GeoPointFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T multiFieldPathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
@ -158,13 +149,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
}
|
||||
|
||||
public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo);
|
||||
|
||||
public Y build(Mapper.BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType;
|
||||
|
||||
DoubleFieldMapper latMapper = null;
|
||||
@ -190,9 +178,8 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
|
||||
return build(context, name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
|
||||
return build(context, name, fieldType, defaultFieldType, context.indexSettings(),
|
||||
latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo);
|
||||
}
|
||||
}
|
||||
@ -364,17 +351,14 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
|
||||
protected final DoubleFieldMapper lonMapper;
|
||||
|
||||
protected final ContentPath.Type pathType;
|
||||
|
||||
protected final StringFieldMapper geoHashMapper;
|
||||
|
||||
protected Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper,
|
||||
DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper,
|
||||
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.pathType = pathType;
|
||||
this.latMapper = latMapper;
|
||||
this.lonMapper = lonMapper;
|
||||
this.geoHashMapper = geoHashMapper;
|
||||
@ -434,8 +418,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(simpleName());
|
||||
|
||||
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);
|
||||
@ -480,7 +462,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
}
|
||||
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -505,9 +486,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) {
|
||||
builder.field("lat_lon", fieldType().isLatLonEnabled());
|
||||
}
|
||||
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
@ -81,12 +80,12 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
|
||||
|
||||
@Override
|
||||
public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper,
|
||||
MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper,
|
||||
DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
CopyTo copyTo) {
|
||||
fieldType.setTokenized(false);
|
||||
setupFieldType(context);
|
||||
return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper,
|
||||
return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper,
|
||||
geoHashMapper, multiFields, ignoreMalformed, copyTo);
|
||||
}
|
||||
|
||||
@ -104,9 +103,9 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
|
||||
}
|
||||
|
||||
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields,
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields,
|
||||
ignoreMalformed, copyTo);
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,6 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.util.ByteUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
@ -110,14 +109,14 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
||||
|
||||
@Override
|
||||
public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper,
|
||||
MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper,
|
||||
DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
CopyTo copyTo) {
|
||||
fieldType.setTokenized(false);
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
defaultFieldType.setHasDocValues(false);
|
||||
return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper,
|
||||
return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper,
|
||||
geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo);
|
||||
}
|
||||
|
||||
@ -287,10 +286,10 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
|
||||
protected Explicit<Boolean> coerce;
|
||||
|
||||
public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields,
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields,
|
||||
ignoreMalformed, copyTo);
|
||||
this.coerce = coerce;
|
||||
}
|
||||
|
@ -125,13 +125,13 @@ public class DynamicTemplate {
|
||||
}
|
||||
|
||||
public boolean match(ContentPath path, String name, String dynamicType) {
|
||||
if (pathMatch != null && !patternMatch(pathMatch, path.fullPathAsText(name))) {
|
||||
if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) {
|
||||
return false;
|
||||
}
|
||||
if (match != null && !patternMatch(match, name)) {
|
||||
return false;
|
||||
}
|
||||
if (pathUnmatch != null && patternMatch(pathUnmatch, path.fullPathAsText(name))) {
|
||||
if (pathUnmatch != null && patternMatch(pathUnmatch, path.pathAsText(name))) {
|
||||
return false;
|
||||
}
|
||||
if (unmatch != null && patternMatch(unmatch, name)) {
|
||||
|
@ -24,7 +24,6 @@ import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
@ -40,7 +39,6 @@ import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.object;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -54,7 +52,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
public static final boolean ENABLED = true;
|
||||
public static final Nested NESTED = Nested.NO;
|
||||
public static final Dynamic DYNAMIC = null; // not set, inherited from root
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
}
|
||||
|
||||
public static enum Dynamic {
|
||||
@ -104,8 +101,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
|
||||
protected Dynamic dynamic = Defaults.DYNAMIC;
|
||||
|
||||
protected ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
protected Boolean includeInAll;
|
||||
|
||||
protected final List<Mapper.Builder> mappersBuilders = new ArrayList<>();
|
||||
@ -130,11 +125,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T pathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T includeInAll(boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
@ -147,8 +137,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
|
||||
@Override
|
||||
public Y build(BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(name);
|
||||
|
||||
Map<String, Mapper> mappers = new HashMap<>();
|
||||
@ -156,17 +144,16 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
Mapper mapper = builder.build(context);
|
||||
mappers.put(mapper.simpleName(), mapper);
|
||||
}
|
||||
context.path().pathType(origPathType);
|
||||
context.path().remove();
|
||||
|
||||
ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers, context.indexSettings());
|
||||
ObjectMapper objectMapper = createMapper(name, context.path().pathAsText(name), enabled, nested, dynamic, mappers, context.indexSettings());
|
||||
objectMapper = objectMapper.includeInAllIfNotSet(includeInAll);
|
||||
|
||||
return (Y) objectMapper;
|
||||
}
|
||||
|
||||
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
return new ObjectMapper(name, fullPath, enabled, nested, dynamic, pathType, mappers);
|
||||
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
return new ObjectMapper(name, fullPath, enabled, nested, dynamic, mappers);
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,7 +166,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder) || parseObjectProperties(name, fieldName, fieldNode, parserContext, builder)) {
|
||||
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
@ -214,14 +201,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
return false;
|
||||
}
|
||||
|
||||
protected static boolean parseObjectProperties(String name, String fieldName, Object fieldNode, ParserContext parserContext, ObjectMapper.Builder builder) {
|
||||
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.pathType(parsePathType(name, fieldNode.toString()));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
protected static void parseNested(String name, Map<String, Object> node, ObjectMapper.Builder builder) {
|
||||
boolean nested = false;
|
||||
boolean nestedIncludeInParent = false;
|
||||
@ -326,19 +305,16 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
|
||||
private volatile Dynamic dynamic;
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private volatile CopyOnWriteHashMap<String, Mapper> mappers;
|
||||
|
||||
ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
|
||||
ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map<String, Mapper> mappers) {
|
||||
super(name);
|
||||
this.fullPath = fullPath;
|
||||
this.enabled = enabled;
|
||||
this.nested = nested;
|
||||
this.dynamic = dynamic;
|
||||
this.pathType = pathType;
|
||||
if (mappers == null) {
|
||||
this.mappers = new CopyOnWriteHashMap<>();
|
||||
} else {
|
||||
@ -380,10 +356,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
public ContentPath.Type pathType() {
|
||||
return pathType;
|
||||
}
|
||||
|
||||
public Mapper getMapper(String field) {
|
||||
return mappers.get(field);
|
||||
}
|
||||
@ -535,9 +507,6 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
||||
if (enabled != Defaults.ENABLED) {
|
||||
builder.field("enabled", enabled);
|
||||
}
|
||||
if (pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class RootObjectMapper extends ObjectMapper {
|
||||
|
||||
|
||||
@Override
|
||||
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
assert !nested.isNested();
|
||||
FormatDateTimeFormatter[] dates = null;
|
||||
if (dynamicDateTimeFormatters == null) {
|
||||
@ -106,7 +106,7 @@ public class RootObjectMapper extends ObjectMapper {
|
||||
} else {
|
||||
dates = dynamicDateTimeFormatters.toArray(new FormatDateTimeFormatter[dynamicDateTimeFormatters.size()]);
|
||||
}
|
||||
return new RootObjectMapper(name, enabled, dynamic, pathType, mappers,
|
||||
return new RootObjectMapper(name, enabled, dynamic, mappers,
|
||||
dates,
|
||||
dynamicTemplates.toArray(new DynamicTemplate[dynamicTemplates.size()]),
|
||||
dateDetection, numericDetection);
|
||||
@ -196,9 +196,9 @@ public class RootObjectMapper extends ObjectMapper {
|
||||
|
||||
private volatile DynamicTemplate dynamicTemplates[];
|
||||
|
||||
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers,
|
||||
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Map<String, Mapper> mappers,
|
||||
FormatDateTimeFormatter[] dynamicDateTimeFormatters, DynamicTemplate dynamicTemplates[], boolean dateDetection, boolean numericDetection) {
|
||||
super(name, name, enabled, Nested.NO, dynamic, pathType, mappers);
|
||||
super(name, name, enabled, Nested.NO, dynamic, mappers);
|
||||
this.dynamicTemplates = dynamicTemplates;
|
||||
this.dynamicDateTimeFormatters = dynamicDateTimeFormatters;
|
||||
this.dateDetection = dateDetection;
|
||||
|
@ -22,7 +22,6 @@ import org.apache.lucene.store.StoreRateLimiting;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
||||
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.monitor.os;
|
||||
|
||||
public class DummyOsInfo extends OsInfo {
|
||||
|
||||
DummyOsInfo() {
|
||||
refreshInterval = 0;
|
||||
availableProcessors = 0;
|
||||
allocatedProcessors = 0;
|
||||
name = "dummy_name";
|
||||
arch = "dummy_arch";
|
||||
version = "dummy_version";
|
||||
}
|
||||
|
||||
public static final DummyOsInfo INSTANCE = new DummyOsInfo();
|
||||
}
|
@ -108,6 +108,9 @@ public class OsInfo implements Streamable, ToXContent {
|
||||
refreshInterval = in.readLong();
|
||||
availableProcessors = in.readInt();
|
||||
allocatedProcessors = in.readInt();
|
||||
name = in.readOptionalString();
|
||||
arch = in.readOptionalString();
|
||||
version = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -115,5 +118,8 @@ public class OsInfo implements Streamable, ToXContent {
|
||||
out.writeLong(refreshInterval);
|
||||
out.writeInt(availableProcessors);
|
||||
out.writeInt(allocatedProcessors);
|
||||
out.writeOptionalString(name);
|
||||
out.writeOptionalString(arch);
|
||||
out.writeOptionalString(version);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
public class DummyPluginInfo extends PluginInfo {
|
||||
|
||||
private DummyPluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) {
|
||||
super(name, description, site, version, jvm, classname, isolated);
|
||||
}
|
||||
|
||||
public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", true, "dummy_plugin_version", true, "DummyPluginName", true);
|
||||
}
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.children;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.search.*;
|
||||
@ -64,9 +65,6 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
|
||||
private final LongObjectPagedHashMap<long[]> parentOrdToOtherBuckets;
|
||||
private boolean multipleBucketsPerParentOrd = false;
|
||||
|
||||
// This needs to be a Set to avoid duplicate reader context entries via (#setNextReader(...), it can get invoked multiple times with the same reader context)
|
||||
private Set<LeafReaderContext> replay = new LinkedHashSet<>();
|
||||
|
||||
public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext,
|
||||
Aggregator parent, String parentType, Query childFilter, Query parentFilter,
|
||||
ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource,
|
||||
@ -99,17 +97,11 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
|
||||
if (valuesSource == null) {
|
||||
return LeafBucketCollector.NO_OP_COLLECTOR;
|
||||
}
|
||||
if (replay == null) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx);
|
||||
assert globalOrdinals != null;
|
||||
Scorer parentScorer = parentFilter.scorer(ctx);
|
||||
final Bits parentDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), parentScorer);
|
||||
if (childFilter.scorer(ctx) != null) {
|
||||
replay.add(ctx);
|
||||
}
|
||||
return new LeafBucketCollector() {
|
||||
|
||||
@Override
|
||||
@ -138,10 +130,8 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
|
||||
|
||||
@Override
|
||||
protected void doPostCollection() throws IOException {
|
||||
final Set<LeafReaderContext> replay = this.replay;
|
||||
this.replay = null;
|
||||
|
||||
for (LeafReaderContext ctx : replay) {
|
||||
IndexReader indexReader = context().searchContext().searcher().getIndexReader();
|
||||
for (LeafReaderContext ctx : indexReader.leaves()) {
|
||||
DocIdSetIterator childDocsIter = childFilter.scorer(ctx);
|
||||
if (childDocsIter == null) {
|
||||
continue;
|
||||
|
@ -95,9 +95,6 @@ public class ExternalMapper extends FieldMapper {
|
||||
|
||||
@Override
|
||||
public ExternalMapper build(BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(ContentPath.Type.FULL);
|
||||
|
||||
context.path().add(name);
|
||||
BinaryFieldMapper binMapper = binBuilder.build(context);
|
||||
BooleanFieldMapper boolMapper = boolBuilder.build(context);
|
||||
@ -107,7 +104,6 @@ public class ExternalMapper extends FieldMapper {
|
||||
FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context);
|
||||
context.path().remove();
|
||||
|
||||
context.path().pathType(origPathType);
|
||||
setupFieldType(context);
|
||||
|
||||
return new ExternalMapper(name, fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper,
|
||||
|
@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.nodesinfo;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.http.HttpInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.os.DummyOsInfo;
|
||||
import org.elasticsearch.monitor.os.OsInfo;
|
||||
import org.elasticsearch.monitor.process.ProcessInfo;
|
||||
import org.elasticsearch.plugins.DummyPluginInfo;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolInfo;
|
||||
import org.elasticsearch.transport.TransportInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class NodeInfoStreamingTests extends ESTestCase {
|
||||
|
||||
public void testNodeInfoStreaming() throws IOException {
|
||||
NodeInfo nodeInfo = createNodeInfo();
|
||||
Version version = Version.CURRENT;
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
out.setVersion(version);
|
||||
nodeInfo.writeTo(out);
|
||||
out.close();
|
||||
StreamInput in = StreamInput.wrap(out.bytes());
|
||||
in.setVersion(version);
|
||||
NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in);
|
||||
assertExpectedUnchanged(nodeInfo, readNodeInfo);
|
||||
|
||||
}
|
||||
// checks all properties that are expected to be unchanged. Once we start changing them between versions this method has to be changed as well
|
||||
private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException {
|
||||
assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString()));
|
||||
assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname()));
|
||||
assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion()));
|
||||
assertThat(nodeInfo.getServiceAttributes().size(), equalTo(readNodeInfo.getServiceAttributes().size()));
|
||||
for (Map.Entry<String, String> entry : nodeInfo.getServiceAttributes().entrySet()) {
|
||||
assertNotNull(readNodeInfo.getServiceAttributes().get(entry.getKey()));
|
||||
assertThat(readNodeInfo.getServiceAttributes().get(entry.getKey()), equalTo(entry.getValue()));
|
||||
}
|
||||
compareJsonOutput(nodeInfo.getHttp(), readNodeInfo.getHttp());
|
||||
compareJsonOutput(nodeInfo.getJvm(), readNodeInfo.getJvm());
|
||||
compareJsonOutput(nodeInfo.getProcess(), readNodeInfo.getProcess());
|
||||
compareJsonOutput(nodeInfo.getSettings(), readNodeInfo.getSettings());
|
||||
compareJsonOutput(nodeInfo.getThreadPool(), readNodeInfo.getThreadPool());
|
||||
compareJsonOutput(nodeInfo.getTransport(), readNodeInfo.getTransport());
|
||||
compareJsonOutput(nodeInfo.getNode(), readNodeInfo.getNode());
|
||||
compareJsonOutput(nodeInfo.getOs(), readNodeInfo.getOs());
|
||||
comparePluginsAndModules(nodeInfo, readNodeInfo);
|
||||
}
|
||||
|
||||
private void comparePluginsAndModules(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException {
|
||||
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
|
||||
XContentBuilder pluginsAndModules = jsonBuilder();
|
||||
pluginsAndModules.startObject();
|
||||
nodeInfo.getPlugins().toXContent(pluginsAndModules, params);
|
||||
pluginsAndModules.endObject();
|
||||
XContentBuilder readPluginsAndModules = jsonBuilder();
|
||||
readPluginsAndModules.startObject();
|
||||
readNodeInfo.getPlugins().toXContent(readPluginsAndModules, params);
|
||||
readPluginsAndModules.endObject();
|
||||
assertThat(pluginsAndModules.string(), equalTo(readPluginsAndModules.string()));
|
||||
}
|
||||
|
||||
private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException {
|
||||
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
|
||||
XContentBuilder param1Builder = jsonBuilder();
|
||||
XContentBuilder param2Builder = jsonBuilder();
|
||||
param1.toXContent(param1Builder, params);
|
||||
param2.toXContent(param2Builder, params);
|
||||
assertThat(param1Builder.string(), equalTo(param2Builder.string()));
|
||||
}
|
||||
|
||||
|
||||
private NodeInfo createNodeInfo() {
|
||||
Build build = Build.CURRENT;
|
||||
DiscoveryNode node = new DiscoveryNode("test_node", DummyTransportAddress.INSTANCE, VersionUtils.randomVersion(random()));
|
||||
Map<String, String> serviceAttributes = new HashMap<>();
|
||||
serviceAttributes.put("test", "attribute");
|
||||
Settings settings = Settings.builder().put("test", "setting").build();
|
||||
OsInfo osInfo = DummyOsInfo.INSTANCE;
|
||||
ProcessInfo process = new ProcessInfo(randomInt(), randomBoolean());
|
||||
JvmInfo jvm = JvmInfo.jvmInfo();
|
||||
List<ThreadPool.Info> threadPoolInfos = new ArrayList<>();
|
||||
threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5));
|
||||
ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos);
|
||||
Map<String, BoundTransportAddress> profileAddresses = new HashMap<>();
|
||||
BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE}, DummyTransportAddress.INSTANCE);
|
||||
profileAddresses.put("test_address", dummyBoundTransportAddress);
|
||||
TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses);
|
||||
HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong());
|
||||
PluginsAndModules plugins = new PluginsAndModules();
|
||||
plugins.addModule(DummyPluginInfo.INSTANCE);
|
||||
plugins.addPlugin(DummyPluginInfo.INSTANCE);
|
||||
return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins);
|
||||
}
|
||||
}
|
@ -18,12 +18,15 @@
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.Children;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
|
||||
@ -392,6 +395,65 @@ public class ChildrenIT extends ESIntegTestCase {
|
||||
assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1l));
|
||||
}
|
||||
|
||||
public void testPostCollectAllLeafReaders() throws Exception {
|
||||
// The 'towns' and 'parent_names' aggs operate on parent docs and if child docs are in different segments we need
|
||||
// to ensure those segments which child docs are also evaluated to in the post collect phase.
|
||||
|
||||
// Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused
|
||||
// us to miss to evaluate child docs in segments we didn't have parent matches for.
|
||||
|
||||
assertAcked(
|
||||
prepareCreate("index")
|
||||
.addMapping("parentType", "name", "type=string,index=not_analyzed", "town", "type=string,index=not_analyzed")
|
||||
.addMapping("childType", "_parent", "type=parentType", "name", "type=string,index=not_analyzed", "age", "type=integer")
|
||||
);
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
requests.add(client().prepareIndex("index", "parentType", "1").setSource("name", "Bob", "town", "Memphis"));
|
||||
requests.add(client().prepareIndex("index", "parentType", "2").setSource("name", "Alice", "town", "Chicago"));
|
||||
requests.add(client().prepareIndex("index", "parentType", "3").setSource("name", "Bill", "town", "Chicago"));
|
||||
requests.add(client().prepareIndex("index", "childType", "1").setSource("name", "Jill", "age", 5).setParent("1"));
|
||||
requests.add(client().prepareIndex("index", "childType", "2").setSource("name", "Joey", "age", 3).setParent("1"));
|
||||
requests.add(client().prepareIndex("index", "childType", "3").setSource("name", "John", "age", 2).setParent("2"));
|
||||
requests.add(client().prepareIndex("index", "childType", "4").setSource("name", "Betty", "age", 6).setParent("3"));
|
||||
requests.add(client().prepareIndex("index", "childType", "5").setSource("name", "Dan", "age", 1).setParent("3"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setSize(0)
|
||||
.addAggregation(AggregationBuilders.terms("towns").field("town")
|
||||
.subAggregation(AggregationBuilders.terms("parent_names").field("name")
|
||||
.subAggregation(AggregationBuilders.children("child_docs").childType("childType"))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
|
||||
Terms towns = response.getAggregations().get("towns");
|
||||
assertThat(towns.getBuckets().size(), equalTo(2));
|
||||
assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago"));
|
||||
assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L));
|
||||
|
||||
Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names");
|
||||
assertThat(parents.getBuckets().size(), equalTo(2));
|
||||
assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice"));
|
||||
assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L));
|
||||
Children children = parents.getBuckets().get(0).getAggregations().get("child_docs");
|
||||
assertThat(children.getDocCount(), equalTo(1L));
|
||||
|
||||
assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill"));
|
||||
assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L));
|
||||
children = parents.getBuckets().get(1).getAggregations().get("child_docs");
|
||||
assertThat(children.getDocCount(), equalTo(2L));
|
||||
|
||||
assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis"));
|
||||
assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L));
|
||||
parents = towns.getBuckets().get(1).getAggregations().get("parent_names");
|
||||
assertThat(parents.getBuckets().size(), equalTo(1));
|
||||
assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob"));
|
||||
assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L));
|
||||
children = parents.getBuckets().get(0).getAggregations().get("child_docs");
|
||||
assertThat(children.getDocCount(), equalTo(2L));
|
||||
}
|
||||
|
||||
private static final class Control {
|
||||
|
||||
final String category;
|
||||
|
@ -61,7 +61,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase {
|
||||
final int numShards = between(1, 20);
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(settingsBuilder().put("index.number_of_shards", numShards).put("index.number_of_replicas", numberOfReplicas))
|
||||
.addMapping("type1", "loc", "type=geo_point", "test", "type=string").execute().actionGet();
|
||||
.addMapping("type", "loc", "type=geo_point", "test", "type=string").execute().actionGet();
|
||||
ensureGreen();
|
||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||
final int numDocs = between(10, 20);
|
||||
|
@ -88,24 +88,26 @@ by raising an issue. Thank you!
|
||||
Once installed, define the configuration for the `hdfs` repository through `elasticsearch.yml` or the
|
||||
{ref}/modules-snapshots.html[REST API]:
|
||||
|
||||
[source]
|
||||
[source,yaml]
|
||||
----
|
||||
repositories
|
||||
hdfs:
|
||||
uri: "hdfs://<host>:<port>/" # optional - Hadoop file-system URI
|
||||
path: "some/path" # required - path with the file-system where data is stored/loaded
|
||||
load_defaults: "true" # optional - whether to load the default Hadoop configuration (default) or not
|
||||
conf_location: "extra-cfg.xml" # optional - Hadoop configuration XML to be loaded (use commas for multi values)
|
||||
conf.<key> : "<value>" # optional - 'inlined' key=value added to the Hadoop configuration
|
||||
concurrent_streams: 5 # optional - the number of concurrent streams (defaults to 5)
|
||||
compress: "false" # optional - whether to compress the metadata or not (default)
|
||||
chunk_size: "10mb" # optional - chunk size (disabled by default)
|
||||
uri: "hdfs://<host>:<port>/" \# optional - Hadoop file-system URI
|
||||
path: "some/path" \# required - path with the file-system where data is stored/loaded
|
||||
load_defaults: "true" \# optional - whether to load the default Hadoop configuration (default) or not
|
||||
conf_location: "extra-cfg.xml" \# optional - Hadoop configuration XML to be loaded (use commas for multi values)
|
||||
conf.<key> : "<value>" \# optional - 'inlined' key=value added to the Hadoop configuration
|
||||
concurrent_streams: 5 \# optional - the number of concurrent streams (defaults to 5)
|
||||
compress: "false" \# optional - whether to compress the metadata or not (default)
|
||||
chunk_size: "10mb" \# optional - chunk size (disabled by default)
|
||||
|
||||
----
|
||||
|
||||
NOTE: Be careful when including a paths within the `uri` setting; Some implementations ignore them completely while
|
||||
others consider them. In general, we recommend keeping the `uri` to a minimum and using the `path` element instead.
|
||||
|
||||
===== Plugging other file-systems
|
||||
[[repository-hdfs-other-fs]]
|
||||
==== Plugging other file-systems
|
||||
|
||||
Any HDFS-compatible file-systems (like Amazon `s3://` or Google `gs://`) can be used as long as the proper Hadoop
|
||||
configuration is passed to the Elasticsearch plugin. In practice, this means making sure the correct Hadoop configuration
|
||||
|
@ -110,7 +110,7 @@ GET my_index/_search
|
||||
"bool": {
|
||||
"must": [
|
||||
{ "match": { "user.first": "Alice" }},
|
||||
{ "match": { "user.last": "White" }} <2>
|
||||
{ "match": { "user.last": "Smith" }} <2>
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -127,7 +127,7 @@ GET my_index/_search
|
||||
"bool": {
|
||||
"must": [
|
||||
{ "match": { "user.first": "Alice" }},
|
||||
{ "match": { "user.last": "Smith" }} <3>
|
||||
{ "match": { "user.last": "White" }} <3>
|
||||
]
|
||||
}
|
||||
},
|
||||
@ -137,14 +137,14 @@ GET my_index/_search
|
||||
"user.first": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// AUTOSENSE
|
||||
<1> The `user` field is mapped as type `nested` instead of type `object`.
|
||||
<2> This query doesn't match because `Alice` and `White` are not in the same nested object.
|
||||
<2> This query doesn't match because `Alice` and `Smith` are not in the same nested object.
|
||||
<3> This query matches because `Alice` and `White` are in the same nested object.
|
||||
<4> `inner_hits` allow us to highlight the matching nested documents.
|
||||
|
||||
|
@ -37,7 +37,6 @@ import java.util.*;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.*;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
@ -65,7 +64,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
public static final String CONTENT_TYPE = "attachment";
|
||||
|
||||
public static class Defaults {
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
|
||||
public static final AttachmentFieldType FIELD_TYPE = new AttachmentFieldType();
|
||||
static {
|
||||
@ -108,8 +106,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, AttachmentMapper> {
|
||||
|
||||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
private Boolean ignoreErrors = null;
|
||||
|
||||
private Integer defaultIndexedChars = null;
|
||||
@ -140,11 +136,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
this.contentBuilder = stringField(FieldNames.CONTENT);
|
||||
}
|
||||
|
||||
public Builder pathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder content(Mapper.Builder content) {
|
||||
this.contentBuilder = content;
|
||||
return this;
|
||||
@ -192,8 +183,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
|
||||
@Override
|
||||
public AttachmentMapper build(BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
FieldMapper contentMapper;
|
||||
if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
|
||||
@ -220,8 +209,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
FieldMapper language = (FieldMapper) languageBuilder.build(context);
|
||||
context.path().remove();
|
||||
|
||||
context.path().pathType(origPathType);
|
||||
|
||||
if (defaultIndexedChars == null && context.indexSettings() != null) {
|
||||
defaultIndexedChars = context.indexSettings().getAsInt("index.mapping.attachment.indexed_chars", 100000);
|
||||
}
|
||||
@ -257,7 +244,7 @@ public class AttachmentMapper extends FieldMapper {
|
||||
|
||||
defaultFieldType.freeze();
|
||||
this.setupFieldType(context);
|
||||
return new AttachmentMapper(name, fieldType, defaultFieldType, pathType, defaultIndexedChars, ignoreErrors, langDetect, contentMapper,
|
||||
return new AttachmentMapper(name, fieldType, defaultFieldType, defaultIndexedChars, ignoreErrors, langDetect, contentMapper,
|
||||
dateMapper, titleMapper, nameMapper, authorMapper, keywordsMapper, contentTypeMapper, contentLength,
|
||||
language, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
@ -309,10 +296,7 @@ public class AttachmentMapper extends FieldMapper {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = entry.getKey();
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.pathType(parsePathType(name, fieldNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("fields")) {
|
||||
if (fieldName.equals("fields")) {
|
||||
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
|
||||
for (Iterator<Map.Entry<String, Object>> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry1 = fieldsIterator.next();
|
||||
@ -375,8 +359,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
|
||||
private final int defaultIndexedChars;
|
||||
|
||||
private final boolean ignoreErrors;
|
||||
@ -401,13 +383,12 @@ public class AttachmentMapper extends FieldMapper {
|
||||
|
||||
private final FieldMapper languageMapper;
|
||||
|
||||
public AttachmentMapper(String simpleName, MappedFieldType type, MappedFieldType defaultFieldType, ContentPath.Type pathType, int defaultIndexedChars, Boolean ignoreErrors,
|
||||
public AttachmentMapper(String simpleName, MappedFieldType type, MappedFieldType defaultFieldType, int defaultIndexedChars, Boolean ignoreErrors,
|
||||
Boolean defaultLangDetect, FieldMapper contentMapper,
|
||||
FieldMapper dateMapper, FieldMapper titleMapper, FieldMapper nameMapper, FieldMapper authorMapper,
|
||||
FieldMapper keywordsMapper, FieldMapper contentTypeMapper, FieldMapper contentLengthMapper,
|
||||
FieldMapper languageMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, type, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.pathType = pathType;
|
||||
this.defaultIndexedChars = defaultIndexedChars;
|
||||
this.ignoreErrors = ignoreErrors;
|
||||
this.defaultLangDetect = defaultLangDetect;
|
||||
@ -626,9 +607,6 @@ public class AttachmentMapper extends FieldMapper {
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName());
|
||||
builder.field("type", CONTENT_TYPE);
|
||||
if (indexCreatedBefore2x) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
builder.startObject("fields");
|
||||
contentMapper.toXContent(builder, params);
|
||||
|
@ -168,6 +168,7 @@ public class HttpRequestBuilder {
|
||||
logger.trace("sending request \n{}", stringBuilder.toString());
|
||||
}
|
||||
for (Map.Entry<String, String> entry : this.headers.entrySet()) {
|
||||
logger.trace("adding header [{} => {}]", entry.getKey(), entry.getValue());
|
||||
httpUriRequest.addHeader(entry.getKey(), entry.getValue());
|
||||
}
|
||||
try (CloseableHttpResponse closeableHttpResponse = httpClient.execute(httpUriRequest)) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user