From 08ee4a87b3b4cc7fff1204623014b359861628a7 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 21 May 2015 12:36:44 +0200 Subject: [PATCH 01/37] Internal: tighten up our compression framework. We have a compression framework that we use internally, mainly to compress some xcontent bytes. However it is quite lenient: for instance it relies on the assumption that detection of the compression format can only be called on either some compressed xcontent bytes or some raw xcontent bytes, but nothing checks this. By the way, we are misusing it in BinaryFieldMapper so that if someone indexes a binary field which happens to have the same header as a LZF stream, then at read time, we will try to decompress it. It also simplifies the API by removing block compression (only streaming) and some code duplication caused by some methods accepting a byte[] and other methods a BytesReference. --- .../TransportReplicationAction.java | 4 +- .../elasticsearch/cluster/ClusterState.java | 4 +- .../cluster/metadata/AliasMetaData.java | 22 ++-- .../cluster/metadata/IndexMetaData.java | 4 +- .../metadata/IndexTemplateMetaData.java | 24 ++-- .../cluster/metadata/MappingMetaData.java | 20 +-- .../metadata/MetaDataCreateIndexService.java | 8 +- .../metadata/MetaDataMappingService.java | 16 +-- .../common/bytes/PagedBytesReference.java | 13 +- .../common/compress/CompressedIndexInput.java | 6 +- .../compress/CompressedStreamInput.java | 13 +- .../compress/CompressedStreamOutput.java | 6 +- ...sedString.java => CompressedXContent.java} | 39 +++--- .../common/compress/Compressor.java | 24 ++-- .../common/compress/CompressorFactory.java | 68 +++++----- ...ntext.java => NotCompressedException.java} | 12 +- ...Context.java => NotXContentException.java} | 14 +- .../compress/lzf/LZFCompressedIndexInput.java | 4 +- .../lzf/LZFCompressedStreamInput.java | 4 +- .../lzf/LZFCompressedStreamOutput.java | 4 +- .../common/compress/lzf/LZFCompressor.java | 19 --- .../io/stream/InputStreamStreamInput.java | 10 ++ .../common/xcontent/XContentFactory.java | 121 ++++++++++-------- .../common/xcontent/XContentHelper.java | 75 +++-------- .../gateway/MetaDataStateFormat.java | 16 ++- .../index/aliases/IndexAlias.java | 8 +- .../index/aliases/IndexAliasesService.java | 8 +- .../index/mapper/DocumentMapper.java | 8 +- .../index/mapper/DocumentMapperParser.java | 8 +- .../index/mapper/MapperService.java | 8 +- .../index/mapper/core/BinaryFieldMapper.java | 15 ++- .../mapper/internal/SourceFieldMapper.java | 10 +- .../cluster/IndicesClusterStateService.java | 8 +- .../blobstore/BlobStoreRepository.java | 11 +- .../search/lookup/SourceLookup.java | 8 -- .../netty/MessageChannelHandler.java | 7 +- .../metadata/MappingMetaDataParserTests.java | 36 +++--- ...ests.java => CompressedXContentTests.java} | 34 ++--- .../common/xcontent/XContentFactoryTests.java | 2 +- .../aliases/IndexAliasesServiceTests.java | 7 +- .../fielddata/ParentChildFieldDataTests.java | 6 +- .../mapper/binary/BinaryMappingTests.java | 4 +- .../mapper/merge/TestMergeMapperTests.java | 6 +- .../mapper/multifield/MultiFieldTests.java | 2 +- .../source/CompressSourceMappingTests.java | 9 +- .../source/DefaultSourceMappingTests.java | 8 +- .../timestamp/TimestampMappingTests.java | 10 +- .../index/mapper/ttl/TTLMappingTests.java | 43 ++++--- .../mapper/update/UpdateMappingTests.java | 28 ++-- ...QueryParserFilterDateRangeFormatTests.java | 4 +- ...eryParserFilterDateRangeTimezoneTests.java | 4 +- .../query/SimpleIndexQueryParserTests.java | 4 +- .../search/child/AbstractChildTests.java | 6 +- .../template/SimpleIndexTemplateTests.java | 4 + .../bucket/nested/NestedAggregatorTest.java | 4 +- 55 files changed, 436 insertions(+), 434 deletions(-) rename src/main/java/org/elasticsearch/common/compress/{CompressedString.java => CompressedXContent.java} (74%) rename src/main/java/org/elasticsearch/common/compress/{CompressorContext.java => NotCompressedException.java} (73%) rename src/main/java/org/elasticsearch/common/compress/{lzf/LZFCompressorContext.java => NotXContentException.java} (69%) rename src/test/java/org/elasticsearch/common/compress/{CompressedStringTests.java => CompressedXContentTests.java} (72%) diff --git a/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 2e2a9e7abf3..d3f7a5b9356 100644 --- a/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -43,7 +43,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; @@ -1080,7 +1080,7 @@ public abstract class TransportReplicationAction { builder.endObject(); builder.startObject("mappings"); - for (ObjectObjectCursor cursor1 : templateMetaData.mappings()) { + for (ObjectObjectCursor cursor1 : templateMetaData.mappings()) { byte[] mappingSource = cursor1.value.uncompressed(); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); Map mapping = parser.map(); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index 0f7e55c8087..fb640eedc5a 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableSet; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -45,7 +45,7 @@ public class AliasMetaData extends AbstractDiffable { private final String alias; - private final CompressedString filter; + private final CompressedXContent filter; private final String indexRouting; @@ -53,7 +53,7 @@ public class AliasMetaData extends AbstractDiffable { private final Set searchRoutingValues; - private AliasMetaData(String alias, CompressedString filter, String indexRouting, String searchRouting) { + private AliasMetaData(String alias, CompressedXContent filter, String indexRouting, String searchRouting) { this.alias = alias; this.filter = filter; this.indexRouting = indexRouting; @@ -77,11 +77,11 @@ public class AliasMetaData extends AbstractDiffable { return alias(); } - public CompressedString filter() { + public CompressedXContent filter() { return filter; } - public CompressedString getFilter() { + public CompressedXContent getFilter() { return filter(); } @@ -176,9 +176,9 @@ public class AliasMetaData extends AbstractDiffable { @Override public AliasMetaData readFrom(StreamInput in) throws IOException { String alias = in.readString(); - CompressedString filter = null; + CompressedXContent filter = null; if (in.readBoolean()) { - filter = CompressedString.readCompressedString(in); + filter = CompressedXContent.readCompressedString(in); } String indexRouting = null; if (in.readBoolean()) { @@ -195,7 +195,7 @@ public class AliasMetaData extends AbstractDiffable { private final String alias; - private CompressedString filter; + private CompressedXContent filter; private String indexRouting; @@ -217,7 +217,7 @@ public class AliasMetaData extends AbstractDiffable { return alias; } - public Builder filter(CompressedString filter) { + public Builder filter(CompressedXContent filter) { this.filter = filter; return this; } @@ -244,7 +244,7 @@ public class AliasMetaData extends AbstractDiffable { } try { XContentBuilder builder = XContentFactory.jsonBuilder().map(filter); - this.filter = new CompressedString(builder.bytes()); + this.filter = new CompressedXContent(builder.bytes()); return this; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to build json for alias request", e); @@ -324,7 +324,7 @@ public class AliasMetaData extends AbstractDiffable { } } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { if ("filter".equals(currentFieldName)) { - builder.filter(new CompressedString(parser.binaryValue())); + builder.filter(new CompressedXContent(parser.binaryValue())); } } else if (token == XContentParser.Token.VALUE_STRING) { if ("routing".equals(currentFieldName)) { diff --git a/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 07703bca591..2f3f6c889f8 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -35,7 +35,7 @@ import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -874,7 +874,7 @@ public class IndexMetaData implements Diffable { if ("mappings".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { - builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue()))); + builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue()))); } else { Map mapping = parser.mapOrdered(); if (mapping.size() == 1) { diff --git a/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index ae555a54e75..d91d0817cfc 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -24,7 +24,7 @@ import com.google.common.collect.Sets; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; @@ -54,13 +54,13 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings; + private final ImmutableOpenMap mappings; private final ImmutableOpenMap aliases; private final ImmutableOpenMap customs; - public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap mappings, + public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs) { this.name = name; this.order = order; @@ -103,11 +103,11 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings() { + public ImmutableOpenMap mappings() { return this.mappings; } - public ImmutableOpenMap getMappings() { + public ImmutableOpenMap getMappings() { return this.mappings; } @@ -170,7 +170,7 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : mappings) { + for (ObjectObjectCursor cursor : mappings) { out.writeString(cursor.key); cursor.value.writeTo(out); } @@ -223,7 +223,7 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings; + private final ImmutableOpenMap.Builder mappings; private final ImmutableOpenMap.Builder aliases; @@ -276,13 +276,13 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : indexTemplateMetaData.mappings()) { + for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { byte[] mappingSource = cursor.value.uncompressed(); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); Map mapping = parser.map(); @@ -341,7 +341,7 @@ public class IndexTemplateMetaData extends AbstractDiffable cursor : indexTemplateMetaData.mappings()) { + for (ObjectObjectCursor cursor : indexTemplateMetaData.mappings()) { byte[] data = cursor.value.uncompressed(); XContentParser parser = XContentFactory.xContent(data).createParser(data); Map mapping = parser.mapOrderedAndClose(); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index e6067c46817..2d8054d748f 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -276,7 +276,7 @@ public class MappingMetaData extends AbstractDiffable { private final String type; - private final CompressedString source; + private final CompressedXContent source; private Id id; private Routing routing; @@ -294,9 +294,9 @@ public class MappingMetaData extends AbstractDiffable { this.hasParentField = docMapper.parentFieldMapper().active(); } - public MappingMetaData(CompressedString mapping) throws IOException { + public MappingMetaData(CompressedXContent mapping) throws IOException { this.source = mapping; - Map mappingMap = XContentHelper.createParser(mapping.compressed(), 0, mapping.compressed().length).mapOrderedAndClose(); + Map mappingMap = XContentHelper.createParser(mapping.compressedReference()).mapOrderedAndClose(); if (mappingMap.size() != 1) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } @@ -311,7 +311,7 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData(String type, Map mapping) throws IOException { this.type = type; XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); - this.source = new CompressedString(mappingBuilder.bytes()); + this.source = new CompressedXContent(mappingBuilder.bytes()); Map withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); @@ -322,7 +322,7 @@ public class MappingMetaData extends AbstractDiffable { private MappingMetaData() { this.type = ""; try { - this.source = new CompressedString(""); + this.source = new CompressedXContent("{}"); } catch (IOException ex) { throw new IllegalStateException("Cannot create MappingMetaData prototype", ex); } @@ -393,7 +393,7 @@ public class MappingMetaData extends AbstractDiffable { } } - public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { + public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { this.type = type; this.source = source; this.id = id; @@ -418,7 +418,7 @@ public class MappingMetaData extends AbstractDiffable { return this.type; } - public CompressedString source() { + public CompressedXContent source() { return this.source; } @@ -430,7 +430,7 @@ public class MappingMetaData extends AbstractDiffable { * Converts the serialized compressed form of the mappings into a parsed map. */ public Map sourceAsMap() throws IOException { - Map mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length, true).v2(); + Map mapping = XContentHelper.convertToMap(source.compressedReference(), true).v2(); if (mapping.size() == 1 && mapping.containsKey(type())) { // the type name is the root value, reduce it mapping = (Map) mapping.get(type()); @@ -599,7 +599,7 @@ public class MappingMetaData extends AbstractDiffable { public MappingMetaData readFrom(StreamInput in) throws IOException { String type = in.readString(); - CompressedString source = CompressedString.readCompressedString(in); + CompressedXContent source = CompressedXContent.readCompressedString(in); // id Id id = new Id(in.readBoolean() ? in.readString() : null); // routing diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 41e310a95ad..612989bb451 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -46,7 +46,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.regex.Regex; @@ -254,7 +254,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // apply templates, merging the mappings into the request mapping if exists for (IndexTemplateMetaData template : templates) { templateNames.add(template.getName()); - for (ObjectObjectCursor cursor : template.mappings()) { + for (ObjectObjectCursor cursor : template.mappings()) { if (mappings.containsKey(cursor.key)) { XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string())); } else { @@ -357,7 +357,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // first, add the default mapping if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { try { - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false); } catch (Exception e) { removalReason = "failed on parsing default mapping on index creation"; throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e); @@ -369,7 +369,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { } try { // apply the default here, its the first time we parse it - mapperService.merge(entry.getKey(), new CompressedString(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true); + mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true); } catch (Exception e) { removalReason = "failed on parsing mappings on index creation"; throw new MapperParsingException("mapping [" + entry.getKey() + "]", e); diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 0e39e7a613d..4406decfe0c 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -32,7 +32,7 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; @@ -91,11 +91,11 @@ public class MetaDataMappingService extends AbstractComponent { static class UpdateTask extends MappingTask { final String type; - final CompressedString mappingSource; + final CompressedXContent mappingSource; final String nodeId; // null fr unknown final ActionListener listener; - UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, String nodeId, ActionListener listener) { + UpdateTask(String index, String indexUUID, String type, CompressedXContent mappingSource, String nodeId, ActionListener listener) { super(index, indexUUID); this.type = type; this.mappingSource = mappingSource; @@ -254,7 +254,7 @@ public class MetaDataMappingService extends AbstractComponent { UpdateTask updateTask = (UpdateTask) task; try { String type = updateTask.type; - CompressedString mappingSource = updateTask.mappingSource; + CompressedXContent mappingSource = updateTask.mappingSource; MappingMetaData mappingMetaData = builder.mapping(type); if (mappingMetaData != null && mappingMetaData.source().equals(mappingSource)) { @@ -376,9 +376,9 @@ public class MetaDataMappingService extends AbstractComponent { DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); if (MapperService.DEFAULT_MAPPING.equals(request.type())) { // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default - newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), false); + newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false); } else { - newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), existingMapper == null); + newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null); if (existingMapper != null) { // first, simulate MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true); @@ -415,12 +415,12 @@ public class MetaDataMappingService extends AbstractComponent { continue; } - CompressedString existingSource = null; + CompressedXContent existingSource = null; if (existingMappers.containsKey(entry.getKey())) { existingSource = existingMappers.get(entry.getKey()).mappingSource(); } DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false); - CompressedString updatedSource = mergedMapper.mappingSource(); + CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { if (existingSource.equals(updatedSource)) { diff --git a/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java b/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java index 1bc370cd894..add383b75fa 100644 --- a/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java +++ b/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java @@ -352,6 +352,7 @@ public class PagedBytesReference implements BytesReference { private final int offset; private final int length; private int pos; + private int mark; public PagedBytesReferenceStreamInput(ByteArray bytearray, int offset, int length) { this.bytearray = bytearray; @@ -420,9 +421,19 @@ public class PagedBytesReference implements BytesReference { return copiedBytes; } + @Override + public boolean markSupported() { + return true; + } + + @Override + public void mark(int readlimit) { + this.mark = pos; + } + @Override public void reset() throws IOException { - pos = 0; + pos = mark; } @Override diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java b/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java index 06ec2a2f48f..12094108932 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedIndexInput.java @@ -30,10 +30,9 @@ import java.io.IOException; * @deprecated Used only for backward comp. to read old compressed files, since we now use codec based compression */ @Deprecated -public abstract class CompressedIndexInput extends IndexInput { +public abstract class CompressedIndexInput extends IndexInput { private IndexInput in; - protected final T context; private int version; private long totalUncompressedLength; @@ -48,10 +47,9 @@ public abstract class CompressedIndexInput extends private int currentOffsetIdx; private long currentUncompressedChunkPointer; - public CompressedIndexInput(IndexInput in, T context) throws IOException { + public CompressedIndexInput(IndexInput in) throws IOException { super("compressed(" + in.toString() + ")"); this.in = in; - this.context = context; readHeader(in); this.version = in.readInt(); long metaDataPosition = in.readLong(); diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java b/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java index 3df98a7f718..82eefe13a4c 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedStreamInput.java @@ -27,10 +27,9 @@ import java.io.IOException; /** */ -public abstract class CompressedStreamInput extends StreamInput { +public abstract class CompressedStreamInput extends StreamInput { private final StreamInput in; - protected final CompressorContext context; private boolean closed; @@ -38,9 +37,8 @@ public abstract class CompressedStreamInput extends private int position = 0; private int valid = 0; - public CompressedStreamInput(StreamInput in, T context) throws IOException { + public CompressedStreamInput(StreamInput in) throws IOException { this.in = in; - this.context = context; super.setVersion(in.getVersion()); readHeader(in); } @@ -51,13 +49,6 @@ public abstract class CompressedStreamInput extends return super.setVersion(version); } - /** - * Expert!, resets to buffer start, without the need to decompress it again. - */ - public void resetToBufferStart() { - this.position = 0; - } - /** * Method is overridden to report number of bytes that can now be read * from decoded data buffer, without reading bytes from the underlying diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java b/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java index 009fddc074c..9e0763816a8 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedStreamOutput.java @@ -26,10 +26,9 @@ import java.io.IOException; /** */ -public abstract class CompressedStreamOutput extends StreamOutput { +public abstract class CompressedStreamOutput extends StreamOutput { private final StreamOutput out; - protected final T context; protected byte[] uncompressed; protected int uncompressedLength; @@ -37,9 +36,8 @@ public abstract class CompressedStreamOutput extend private boolean closed; - public CompressedStreamOutput(StreamOutput out, T context) throws IOException { + public CompressedStreamOutput(StreamOutput out) throws IOException { this.out = out; - this.context = context; super.setVersion(out.getVersion()); writeHeader(out); } diff --git a/src/main/java/org/elasticsearch/common/compress/CompressedString.java b/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java similarity index 74% rename from src/main/java/org/elasticsearch/common/compress/CompressedString.java rename to src/main/java/org/elasticsearch/common/compress/CompressedXContent.java index aca1d45f86d..09ced0e29b2 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressedString.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.compress; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,33 +35,32 @@ import java.util.Arrays; * memory. Note that the compressed string might still sometimes need to be * decompressed in order to perform equality checks or to compute hash codes. */ -public final class CompressedString { +public final class CompressedXContent { private final byte[] bytes; private int hashCode; - public CompressedString(BytesReference data) throws IOException { + public CompressedXContent(BytesReference data) throws IOException { Compressor compressor = CompressorFactory.compressor(data); if (compressor != null) { // already compressed... this.bytes = data.toBytes(); } else { - BytesArray bytesArray = data.toBytesArray(); - this.bytes = CompressorFactory.defaultCompressor().compress(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length()); - assert CompressorFactory.compressor(bytes) != null; + BytesStreamOutput out = new BytesStreamOutput(); + try (StreamOutput compressedOutput = CompressorFactory.defaultCompressor().streamOutput(out)) { + data.writeTo(compressedOutput); + } + this.bytes = out.bytes().toBytes(); + assert CompressorFactory.compressor(new BytesArray(bytes)) != null; } } - public CompressedString(byte[] data, int offset, int length) throws IOException { - this(new BytesArray(data, offset, length)); + public CompressedXContent(byte[] data) throws IOException { + this(new BytesArray(data)); } - public CompressedString(byte[] data) throws IOException { - this(data, 0, data.length); - } - - public CompressedString(String str) throws IOException { + public CompressedXContent(String str) throws IOException { this(new BytesArray(new BytesRef(str))); } @@ -69,12 +69,15 @@ public final class CompressedString { return this.bytes; } + /** Return the compressed bytes as a {@link BytesReference}. */ + public BytesReference compressedReference() { + return new BytesArray(bytes); + } + /** Return the uncompressed bytes. */ public byte[] uncompressed() { - Compressor compressor = CompressorFactory.compressor(bytes); - assert compressor != null; try { - return compressor.uncompress(bytes, 0, bytes.length); + return CompressorFactory.uncompress(new BytesArray(bytes)).toBytes(); } catch (IOException e) { throw new IllegalStateException("Cannot decompress compressed string", e); } @@ -84,10 +87,10 @@ public final class CompressedString { return new BytesRef(uncompressed()).utf8ToString(); } - public static CompressedString readCompressedString(StreamInput in) throws IOException { + public static CompressedXContent readCompressedString(StreamInput in) throws IOException { byte[] bytes = new byte[in.readVInt()]; in.readBytes(bytes, 0, bytes.length); - return new CompressedString(bytes); + return new CompressedXContent(bytes); } public void writeTo(StreamOutput out) throws IOException { @@ -100,7 +103,7 @@ public final class CompressedString { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - CompressedString that = (CompressedString) o; + CompressedXContent that = (CompressedXContent) o; if (Arrays.equals(compressed(), that.compressed())) { return true; diff --git a/src/main/java/org/elasticsearch/common/compress/Compressor.java b/src/main/java/org/elasticsearch/common/compress/Compressor.java index 8d0199703ac..d8f0ae82bf6 100644 --- a/src/main/java/org/elasticsearch/common/compress/Compressor.java +++ b/src/main/java/org/elasticsearch/common/compress/Compressor.java @@ -38,26 +38,18 @@ public interface Compressor { boolean isCompressed(BytesReference bytes); - boolean isCompressed(byte[] data, int offset, int length); - boolean isCompressed(ChannelBuffer buffer); + StreamInput streamInput(StreamInput in) throws IOException; + + StreamOutput streamOutput(StreamOutput out) throws IOException; + + /** + * @deprecated Used for backward comp. since we now use Lucene compressed codec. + */ + @Deprecated boolean isCompressed(IndexInput in) throws IOException; - /** - * Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. - */ - byte[] uncompress(byte[] data, int offset, int length) throws IOException; - - /** - * Compresses the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. - */ - byte[] compress(byte[] data, int offset, int length) throws IOException; - - CompressedStreamInput streamInput(StreamInput in) throws IOException; - - CompressedStreamOutput streamOutput(StreamOutput out) throws IOException; - /** * @deprecated Used for backward comp. since we now use Lucene compressed codec. */ diff --git a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java index 9eb9c9d7212..9873123e558 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java @@ -21,9 +21,9 @@ package org.elasticsearch.common.compress; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; + import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.lzf.LZFCompressor; @@ -32,6 +32,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; @@ -92,14 +94,6 @@ public class CompressorFactory { return compressor(bytes) != null; } - public static boolean isCompressed(byte[] data) { - return compressor(data, 0, data.length) != null; - } - - public static boolean isCompressed(byte[] data, int offset, int length) { - return compressor(data, offset, length) != null; - } - public static boolean isCompressed(IndexInput in) throws IOException { return compressor(in) != null; } @@ -108,35 +102,29 @@ public class CompressorFactory { public static Compressor compressor(BytesReference bytes) { for (Compressor compressor : compressors) { if (compressor.isCompressed(bytes)) { + // bytes should be either detected as compressed or as xcontent, + // if we have bytes that can be either detected as compressed or + // as a xcontent, we have a problem + assert XContentFactory.xContentType(bytes) == null; return compressor; } } - return null; - } - @Nullable - public static Compressor compressor(byte[] data) { - return compressor(data, 0, data.length); - } - - @Nullable - public static Compressor compressor(byte[] data, int offset, int length) { - for (Compressor compressor : compressors) { - if (compressor.isCompressed(data, offset, length)) { - return compressor; - } + XContentType contentType = XContentFactory.xContentType(bytes); + if (contentType == null) { + throw new NotXContentException("Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes"); } + return null; } - @Nullable public static Compressor compressor(ChannelBuffer buffer) { for (Compressor compressor : compressors) { if (compressor.isCompressed(buffer)) { return compressor; } } - return null; + throw new NotCompressedException(); } @Nullable @@ -158,16 +146,30 @@ public class CompressorFactory { */ public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException { Compressor compressor = compressor(bytes); + BytesReference uncompressed; if (compressor != null) { - if (bytes.hasArray()) { - return new BytesArray(compressor.uncompress(bytes.array(), bytes.arrayOffset(), bytes.length())); - } - StreamInput compressed = compressor.streamInput(bytes.streamInput()); - BytesStreamOutput bStream = new BytesStreamOutput(); - Streams.copy(compressed, bStream); - compressed.close(); - return bStream.bytes(); + uncompressed = uncompress(bytes, compressor); + } else { + uncompressed = bytes; } - return bytes; + + return uncompressed; + } + + /** Decompress the provided {@link BytesReference}. */ + public static BytesReference uncompress(BytesReference bytes) throws IOException { + Compressor compressor = compressor(bytes); + if (compressor == null) { + throw new IllegalArgumentException("Bytes are not compressed"); + } + return uncompress(bytes, compressor); + } + + private static BytesReference uncompress(BytesReference bytes, Compressor compressor) throws IOException { + StreamInput compressed = compressor.streamInput(bytes.streamInput()); + BytesStreamOutput bStream = new BytesStreamOutput(); + Streams.copy(compressed, bStream); + compressed.close(); + return bStream.bytes(); } } diff --git a/src/main/java/org/elasticsearch/common/compress/CompressorContext.java b/src/main/java/org/elasticsearch/common/compress/NotCompressedException.java similarity index 73% rename from src/main/java/org/elasticsearch/common/compress/CompressorContext.java rename to src/main/java/org/elasticsearch/common/compress/NotCompressedException.java index 9ad70554046..653483fc586 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressorContext.java +++ b/src/main/java/org/elasticsearch/common/compress/NotCompressedException.java @@ -19,7 +19,13 @@ package org.elasticsearch.common.compress; -/** - */ -public interface CompressorContext { +/** Exception indicating that we were expecting something compressed, which + * was not compressed or corrupted so that the compression format could not + * be detected. */ +public class NotCompressedException extends RuntimeException { + + public NotCompressedException() { + super(); + } + } diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressorContext.java b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java similarity index 69% rename from src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressorContext.java rename to src/main/java/org/elasticsearch/common/compress/NotXContentException.java index 89c7b182c47..bca35c317d8 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressorContext.java +++ b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.common.compress.lzf; +package org.elasticsearch.common.compress; -import org.elasticsearch.common.compress.CompressorContext; +/** Exception indicating that we were expecting something compressed, which + * was not compressed or corrupted so that the compression format could not + * be detected. */ +public class NotXContentException extends RuntimeException { -/** - */ -public class LZFCompressorContext implements CompressorContext { + public NotXContentException(String message) { + super(message); + } - public static final LZFCompressorContext INSTANCE = new LZFCompressorContext(); } diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java index 326eceb77c4..93bd583662b 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedIndexInput.java @@ -32,14 +32,14 @@ import java.util.Arrays; /** */ @Deprecated -public class LZFCompressedIndexInput extends CompressedIndexInput { +public class LZFCompressedIndexInput extends CompressedIndexInput { private final ChunkDecoder decoder; // scratch area buffer private byte[] inputBuffer; public LZFCompressedIndexInput(IndexInput in, ChunkDecoder decoder) throws IOException { - super(in, LZFCompressorContext.INSTANCE); + super(in); this.decoder = decoder; this.uncompressed = new byte[LZFChunk.MAX_CHUNK_LEN]; diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java index caaaadbeb3e..baefcaa8928 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamInput.java @@ -29,7 +29,7 @@ import java.io.IOException; /** */ -public class LZFCompressedStreamInput extends CompressedStreamInput { +public class LZFCompressedStreamInput extends CompressedStreamInput { private final BufferRecycler recycler; @@ -39,7 +39,7 @@ public class LZFCompressedStreamInput extends CompressedStreamInput { +public class LZFCompressedStreamOutput extends CompressedStreamOutput { private final BufferRecycler recycler; private final ChunkEncoder encoder; public LZFCompressedStreamOutput(StreamOutput out) throws IOException { - super(out, LZFCompressorContext.INSTANCE); + super(out); this.recycler = BufferRecycler.instance(); this.uncompressed = this.recycler.allocOutputBuffer(LZFChunk.MAX_CHUNK_LEN); this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN; diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java index 57771a1e3a8..c5c937ccbb9 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.compress.lzf; import com.ning.compress.lzf.ChunkDecoder; import com.ning.compress.lzf.LZFChunk; -import com.ning.compress.lzf.LZFEncoder; import com.ning.compress.lzf.util.ChunkDecoderFactory; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.bytes.BytesReference; @@ -69,14 +68,6 @@ public class LZFCompressor implements Compressor { (bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); } - @Override - public boolean isCompressed(byte[] data, int offset, int length) { - return length >= 3 && - data[offset] == LZFChunk.BYTE_Z && - data[offset + 1] == LZFChunk.BYTE_V && - (data[offset + 2] == LZFChunk.BLOCK_TYPE_COMPRESSED || data[offset + 2] == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); - } - @Override public boolean isCompressed(ChannelBuffer buffer) { int offset = buffer.readerIndex(); @@ -103,16 +94,6 @@ public class LZFCompressor implements Compressor { return true; } - @Override - public byte[] uncompress(byte[] data, int offset, int length) throws IOException { - return decoder.decode(data, offset, length); - } - - @Override - public byte[] compress(byte[] data, int offset, int length) throws IOException { - return LZFEncoder.safeEncode(data, offset, length); - } - @Override public CompressedStreamInput streamInput(StreamInput in) throws IOException { return new LZFCompressedStreamInput(in, decoder); diff --git a/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java b/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java index ffe8d297ba4..e9aa52cf4d0 100644 --- a/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java +++ b/src/main/java/org/elasticsearch/common/io/stream/InputStreamStreamInput.java @@ -59,6 +59,16 @@ public class InputStreamStreamInput extends StreamInput { is.reset(); } + @Override + public boolean markSupported() { + return is.markSupported(); + } + + @Override + public void mark(int readlimit) { + is.mark(readlimit); + } + @Override public void close() throws IOException { is.close(); diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java b/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java index 9ae1a03a67d..75e57509948 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -163,6 +164,9 @@ public class XContentFactory { if (c == '{') { return XContentType.JSON; } + if (Character.isWhitespace(c) == false) { + break; + } } return null; } @@ -204,65 +208,76 @@ public class XContentFactory { } /** - * Guesses the content type based on the provided input stream. + * Guesses the content type based on the provided input stream without consuming it. */ public static XContentType xContentType(InputStream si) throws IOException { - final int firstInt = si.read(); // this must be an int since we need to respect the method contract - if (firstInt == -1) { - return null; + if (si.markSupported() == false) { + throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); } - - final int secondInt = si.read(); // this must be an int since we need to respect the method contract - if (secondInt == -1) { - return null; - } - final byte first = (byte) (0xff & firstInt); - final byte second = (byte) (0xff & secondInt); - if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) { - int third = si.read(); - if (third == SmileConstants.HEADER_BYTE_3) { - return XContentType.SMILE; - } - } - if (first == '{' || second == '{') { - return XContentType.JSON; - } - if (first == '-' && second == '-') { - int third = si.read(); - if (third == '-') { - return XContentType.YAML; - } - } - // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){ - return XContentType.CBOR; - } - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) { - // Actually, specific "self-describe tag" is a very good indicator - int third = si.read(); - if (third == -1) { + si.mark(GUESS_HEADER_LENGTH); + try { + final int firstInt = si.read(); // this must be an int since we need to respect the method contract + if (firstInt == -1) { return null; } - if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) { - return XContentType.CBOR; - } - } - // for small objects, some encoders just encode as major type object, we can safely - // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { - return XContentType.CBOR; - } - for (int i = 2; i < GUESS_HEADER_LENGTH; i++) { - int val = si.read(); - if (val == -1) { - return null; + final int secondInt = si.read(); // this must be an int since we need to respect the method contract + if (secondInt == -1) { + return null; } - if (val == '{') { + final byte first = (byte) (0xff & firstInt); + final byte second = (byte) (0xff & secondInt); + if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) { + int third = si.read(); + if (third == SmileConstants.HEADER_BYTE_3) { + return XContentType.SMILE; + } + } + if (first == '{' || second == '{') { return XContentType.JSON; } + if (first == '-' && second == '-') { + int third = si.read(); + if (third == '-') { + return XContentType.YAML; + } + } + // CBOR logic similar to CBORFactory#hasCBORFormat + if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){ + return XContentType.CBOR; + } + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) { + // Actually, specific "self-describe tag" is a very good indicator + int third = si.read(); + if (third == -1) { + return null; + } + if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) { + return XContentType.CBOR; + } + } + // for small objects, some encoders just encode as major type object, we can safely + // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { + return XContentType.CBOR; + } + + for (int i = 2; i < GUESS_HEADER_LENGTH; i++) { + int val = si.read(); + if (val == -1) { + return null; + } + if (val == '{') { + return XContentType.JSON; + } + if (Character.isWhitespace(val) == false) { + break; + } + } + return null; + } finally { + si.reset(); } - return null; } /** @@ -284,7 +299,7 @@ public class XContentFactory { * Guesses the content type based on the provided bytes. */ public static XContentType xContentType(BytesReference bytes) { - int length = bytes.length() < GUESS_HEADER_LENGTH ? bytes.length() : GUESS_HEADER_LENGTH; + int length = bytes.length(); if (length == 0) { return null; } @@ -316,9 +331,13 @@ public class XContentFactory { // a last chance for JSON for (int i = 0; i < length; i++) { - if (bytes.get(i) == '{') { + byte b = bytes.get(i); + if (b == '{') { return XContentType.JSON; } + if (Character.isWhitespace(b) == false) { + break; + } } return null; } diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index d196d459fbd..5325950e202 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -28,14 +28,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedStreamInput; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.ToXContent.Params; +import java.io.BufferedInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -49,45 +49,30 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; public class XContentHelper { public static XContentParser createParser(BytesReference bytes) throws IOException { - if (bytes.hasArray()) { - return createParser(bytes.array(), bytes.arrayOffset(), bytes.length()); - } Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { - CompressedStreamInput compressedInput = compressor.streamInput(bytes.streamInput()); + InputStream compressedInput = compressor.streamInput(bytes.streamInput()); + if (compressedInput.markSupported() == false) { + compressedInput = new BufferedInputStream(compressedInput); + } XContentType contentType = XContentFactory.xContentType(compressedInput); - compressedInput.resetToBufferStart(); return XContentFactory.xContent(contentType).createParser(compressedInput); } else { return XContentFactory.xContent(bytes).createParser(bytes.streamInput()); } } - - public static XContentParser createParser(byte[] data, int offset, int length) throws IOException { - Compressor compressor = CompressorFactory.compressor(data, offset, length); - if (compressor != null) { - CompressedStreamInput compressedInput = compressor.streamInput(StreamInput.wrap(data, offset, length)); - XContentType contentType = XContentFactory.xContentType(compressedInput); - compressedInput.resetToBufferStart(); - return XContentFactory.xContent(contentType).createParser(compressedInput); - } else { - return XContentFactory.xContent(data, offset, length).createParser(data, offset, length); - } - } - public static Tuple> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException { - if (bytes.hasArray()) { - return convertToMap(bytes.array(), bytes.arrayOffset(), bytes.length(), ordered); - } try { XContentParser parser; XContentType contentType; Compressor compressor = CompressorFactory.compressor(bytes); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(bytes.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(bytes.streamInput()); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput); } else { contentType = XContentFactory.xContentType(bytes); @@ -103,34 +88,6 @@ public class XContentHelper { } } - public static Tuple> convertToMap(byte[] data, boolean ordered) throws ElasticsearchParseException { - return convertToMap(data, 0, data.length, ordered); - } - - public static Tuple> convertToMap(byte[] data, int offset, int length, boolean ordered) throws ElasticsearchParseException { - try { - XContentParser parser; - XContentType contentType; - Compressor compressor = CompressorFactory.compressor(data, offset, length); - if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(StreamInput.wrap(data, offset, length)); - contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); - parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput); - } else { - contentType = XContentFactory.xContentType(data, offset, length); - parser = XContentFactory.xContent(contentType).createParser(data, offset, length); - } - if (ordered) { - return Tuple.tuple(contentType, parser.mapOrderedAndClose()); - } else { - return Tuple.tuple(contentType, parser.mapAndClose()); - } - } catch (IOException e) { - throw new ElasticsearchParseException("Failed to parse content to map", e); - } - } - public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException { return convertToJson(bytes, reformatJson, false); } @@ -426,9 +383,11 @@ public class XContentHelper { public static void writeDirect(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); if (contentType == rawBuilder.contentType()) { Streams.copy(compressedStreamInput, rawBuilder.stream()); } else { @@ -457,9 +416,11 @@ public class XContentHelper { public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } if (contentType == builder.contentType()) { builder.rawField(field, compressedStreamInput); } else { diff --git a/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 9d88d84f64a..9ea7cf5e60b 100644 --- a/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -21,16 +21,26 @@ package org.elasticsearch.gateway; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; + import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.store.*; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.OutputStreamIndexOutput; +import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.io.OutputStream; @@ -280,7 +290,7 @@ public abstract class MetaDataStateFormat { logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); continue; } - parser = XContentHelper.createParser(data, 0, data.length); + parser = XContentHelper.createParser(new BytesArray(data)); state = fromXContent(parser); if (state == null) { logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); diff --git a/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java b/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java index 3d02731dbfa..48ebc4239ac 100644 --- a/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java +++ b/src/main/java/org/elasticsearch/index/aliases/IndexAlias.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.aliases; import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; /** * @@ -30,11 +30,11 @@ public class IndexAlias { private final String alias; - private final CompressedString filter; + private final CompressedXContent filter; private final Query parsedFilter; - public IndexAlias(String alias, @Nullable CompressedString filter, @Nullable Query parsedFilter) { + public IndexAlias(String alias, @Nullable CompressedXContent filter, @Nullable Query parsedFilter) { this.alias = alias; this.filter = filter; this.parsedFilter = parsedFilter; @@ -45,7 +45,7 @@ public class IndexAlias { } @Nullable - public CompressedString filter() { + public CompressedXContent filter() { return filter; } diff --git a/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java b/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java index a097a01675b..21d6582e03f 100644 --- a/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java +++ b/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java @@ -23,7 +23,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -63,11 +63,11 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera return aliases.get(alias); } - public IndexAlias create(String alias, @Nullable CompressedString filter) { + public IndexAlias create(String alias, @Nullable CompressedXContent filter) { return new IndexAlias(alias, filter, parse(alias, filter)); } - public void add(String alias, @Nullable CompressedString filter) { + public void add(String alias, @Nullable CompressedXContent filter) { add(new IndexAlias(alias, filter, parse(alias, filter))); } @@ -120,7 +120,7 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera aliases.remove(alias); } - private Query parse(String alias, CompressedString filter) { + private Query parse(String alias, CompressedXContent filter) { if (filter == null) { return null; } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 45da4be320b..2c4bd053251 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -35,7 +35,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -150,7 +150,7 @@ public class DocumentMapper implements ToXContent { private final String type; private final StringAndBytesText typeText; - private volatile CompressedString mappingSource; + private volatile CompressedXContent mappingSource; private final Mapping mapping; @@ -235,7 +235,7 @@ public class DocumentMapper implements ToXContent { return mapping.meta; } - public CompressedString mappingSource() { + public CompressedXContent mappingSource() { return this.mappingSource; } @@ -473,7 +473,7 @@ public class DocumentMapper implements ToXContent { toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); builder.close(); - mappingSource = new CompressedString(bStream.bytes()); + mappingSource = new CompressedXContent(bStream.bytes()); } catch (Exception e) { throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 9084e17d60b..4bb9e8d830e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -194,15 +194,15 @@ public class DocumentMapperParser extends AbstractIndexComponent { return parse(type, mapping, defaultSource); } - public DocumentMapper parseCompressed(@Nullable String type, CompressedString source) throws MapperParsingException { + public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException { return parseCompressed(type, source, null); } @SuppressWarnings({"unchecked"}) - public DocumentMapper parseCompressed(@Nullable String type, CompressedString source, String defaultSource) throws MapperParsingException { + public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException { Map mapping = null; if (source != null) { - Map root = XContentHelper.convertToMap(source.compressed(), true).v2(); + Map root = XContentHelper.convertToMap(source.compressedReference(), true).v2(); Tuple> t = extractMapping(type, root); type = t.v1(); mapping = t.v2(); diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b63df2d6cc4..8847550c6c9 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -43,7 +43,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; @@ -214,7 +214,7 @@ public class MapperService extends AbstractIndexComponent { typeListeners.remove(listener); } - public DocumentMapper merge(String type, CompressedString mappingSource, boolean applyDefault) { + public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault) { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource); @@ -293,7 +293,7 @@ public class MapperService extends AbstractIndexComponent { private boolean assertSerialization(DocumentMapper mapper) { // capture the source now, it may change due to concurrent parsing - final CompressedString mappingSource = mapper.mappingSource(); + final CompressedXContent mappingSource = mapper.mappingSource(); DocumentMapper newMapper = parse(mapper.type(), mappingSource, false); if (newMapper.mappingSource().equals(mappingSource) == false) { @@ -328,7 +328,7 @@ public class MapperService extends AbstractIndexComponent { this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers); } - public DocumentMapper parse(String mappingType, CompressedString mappingSource, boolean applyDefault) throws MapperParsingException { + public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { String defaultMappingSource; if (PercolatorService.TYPE_NAME.equals(mappingType)) { defaultMappingSource = this.defaultPercolatorMappingSource; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 9972ca45cbd..91375efed47 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -36,6 +36,9 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotXContentException; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; @@ -144,10 +147,16 @@ public class BinaryFieldMapper extends AbstractFieldMapper { } try { if (indexCreatedBefore2x) { - return CompressorFactory.uncompressIfNeeded(bytes); - } else { - return bytes; + try { + return CompressorFactory.uncompressIfNeeded(bytes); + } catch (NotXContentException e) { + // This is a BUG! We try to decompress by detecting a header in + // the stored bytes but since we accept arbitrary bytes, we have + // no guarantee that uncompressed bytes will be detected as + // compressed! + } } + return bytes; } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 7c8ae58d5fd..d5d745f263f 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; + import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StoredField; @@ -31,7 +32,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.compress.CompressedStreamInput; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -53,7 +53,9 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; +import java.io.BufferedInputStream; import java.io.IOException; +import java.io.InputStream; import java.util.Arrays; import java.util.Iterator; import java.util.List; @@ -324,9 +326,11 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper // see if we need to convert the content type Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { - CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); + InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); + if (compressedStreamInput.markSupported() == false) { + compressedStreamInput = new BufferedInputStream(compressedStreamInput); + } XContentType contentType = XContentFactory.xContentType(compressedStreamInput); - compressedStreamInput.resetToBufferStart(); if (contentType != formatContentType) { // we need to reread and store back, compressed.... BytesStreamOutput bStream = new BytesStreamOutput(); diff --git a/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 77832bb052d..6995ecae587 100644 --- a/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -41,7 +41,7 @@ import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -369,7 +369,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent cursor : indexMetaData.mappings().values()) { MappingMetaData mappingMd = cursor.value; String mappingType = mappingMd.type(); - CompressedString mappingSource = mappingMd.source(); + CompressedXContent mappingSource = mappingMd.source(); if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first continue; } @@ -396,7 +396,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent(index, mappingType))) { seenMappings.put(new Tuple<>(index, mappingType), true); } @@ -484,7 +484,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent cursor : aliases) { AliasMetaData aliasMd = cursor.value; String alias = aliasMd.alias(); - CompressedString filter = aliasMd.filter(); + CompressedXContent filter = aliasMd.filter(); try { if (!indexAliasesService.hasAlias(alias)) { if (logger.isDebugEnabled()) { diff --git a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 180f6595521..83baf8d1e2d 100644 --- a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; + import org.apache.lucene.store.RateLimiter; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -35,6 +35,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.CompressorFactory; @@ -407,7 +408,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent snapshots = new ArrayList<>(); - try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) { + try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) { if (parser.nextToken() == XContentParser.Token.START_OBJECT) { if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { String currentFieldName = parser.currentName(); diff --git a/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java b/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java index ee7018d397e..7c3a2aa4e2d 100644 --- a/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java +++ b/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java @@ -91,14 +91,6 @@ public class SourceLookup implements Map { return sourceAsMapAndType(source).v2(); } - public static Tuple> sourceAsMapAndType(byte[] bytes, int offset, int length) throws ElasticsearchParseException { - return XContentHelper.convertToMap(bytes, offset, length, false); - } - - public static Map sourceAsMap(byte[] bytes, int offset, int length) throws ElasticsearchParseException { - return sourceAsMapAndType(bytes, offset, length).v2(); - } - public void setSegmentAndDocument(LeafReaderContext context, int docId) { if (this.reader == context.reader() && this.docId == docId) { // if we are called with the same document, don't invalidate source diff --git a/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index b0689df6f2c..aa2d69dac6b 100644 --- a/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotCompressedException; import org.elasticsearch.common.io.ThrowableObjectInputStream; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.ESLogger; @@ -91,8 +92,10 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { StreamInput wrappedStream; if (TransportStatus.isCompress(status) && hasMessageBytesToRead && buffer.readable()) { - Compressor compressor = CompressorFactory.compressor(buffer); - if (compressor == null) { + Compressor compressor; + try { + compressor = CompressorFactory.compressor(buffer); + } catch (NotCompressedException ex) { int maxToRead = Math.min(buffer.readableBytes(), 10); int offset = buffer.readerIndex(); StringBuilder sb = new StringBuilder("stream marked as compressed, but no compressor found, first [").append(maxToRead).append("] content bytes out of [").append(buffer.readableBytes()).append("] readable bytes with message size [").append(size).append("] ").append("] are ["); diff --git a/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java b/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java index 230889699a4..18ebbc8c430 100644 --- a/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java +++ b/src/test/java/org/elasticsearch/cluster/metadata/MappingMetaDataParserTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; @@ -34,7 +34,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -52,7 +52,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testFailIfIdIsNoValue() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -79,7 +79,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseRoutingAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -97,7 +97,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseTimestampAlone() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -115,11 +115,11 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseTimestampEquals() throws Exception { - MappingMetaData md1 = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md1 = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); - MappingMetaData md2 = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md2 = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -128,7 +128,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestamp() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -143,7 +143,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -161,7 +161,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -182,7 +182,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseRoutingWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -203,7 +203,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseTimestampWithPath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -224,7 +224,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -242,7 +242,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.obj0.id"), new MappingMetaData.Routing(true, "obj1.obj2.routing"), new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -271,7 +271,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("obj1.id"), new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -291,7 +291,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { // @Test public void testParseIdRoutingTimestampWithRepeatedField() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("field1"), new MappingMetaData.Routing(true, "field1.field1"), new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -314,7 +314,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id("id"), new MappingMetaData.Routing(true, "field1.field1.field2"), new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); @@ -337,7 +337,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase { @Test public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception { - MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), + MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"), new MappingMetaData.Id(null), new MappingMetaData.Routing(true, "field1.field2"), new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); diff --git a/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java b/src/test/java/org/elasticsearch/common/compress/CompressedXContentTests.java similarity index 72% rename from src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java rename to src/test/java/org/elasticsearch/common/compress/CompressedXContentTests.java index 4d9de6736e7..670ad10aaf2 100644 --- a/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java +++ b/src/test/java/org/elasticsearch/common/compress/CompressedXContentTests.java @@ -37,14 +37,14 @@ import static org.hamcrest.Matchers.not; /** * */ -public class CompressedStringTests extends ElasticsearchTestCase { +public class CompressedXContentTests extends ElasticsearchTestCase { @Test public void simpleTestsLZF() throws IOException { simpleTests("lzf"); } - private void assertEquals(CompressedString s1, CompressedString s2) { + private void assertEquals(CompressedXContent s1, CompressedXContent s2) { Assert.assertEquals(s1, s2); assertArrayEquals(s1.uncompressed(), s2.uncompressed()); assertEquals(s1.hashCode(), s2.hashCode()); @@ -52,16 +52,16 @@ public class CompressedStringTests extends ElasticsearchTestCase { public void simpleTests(String compressor) throws IOException { CompressorFactory.configure(Settings.settingsBuilder().put("compress.default.type", compressor).build()); - String str = "this is a simple string"; - CompressedString cstr = new CompressedString(str); + String str = "---\nf:this is a simple string"; + CompressedXContent cstr = new CompressedXContent(str); assertThat(cstr.string(), equalTo(str)); - assertThat(new CompressedString(str), equalTo(cstr)); + assertThat(new CompressedXContent(str), equalTo(cstr)); - String str2 = "this is a simple string 2"; - CompressedString cstr2 = new CompressedString(str2); + String str2 = "---\nf:this is a simple string 2"; + CompressedXContent cstr2 = new CompressedXContent(str2); assertThat(cstr2.string(), not(equalTo(str))); - assertThat(new CompressedString(str2), not(equalTo(cstr))); - assertEquals(new CompressedString(str2), cstr2); + assertThat(new CompressedXContent(str2), not(equalTo(cstr))); + assertEquals(new CompressedXContent(str2), cstr2); } public void testRandom() throws IOException { @@ -70,13 +70,15 @@ public class CompressedStringTests extends ElasticsearchTestCase { Random r = getRandom(); for (int i = 0; i < 1000; i++) { String string = TestUtil.randomUnicodeString(r, 10000); - CompressedString compressedString = new CompressedString(string); - assertThat(compressedString.string(), equalTo(string)); + // hack to make it detected as YAML + string = "---\n" + string; + CompressedXContent compressedXContent = new CompressedXContent(string); + assertThat(compressedXContent.string(), equalTo(string)); } } public void testDifferentCompressedRepresentation() throws Exception { - byte[] b = "abcdefghijabcdefghij".getBytes("UTF-8"); + byte[] b = "---\nf:abcdefghijabcdefghij".getBytes("UTF-8"); CompressorFactory.defaultCompressor(); Compressor compressor = CompressorFactory.defaultCompressor(); @@ -100,14 +102,14 @@ public class CompressedStringTests extends ElasticsearchTestCase { // of different size are being used assertFalse(b1.equals(b2)); // we used the compressed representation directly and did not recompress - assertArrayEquals(b1.toBytes(), new CompressedString(b1).compressed()); - assertArrayEquals(b2.toBytes(), new CompressedString(b2).compressed()); + assertArrayEquals(b1.toBytes(), new CompressedXContent(b1).compressed()); + assertArrayEquals(b2.toBytes(), new CompressedXContent(b2).compressed()); // but compressedstring instances are still equal - assertEquals(new CompressedString(b1), new CompressedString(b2)); + assertEquals(new CompressedXContent(b1), new CompressedXContent(b2)); } public void testHashCode() throws IOException { - assertFalse(new CompressedString("a").hashCode() == new CompressedString("b").hashCode()); + assertFalse(new CompressedXContent("{\"a\":\"b\"}").hashCode() == new CompressedXContent("{\"a\":\"c\"}").hashCode()); } } diff --git a/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index e904116221f..f3d89e86ca9 100644 --- a/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -82,7 +82,7 @@ public class XContentFactoryTests extends ElasticsearchTestCase { // this if for {"foo" : 5} in python CBOR bytes = new byte[] {(byte) 0xA1, (byte) 0x63, (byte) 0x66, (byte) 0x6f, (byte) 0x6f, (byte) 0x5}; assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR)); - assertThat(((Number) XContentHelper.convertToMap(bytes, true).v2().get("foo")).intValue(), equalTo(5)); + assertThat(((Number) XContentHelper.convertToMap(new BytesArray(bytes), true).v2().get("foo")).intValue(), equalTo(5)); // also make sure major type check doesn't collide with SMILE and JSON, just in case assertThat(CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, SmileConstants.HEADER_BYTE_1), equalTo(false)); diff --git a/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java b/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java index 3115edeca26..484e5c92270 100644 --- a/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java +++ b/src/test/java/org/elasticsearch/index/aliases/IndexAliasesServiceTests.java @@ -19,7 +19,8 @@ package org.elasticsearch.index.aliases; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -47,11 +48,11 @@ public class IndexAliasesServiceTests extends ElasticsearchSingleNodeTest { return indexService.aliasesService(); } - public static CompressedString filter(QueryBuilder filterBuilder) throws IOException { + public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - return new CompressedString(builder.string()); + return new CompressedXContent(builder.string()); } @Test diff --git a/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index 144d137f46d..bdd3b716259 100644 --- a/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.mapper.Uid; @@ -63,10 +63,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTests { @Before public void before() throws Exception { mapperService.merge( - childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true + childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true ); mapperService.merge( - grandChildType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true + grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true ); Document d = new Document(); diff --git a/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index a7f8017efa7..efe22e0eaf8 100644 --- a/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -82,7 +82,7 @@ public class BinaryMappingTests extends ElasticsearchSingleNodeTest { new BytesArray(binaryValue1).writeTo(compressed); } final byte[] binaryValue2 = out.bytes().toBytes(); - assertTrue(CompressorFactory.isCompressed(binaryValue2)); + assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes()); @@ -114,7 +114,7 @@ public class BinaryMappingTests extends ElasticsearchSingleNodeTest { new BytesArray(original).writeTo(compressed); } final byte[] binaryValue = out.bytes().toBytes(); - assertTrue(CompressorFactory.isCompressed(binaryValue)); + assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue))); ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", binaryValue).endObject().bytes()); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); diff --git a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 620847559ee..98d17ba150d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper.merge; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -160,7 +160,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { public void testConcurrentMergeTest() throws Throwable { final MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("test", new CompressedString("{\"test\":{}}"), true); + mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true); final DocumentMapper documentMapper = mapperService.documentMapper("test"); DocumentFieldMappers dfm = documentMapper.mappers(); @@ -186,7 +186,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { Mapping update = doc.dynamicMappingsUpdate(); assert update != null; lastIntroducedFieldName.set(fieldName); - mapperService.merge("test", new CompressedString(update.toString()), false); + mapperService.merge("test", new CompressedXContent(update.toString()), false); } } catch (Throwable t) { error.set(t); diff --git a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index e7df72c3dcd..89e88469913 100644 --- a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -432,7 +432,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); Arrays.sort(multiFieldNames); - Map sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressed(), true).v2(); + Map sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true).v2(); @SuppressWarnings("unchecked") Map multiFields = (Map) XContentMapValues.extractValue("type.properties.my_field.fields", sourceAsMap); assertThat(multiFields.size(), equalTo(multiFieldNames.length)); diff --git a/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java index 90da64b3fbd..e08562cfb78 100644 --- a/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/source/CompressSourceMappingTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.source; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -51,7 +52,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .field("field2", "value2") .endObject().bytes()); BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(false)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false)); } @Test @@ -68,7 +69,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().bytes()); BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(true)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true)); } @Test @@ -84,7 +85,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().bytes()); BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(false)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false)); doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() .field("field1", "value1") @@ -95,6 +96,6 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().bytes()); bytes = doc.rootDoc().getBinaryValue("_source"); - assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(true)); + assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true)); } } diff --git a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 0760255c2a8..10f33c9025d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -193,7 +193,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1(); assertThat(mapper.type(), equalTo("my_type")); @@ -206,12 +206,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true); String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - mapperService.merge("my_type", new CompressedString(mapping), true); + mapperService.merge("my_type", new CompressedXContent(mapping), true); DocumentMapper mapper = mapperService.documentMapper("my_type"); assertThat(mapper.type(), equalTo("my_type")); diff --git a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index e415ea76c07..c5e17a7fe6b 100644 --- a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.joda.Joda; @@ -450,7 +450,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { { MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); - MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), + MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); @@ -467,7 +467,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { { MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); - MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), + MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); @@ -484,7 +484,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { { MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); - MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), + MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); BytesStreamOutput out = new BytesStreamOutput(); @@ -652,7 +652,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject().endObject().string(); // This was causing a NPE - new MappingMetaData(new CompressedString(mapping)); + new MappingMetaData(new CompressedXContent(mapping)); } @Test diff --git a/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index dca10c636fe..32b75094a8d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -27,7 +27,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -196,7 +197,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean()); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean()); assertFalse(mergeResult.hasConflicts()); } @@ -204,7 +205,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean()); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean()); assertFalse(mergeResult.hasConflicts()); } @@ -213,23 +214,23 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); - CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @Test public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); - CompressedString mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterCreation, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); + assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); - CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @Test @@ -238,12 +239,12 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { //check if default ttl changed when simulate set to true XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl); - CompressedString mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d"); - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true).mapping(), true); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - no mappings applied - CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); client().admin().indices().prepareDelete("testindex").get(); @@ -252,7 +253,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled(); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - no mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); @@ -264,7 +265,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - no mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); @@ -275,21 +276,21 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { mappingWithoutTtl = getMappingWithTtlDisabled("6d"); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); client().admin().indices().prepareDelete("testindex").get(); // check if switching simulate flag off works if nothing was applied in the beginning indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false); + mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false); assertFalse(mergeResult.hasConflicts()); // make sure simulate flag actually worked - mappings applied mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); + assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -348,4 +349,4 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest { .startObject("properties").field("field").startObject().field("type", "string").endObject().endObject() .endObject().endObject(); } -} \ No newline at end of file +} diff --git a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index 1090f6df85a..35ae3dbb8c4 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.mapper.update; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -80,11 +80,11 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), false); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false); // assure we have no conflicts assertThat(mergeResult.buildConflicts().length, equalTo(0)); // make sure mappings applied - CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); } @@ -102,13 +102,13 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); - CompressedString mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping - MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), true); + MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true); // assure we have conflicts assertThat(mergeResult.buildConflicts().length, equalTo(1)); // make sure simulate flag actually worked - no mappings applied - CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); } @@ -124,9 +124,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject() .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); } @@ -146,11 +146,11 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject() .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); @@ -168,10 +168,10 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .endObject() .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.sizeFieldMapper().fieldType().stored()); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled)); } @@ -179,9 +179,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { public void testSizeTimestampIndexParsing() throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build()); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json"); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(mapping), true); + DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true); assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); - documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); + documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); } diff --git a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java index 9431f8a8cf6..5111b36969e 100644 --- a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java +++ b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java @@ -23,7 +23,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; @@ -57,7 +57,7 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedString(mapping), true); + mapperService.merge("person", new CompressedXContent(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); diff --git a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java index 6d1a3dbb344..fde771c5457 100644 --- a/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java +++ b/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java @@ -23,7 +23,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; @@ -58,7 +58,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedString(mapping), true); + mapperService.merge("person", new CompressedXContent(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); diff --git a/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 992f1777a2c..b42a622fcf1 100644 --- a/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -70,7 +70,7 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.lucene.search.Queries; @@ -209,7 +209,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest { MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); - mapperService.merge("person", new CompressedString(mapping), true); + mapperService.merge("person", new CompressedXContent(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); diff --git a/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java b/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java index d6aa83c341b..8eabad9735d 100644 --- a/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java +++ b/src/test/java/org/elasticsearch/index/search/child/AbstractChildTests.java @@ -31,7 +31,7 @@ import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; @@ -67,8 +67,8 @@ public abstract class AbstractChildTests extends ElasticsearchSingleNodeTest { MapperService mapperService = indexService.mapperService(); // Parent/child parsers require that the parent and child type to be presented in mapping // Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used - mapperService.merge(parentType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true); - mapperService.merge(childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true); + mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true); + mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true); return createSearchContext(indexService); } diff --git a/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java b/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java index 6907736d3cb..131e8ad73df 100644 --- a/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java +++ b/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.template; import com.google.common.collect.Lists; import com.google.common.collect.Sets; + import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestValidationException; @@ -32,6 +33,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; @@ -42,6 +44,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.junit.Test; +import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Set; @@ -668,4 +671,5 @@ public class SimpleIndexTemplateTests extends ElasticsearchIntegrationTest { assertThat(response.getItems()[0].isFailed(), equalTo(true)); assertThat(response.getItems()[0].getFailureMessage(), containsString("failed to parse filter for alias [alias4]")); } + } diff --git a/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java b/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java index 855f21de852..f07d7790ff4 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java +++ b/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTest.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedString; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; @@ -117,7 +117,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeTest { IndexSearcher searcher = new IndexSearcher(directoryReader); IndexService indexService = createIndex("test"); - indexService.mapperService().merge("test", new CompressedString(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true); + indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true); SearchContext searchContext = createSearchContext(indexService); AggregationContext context = new AggregationContext(searchContext); From 5a9694783b657e58ee2c3e3c84b9696a0fdd4eeb Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 29 May 2015 13:52:26 +0200 Subject: [PATCH 02/37] Consolidate shard level modules without logic into IndexShardModule We have a lot of module classes that don't contain any actual logic, only declarative bind actions. These classes are unnecessary and can be consolidated into the already existings IndexShardModule --- .../metadata/MetaDataCreateIndexService.java | 4 +- .../org/elasticsearch/index/IndexService.java | 39 +++++++----------- .../bitset/ShardBitsetFilterCacheModule.java | 32 --------------- .../cache/filter/ShardFilterCacheModule.java | 38 ----------------- .../cache/query/ShardQueryCacheModule.java | 32 --------------- .../gateway/IndexShardGatewayModule.java | 35 ---------------- .../index/get/ShardGetModule.java | 32 --------------- .../index/indexing/ShardIndexingModule.java | 34 --------------- .../percolator/PercolatorShardModule.java | 34 --------------- .../index/search/stats/ShardSearchModule.java | 34 --------------- .../index/shard/IndexShardModule.java | 41 ++++++++++++++++++- .../snapshots/IndexShardSnapshotModule.java | 33 --------------- .../index/store/StoreModule.java | 2 - .../termvectors/ShardTermVectorsModule.java | 32 --------------- .../index/shard/IndexShardModuleTests.java | 8 ++-- 15 files changed, 59 insertions(+), 371 deletions(-) delete mode 100644 src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java delete mode 100644 src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java delete mode 100644 src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java delete mode 100644 src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java delete mode 100644 src/main/java/org/elasticsearch/index/get/ShardGetModule.java delete mode 100644 src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java delete mode 100644 src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java delete mode 100644 src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java delete mode 100644 src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java delete mode 100644 src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 41e310a95ad..b16edc21dd7 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -87,7 +87,6 @@ public class MetaDataCreateIndexService extends AbstractComponent { public final static int MAX_INDEX_NAME_BYTES = 255; private static final DefaultIndexTemplateFilter DEFAULT_INDEX_TEMPLATE_FILTER = new DefaultIndexTemplateFilter(); - private final Environment environment; private final ThreadPool threadPool; private final ClusterService clusterService; private final IndicesService indicesService; @@ -100,12 +99,11 @@ public class MetaDataCreateIndexService extends AbstractComponent { private final NodeEnvironment nodeEnv; @Inject - public MetaDataCreateIndexService(Settings settings, Environment environment, ThreadPool threadPool, ClusterService clusterService, + public MetaDataCreateIndexService(Settings settings, ThreadPool threadPool, ClusterService clusterService, IndicesService indicesService, AllocationService allocationService, MetaDataService metaDataService, Version version, @RiverIndexName String riverIndexName, AliasValidator aliasValidator, Set indexTemplateFilters, NodeEnvironment nodeEnv) { super(settings); - this.environment = environment; this.threadPool = threadPool; this.clusterService = clusterService; this.indicesService = indicesService; diff --git a/src/main/java/org/elasticsearch/index/IndexService.java b/src/main/java/org/elasticsearch/index/IndexService.java index 09335126c73..e6ff7f232f2 100644 --- a/src/main/java/org/elasticsearch/index/IndexService.java +++ b/src/main/java/org/elasticsearch/index/IndexService.java @@ -36,36 +36,37 @@ import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCacheModule; +import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; import org.elasticsearch.index.cache.filter.ShardFilterCache; -import org.elasticsearch.index.cache.filter.ShardFilterCacheModule; -import org.elasticsearch.index.cache.query.ShardQueryCacheModule; +import org.elasticsearch.index.cache.query.ShardQueryCache; import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.fielddata.ShardFieldDataModule; -import org.elasticsearch.index.gateway.IndexShardGatewayModule; +import org.elasticsearch.index.fielddata.ShardFieldData; +import org.elasticsearch.index.gateway.IndexShardGateway; import org.elasticsearch.index.gateway.IndexShardGatewayService; -import org.elasticsearch.index.get.ShardGetModule; -import org.elasticsearch.index.indexing.ShardIndexingModule; +import org.elasticsearch.index.get.ShardGetService; +import org.elasticsearch.index.indexing.ShardIndexingService; +import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.policy.MergePolicyModule; import org.elasticsearch.index.merge.policy.MergePolicyProvider; import org.elasticsearch.index.merge.scheduler.MergeSchedulerModule; import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.percolator.PercolatorShardModule; +import org.elasticsearch.index.percolator.stats.ShardPercolateService; import org.elasticsearch.index.query.IndexQueryParserService; -import org.elasticsearch.index.search.stats.ShardSearchModule; +import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; +import org.elasticsearch.index.search.stats.ShardSearchService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; import org.elasticsearch.index.shard.*; import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.index.snapshots.IndexShardSnapshotModule; +import org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreModule; -import org.elasticsearch.index.suggest.SuggestShardModule; -import org.elasticsearch.index.termvectors.ShardTermVectorsModule; +import org.elasticsearch.index.suggest.stats.ShardSuggestService; +import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; @@ -307,24 +308,12 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone final ShardFilterCache shardFilterCache = new ShardFilterCache(shardId, injector.getInstance(IndicesFilterCache.class)); ModulesBuilder modules = new ModulesBuilder(); modules.add(new ShardsPluginsModule(indexSettings, pluginsService)); - modules.add(new IndexShardModule(shardId, primary, indexSettings)); - modules.add(new ShardIndexingModule()); - modules.add(new ShardSearchModule()); - modules.add(new ShardGetModule()); + modules.add(new IndexShardModule(shardId, primary, indexSettings, shardFilterCache)); modules.add(new StoreModule(injector.getInstance(IndexStore.class).shardDirectory(), lock, new StoreCloseListener(shardId, canDeleteShardContent, shardFilterCache), path)); modules.add(new DeletionPolicyModule(indexSettings)); modules.add(new MergePolicyModule(indexSettings)); modules.add(new MergeSchedulerModule(indexSettings)); - modules.add(new ShardFilterCacheModule(shardFilterCache)); - modules.add(new ShardQueryCacheModule()); - modules.add(new ShardBitsetFilterCacheModule()); - modules.add(new ShardFieldDataModule()); - modules.add(new IndexShardGatewayModule()); - modules.add(new PercolatorShardModule()); - modules.add(new ShardTermVectorsModule()); - modules.add(new IndexShardSnapshotModule()); - modules.add(new SuggestShardModule()); try { shardInjector = modules.createChildInjector(injector); } catch (CreationException e) { diff --git a/src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java b/src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java deleted file mode 100644 index c0087119f66..00000000000 --- a/src/main/java/org/elasticsearch/index/cache/bitset/ShardBitsetFilterCacheModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.bitset; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardBitsetFilterCacheModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardBitsetFilterCache.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java b/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java deleted file mode 100644 index 37bcb805768..00000000000 --- a/src/main/java/org/elasticsearch/index/cache/filter/ShardFilterCacheModule.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.filter; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardFilterCacheModule extends AbstractModule { - - private final ShardFilterCache shardFilterCache; - - public ShardFilterCacheModule(ShardFilterCache shardFilterCache) { - this.shardFilterCache = shardFilterCache; - } - - @Override - protected void configure() { - bind(ShardFilterCache.class).toInstance(shardFilterCache); - } -} diff --git a/src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java b/src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java deleted file mode 100644 index 938f016a8c3..00000000000 --- a/src/main/java/org/elasticsearch/index/cache/query/ShardQueryCacheModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.query; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardQueryCacheModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardQueryCache.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java b/src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java deleted file mode 100644 index 11ff2cf717e..00000000000 --- a/src/main/java/org/elasticsearch/index/gateway/IndexShardGatewayModule.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.gateway; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class IndexShardGatewayModule extends AbstractModule { - - - @Override - protected void configure() { - bind(IndexShardGateway.class).asEagerSingleton(); - bind(IndexShardGatewayService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/get/ShardGetModule.java b/src/main/java/org/elasticsearch/index/get/ShardGetModule.java deleted file mode 100644 index bc1df275dc5..00000000000 --- a/src/main/java/org/elasticsearch/index/get/ShardGetModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.get; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - */ -public class ShardGetModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardGetService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java b/src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java deleted file mode 100644 index 4d5e29daca3..00000000000 --- a/src/main/java/org/elasticsearch/index/indexing/ShardIndexingModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.indexing; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; - -/** - */ -public class ShardIndexingModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardIndexingService.class).asEagerSingleton(); - bind(ShardSlowLogIndexingService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java b/src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java deleted file mode 100644 index aba7e10fb2e..00000000000 --- a/src/main/java/org/elasticsearch/index/percolator/PercolatorShardModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.percolator; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.index.percolator.stats.ShardPercolateService; - -/** - * - */ -public class PercolatorShardModule extends AbstractModule { - - @Override - protected void configure() { - bind(PercolatorQueriesRegistry.class).asEagerSingleton(); - bind(ShardPercolateService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java b/src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java deleted file mode 100644 index 28f8c09c8c2..00000000000 --- a/src/main/java/org/elasticsearch/index/search/stats/ShardSearchModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.stats; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; - -/** - */ -public class ShardSearchModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardSearchService.class).asEagerSingleton(); - bind(ShardSlowLogSearchService.class).asEagerSingleton(); - } -} diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java b/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java index fc44f11eab9..672b63bfb1c 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java @@ -22,8 +22,27 @@ package org.elasticsearch.index.shard; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.ShardLock; +import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; +import org.elasticsearch.index.cache.filter.ShardFilterCache; +import org.elasticsearch.index.cache.query.ShardQueryCache; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.fielddata.ShardFieldData; +import org.elasticsearch.index.gateway.IndexShardGateway; +import org.elasticsearch.index.gateway.IndexShardGatewayService; +import org.elasticsearch.index.get.ShardGetService; +import org.elasticsearch.index.indexing.ShardIndexingService; +import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService; +import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.percolator.stats.ShardPercolateService; +import org.elasticsearch.index.search.slowlog.ShardSlowLogSearchService; +import org.elasticsearch.index.search.stats.ShardSearchService; +import org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService; +import org.elasticsearch.index.store.DirectoryService; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.suggest.stats.ShardSuggestService; +import org.elasticsearch.index.termvectors.ShardTermVectorsService; import org.elasticsearch.index.translog.TranslogService; import org.elasticsearch.index.warmer.ShardIndexWarmerService; @@ -43,9 +62,11 @@ public class IndexShardModule extends AbstractModule { private final ShardId shardId; private final Settings settings; private final boolean primary; + private final ShardFilterCache shardFilterCache; - public IndexShardModule(ShardId shardId, boolean primary, Settings settings) { + public IndexShardModule(ShardId shardId, boolean primary, Settings settings, ShardFilterCache shardFilterCache) { this.settings = settings; + this.shardFilterCache = shardFilterCache; this.shardId = shardId; this.primary = primary; if (settings.get("index.translog.type") != null) { @@ -69,7 +90,25 @@ public class IndexShardModule extends AbstractModule { } bind(EngineFactory.class).to(settings.getAsClass(ENGINE_FACTORY, DEFAULT_ENGINE_FACTORY_CLASS, ENGINE_PREFIX, ENGINE_SUFFIX)); + bind(ShardIndexWarmerService.class).asEagerSingleton(); + bind(ShardIndexingService.class).asEagerSingleton(); + bind(ShardSlowLogIndexingService.class).asEagerSingleton(); + bind(ShardSearchService.class).asEagerSingleton(); + bind(ShardSlowLogSearchService.class).asEagerSingleton(); + bind(ShardGetService.class).asEagerSingleton(); + bind(ShardFilterCache.class).toInstance(shardFilterCache); + bind(ShardQueryCache.class).asEagerSingleton(); + bind(ShardBitsetFilterCache.class).asEagerSingleton(); + bind(ShardFieldData.class).asEagerSingleton(); + bind(IndexShardGateway.class).asEagerSingleton(); + bind(IndexShardGatewayService.class).asEagerSingleton(); + bind(PercolatorQueriesRegistry.class).asEagerSingleton(); + bind(ShardPercolateService.class).asEagerSingleton(); + bind(ShardTermVectorsService.class).asEagerSingleton(); + bind(IndexShardSnapshotAndRestoreService.class).asEagerSingleton(); + bind(ShardSuggestService.class).asEagerSingleton(); } + } \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java b/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java deleted file mode 100644 index c0cf9788400..00000000000 --- a/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.snapshots; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * This shard-level module configures {@link IndexShardSnapshotAndRestoreService} - */ -public class IndexShardSnapshotModule extends AbstractModule { - - @Override - protected void configure() { - bind(IndexShardSnapshotAndRestoreService.class).asEagerSingleton(); - } -} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/index/store/StoreModule.java b/src/main/java/org/elasticsearch/index/store/StoreModule.java index fe0e314ba16..fccd2de2e43 100644 --- a/src/main/java/org/elasticsearch/index/store/StoreModule.java +++ b/src/main/java/org/elasticsearch/index/store/StoreModule.java @@ -27,8 +27,6 @@ import org.elasticsearch.index.shard.ShardPath; * */ public class StoreModule extends AbstractModule { - - private final ShardLock lock; private final Store.OnClose closeCallback; private final ShardPath path; diff --git a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java b/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java deleted file mode 100644 index 45a7d14b703..00000000000 --- a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsModule.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.termvectors; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class ShardTermVectorsModule extends AbstractModule { - - @Override - protected void configure() { - bind(ShardTermVectorsService.class).asEagerSingleton(); - } -} diff --git a/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java b/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java index b5ac0cce405..8238c159df2 100644 --- a/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java +++ b/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java @@ -41,10 +41,10 @@ public class IndexShardModuleTests extends ElasticsearchTestCase { .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .build(); - IndexShardModule ism1 = new IndexShardModule(shardId, true, regularSettings); - IndexShardModule ism2 = new IndexShardModule(shardId, false, regularSettings); - IndexShardModule ism3 = new IndexShardModule(shardId, true, shadowSettings); - IndexShardModule ism4 = new IndexShardModule(shardId, false, shadowSettings); + IndexShardModule ism1 = new IndexShardModule(shardId, true, regularSettings, null); + IndexShardModule ism2 = new IndexShardModule(shardId, false, regularSettings, null); + IndexShardModule ism3 = new IndexShardModule(shardId, true, shadowSettings, null); + IndexShardModule ism4 = new IndexShardModule(shardId, false, shadowSettings, null); assertFalse("no shadow replicas for normal settings", ism1.useShadowEngine()); assertFalse("no shadow replicas for normal settings", ism2.useShadowEngine()); From c7ca64cc080ba9426e82e9223f71d0e9177a7868 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 26 May 2015 16:14:54 +0200 Subject: [PATCH 03/37] Fix typed parameters in IndexRequestBuilder and CreateIndexRequestBuilder IndexRequestBuilder#setSource as well as CreateIndexRequestBuilder#setSettings and CreateIndexRequestBuilder#setSouce() will not work with Map argument although the API looks like it should. This PR fixes the problem introducing correct wildcard parameters and adds tests. Closes #10825 --- .../indices/create/CreateIndexRequest.java | 4 +- .../create/CreateIndexRequestBuilder.java | 4 +- .../action/index/IndexRequestBuilder.java | 4 +- .../create/CreateIndexRequestBuilderTest.java | 112 ++++++++++++++++++ .../action/index/IndexRequestBuilderTest.java | 87 ++++++++++++++ .../HeadersAndContextCopyClientTests.java | 52 +++----- .../org/elasticsearch/rest/NoOpClient.java | 54 +++++++++ 7 files changed, 276 insertions(+), 41 deletions(-) create mode 100644 src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java create mode 100644 src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java create mode 100644 src/test/java/org/elasticsearch/rest/NoOpClient.java diff --git a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 873e6e9e8ca..3a174484ef9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -377,9 +377,9 @@ public class CreateIndexRequest extends AcknowledgedRequest * Sets the settings and mappings as a single source. */ @SuppressWarnings("unchecked") - public CreateIndexRequest source(Map source) { + public CreateIndexRequest source(Map source) { boolean found = false; - for (Map.Entry entry : source.entrySet()) { + for (Map.Entry entry : source.entrySet()) { String name = entry.getKey(); if (name.equals("settings")) { found = true; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 12648db563a..637c6d7ba08 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -93,7 +93,7 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder source) { + public CreateIndexRequestBuilder setSettings(Map source) { request.settings(source); return this; } @@ -223,7 +223,7 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder source) { + public CreateIndexRequestBuilder setSource(Map source) { request.source(source); return this; } diff --git a/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index cf494358628..5b6674e38a1 100644 --- a/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -90,7 +90,7 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder source) { + public IndexRequestBuilder setSource(Map source) { request.source(source); return this; } @@ -100,7 +100,7 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder source, XContentType contentType) { + public IndexRequestBuilder setSource(Map source, XContentType contentType) { request.source(source, contentType); return this; } diff --git a/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java b/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java new file mode 100644 index 00000000000..31576c38d06 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTest.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.create; + +import org.elasticsearch.action.index.IndexRequestBuilderTest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.rest.NoOpClient; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class CreateIndexRequestBuilderTest extends ElasticsearchTestCase { + + private static final String KEY = "my.settings.key"; + private static final String VALUE = "my.settings.value"; + private NoOpClient testClient; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.testClient = new NoOpClient(getTestName()); + } + + @Override + @After + public void tearDown() throws Exception { + this.testClient.close(); + super.tearDown(); + } + + /** + * test setting the source with available setters + */ + @Test + public void testSetSource() throws IOException { + CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE); + builder.setSource("{\""+KEY+"\" : \""+VALUE+"\"}"); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + XContentBuilder xContent = XContentFactory.jsonBuilder().startObject().field(KEY, VALUE).endObject(); + xContent.close(); + builder.setSource(xContent); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + ByteArrayOutputStream docOut = new ByteArrayOutputStream(); + XContentBuilder doc = XContentFactory.jsonBuilder(docOut).startObject().field(KEY, VALUE).endObject(); + doc.close(); + builder.setSource(docOut.toByteArray()); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + Map settingsMap = new HashMap<>(); + settingsMap.put(KEY, VALUE); + builder.setSettings(settingsMap); + assertEquals(VALUE, builder.request().settings().get(KEY)); + } + + /** + * test setting the settings with available setters + */ + @Test + public void testSetSettings() throws IOException { + CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE); + builder.setSettings(KEY, VALUE); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + builder.setSettings("{\""+KEY+"\" : \""+VALUE+"\"}"); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + builder.setSettings(Settings.builder().put(KEY, VALUE)); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + builder.setSettings(Settings.builder().put(KEY, VALUE).build()); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + Map settingsMap = new HashMap<>(); + settingsMap.put(KEY, VALUE); + builder.setSettings(settingsMap); + assertEquals(VALUE, builder.request().settings().get(KEY)); + + XContentBuilder xContent = XContentFactory.jsonBuilder().startObject().field(KEY, VALUE).endObject(); + xContent.close(); + builder.setSettings(xContent); + assertEquals(VALUE, builder.request().settings().get(KEY)); + } + +} diff --git a/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java b/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java new file mode 100644 index 00000000000..478e12051d6 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTest.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.index; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.NoOpClient; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.ByteArrayOutputStream; +import java.util.HashMap; +import java.util.Map; + +public class IndexRequestBuilderTest extends ElasticsearchTestCase { + + private static final String EXPECTED_SOURCE = "{\"SomeKey\":\"SomeValue\"}"; + private NoOpClient testClient; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + this.testClient = new NoOpClient(getTestName()); + } + + @Override + @After + public void tearDown() throws Exception { + this.testClient.close(); + super.tearDown(); + } + + /** + * test setting the source for the request with different available setters + */ + @Test + public void testSetSource() throws Exception { + IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(this.testClient, IndexAction.INSTANCE); + Map source = new HashMap<>(); + source.put("SomeKey", "SomeValue"); + indexRequestBuilder.setSource(source); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + indexRequestBuilder.setSource(source, XContentType.JSON); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + indexRequestBuilder.setSource("SomeKey", "SomeValue"); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + // force the Object... setter + indexRequestBuilder.setSource((Object) "SomeKey", "SomeValue"); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + ByteArrayOutputStream docOut = new ByteArrayOutputStream(); + XContentBuilder doc = XContentFactory.jsonBuilder(docOut).startObject().field("SomeKey", "SomeValue").endObject(); + doc.close(); + indexRequestBuilder.setSource(docOut.toByteArray()); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + + doc = XContentFactory.jsonBuilder().startObject().field("SomeKey", "SomeValue").endObject(); + doc.close(); + indexRequestBuilder.setSource(doc); + assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); + } +} diff --git a/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java b/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java index 9d87de9a354..6a110cd5da4 100644 --- a/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java +++ b/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java @@ -20,8 +20,9 @@ package org.elasticsearch.rest; import com.google.common.collect.Maps; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.*; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; @@ -31,24 +32,26 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.*; -import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.client.support.Headers; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.Requests; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.threadpool.ThreadPool; import org.junit.Test; -import java.util.*; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.is; public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { @@ -108,7 +111,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { SearchRequest searchRequest = Requests.searchRequest(); putHeaders(searchRequest, transportHeaders); @@ -154,7 +157,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, expectedContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, expectedContext), usefulRestHeaders)) { ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest(); putHeaders(clusterHealthRequest, transportHeaders); @@ -200,7 +203,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { CreateIndexRequest createIndexRequest = Requests.createIndexRequest("test"); putHeaders(createIndexRequest, transportHeaders); @@ -246,7 +249,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ client.prepareIndex("index", "type"), @@ -287,7 +290,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ client.admin().cluster().prepareNodesInfo(), @@ -327,7 +330,7 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { expectedContext.putAll(transportContext); expectedContext.putAll(restContext); - try (Client client = client(new NoOpClient(), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { + try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) { ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{ client.admin().indices().prepareValidateQuery(), @@ -420,25 +423,4 @@ public class HeadersAndContextCopyClientTests extends ElasticsearchTestCase { } } } - - private class NoOpClient extends AbstractClient { - - public NoOpClient() { - super(Settings.EMPTY, new ThreadPool(getTestName()), Headers.EMPTY); - } - - @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { - listener.onResponse(null); - } - - @Override - public void close() { - try { - terminate(threadPool()); - } catch (Throwable t) { - throw new ElasticsearchException(t.getMessage(), t); - } - } - } } diff --git a/src/test/java/org/elasticsearch/rest/NoOpClient.java b/src/test/java/org/elasticsearch/rest/NoOpClient.java new file mode 100644 index 00000000000..245bdb96a33 --- /dev/null +++ b/src/test/java/org/elasticsearch/rest/NoOpClient.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.support.AbstractClient; +import org.elasticsearch.client.support.Headers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.concurrent.TimeUnit; + +public class NoOpClient extends AbstractClient { + + public NoOpClient(String testName) { + super(Settings.EMPTY, new ThreadPool(testName), Headers.EMPTY); + } + + @Override + protected > void doExecute(Action action, Request request, ActionListener listener) { + listener.onResponse(null); + } + + @Override + public void close() { + try { + ThreadPool.terminate(threadPool(), 10, TimeUnit.SECONDS); + } catch (Throwable t) { + throw new ElasticsearchException(t.getMessage(), t); + } + } +} \ No newline at end of file From b6a3952036b1e3297ae56de385aecf7adeab8f37 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 May 2015 17:56:51 +0200 Subject: [PATCH 04/37] Internal: Use DEFLATE instead of LZF for compression. LZF only stays for backward-compatibility reasons and can only read, not write. DEFLATE is configured to use level=3, which is a nice trade-off between speed and compression ratio and is the same as we use for Lucene's high compression codec. --- .../client/transport/TransportClient.java | 11 +- .../common/compress/Compressor.java | 5 - .../common/compress/CompressorFactory.java | 64 ++----- .../common/compress/NotXContentException.java | 7 +- .../compress/deflate/DeflateCompressor.java | 156 ++++++++++++++++++ .../common/compress/lzf/LZFCompressor.java | 22 +-- .../common/xcontent/XContentHelper.java | 2 +- .../publish/PublishClusterStateAction.java | 20 +-- .../index/mapper/DocumentMapper.java | 10 +- .../index/mapper/core/BinaryFieldMapper.java | 12 +- .../java/org/elasticsearch/node/Node.java | 2 - .../blobstore/BlobStoreRepository.java | 68 +++++--- ...ava => AbstractCompressedStreamTests.java} | 48 +++--- ...a => AbstractCompressedXContentTests.java} | 63 +++---- .../deflate/DeflateCompressedStreamTests.java | 30 ++++ .../deflate/DeflateXContentTests.java | 30 ++++ .../compress/lzf}/CompressedStreamOutput.java | 2 +- .../lzf/LZFCompressedStreamOutput.java | 2 +- .../lzf/LZFCompressedStreamTests.java | 30 ++++ .../compress/lzf/LZFTestCompressor.java | 34 ++++ .../common/compress/lzf/LZFXContentTests.java | 30 ++++ .../compress/SearchSourceCompressTests.java | 16 +- 22 files changed, 480 insertions(+), 184 deletions(-) create mode 100644 src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java rename src/test/java/org/elasticsearch/common/compress/{CompressedStreamTests.java => AbstractCompressedStreamTests.java} (97%) rename src/test/java/org/elasticsearch/common/compress/{CompressedXContentTests.java => AbstractCompressedXContentTests.java} (62%) create mode 100644 src/test/java/org/elasticsearch/common/compress/deflate/DeflateCompressedStreamTests.java create mode 100644 src/test/java/org/elasticsearch/common/compress/deflate/DeflateXContentTests.java rename src/{main/java/org/elasticsearch/common/compress => test/java/org/elasticsearch/common/compress/lzf}/CompressedStreamOutput.java (98%) rename src/{main => test}/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamOutput.java (97%) create mode 100644 src/test/java/org/elasticsearch/common/compress/lzf/LZFCompressedStreamTests.java create mode 100644 src/test/java/org/elasticsearch/common/compress/lzf/LZFTestCompressor.java create mode 100644 src/test/java/org/elasticsearch/common/compress/lzf/LZFXContentTests.java diff --git a/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 9e3cb1f0f80..d63e94d2ffe 100644 --- a/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -20,8 +20,14 @@ package org.elasticsearch.client.transport; import com.google.common.collect.ImmutableList; + import org.elasticsearch.Version; -import org.elasticsearch.action.*; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.client.support.Headers; @@ -30,7 +36,6 @@ import org.elasticsearch.cluster.ClusterNameModule; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.network.NetworkModule; @@ -122,8 +127,6 @@ public class TransportClient extends AbstractClient { Version version = Version.CURRENT; - CompressorFactory.configure(this.settings); - final ThreadPool threadPool = new ThreadPool(settings); boolean success = false; diff --git a/src/main/java/org/elasticsearch/common/compress/Compressor.java b/src/main/java/org/elasticsearch/common/compress/Compressor.java index d8f0ae82bf6..252fad09807 100644 --- a/src/main/java/org/elasticsearch/common/compress/Compressor.java +++ b/src/main/java/org/elasticsearch/common/compress/Compressor.java @@ -23,7 +23,6 @@ import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; @@ -32,10 +31,6 @@ import java.io.IOException; */ public interface Compressor { - String type(); - - void configure(Settings settings); - boolean isCompressed(BytesReference bytes); boolean isCompressed(ChannelBuffer buffer); diff --git a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java index 9873123e558..72c57a97a01 100644 --- a/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java +++ b/src/main/java/org/elasticsearch/common/compress/CompressorFactory.java @@ -19,70 +19,36 @@ package org.elasticsearch.common.compress; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; - import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.compress.deflate.DeflateCompressor; import org.elasticsearch.common.compress.lzf.LZFCompressor; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; -import java.util.List; -import java.util.Locale; /** */ public class CompressorFactory { - private static final LZFCompressor LZF = new LZFCompressor(); - private static final Compressor[] compressors; - private static final ImmutableMap compressorsByType; - private static Compressor defaultCompressor; + private static volatile Compressor defaultCompressor; static { - List compressorsX = Lists.newArrayList(); - compressorsX.add(LZF); - - compressors = compressorsX.toArray(new Compressor[compressorsX.size()]); - MapBuilder compressorsByTypeX = MapBuilder.newMapBuilder(); - for (Compressor compressor : compressors) { - compressorsByTypeX.put(compressor.type(), compressor); - } - compressorsByType = compressorsByTypeX.immutableMap(); - - defaultCompressor = LZF; + compressors = new Compressor[] { + new LZFCompressor(), + new DeflateCompressor() + }; + defaultCompressor = new DeflateCompressor(); } - public static synchronized void configure(Settings settings) { - for (Compressor compressor : compressors) { - compressor.configure(settings); - } - String defaultType = settings.get("compress.default.type", "lzf").toLowerCase(Locale.ENGLISH); - boolean found = false; - for (Compressor compressor : compressors) { - if (defaultType.equalsIgnoreCase(compressor.type())) { - defaultCompressor = compressor; - found = true; - break; - } - } - if (!found) { - Loggers.getLogger(CompressorFactory.class).warn("failed to find default type [{}]", defaultType); - } - } - - public static synchronized void setDefaultCompressor(Compressor defaultCompressor) { + public static void setDefaultCompressor(Compressor defaultCompressor) { CompressorFactory.defaultCompressor = defaultCompressor; } @@ -94,6 +60,10 @@ public class CompressorFactory { return compressor(bytes) != null; } + /** + * @deprecated we don't compress lucene indexes anymore and rely on lucene codecs + */ + @Deprecated public static boolean isCompressed(IndexInput in) throws IOException { return compressor(in) != null; } @@ -127,6 +97,10 @@ public class CompressorFactory { throw new NotCompressedException(); } + /** + * @deprecated we don't compress lucene indexes anymore and rely on lucene codecs + */ + @Deprecated @Nullable public static Compressor compressor(IndexInput in) throws IOException { for (Compressor compressor : compressors) { @@ -137,10 +111,6 @@ public class CompressorFactory { return null; } - public static Compressor compressor(String type) { - return compressorsByType.get(type); - } - /** * Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. */ @@ -160,7 +130,7 @@ public class CompressorFactory { public static BytesReference uncompress(BytesReference bytes) throws IOException { Compressor compressor = compressor(bytes); if (compressor == null) { - throw new IllegalArgumentException("Bytes are not compressed"); + throw new NotCompressedException(); } return uncompress(bytes, compressor); } diff --git a/src/main/java/org/elasticsearch/common/compress/NotXContentException.java b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java index bca35c317d8..68bbf4da81c 100644 --- a/src/main/java/org/elasticsearch/common/compress/NotXContentException.java +++ b/src/main/java/org/elasticsearch/common/compress/NotXContentException.java @@ -19,9 +19,10 @@ package org.elasticsearch.common.compress; -/** Exception indicating that we were expecting something compressed, which - * was not compressed or corrupted so that the compression format could not - * be detected. */ +import org.elasticsearch.common.xcontent.XContent; + +/** Exception indicating that we were expecting some {@link XContent} but could + * not detect its type. */ public class NotXContentException extends RuntimeException { public NotXContentException(String message) { diff --git a/src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java b/src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java new file mode 100644 index 00000000000..b2aea1fa0ce --- /dev/null +++ b/src/main/java/org/elasticsearch/common/compress/deflate/DeflateCompressor.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.compress.deflate; + +import org.apache.lucene.store.IndexInput; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedIndexInput; +import org.elasticsearch.common.compress.Compressor; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.jboss.netty.buffer.ChannelBuffer; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.zip.Deflater; +import java.util.zip.DeflaterOutputStream; +import java.util.zip.Inflater; +import java.util.zip.InflaterInputStream; + +/** + * {@link Compressor} implementation based on the DEFLATE compression algorithm. + */ +public class DeflateCompressor implements Compressor { + + // An arbitrary header that we use to identify compressed streams + // It needs to be different from other compressors and to not be specific + // enough so that no stream starting with these bytes could be detected as + // a XContent + private static final byte[] HEADER = new byte[] { 'D', 'F', 'L', '\0' }; + // 3 is a good trade-off between speed and compression ratio + private static final int LEVEL = 3; + // We use buffering on the input and ouput of in/def-laters in order to + // limit the number of JNI calls + private static final int BUFFER_SIZE = 4096; + + @Override + public boolean isCompressed(BytesReference bytes) { + if (bytes.length() < HEADER.length) { + return false; + } + for (int i = 0; i < HEADER.length; ++i) { + if (bytes.get(i) != HEADER[i]) { + return false; + } + } + return true; + } + + @Override + public boolean isCompressed(ChannelBuffer buffer) { + if (buffer.readableBytes() < HEADER.length) { + return false; + } + final int offset = buffer.readerIndex(); + for (int i = 0; i < HEADER.length; ++i) { + if (buffer.getByte(offset + i) != HEADER[i]) { + return false; + } + } + return true; + } + + @Override + public StreamInput streamInput(StreamInput in) throws IOException { + final byte[] headerBytes = new byte[HEADER.length]; + int len = 0; + while (len < headerBytes.length) { + final int read = in.read(headerBytes, len, headerBytes.length - len); + if (read == -1) { + break; + } + len += read; + } + if (len != HEADER.length || Arrays.equals(headerBytes, HEADER) == false) { + throw new IllegalArgumentException("Input stream is not compressed with DEFLATE!"); + } + + final boolean nowrap = true; + final Inflater inflater = new Inflater(nowrap); + InputStream decompressedIn = new InflaterInputStream(in, inflater, BUFFER_SIZE); + decompressedIn = new BufferedInputStream(decompressedIn, BUFFER_SIZE); + return new InputStreamStreamInput(decompressedIn) { + private boolean closed = false; + + public void close() throws IOException { + try { + super.close(); + } finally { + if (closed == false) { + // important to release native memory + inflater.end(); + closed = true; + } + } + } + }; + } + + @Override + public StreamOutput streamOutput(StreamOutput out) throws IOException { + out.writeBytes(HEADER); + final boolean nowrap = true; + final Deflater deflater = new Deflater(LEVEL, nowrap); + final boolean syncFlush = true; + OutputStream compressedOut = new DeflaterOutputStream(out, deflater, BUFFER_SIZE, syncFlush); + compressedOut = new BufferedOutputStream(compressedOut, BUFFER_SIZE); + return new OutputStreamStreamOutput(compressedOut) { + private boolean closed = false; + + public void close() throws IOException { + try { + super.close(); + } finally { + if (closed == false) { + // important to release native memory + deflater.end(); + closed = true; + } + } + } + }; + } + + @Override + public boolean isCompressed(IndexInput in) throws IOException { + return false; + } + + @Override + public CompressedIndexInput indexInput(IndexInput in) throws IOException { + throw new UnsupportedOperationException(); + } +} diff --git a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java index c5c937ccbb9..3646595f724 100644 --- a/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java +++ b/src/main/java/org/elasticsearch/common/compress/lzf/LZFCompressor.java @@ -25,25 +25,23 @@ import com.ning.compress.lzf.util.ChunkDecoderFactory; import org.apache.lucene.store.IndexInput; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedIndexInput; -import org.elasticsearch.common.compress.CompressedStreamInput; -import org.elasticsearch.common.compress.CompressedStreamOutput; import org.elasticsearch.common.compress.Compressor; +import org.elasticsearch.common.compress.deflate.DeflateCompressor; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; import org.jboss.netty.buffer.ChannelBuffer; import java.io.IOException; /** + * @deprecated Use {@link DeflateCompressor} instead */ +@Deprecated public class LZFCompressor implements Compressor { static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0}; - public static final String TYPE = "lzf"; - private ChunkDecoder decoder; public LZFCompressor() { @@ -52,14 +50,6 @@ public class LZFCompressor implements Compressor { this.decoder.getClass().getSimpleName()); } - @Override - public String type() { - return TYPE; - } - - @Override - public void configure(Settings settings) {} - @Override public boolean isCompressed(BytesReference bytes) { return bytes.length() >= 3 && @@ -95,13 +85,13 @@ public class LZFCompressor implements Compressor { } @Override - public CompressedStreamInput streamInput(StreamInput in) throws IOException { + public StreamInput streamInput(StreamInput in) throws IOException { return new LZFCompressedStreamInput(in, decoder); } @Override - public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException { - return new LZFCompressedStreamOutput(out); + public StreamOutput streamOutput(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("LZF is only here for back compat, no write support"); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 5325950e202..4efd18e8fa9 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -417,10 +417,10 @@ public class XContentHelper { Compressor compressor = CompressorFactory.compressor(source); if (compressor != null) { InputStream compressedStreamInput = compressor.streamInput(source.streamInput()); - XContentType contentType = XContentFactory.xContentType(compressedStreamInput); if (compressedStreamInput.markSupported() == false) { compressedStreamInput = new BufferedInputStream(compressedStreamInput); } + XContentType contentType = XContentFactory.xContentType(compressedStreamInput); if (contentType == builder.contentType()) { builder.rawField(field, compressedStreamInput); } else { diff --git a/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java index 92d5bad4bf6..7fd585a6a41 100644 --- a/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java +++ b/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java @@ -227,21 +227,21 @@ public class PublishClusterStateAction extends AbstractComponent { public static BytesReference serializeFullClusterState(ClusterState clusterState, Version nodeVersion) throws IOException { BytesStreamOutput bStream = new BytesStreamOutput(); - StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream); - stream.setVersion(nodeVersion); - stream.writeBoolean(true); - clusterState.writeTo(stream); - stream.close(); + try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) { + stream.setVersion(nodeVersion); + stream.writeBoolean(true); + clusterState.writeTo(stream); + } return bStream.bytes(); } public static BytesReference serializeDiffClusterState(Diff diff, Version nodeVersion) throws IOException { BytesStreamOutput bStream = new BytesStreamOutput(); - StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream); - stream.setVersion(nodeVersion); - stream.writeBoolean(false); - diff.writeTo(stream); - stream.close(); + try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) { + stream.setVersion(nodeVersion); + stream.writeBoolean(false); + diff.writeTo(stream); + } return bStream.bytes(); } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 2c4bd053251..7f8bb8ffa0a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -468,11 +468,11 @@ public class DocumentMapper implements ToXContent { private void refreshSource() throws ElasticsearchGenerationException { try { BytesStreamOutput bStream = new BytesStreamOutput(); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream)); - builder.startObject(); - toXContent(builder, ToXContent.EMPTY_PARAMS); - builder.endObject(); - builder.close(); + try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream))) { + builder.startObject(); + toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + } mappingSource = new CompressedXContent(bStream.bytes()); } catch (Exception e) { throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 91375efed47..cda0877fdae 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -37,8 +37,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.NotXContentException; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; @@ -150,10 +148,12 @@ public class BinaryFieldMapper extends AbstractFieldMapper { try { return CompressorFactory.uncompressIfNeeded(bytes); } catch (NotXContentException e) { - // This is a BUG! We try to decompress by detecting a header in - // the stored bytes but since we accept arbitrary bytes, we have - // no guarantee that uncompressed bytes will be detected as - // compressed! + // NOTE: previous versions of Elasticsearch used to try to detect if + // data was compressed. However this could cause decompression failures + // as a user may have submitted arbitrary data which looks like it is + // compressed to elasticsearch but is not. So we removed the ability to + // compress binary fields and keep this empty catch block for backward + // compatibility with 1.x } } return bytes; diff --git a/src/main/java/org/elasticsearch/node/Node.java b/src/main/java/org/elasticsearch/node/Node.java index 820c3a84534..355bea50643 100644 --- a/src/main/java/org/elasticsearch/node/Node.java +++ b/src/main/java/org/elasticsearch/node/Node.java @@ -36,7 +36,6 @@ import org.elasticsearch.common.StopWatch; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.lease.Releasable; @@ -151,7 +150,6 @@ public class Node implements Releasable { // create the environment based on the finalized (processed) view of the settings this.environment = new Environment(this.settings()); - CompressorFactory.configure(settings); final NodeEnvironment nodeEnvironment; try { nodeEnvironment = new NodeEnvironment(this.settings, this.environment); diff --git a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 83baf8d1e2d..2cf35a9905d 100644 --- a/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -26,12 +26,13 @@ import com.google.common.collect.Maps; import com.google.common.io.ByteStreams; import org.apache.lucene.store.RateLimiter; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.SnapshotId; -import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -39,13 +40,19 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.shard.IndexShardException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -55,14 +62,21 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.RepositoryVerificationException; -import org.elasticsearch.snapshots.*; +import org.elasticsearch.snapshots.InvalidSnapshotNameException; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotCreationException; +import org.elasticsearch.snapshots.SnapshotException; +import org.elasticsearch.snapshots.SnapshotMissingException; +import org.elasticsearch.snapshots.SnapshotShardFailure; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.NoSuchFileException; -import java.util.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import static com.google.common.collect.Lists.newArrayList; @@ -230,19 +244,15 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent Date: Tue, 12 May 2015 10:37:22 +0100 Subject: [PATCH 05/37] Scripting: Unify script and template requests across codebase This change unifies the way scripts and templates are specified for all instances in the codebase. It builds on the Script class added previously and adds request building and parsing support as well as the ability to transfer script objects between nodes. It also adds a Template class which aims to provide the same functionality for template APIs Closes #11091 --- .../scripted-metric-aggregation.asciidoc | 22 +- docs/java-api/update.asciidoc | 4 +- docs/reference/aggregations.asciidoc | 2 - .../bucket/range-aggregation.asciidoc | 29 +- .../significantterms-aggregation.asciidoc | 7 - .../bucket/terms-aggregation.asciidoc | 22 +- .../metrics/avg-aggregation.asciidoc | 32 +- .../metrics/cardinality-aggregation.asciidoc | 23 +- .../extendedstats-aggregation.asciidoc | 32 +- .../metrics/max-aggregation.asciidoc | 30 +- .../metrics/min-aggregation.asciidoc | 30 +- .../metrics/percentile-aggregation.asciidoc | 30 +- .../percentile-rank-aggregation.asciidoc | 32 +- .../scripted-metric-aggregation.asciidoc | 45 +- .../metrics/stats-aggregation.asciidoc | 32 +- .../metrics/sum-aggregation.asciidoc | 27 +- .../metrics/valuecount-aggregation.asciidoc | 24 +- docs/reference/docs/bulk.asciidoc | 2 +- docs/reference/docs/update.asciidoc | 46 +- docs/reference/mapping/transform.asciidoc | 12 +- docs/reference/modules/scripting.asciidoc | 20 +- .../query-dsl/function-score-query.asciidoc | 14 +- .../reference/query-dsl/script-query.asciidoc | 8 +- .../query-dsl/template-query.asciidoc | 4 +- .../search/request/script-fields.asciidoc | 8 +- docs/reference/search/request/sort.asciidoc | 8 +- .../reference/search/search-template.asciidoc | 18 +- .../action/bulk/BulkRequest.java | 9 +- .../action/search/SearchRequest.java | 90 +- .../action/search/SearchRequestBuilder.java | 68 +- .../action/update/UpdateHelper.java | 10 +- .../action/update/UpdateRequest.java | 201 ++- .../action/update/UpdateRequestBuilder.java | 35 +- .../search/function/ScriptScoreFunction.java | 15 +- .../common/xcontent/XContentType.java | 18 + .../index/mapper/DocumentMapper.java | 80 +- .../index/mapper/DocumentMapperParser.java | 23 +- .../index/query/QueryBuilders.java | 21 + .../index/query/ScriptQueryBuilder.java | 57 +- .../index/query/ScriptQueryParser.java | 66 +- .../index/query/TemplateQueryBuilder.java | 60 +- .../index/query/TemplateQueryParser.java | 94 +- .../functionscore/ScoreFunctionBuilders.java | 21 + .../script/ScriptScoreFunctionBuilder.java | 54 +- .../script/ScriptScoreFunctionParser.java | 30 +- .../query/support/BaseInnerHitBuilder.java | 50 +- .../rest/action/update/RestUpdateAction.java | 25 +- .../script/AbstractScriptParser.java | 196 +++ .../java/org/elasticsearch/script/Script.java | 268 +++- .../script/ScriptParameterParser.java | 19 +- .../elasticsearch/script/ScriptService.java | 83 +- .../org/elasticsearch/script/Template.java | 198 +++ .../elasticsearch/search/SearchService.java | 35 +- .../ValuesSourceAggregationBuilder.java | 80 +- .../significant/SignificantTermsBuilder.java | 2 +- .../significant/heuristics/ChiSquare.java | 3 +- .../bucket/significant/heuristics/GND.java | 3 +- .../significant/heuristics/JLHScore.java | 3 +- .../heuristics/MutualInformation.java | 3 +- .../heuristics/PercentageScore.java | 3 +- .../heuristics/ScriptHeuristic.java | 160 ++- .../SignificanceHeuristicBuilder.java | 8 +- .../scripted/InternalScriptedMetric.java | 47 +- .../scripted/ScriptedMetricAggregator.java | 101 +- .../scripted/ScriptedMetricBuilder.java | 144 ++- .../scripted/ScriptedMetricParser.java | 89 +- .../metrics/tophits/TopHitsBuilder.java | 50 +- .../support/ValuesSourceParser.java | 37 +- .../search/builder/SearchSourceBuilder.java | 56 +- .../script/ScriptFieldsParseElement.java | 40 +- .../internal/ShardSearchLocalRequest.java | 39 +- .../search/internal/ShardSearchRequest.java | 9 +- .../internal/ShardSearchTransportRequest.java | 17 +- .../search/sort/ScriptSortBuilder.java | 88 +- .../search/sort/ScriptSortParser.java | 42 +- .../search/sort/SortBuilders.java | 16 + .../suggest/phrase/PhraseSuggestParser.java | 19 +- .../action/IndicesRequestTests.java | 28 +- .../action/bulk/BulkRequestTests.java | 16 +- .../action/update/UpdateRequestTests.java | 108 +- .../expression/ScriptComparisonBenchmark.java | 4 +- .../scripts/score/BasicScriptBenchmark.java | 16 +- ...TimeDataHistogramAggregationBenchmark.java | 11 +- .../cluster/NoMasterNodeTests.java | 25 +- .../function/ScriptScoreFunctionTests.java | 3 +- .../org/elasticsearch/document/BulkTests.java | 273 ++++ .../index/query/TemplateQueryBuilderTest.java | 21 +- .../index/query/TemplateQueryParserTest.java | 8 +- .../index/query/TemplateQueryTest.java | 406 +++++- .../nested/SimpleNestedTests.java | 144 +++ .../percolator/PercolatorTests.java | 18 +- .../routing/AliasRoutingTests.java | 4 +- .../script/CustomScriptContextTests.java | 23 +- .../script/GroovyScriptTests.java | 31 +- .../script/IndexLookupTests.java | 579 ++++++++- .../script/IndexedScriptTests.java | 62 +- .../script/NativeScriptTests.java | 7 +- .../script/OnDiskScriptTests.java | 33 + .../script/ScriptFieldTests.java | 32 + .../script/ScriptParameterParserTest.java | 46 +- .../script/ScriptServiceTests.java | 35 +- .../expression/ExpressionScriptTests.java | 11 +- .../search/aggregations/EquivalenceTests.java | 9 +- .../bucket/DateHistogramTests.java | 411 +++++- .../aggregations/bucket/DateRangeTests.java | 453 ++++++- .../aggregations/bucket/DoubleTermsTests.java | 538 ++++++-- .../aggregations/bucket/HistogramTests.java | 296 ++++- .../aggregations/bucket/IPv4RangeTests.java | 452 ++++++- .../aggregations/bucket/LongTermsTests.java | 276 +++- .../aggregations/bucket/MinDocCountTests.java | 3 +- .../aggregations/bucket/RangeTests.java | 491 ++++++- ...ignificantTermsSignificanceScoreTests.java | 151 ++- .../aggregations/bucket/StringTermsTests.java | 1141 ++++++++++------- .../aggregations/bucket/TopHitsTests.java | 132 +- .../metrics/ScriptedMetricTests.java | 741 ++++++++++- .../child/SimpleChildQuerySearchTests.java | 13 +- .../search/fields/SearchFieldsTests.java | 256 +++- .../functionscore/ExplainableScriptTests.java | 11 +- .../functionscore/FunctionScoreTests.java | 297 ++++- .../RandomScoreFunctionTests.java | 95 +- .../search/geo/GeoDistanceTests.java | 137 +- .../search/innerhits/InnerHitsTests.java | 246 +++- .../search/query/SearchQueryTests.java | 64 +- .../search/rescore/QueryRescorerTests.java | 54 +- .../scriptfilter/ScriptQuerySearchTests.java | 80 +- .../search/sort/SimpleSortTests.java | 487 ++++++- .../search/stats/SearchStatsTests.java | 15 +- .../search/timeout/SearchTimeoutTests.java | 5 +- .../update/UpdateByNativeScriptTests.java | 30 +- .../org/elasticsearch/update/UpdateTests.java | 826 +++++++++++- 130 files changed, 10901 insertions(+), 2201 deletions(-) create mode 100644 src/main/java/org/elasticsearch/script/AbstractScriptParser.java create mode 100644 src/main/java/org/elasticsearch/script/Template.java diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index c2776b84797..e9c79ed59d8 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -30,10 +30,10 @@ MetricsAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") .initScript("_agg['heights'] = []") - .mapScript("if (doc['gender'].value == \"male\") " + + .mapScript(new Script("if (doc['gender'].value == \"male\") " + "{ _agg.heights.add(doc['height'].value) } " + "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }"); + "{ _agg.heights.add(-1 * doc['height'].value) }")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: @@ -43,12 +43,12 @@ You can also specify a `combine` script which will be executed on each shard: MetricsAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript("_agg['heights'] = []") - .mapScript("if (doc['gender'].value == \"male\") " + + .initScript(new Script("_agg['heights'] = []")) + .mapScript(new Script("if (doc['gender'].value == \"male\") " + "{ _agg.heights.add(doc['height'].value) } " + "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }") - .combineScript("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum"); + "{ _agg.heights.add(-1 * doc['height'].value) }")) + .combineScript(new Script("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum")); -------------------------------------------------- You can also specify a `reduce` script which will be executed on the node which gets the request: @@ -58,13 +58,13 @@ You can also specify a `reduce` script which will be executed on the node which MetricsAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript("_agg['heights'] = []") - .mapScript("if (doc['gender'].value == \"male\") " + + .initScript(new Script("_agg['heights'] = []")) + .mapScript(new Script("if (doc['gender'].value == \"male\") " + "{ _agg.heights.add(doc['height'].value) } " + "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }") - .combineScript("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum") - .reduceScript("heights_sum = 0; for (a in _aggs) { heights_sum += a }; return heights_sum"); + "{ _agg.heights.add(-1 * doc['height'].value) }")) + .combineScript(new Script("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum")) + .reduceScript(new Script("heights_sum = 0; for (a in _aggs) { heights_sum += a }; return heights_sum")); -------------------------------------------------- diff --git a/docs/java-api/update.asciidoc b/docs/java-api/update.asciidoc index 2de835755c6..ea25ec0c2d2 100644 --- a/docs/java-api/update.asciidoc +++ b/docs/java-api/update.asciidoc @@ -22,7 +22,7 @@ Or you can use `prepareUpdate()` method: [source,java] -------------------------------------------------- client.prepareUpdate("ttl", "doc", "1") - .setScript("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE) + .setScript(new Script("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE, null, null)) .get(); client.prepareUpdate("ttl", "doc", "1") @@ -46,7 +46,7 @@ The update API allows to update a document based on a script provided: [source,java] -------------------------------------------------- UpdateRequest updateRequest = new UpdateRequest("ttl", "doc", "1") - .script("ctx._source.gender = \"male\""); + .script(new Script("ctx._source.gender = \"male\"")); client.update(updateRequest).get(); -------------------------------------------------- diff --git a/docs/reference/aggregations.asciidoc b/docs/reference/aggregations.asciidoc index 2464873b452..b1b34ee66df 100644 --- a/docs/reference/aggregations.asciidoc +++ b/docs/reference/aggregations.asciidoc @@ -73,8 +73,6 @@ Some aggregations work on values extracted from the aggregated documents. Typica a specific document field which is set using the `field` key for the aggregations. It is also possible to define a <> which will generate the values (per document). -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. - When both `field` and `script` settings are configured for the aggregation, the script will be treated as a `value script`. While normal scripts are evaluated on a document level (i.e. the script has access to all the data associated with the document), value scripts are evaluated on the *value* level. In this mode, the values are extracted diff --git a/docs/reference/aggregations/bucket/range-aggregation.asciidoc b/docs/reference/aggregations/bucket/range-aggregation.asciidoc index f7bfcab0644..d428d44523f 100644 --- a/docs/reference/aggregations/bucket/range-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/range-aggregation.asciidoc @@ -128,8 +128,6 @@ It is also possible to customize the key for each range: ==== Script -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. - [source,js] -------------------------------------------------- { @@ -148,6 +146,33 @@ TIP: The `script` parameter expects an inline script. Use `script_id` for indexe } -------------------------------------------------- +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "price_ranges" : { + "range" : { + "script" : { + "file": "my_script", + "params": { + "field": "price" + } + }, + "ranges" : [ + { "to" : 50 }, + { "from" : 50, "to" : 100 }, + { "from" : 100 } + ] + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. + ==== Value Script Lets say the product prices are in USD but we would like to get the price ranges in EURO. We can use value script to convert the prices prior the aggregation (assuming conversion rate of 0.8) diff --git a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc index 1e329db1df4..80c747e61a5 100644 --- a/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/significantterms-aggregation.asciidoc @@ -358,13 +358,6 @@ Customized scores can be implemented via a script: -------------------------------------------------- Scripts can be inline (as in above example), indexed or stored on disk. For details on the options, see <>. -Parameters need to be set as follows: - -[horizontal] -`script`:: Inline script, name of script file or name of indexed script. Mandatory. -`script_type`:: One of "inline" (default), "indexed" or "file". -`lang`:: Script language (default "groovy") -`params`:: Script parameters (default empty). Available parameters in the script are diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index cf401126c8a..70bdb00d184 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -441,7 +441,27 @@ Generating the terms using a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "genders" : { + "terms" : { + "script" : { + "file": "my_script", + "params": { + "field": "gender" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Value Script diff --git a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc index 8e0d2b4b5e7..f81cd3eee33 100644 --- a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc @@ -47,7 +47,29 @@ Computing the average grade based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "avg_grade" : { + "avg" : { + "script" : { + "file": "my_script", + "params": { + "field": "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -63,9 +85,11 @@ It turned out that the exam was way above the level of the students and a grade "avg_corrected_grade" : { "avg" : { "field" : "grade", - "script" : "_value * correction", - "params" : { - "correction" : 1.2 + "script" : { + "inline": "_value * correction", + "params" : { + "correction" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc index 8e34e16f7a8..0b484288b1c 100644 --- a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc @@ -153,7 +153,28 @@ however since hashes need to be computed on the fly. } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "author_count" : { + "cardinality" : { + "script" : { + "file": "my_script", + "params": { + "first_name_field": "author.first_name", + "last_name_field": "author.last_name" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Missing value diff --git a/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc b/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc index 0f65b7670cf..30a5acf6809 100644 --- a/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc @@ -91,7 +91,29 @@ Computing the grades stats based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "grades_stats" : { + "extended_stats" : { + "script" : { + "file": "my_script", + "params": { + "field": "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -107,9 +129,11 @@ It turned out that the exam was way above the level of the students and a grade "grades_stats" : { "extended_stats" : { "field" : "grade", - "script" : "_value * correction", - "params" : { - "correction" : 1.2 + "script" : { + "inline": "_value * correction", + "params" : { + "correction" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/max-aggregation.asciidoc b/docs/reference/aggregations/metrics/max-aggregation.asciidoc index 856adc4b03d..2a641fda5dc 100644 --- a/docs/reference/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/max-aggregation.asciidoc @@ -44,7 +44,27 @@ Computing the max price value across all document, this time using a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "max_price" : { + "max" : { + "script" : { + "file": "my_script", + "params": { + "field": "price" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Value Script @@ -57,9 +77,11 @@ Let's say that the prices of the documents in our index are in USD, but we would "max_price_in_euros" : { "max" : { "field" : "price", - "script" : "_value * conversion_rate", - "params" : { - "conversion_rate" : 1.2 + "script" : { + "inline": "_value * conversion_rate", + "params" : { + "conversion_rate" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/min-aggregation.asciidoc b/docs/reference/aggregations/metrics/min-aggregation.asciidoc index c7424d5570b..7698a41202c 100644 --- a/docs/reference/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/min-aggregation.asciidoc @@ -44,7 +44,27 @@ Computing the min price value across all document, this time using a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "min_price" : { + "min" : { + "script" : { + "file": "my_script", + "params": { + "field": "price" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Value Script @@ -57,9 +77,11 @@ Let's say that the prices of the documents in our index are in USD, but we would "min_price_in_euros" : { "min" : { "field" : "price", - "script" : "_value * conversion_rate", - "params" : { - "conversion_rate" : 1.2 + "script" : + "inline": "_value * conversion_rate", + "params" : { + "conversion_rate" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index d5262beb6ef..ecad363886d 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -100,9 +100,11 @@ a script to convert them on-the-fly: "aggs" : { "load_time_outlier" : { "percentiles" : { - "script" : "doc['load_time'].value / timeUnit", <1> - "params" : { - "timeUnit" : 1000 <2> + "script" : { + "inline": "doc['load_time'].value / timeUnit", <1> + "params" : { + "timeUnit" : 1000 <2> + } } } } @@ -113,7 +115,27 @@ a script to convert them on-the-fly: script to generate values which percentiles are calculated on <2> Scripting supports parameterized input just like any other script -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "load_time_outlier" : { + "percentiles" : { + "script" : { + "file": "my_script", + "params" : { + "timeUnit" : 1000 + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. [[search-aggregations-metrics-percentile-aggregation-approximation]] ==== Percentiles are (usually) approximate diff --git a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc index a494a0a5d00..5da59061e0b 100644 --- a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -72,9 +72,11 @@ a script to convert them on-the-fly: "load_time_outlier" : { "percentile_ranks" : { "values" : [3, 5], - "script" : "doc['load_time'].value / timeUnit", <1> - "params" : { - "timeUnit" : 1000 <2> + "script" : { + "inline": "doc['load_time'].value / timeUnit", <1> + "params" : { + "timeUnit" : 1000 <2> + } } } } @@ -85,7 +87,28 @@ a script to convert them on-the-fly: script to generate values which percentile ranks are calculated on <2> Scripting supports parameterized input just like any other script -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "load_time_outlier" : { + "percentile_ranks" : { + "values" : [3, 5], + "script" : { + "file": "my_script", + "params" : { + "timeUnit" : 1000 + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ==== Missing value @@ -108,3 +131,4 @@ had a value. -------------------------------------------------- <1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`. + diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index a775d545409..6db8c82a9e8 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -45,6 +45,42 @@ The response for the above aggregation: } -------------------------------------------------- +The above example can also be specified using file scripts as follows: + +[source,js] +-------------------------------------------------- +{ + "query" : { + "match_all" : {} + }, + "aggs": { + "profit": { + "scripted_metric": { + "init_script" : { + "file": "my_init_script" + }, + "map_script" : { + "file": "my_map_script" + }, + "combine_script" : { + "file": "my_combine_script" + }, + "params": { + "field": "amount" <1> + }, + "reduce_script" : { + "file": "my_reduce_script" + }, + } + } + } +} +-------------------------------------------------- + +<1> script parameters for init, map and combine scripts must be specified in a global `params` object so that it can be share between the scripts + +For more details on specifying scripts see <>. + ==== Scope of scripts The scripted metric aggregation uses scripts at 4 stages of its execution: @@ -225,13 +261,4 @@ params:: Optional. An object whose contents will be passed as variable -------------------------------------------------- reduce_params:: Optional. An object whose contents will be passed as variables to the `reduce_script`. This can be useful to allow the user to control the behavior of the reduce phase. If this is not specified the variable will be undefined in the reduce_script execution. -lang:: Optional. The script language used for the scripts. If this is not specified the default scripting language is used. -init_script_file:: Optional. Can be used in place of the `init_script` parameter to provide the script using in a file. -init_script_id:: Optional. Can be used in place of the `init_script` parameter to provide the script using an indexed script. -map_script_file:: Optional. Can be used in place of the `map_script` parameter to provide the script using in a file. -map_script_id:: Optional. Can be used in place of the `map_script` parameter to provide the script using an indexed script. -combine_script_file:: Optional. Can be used in place of the `combine_script` parameter to provide the script using in a file. -combine_script_id:: Optional. Can be used in place of the `combine_script` parameter to provide the script using an indexed script. -reduce_script_file:: Optional. Can be used in place of the `reduce_script` parameter to provide the script using in a file. -reduce_script_id:: Optional. Can be used in place of the `reduce_script` parameter to provide the script using an indexed script. diff --git a/docs/reference/aggregations/metrics/stats-aggregation.asciidoc b/docs/reference/aggregations/metrics/stats-aggregation.asciidoc index 429be4b8c4d..852c1c3f7a9 100644 --- a/docs/reference/aggregations/metrics/stats-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/stats-aggregation.asciidoc @@ -53,7 +53,29 @@ Computing the grades stats based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "grades_stats" : { + "stats" : { + "script" : { + "file": "my_script", + "params" : { + "field" : "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -69,9 +91,11 @@ It turned out that the exam was way above the level of the students and a grade "grades_stats" : { "stats" : { "field" : "grade", - "script" : "_value * correction", - "params" : { - "correction" : 1.2 + "script" : + "inline": "_value * correction", + "params" : { + "correction" : 1.2 + } } } } diff --git a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc index 2d16129d15f..98286e9396f 100644 --- a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc @@ -55,7 +55,29 @@ Computing the intraday return based on a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "intraday_return" : { + "sum" : { + "script" : { + "file": "my_script", + "params" : { + "field" : "change" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. ===== Value Script @@ -71,7 +93,8 @@ Computing the sum of squares over all stock tick changes: "daytime_return" : { "sum" : { "field" : "change", - "script" : "_value * _value" } + "script" : "_value * _value" + } } } } diff --git a/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc b/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc index ed5e23ee339..fa2bfdbbb9d 100644 --- a/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc @@ -48,4 +48,26 @@ Counting the values generated by a script: } -------------------------------------------------- -TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. +This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: + +[source,js] +-------------------------------------------------- +{ + ..., + + "aggs" : { + "grades_count" : { + "value_count" : { + "script" : { + "file": "my_script", + "params" : { + "field" : "grade" + } + } + } + } + } +} +-------------------------------------------------- + +TIP: for indexed scripts replace the `file` parameter with an `id` parameter. diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 2760a125ff8..aaf12079747 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -187,7 +187,7 @@ the options. Curl example with update actions: { "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"} } { "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } -{ "script" : "ctx._source.counter += param1", "lang" : "js", "params" : {"param1" : 1}, "upsert" : {"counter" : 1}} +{ "script" : { "inline": "ctx._source.counter += param1", "lang" : "js", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"}, "doc_as_upsert" : true } -------------------------------------------------- diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 4236e06754a..485b31cb037 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -28,9 +28,11 @@ Now, we can execute a script that would increment the counter: [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.counter += count", - "params" : { - "count" : 4 + "script" : { + "inline": "ctx._source.counter += count", + "params" : { + "count" : 4 + } } }' -------------------------------------------------- @@ -41,9 +43,11 @@ will still add it, since its a list): [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.tags += tag", - "params" : { - "tag" : "blue" + "script" : { + "inline": "ctx._source.tags += tag", + "params" : { + "tag" : "blue" + } } }' -------------------------------------------------- @@ -71,9 +75,11 @@ And, we can delete the doc if the tags contain blue, or ignore (noop): [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", - "params" : { - "tag" : "blue" + "script" : { + "inline": "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", + "params" : { + "tag" : "blue" + } } }' -------------------------------------------------- @@ -136,9 +142,11 @@ index the fresh doc: [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ - "script" : "ctx._source.counter += count", - "params" : { - "count" : 4 + "script" : { + "inline": "ctx._source.counter += count", + "params" : { + "count" : 4 + } }, "upsert" : { "counter" : 1 @@ -153,13 +161,15 @@ new `scripted_upsert` parameter with the value `true`. [source,js] -------------------------------------------------- curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{ - "script_id" : "my_web_session_summariser", "scripted_upsert":true, - "params" : { - "pageViewEvent" : { - "url":"foo.com/bar", - "response":404, - "time":"2014-01-01 12:32" + "script" : { + "id": "my_web_session_summariser", + "params" : { + "pageViewEvent" : { + "url":"foo.com/bar", + "response":404, + "time":"2014-01-01 12:32" + } } }, "upsert" : { diff --git a/docs/reference/mapping/transform.asciidoc b/docs/reference/mapping/transform.asciidoc index 5235afcfd96..9377336518a 100644 --- a/docs/reference/mapping/transform.asciidoc +++ b/docs/reference/mapping/transform.asciidoc @@ -10,11 +10,13 @@ field. Example: { "example" : { "transform" : { - "script" : "if (ctx._source['title']?.startsWith('t')) ctx._source['suggest'] = ctx._source['content']", - "params" : { - "variable" : "not used but an example anyway" - }, - "lang": "groovy" + "script" : { + "inline": "if (ctx._source['title']?.startsWith('t')) ctx._source['suggest'] = ctx._source['content']", + "params" : { + "variable" : "not used but an example anyway" + }, + "lang": "groovy" + } }, "properties": { "title": { "type": "string" }, diff --git a/docs/reference/modules/scripting.asciidoc b/docs/reference/modules/scripting.asciidoc index 750802c4ec2..0550542b4a2 100644 --- a/docs/reference/modules/scripting.asciidoc +++ b/docs/reference/modules/scripting.asciidoc @@ -29,7 +29,7 @@ GET /_search { "script_fields": { "my_field": { - "script": "1 + my_var", + "inline": "1 + my_var", "params": { "my_var": 2 } @@ -38,7 +38,7 @@ GET /_search } ----------------------------------- -Save the contents of the script as a file called `config/scripts/my_script.groovy` +Save the contents of the `inline` field as a file called `config/scripts/my_script.groovy` on every data node in the cluster: [source,js] @@ -54,7 +54,7 @@ GET /_search { "script_fields": { "my_field": { - "script_file": "my_script", + "file": "my_script", "params": { "my_var": 2 } @@ -67,9 +67,9 @@ GET /_search Additional `lang` plugins are provided to allow to execute scripts in -different languages. All places where a `script` parameter can be used, a `lang` parameter -(on the same level) can be provided to define the language of the -script. The following are the supported scripting languages: +different languages. All places where a script can be used, a `lang` parameter +can be provided to define the language of the script. The following are the +supported scripting languages: [cols="<,<,<",options="header",] |======================================================================= @@ -120,7 +120,7 @@ curl -XPOST localhost:9200/_search -d '{ { "script_score": { "lang": "groovy", - "script_file": "calculate-score", + "file": "calculate-score", "params": { "my_modifier": 8 } @@ -162,8 +162,8 @@ curl -XPOST localhost:9200/_scripts/groovy/indexedCalculateScore -d '{ This will create a document with id: `indexedCalculateScore` and type: `groovy` in the `.scripts` index. The type of the document is the language used by the script. -This script can be accessed at query time by appending `_id` to -the script parameter and passing the script id. So `script` becomes `script_id`.: +This script can be accessed at query time by using the `id` script parameter and passing +the script id: [source,js] -------------------------------------------------- @@ -178,7 +178,7 @@ curl -XPOST localhost:9200/_search -d '{ "functions": [ { "script_score": { - "script_id": "indexedCalculateScore", + "id": "indexedCalculateScore", "lang" : "groovy", "params": { "my_modifier": 8 diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 4588b4c7858..a5618a23c48 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -120,12 +120,14 @@ script, and provide parameters to it: [source,js] -------------------------------------------------- "script_score": { - "lang": "lang", - "params": { - "param1": value1, - "param2": value2 - }, - "script": "_score * doc['my_numeric_field'].value / pow(param1, param2)" + "script": { + "lang": "lang", + "params": { + "param1": value1, + "param2": value2 + }, + "inline": "_score * doc['my_numeric_field'].value / pow(param1, param2)" + } } -------------------------------------------------- diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index 899f176578e..4c307f2556f 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -34,9 +34,11 @@ to use the ability to pass parameters to the script itself, for example: }, "filter" : { "script" : { - "script" : "doc['num1'].value > param1" - "params" : { - "param1" : 5 + "script" : { + "inline" : "doc['num1'].value > param1" + "params" : { + "param1" : 5 + } } } } diff --git a/docs/reference/query-dsl/template-query.asciidoc b/docs/reference/query-dsl/template-query.asciidoc index 5d68992ff54..31728fe9993 100644 --- a/docs/reference/query-dsl/template-query.asciidoc +++ b/docs/reference/query-dsl/template-query.asciidoc @@ -12,7 +12,7 @@ GET /_search { "query": { "template": { - "query": { "match": { "text": "{{query_string}}" }}, + "inline": { "match": { "text": "{{query_string}}" }}, "params" : { "query_string" : "all about search" } @@ -45,7 +45,7 @@ GET /_search { "query": { "template": { - "query": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1> + "inline": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1> "params" : { "query_string" : "all about search" } diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 46b169838a0..596aba31d82 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -15,9 +15,11 @@ evaluation>> (based on different fields) for each hit, for example: "script" : "doc['my_field_name'].value * 2" }, "test2" : { - "script" : "doc['my_field_name'].value * factor", - "params" : { - "factor" : 2.0 + "script" : { + "inline": "doc['my_field_name'].value * factor", + "params" : { + "factor" : 2.0 + } } } } diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 1e4218bb61d..58f42d8fdd8 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -318,10 +318,12 @@ Allow to sort based on custom scripts, here is an example: }, "sort" : { "_script" : { - "script" : "doc['field_name'].value * factor", "type" : "number", - "params" : { - "factor" : 1.1 + "script" : { + "inline": "doc['field_name'].value * factor", + "params" : { + "factor" : 1.1 + } }, "order" : "asc" } diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index bb33628ba3b..b92dbfaa795 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -8,7 +8,7 @@ before they are executed and fill existing templates with template parameters. ------------------------------------------ GET /_search/template { - "template" : { + "inline" : { "query": { "match" : { "{{my_field}}" : "{{my_value}}" } }, "size" : "{{my_size}}" }, @@ -40,7 +40,7 @@ disable scripts per language, source and operation as described in ------------------------------------------ GET /_search/template { - "template": { + "inline": { "query": { "match": { "title": "{{query_string}}" @@ -60,7 +60,7 @@ GET /_search/template ------------------------------------------ GET /_search/template { - "template": { + "inline": { "query": { "terms": { "status": [ @@ -97,7 +97,7 @@ A default value is written as `{{var}}{{^var}}default{{/var}}` for instance: [source,js] ------------------------------------------ { - "template": { + "inline": { "query": { "range": { "line_no": { @@ -212,7 +212,7 @@ via the REST API, should be written as a string: [source,json] -------------------- -"template": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}" +"inline": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}" -------------------- ================================== @@ -229,9 +229,7 @@ In order to execute the stored template, reference it by it's name under the `te ------------------------------------------ GET /_search/template { - "template": { - "file": "storedTemplate" <1> - }, + "file": "storedTemplate", <1> "params": { "query_string": "search for these words" } @@ -293,9 +291,7 @@ To use an indexed template at search time use: ------------------------------------------ GET /_search/template { - "template": { - "id": "templateName" <1> - }, + "id": "templateName", <1> "params": { "query_string": "search for these words" } diff --git a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 715c1d716d9..a562dc046b2 100644 --- a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -20,7 +20,12 @@ package org.elasticsearch.action.bulk; import com.google.common.collect.Lists; -import org.elasticsearch.action.*; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -140,7 +145,7 @@ public class BulkRequest extends ActionRequest implements Composite sizeInBytes += request.upsertRequest().source().length(); } if (request.script() != null) { - sizeInBytes += request.script().length() * 2; + sizeInBytes += request.script().getScript().length() * 2; } return this; } diff --git a/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 8e1da31affa..90ceee99f90 100644 --- a/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -35,11 +35,13 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.Template; +import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; import java.io.IOException; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.search.Scroll.readScroll; @@ -69,9 +71,7 @@ public class SearchRequest extends ActionRequest implements Indic private String preference; private BytesReference templateSource; - private String templateName; - private ScriptService.ScriptType templateType; - private Map templateParams = Collections.emptyMap(); + private Template template; private BytesReference source; @@ -100,9 +100,7 @@ public class SearchRequest extends ActionRequest implements Indic this.routing = searchRequest.routing; this.preference = searchRequest.preference; this.templateSource = searchRequest.templateSource; - this.templateName = searchRequest.templateName; - this.templateType = searchRequest.templateType; - this.templateParams = searchRequest.templateParams; + this.template = searchRequest.template; this.source = searchRequest.source; this.extraSource = searchRequest.extraSource; this.queryCache = searchRequest.queryCache; @@ -390,42 +388,92 @@ public class SearchRequest extends ActionRequest implements Indic } /** - * The name of the stored template + * The stored template */ + public void template(Template template) { + this.template = template; + } + + /** + * The stored template + */ + public Template template() { + return template; + } + + /** + * The name of the stored template + * + * @deprecated use {@link #template(Template))} instead. + */ + @Deprecated public void templateName(String templateName) { - this.templateName = templateName; + updateOrCreateScript(templateName, null, null, null); } + /** + * The type of the stored template + * + * @deprecated use {@link #template(Template))} instead. + */ + @Deprecated public void templateType(ScriptService.ScriptType templateType) { - this.templateType = templateType; + updateOrCreateScript(null, templateType, null, null); } /** * Template parameters used for rendering + * + * @deprecated use {@link #template(Template))} instead. */ + @Deprecated public void templateParams(Map params) { - this.templateParams = params; + updateOrCreateScript(null, null, null, params); } /** * The name of the stored template + * + * @deprecated use {@link #template()} instead. */ + @Deprecated public String templateName() { - return templateName; + return template == null ? null : template.getScript(); } /** * The name of the stored template + * + * @deprecated use {@link #template()} instead. */ + @Deprecated public ScriptService.ScriptType templateType() { - return templateType; + return template == null ? null : template.getType(); } /** * Template parameters used for rendering + * + * @deprecated use {@link #template()} instead. */ + @Deprecated public Map templateParams() { - return templateParams; + return template == null ? null : template.getParams(); + } + + private void updateOrCreateScript(String templateContent, ScriptType type, String lang, Map params) { + Template template = template(); + if (template == null) { + template = new Template(templateContent == null ? "" : templateContent, type == null ? ScriptType.INLINE : type, lang, null, + params); + } else { + String newTemplateContent = templateContent == null ? template.getScript() : templateContent; + ScriptType newTemplateType = type == null ? template.getType() : type; + String newTemplateLang = lang == null ? template.getLang() : lang; + Map newTemplateParams = params == null ? template.getParams() : params; + template = new Template(newTemplateContent, newTemplateType, MustacheScriptEngineService.NAME, null, newTemplateParams); + } + template(template); } /** @@ -517,10 +565,8 @@ public class SearchRequest extends ActionRequest implements Indic indicesOptions = IndicesOptions.readIndicesOptions(in); templateSource = in.readBytesReference(); - templateName = in.readOptionalString(); - templateType = ScriptService.ScriptType.readFrom(in); if (in.readBoolean()) { - templateParams = (Map) in.readGenericValue(); + template = Template.readTemplate(in); } queryCache = in.readOptionalBoolean(); } @@ -550,12 +596,10 @@ public class SearchRequest extends ActionRequest implements Indic indicesOptions.writeIndicesOptions(out); out.writeBytesReference(templateSource); - out.writeOptionalString(templateName); - ScriptService.ScriptType.writeTo(templateType, out); - boolean existTemplateParams = templateParams != null; - out.writeBoolean(existTemplateParams); - if (existTemplateParams) { - out.writeGenericValue(templateParams); + boolean hasTemplate = template != null; + out.writeBoolean(hasTemplate); + if (hasTemplate) { + template.writeTo(out); } out.writeOptionalBoolean(queryCache); diff --git a/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index f7e84b0733a..690ad43b466 100644 --- a/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -29,7 +29,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -423,33 +425,60 @@ public class SearchRequestBuilder extends ActionRequestBuilder params) { sourceBuilder().scriptField(name, script, params); return this; } /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. * - * @param name The name that will represent this value in the return hit - * @param lang The language of the script - * @param script The script to use - * @param params Parameters that the script can use (can be null). + * @param name + * The name that will represent this value in the return hit + * @param lang + * The language of the script + * @param script + * The script to use + * @param params + * Parameters that the script can use (can be null). + * @deprecated Use {@link #addScriptField(String, Script)} instead. */ + @Deprecated public SearchRequestBuilder addScriptField(String name, String lang, String script, Map params) { sourceBuilder().scriptField(name, lang, script, params); return this; @@ -939,16 +968,33 @@ public class SearchRequestBuilder extends ActionRequestBuilder templateParams) { request.templateParams(templateParams); return this; diff --git a/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index ef8144fc095..173587bbbb7 100644 --- a/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.lookup.SourceLookup; @@ -94,7 +93,7 @@ public class UpdateHelper extends AbstractComponent { ctx.put("op", "create"); ctx.put("_source", upsertDoc); try { - ExecutableScript script = scriptService.executable(new Script(request.scriptLang, request.script, request.scriptType, request.scriptParams), ScriptContext.Standard.UPDATE); + ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE); script.setNextVar("ctx", ctx); script.run(); // we need to unwrap the ctx... @@ -111,7 +110,8 @@ public class UpdateHelper extends AbstractComponent { // (the default) or "none", meaning abort upsert if (!"create".equals(scriptOpChoice)) { if (!"none".equals(scriptOpChoice)) { - logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, request.script); + logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, + request.script.getScript()); } UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); @@ -193,7 +193,7 @@ public class UpdateHelper extends AbstractComponent { ctx.put("_source", sourceAndContent.v2()); try { - ExecutableScript script = scriptService.executable(new Script(request.scriptLang, request.script, request.scriptType, request.scriptParams), ScriptContext.Standard.UPDATE); + ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE); script.setNextVar("ctx", ctx); script.run(); // we need to unwrap the ctx... @@ -246,7 +246,7 @@ public class UpdateHelper extends AbstractComponent { update.setGetResult(extractGetResult(request, indexShard.indexService().index().name(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } else { - logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script); + logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript()); UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } diff --git a/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index c3f03db7944..ac1b52a5493 100644 --- a/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -19,14 +19,12 @@ package org.elasticsearch.action.update; -import com.google.common.collect.Maps; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -37,11 +35,14 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -59,13 +60,7 @@ public class UpdateRequest extends InstanceShardOperationRequest private String parent; @Nullable - String script; - @Nullable - ScriptService.ScriptType scriptType; - @Nullable - String scriptLang; - @Nullable - Map scriptParams; + Script script; private String[] fields; @@ -205,105 +200,171 @@ public class UpdateRequest extends InstanceShardOperationRequest return this.shardId; } - public String script() { + public Script script() { return this.script; } - public ScriptService.ScriptType scriptType() { return this.scriptType; } + /** + * The script to execute. Note, make sure not to send different script each times and instead + * use script params if possible with the same (automatically compiled) script. + */ + public UpdateRequest script(Script script) { + this.script = script; + return this; + } + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated + public String scriptString() { + return this.script == null ? null : this.script.getScript(); + } + + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated + public ScriptService.ScriptType scriptType() { + return this.script == null ? null : this.script.getType(); + } + + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated public Map scriptParams() { - return this.scriptParams; + return this.script == null ? null : this.script.getParams(); } /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest script(String script, ScriptService.ScriptType scriptType) { - this.script = script; - this.scriptType = scriptType; + updateOrCreateScript(script, scriptType, null, null); return this; } /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest script(String script) { - this.script = script; - this.scriptType = ScriptService.ScriptType.INLINE; + updateOrCreateScript(script, ScriptType.INLINE, null, null); return this; } - /** * The language of the script to execute. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest scriptLang(String scriptLang) { - this.scriptLang = scriptLang; + updateOrCreateScript(null, null, scriptLang, null); return this; } + /** + * @deprecated Use {@link #script()} instead + */ + @Deprecated public String scriptLang() { - return scriptLang; + return script == null ? null : script.getLang(); } /** * Add a script parameter. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest addScriptParam(String name, Object value) { - if (scriptParams == null) { - scriptParams = Maps.newHashMap(); + Script script = script(); + if (script == null) { + HashMap scriptParams = new HashMap(); + scriptParams.put(name, value); + updateOrCreateScript(null, null, null, scriptParams); + } else { + Map scriptParams = script.getParams(); + if (scriptParams == null) { + scriptParams = new HashMap(); + scriptParams.put(name, value); + updateOrCreateScript(null, null, null, scriptParams); + } else { + scriptParams.put(name, value); + } } - scriptParams.put(name, value); return this; } /** * Sets the script parameters to use with the script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest scriptParams(Map scriptParams) { - if (this.scriptParams == null) { - this.scriptParams = scriptParams; - } else { - this.scriptParams.putAll(scriptParams); - } + updateOrCreateScript(null, null, null, scriptParams); return this; } + private void updateOrCreateScript(String scriptContent, ScriptType type, String lang, Map params) { + Script script = script(); + if (script == null) { + script = new Script(scriptContent == null ? "" : scriptContent, type == null ? ScriptType.INLINE : type, lang, params); + } else { + String newScriptContent = scriptContent == null ? script.getScript() : scriptContent; + ScriptType newScriptType = type == null ? script.getType() : type; + String newScriptLang = lang == null ? script.getLang() : lang; + Map newScriptParams = params == null ? script.getParams() : params; + script = new Script(newScriptContent, newScriptType, newScriptLang, newScriptParams); + } + script(script); + } + /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + * + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated public UpdateRequest script(String script, ScriptService.ScriptType scriptType, @Nullable Map scriptParams) { - this.script = script; - this.scriptType = scriptType; - if (this.scriptParams != null) { - this.scriptParams.putAll(scriptParams); - } else { - this.scriptParams = scriptParams; - } + this.script = new Script(script, scriptType, null, scriptParams); return this; } /** - * The script to execute. Note, make sure not to send different script each times and instead - * use script params if possible with the same (automatically compiled) script. + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. * - * @param script The script to execute - * @param scriptLang The script language - * @param scriptType The script type - * @param scriptParams The script parameters + * @param script + * The script to execute + * @param scriptLang + * The script language + * @param scriptType + * The script type + * @param scriptParams + * The script parameters + * + * @deprecated Use {@link #script(Script)} instead */ - public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType, @Nullable Map scriptParams) { - this.script = script; - this.scriptLang = scriptLang; - this.scriptType = scriptType; - if (this.scriptParams != null) { - this.scriptParams.putAll(scriptParams); - } else { - this.scriptParams = scriptParams; - } + @Deprecated + public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType, + @Nullable Map scriptParams) { + this.script = new Script(script, scriptType, scriptLang, scriptParams); return this; } @@ -574,6 +635,7 @@ public class UpdateRequest extends InstanceShardOperationRequest public UpdateRequest source(BytesReference source) throws Exception { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); + Map scriptParams = null; XContentType xContentType = XContentFactory.xContentType(source); try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) { XContentParser.Token token = parser.nextToken(); @@ -584,6 +646,8 @@ public class UpdateRequest extends InstanceShardOperationRequest while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); + } else if ("script".equals(currentFieldName) && token == XContentParser.Token.START_OBJECT) { + script = Script.parse(parser); } else if ("params".equals(currentFieldName)) { scriptParams = parser.map(); } else if ("scripted_upsert".equals(currentFieldName)) { @@ -604,12 +668,13 @@ public class UpdateRequest extends InstanceShardOperationRequest scriptParameterParser.token(currentFieldName, token, parser); } } - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); + // Don't have a script using the new API so see if it is specified with the old API + if (script == null) { + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), scriptParams); + } } - scriptLang = scriptParameterParser.lang(); } return this; } @@ -639,12 +704,9 @@ public class UpdateRequest extends InstanceShardOperationRequest id = in.readString(); routing = in.readOptionalString(); parent = in.readOptionalString(); - script = in.readOptionalString(); - if(Strings.hasLength(script)) { - scriptType = ScriptService.ScriptType.readFrom(in); + if (in.readBoolean()) { + script = Script.readScript(in); } - scriptLang = in.readOptionalString(); - scriptParams = in.readMap(); retryOnConflict = in.readVInt(); refresh = in.readBoolean(); if (in.readBoolean()) { @@ -677,12 +739,11 @@ public class UpdateRequest extends InstanceShardOperationRequest out.writeString(id); out.writeOptionalString(routing); out.writeOptionalString(parent); - out.writeOptionalString(script); - if (Strings.hasLength(script)) { - ScriptService.ScriptType.writeTo(scriptType, out); + boolean hasScript = script != null; + out.writeBoolean(hasScript); + if (hasScript) { + script.writeTo(out); } - out.writeOptionalString(scriptLang); - out.writeMap(scriptParams); out.writeVInt(retryOnConflict); out.writeBoolean(refresh); if (doc == null) { diff --git a/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 3bcb9c640df..7c30c47dd7c 100644 --- a/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -80,21 +81,43 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilderctx, which is bound to the entry, * e.g. ctx._source.mycounter += 1. * + */ + public UpdateRequestBuilder setScript(Script script) { + request.script(script); + return this; + } + + /** + * The script to execute. Note, make sure not to send different script each + * times and instead use script params if possible with the same + * (automatically compiled) script. + *

+ * The script works with the variable ctx, which is bound to + * the entry, e.g. ctx._source.mycounter += 1. + * * @see #setScriptLang(String) * @see #setScriptParams(Map) + * + * @deprecated use {@link #setScript(Script)} instead */ + @Deprecated public UpdateRequestBuilder setScript(String script, ScriptService.ScriptType scriptType) { request.script(script, scriptType); return this; } /** - * The language of the script to execute. - * Valid options are: mvel, js, groovy, python, and native (Java)
+ * The language of the script to execute. Valid options are: mvel, js, + * groovy, python, and native (Java)
* Default: groovy *

- * Ref: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-scripting.html + * Ref: + * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current + * /modules-scripting.html + * + * @deprecated use {@link #setScript(Script)} instead */ + @Deprecated public UpdateRequestBuilder setScriptLang(String scriptLang) { request.scriptLang(scriptLang); return this; @@ -102,7 +125,10 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder scriptParams) { request.scriptParams(scriptParams); return this; @@ -110,7 +136,10 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder params; + private final Script sScript; private final SearchScript script; - public ScriptScoreFunction(String sScript, Map params, SearchScript script) { + public ScriptScoreFunction(Script sScript, SearchScript script) { super(CombineFunction.REPLACE); this.sScript = sScript; - this.params = params; this.script = script; } @@ -114,8 +111,8 @@ public class ScriptScoreFunction extends ScoreFunction { } else { double score = score(docId, subQueryScore.getValue()); String explanation = "script score function, computed with script:\"" + sScript; - if (params != null) { - explanation += "\" and parameters: \n" + params.toString(); + if (sScript.getParams() != null) { + explanation += "\" and parameters: \n" + sScript.getParams().toString(); } Explanation scoreExp = Explanation.match( subQueryScore.getValue(), "_score: ", @@ -131,7 +128,7 @@ public class ScriptScoreFunction extends ScoreFunction { @Override public String toString() { - return "script[" + sScript + "], params [" + params + "]"; + return "script" + sScript.toString(); } } \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/src/main/java/org/elasticsearch/common/xcontent/XContentType.java index 4acee241603..329bad87265 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentType.java @@ -19,11 +19,15 @@ package org.elasticsearch.common.xcontent; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.cbor.CborXContent; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.smile.SmileXContent; import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import java.io.IOException; + /** * The content type of {@link org.elasticsearch.common.xcontent.XContent}. */ @@ -144,4 +148,18 @@ public enum XContentType { public abstract String shortName(); public abstract XContent xContent(); + + public static XContentType readFrom(StreamInput in) throws IOException { + int index = in.readVInt(); + for (XContentType contentType : values()) { + if (index == contentType.index) { + return contentType; + } + } + throw new IllegalStateException("Unknown XContentType with index [" + index + "]"); + } + + public static void writeTo(XContentType contentType, StreamOutput out) throws IOException { + out.writeVInt(contentType.index); + } } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 7f8bb8ffa0a..b306396700a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -134,8 +134,18 @@ public class DocumentMapper implements ToXContent { return this; } - public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map parameters) { - sourceTransforms.add(new ScriptTransform(scriptService, script, scriptType, language, parameters)); + public Builder transform(ScriptService scriptService, Script script) { + sourceTransforms.add(new ScriptTransform(scriptService, script)); + return this; + } + + /** + * @deprecated Use {@link #transform(ScriptService, Script)} instead. + */ + @Deprecated + public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, + Map parameters) { + sourceTransforms.add(new ScriptTransform(scriptService, new Script(script, scriptType, language, parameters))); return this; } @@ -388,20 +398,20 @@ public class DocumentMapper implements ToXContent { private void addFieldMappers(Collection fieldMappers) { assert mappingLock.isWriteLockedByCurrentThread(); - this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); + this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); mapperService.addFieldMappers(fieldMappers); } private void addObjectMappers(Collection objectMappers) { assert mappingLock.isWriteLockedByCurrentThread(); - MapBuilder builder = MapBuilder.newMapBuilder(this.objectMappers); - for (ObjectMapper objectMapper : objectMappers) { - builder.put(objectMapper.fullPath(), objectMapper); - if (objectMapper.nested().isNested()) { - hasNestedObjects = true; + MapBuilder builder = MapBuilder.newMapBuilder(this.objectMappers); + for (ObjectMapper objectMapper : objectMappers) { + builder.put(objectMapper.fullPath(), objectMapper); + if (objectMapper.nested().isNested()) { + hasNestedObjects = true; + } } - } - this.objectMappers = builder.immutableMap(); + this.objectMappers = builder.immutableMap(); mapperService.addObjectMappers(objectMappers); } @@ -454,15 +464,15 @@ public class DocumentMapper implements ToXContent { public MergeResult merge(Mapping mapping, boolean simulate) { try (ReleasableLock lock = mappingWriteLock.acquire()) { - final MergeResult mergeResult = newMergeContext(simulate); - this.mapping.merge(mapping, mergeResult); - if (simulate == false) { - addFieldMappers(mergeResult.getNewFieldMappers()); - addObjectMappers(mergeResult.getNewObjectMappers()); - refreshSource(); - } - return mergeResult; + final MergeResult mergeResult = newMergeContext(simulate); + this.mapping.merge(mapping, mergeResult); + if (simulate == false) { + addFieldMappers(mergeResult.getNewFieldMappers()); + addObjectMappers(mergeResult.getNewObjectMappers()); + refreshSource(); } + return mergeResult; + } } private void refreshSource() throws ElasticsearchGenerationException { @@ -498,28 +508,13 @@ public class DocumentMapper implements ToXContent { private static class ScriptTransform implements SourceTransform { private final ScriptService scriptService; /** - * Contents of the script to transform the source document before indexing. + * The script to transform the source document before indexing. */ - private final String script; - /** - * The type of the script to run. - */ - private final ScriptType scriptType; - /** - * Language of the script to transform the source document before indexing. - */ - private final String language; - /** - * Parameters passed to the transform script. - */ - private final Map parameters; + private final Script script; - public ScriptTransform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map parameters) { + public ScriptTransform(ScriptService scriptService, Script script) { this.scriptService = scriptService; this.script = script; - this.scriptType = scriptType; - this.language = language; - this.parameters = parameters; } @Override @@ -527,7 +522,7 @@ public class DocumentMapper implements ToXContent { public Map transformSourceAsMap(Map sourceAsMap) { try { // We use the ctx variable and the _source name to be consistent with the update api. - ExecutableScript executable = scriptService.executable(new Script(language, script, scriptType, parameters), ScriptContext.Standard.MAPPING); + ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING); Map ctx = new HashMap<>(1); ctx.put("_source", sourceAsMap); executable.setNextVar("ctx", ctx); @@ -541,16 +536,7 @@ public class DocumentMapper implements ToXContent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("script", script); - if (language != null) { - builder.field("lang", language); - } - if (parameters != null) { - builder.field("params", parameters); - } - builder.endObject(); - return builder; + return script.toXContent(builder, params); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java index 4bb9e8d830e..d5a3ff1f9ad 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java @@ -71,10 +71,8 @@ import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.similarity.SimilarityLookupService; -import org.elasticsearch.script.ScriptParameterParser; -import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.ScriptService.ScriptType; import java.util.Iterator; import java.util.List; @@ -238,7 +236,6 @@ public class DocumentMapperParser extends AbstractIndexComponent { Object fieldNode = entry.getValue(); if ("transform".equals(fieldName)) { - iterator.remove(); if (fieldNode instanceof Map) { parseTransform(docBuilder, (Map) fieldNode, parserContext.indexVersionCreated()); } else if (fieldNode instanceof List) { @@ -251,6 +248,7 @@ public class DocumentMapperParser extends AbstractIndexComponent { } else { throw new MapperParsingException("Transform must be an object or an array but was: " + fieldNode); } + iterator.remove(); } else { Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName); if (typeParser != null) { @@ -296,23 +294,10 @@ public class DocumentMapperParser extends AbstractIndexComponent { return remainingFields.toString(); } - @SuppressWarnings("unchecked") private void parseTransform(DocumentMapper.Builder docBuilder, Map transformConfig, Version indexVersionCreated) { - ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); - scriptParameterParser.parseConfig(transformConfig, true); - - String script = null; - ScriptType scriptType = null; - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); - } - + Script script = Script.parse(transformConfig, true); if (script != null) { - String scriptLang = scriptParameterParser.lang(); - Map params = (Map)transformConfig.remove("params"); - docBuilder.transform(scriptService, script, scriptType, scriptLang, params); + docBuilder.transform(scriptService, script); } checkNoRemainingFields(transformConfig, indexVersionCreated, "Transform config has unsupported parameters: "); } diff --git a/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index fe2e572522b..c9070a24c37 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -27,7 +27,9 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import java.util.Collection; import java.util.Map; @@ -562,6 +564,13 @@ public abstract class QueryBuilders { return new GeoShapeQueryBuilder(name, shape); } + /** + * Facilitates creating template query requests using an inline script + */ + public static TemplateQueryBuilder templateQuery(Template template) { + return new TemplateQueryBuilder(template); + } + /** * Facilitates creating template query requests using an inline script */ @@ -596,6 +605,18 @@ public abstract class QueryBuilders { * * @param script The script to filter by. */ + public static ScriptQueryBuilder scriptQuery(Script script) { + return new ScriptQueryBuilder(script); + } + + /** + * A builder for filter based on a script. + * + * @param script + * The script to filter by. + * @deprecated Use {@link #scriptQuery(Script)} instead. + */ + @Deprecated public static ScriptQueryBuilder scriptQuery(String script) { return new ScriptQueryBuilder(script); } diff --git a/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 9ae05159953..8a6f72190c2 100644 --- a/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -20,34 +20,56 @@ package org.elasticsearch.index.query; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import java.io.IOException; +import java.util.HashMap; import java.util.Map; -import static com.google.common.collect.Maps.newHashMap; - public class ScriptQueryBuilder extends QueryBuilder { - private final String script; + private Script script; + @Deprecated + private String scriptString; + + @Deprecated private Map params; + @Deprecated private String lang; private String queryName; - public ScriptQueryBuilder(String script) { + public ScriptQueryBuilder(Script script) { this.script = script; } + /** + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. + */ + @Deprecated + public ScriptQueryBuilder(String script) { + this.scriptString = script; + } + + /** + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. + */ + @Deprecated public ScriptQueryBuilder addParam(String name, Object value) { if (params == null) { - params = newHashMap(); + params = new HashMap<>(); } params.put(name, value); return this; } + /** + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. + */ + @Deprecated public ScriptQueryBuilder params(Map params) { if (this.params == null) { this.params = params; @@ -59,7 +81,10 @@ public class ScriptQueryBuilder extends QueryBuilder { /** * Sets the script language. + * + * @deprecated Use {@link #ScriptQueryBuilder(Script)} instead. */ + @Deprecated public ScriptQueryBuilder lang(String lang) { this.lang = lang; return this; @@ -74,15 +99,23 @@ public class ScriptQueryBuilder extends QueryBuilder { } @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { + protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { + builder.startObject(ScriptQueryParser.NAME); - builder.field("script", script); - if (this.params != null) { - builder.field("params", this.params); - } - if (this.lang != null) { - builder.field("lang", lang); + if (script != null) { + builder.field(ScriptField.SCRIPT.getPreferredName(), script); + } else { + if (this.scriptString != null) { + builder.field("script", scriptString); + } + if (this.params != null) { + builder.field("params", this.params); + } + if (this.lang != null) { + builder.field("lang", lang); + } } + if (queryName != null) { builder.field("_name", queryName); } diff --git a/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java b/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java index 84dad2b5d92..9912cb94bac 100644 --- a/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.query; +import com.google.common.base.Objects; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -29,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; @@ -38,7 +41,6 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.Map; -import java.util.Objects; import static com.google.common.collect.Maps.newHashMap; @@ -55,7 +57,7 @@ public class ScriptQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NAME}; + return new String[] { NAME }; } @Override @@ -66,13 +68,11 @@ public class ScriptQueryParser implements QueryParser { XContentParser.Token token; // also, when caching, since its isCacheable is false, will result in loading all bit set... - String script = null; - String scriptLang; + Script script = null; Map params = null; String queryName = null; String currentFieldName = null; - ScriptService.ScriptType scriptType = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -80,7 +80,9 @@ public class ScriptQueryParser implements QueryParser { } else if (parseContext.isDeprecatedSetting(currentFieldName)) { // skip } else if (token == XContentParser.Token.START_OBJECT) { - if ("params".equals(currentFieldName)) { + if (ScriptField.SCRIPT.match(currentFieldName)) { + script = Script.parse(parser); + } else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs) params = parser.map(); } else { throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]"); @@ -88,27 +90,29 @@ public class ScriptQueryParser implements QueryParser { } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); - } else if (!scriptParameterParser.token(currentFieldName, token, parser)){ + } else if (!scriptParameterParser.token(currentFieldName, token, parser)) { throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]"); } } } - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); + if (script == null) { // Didn't find anything using the new API so try using the old one instead + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + if (params == null) { + params = newHashMap(); + } + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params); + } + } else if (params != null) { + throw new QueryParsingException(parseContext, "script params must be specified inside script object in a [script] filter"); } - scriptLang = scriptParameterParser.lang(); if (script == null) { throw new QueryParsingException(parseContext, "script must be provided with a [script] filter"); } - if (params == null) { - params = newHashMap(); - } - Query query = new ScriptQuery(scriptLang, script, scriptType, params, parseContext.scriptService(), parseContext.lookup()); + Query query = new ScriptQuery(script, parseContext.scriptService(), parseContext.lookup()); if (queryName != null) { parseContext.addNamedQuery(queryName, query); } @@ -117,14 +121,13 @@ public class ScriptQueryParser implements QueryParser { static class ScriptQuery extends Query { - private final String script; - private final Map params; + private final Script script; + private final SearchScript searchScript; - private ScriptQuery(String scriptLang, String script, ScriptService.ScriptType scriptType, Map params, ScriptService scriptService, SearchLookup searchLookup) { + public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { this.script = script; - this.params = params; - this.searchScript = scriptService.search(searchLookup, new Script(scriptLang, script, scriptType, newHashMap(params)), ScriptContext.Standard.SEARCH); + this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); } @Override @@ -137,23 +140,20 @@ public class ScriptQueryParser implements QueryParser { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (super.equals(o) == false) return false; - - ScriptQuery that = (ScriptQuery) o; - - if (params != null ? !params.equals(that.params) : that.params != null) return false; - if (script != null ? !script.equals(that.script) : that.script != null) return false; - - return true; + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + ScriptQuery other = (ScriptQuery) obj; + return Objects.equal(script, other.script); } @Override public int hashCode() { + final int prime = 31; int result = super.hashCode(); - result = 31 * result + Objects.hashCode(script); - result = 31 * result + Objects.hashCode(params); + result = prime * result + Objects.hashCode(script); return result; } diff --git a/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java b/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java index 7154fe93fc0..852977fa0db 100644 --- a/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.Template; import java.io.IOException; import java.util.Map; @@ -29,51 +30,58 @@ import java.util.Map; * */ public class TemplateQueryBuilder extends QueryBuilder { + /** Template to fill. */ + private Template template; /** Parameters to fill the template with. */ private Map vars; /** Template to fill.*/ - private String template; + private String templateString; private ScriptService.ScriptType templateType; /** - * @param template the template to use for that query. - * @param vars the parameters to fill the template with. + * @param template + * the template to use for that query. * */ + public TemplateQueryBuilder(Template template) { + this.template = template; + } + + /** + * @param template + * the template to use for that query. + * @param vars + * the parameters to fill the template with. + * @deprecated Use {@link #TemplateQueryBuilder(Template)} instead. + * */ + @Deprecated public TemplateQueryBuilder(String template, Map vars) { this(template, ScriptService.ScriptType.INLINE, vars); } /** - * @param template the template to use for that query. - * @param vars the parameters to fill the template with. - * @param templateType what kind of template (INLINE,FILE,ID) + * @param template + * the template to use for that query. + * @param vars + * the parameters to fill the template with. + * @param templateType + * what kind of template (INLINE,FILE,ID) + * @deprecated Use {@link #TemplateQueryBuilder(Template)} instead. * */ + @Deprecated public TemplateQueryBuilder(String template, ScriptService.ScriptType templateType, Map vars) { - this.template = template; - this.vars =vars; + this.templateString = template; + this.vars = vars; this.templateType = templateType; } @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(TemplateQueryParser.NAME); - String fieldname; - switch(templateType){ - case FILE: - fieldname = "file"; - break; - case INDEXED: - fieldname = "id"; - break; - case INLINE: - fieldname = TemplateQueryParser.QUERY; - break; - default: - throw new IllegalArgumentException("Unknown template type " + templateType); + protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException { + builder.field(TemplateQueryParser.NAME); + if (template == null) { + new Template(templateString, templateType, null, null, this.vars).toXContent(builder, builderParams); + } else { + template.toXContent(builder, builderParams); } - builder.field(fieldname, template); - builder.field(TemplateQueryParser.PARAMS, vars); - builder.endObject(); } } diff --git a/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java b/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java index 32872f8f7a0..040df24ec9f 100644 --- a/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java @@ -22,22 +22,20 @@ import org.apache.lucene.search.Query; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.script.Template; import java.io.IOException; import java.util.HashMap; import java.util.Map; /** - * In the simplest case, parse template string and variables from the request, compile the template and - * execute the template against the given variables. + * In the simplest case, parse template string and variables from the request, + * compile the template and execute the template against the given variables. * */ public class TemplateQueryParser implements QueryParser { @@ -45,12 +43,10 @@ public class TemplateQueryParser implements QueryParser { public static final String NAME = "template"; /** Name of query parameter containing the template string. */ public static final String QUERY = "query"; - /** Name of query parameter containing the template parameters. */ - public static final String PARAMS = "params"; private final ScriptService scriptService; - private final static Map parametersToTypes = new HashMap<>(); + private final static Map parametersToTypes = new HashMap<>(); static { parametersToTypes.put("query", ScriptService.ScriptType.INLINE); parametersToTypes.put("file", ScriptService.ScriptType.FILE); @@ -64,21 +60,23 @@ public class TemplateQueryParser implements QueryParser { @Override public String[] names() { - return new String[] {NAME}; + return new String[] { NAME }; } /** - * Parses the template query replacing template parameters with provided values. - * Handles both submitting the template as part of the request as well as - * referencing only the template name. - * @param parseContext parse context containing the templated query. + * Parses the template query replacing template parameters with provided + * values. Handles both submitting the template as part of the request as + * well as referencing only the template name. + * + * @param parseContext + * parse context containing the templated query. */ @Override @Nullable public Query parse(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); - TemplateContext templateContext = parse(parser, PARAMS, parametersToTypes); - ExecutableScript executable = this.scriptService.executable(new Script(MustacheScriptEngineService.NAME, templateContext.template(), templateContext.scriptType(), templateContext.params()), ScriptContext.Standard.SEARCH); + Template template = parse(parser); + ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH); BytesReference querySource = (BytesReference) executable.run(); @@ -89,72 +87,20 @@ public class TemplateQueryParser implements QueryParser { } } - public static TemplateContext parse(XContentParser parser, String paramsFieldname, String ... parameters) throws IOException { + public static Template parse(XContentParser parser, String... parameters) throws IOException { - Map parameterMap = new HashMap<>(parametersToTypes); + Map parameterMap = new HashMap<>(parametersToTypes); for (String parameter : parameters) { parameterMap.put(parameter, ScriptService.ScriptType.INLINE); } - return parse(parser,paramsFieldname,parameterMap); + return parse(parser, parameterMap); } - public static TemplateContext parse(XContentParser parser, String paramsFieldname) throws IOException { - return parse(parser,paramsFieldname,parametersToTypes); + public static Template parse(XContentParser parser) throws IOException { + return parse(parser, parametersToTypes); } - public static TemplateContext parse(XContentParser parser, String paramsFieldname, Map parameterMap) throws IOException { - Map params = null; - String templateNameOrTemplateContent = null; - - String currentFieldName = null; - XContentParser.Token token; - ScriptService.ScriptType type = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parameterMap.containsKey(currentFieldName)) { - type = parameterMap.get(currentFieldName); - if (token == XContentParser.Token.START_OBJECT) { - XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent()); - builder.copyCurrentStructure(parser); - templateNameOrTemplateContent = builder.string(); - } else { - templateNameOrTemplateContent = parser.text(); - } - } else if (paramsFieldname.equals(currentFieldName)) { - params = parser.map(); - } - } - - return new TemplateContext(type, templateNameOrTemplateContent, params); - } - - public static class TemplateContext { - private Map params; - private String template; - private ScriptService.ScriptType type; - - public TemplateContext(ScriptService.ScriptType type, String template, Map params) { - this.params = params; - this.template = template; - this.type = type; - } - - public Map params() { - return params; - } - - public String template() { - return template; - } - - public ScriptService.ScriptType scriptType(){ - return type; - } - - @Override - public String toString(){ - return type + " " + template; - } + public static Template parse(XContentParser parser, Map parameterMap) throws IOException { + return Template.parse(parser, parameterMap); } } diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java b/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java index 8ae38b5008f..ef9865395b3 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionBuilders.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilde import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; +import org.elasticsearch.script.Script; import java.util.Map; @@ -56,18 +57,38 @@ public class ScoreFunctionBuilders { return new LinearDecayFunctionBuilder(fieldName, null, scale); } + public static ScriptScoreFunctionBuilder scriptFunction(Script script) { + return (new ScriptScoreFunctionBuilder()).script(script); + } + + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script) { return (new ScriptScoreFunctionBuilder()).script(script); } + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script, String lang) { return (new ScriptScoreFunctionBuilder()).script(script).lang(lang); } + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script, String lang, Map params) { return (new ScriptScoreFunctionBuilder()).script(script).lang(lang).params(params); } + /** + * @deprecated Use {@link #scriptFunction(Script)} instead. + */ + @Deprecated public static ScriptScoreFunctionBuilder scriptFunction(String script, Map params) { return (new ScriptScoreFunctionBuilder()).script(script).params(params); } diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java index 3f715512bab..20dca88788a 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionBuilder.java @@ -19,12 +19,13 @@ package org.elasticsearch.index.query.functionscore.script; -import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; - -import com.google.common.collect.Maps; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import java.io.IOException; +import java.util.HashMap; import java.util.Map; /** @@ -33,7 +34,9 @@ import java.util.Map; */ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { - private String script; + private Script script; + + private String scriptString; private String lang; @@ -43,22 +46,35 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { } - public ScriptScoreFunctionBuilder script(String script) { + public ScriptScoreFunctionBuilder script(Script script) { this.script = script; return this; } /** - * Sets the language of the script. + * @deprecated Use {@link #script(Script)} instead */ + @Deprecated + public ScriptScoreFunctionBuilder script(String script) { + this.scriptString = script; + return this; + } + + /** + * Sets the language of the script.@deprecated Use {@link #script(Script)} + * instead + */ + @Deprecated public ScriptScoreFunctionBuilder lang(String lang) { this.lang = lang; return this; } /** - * Additional parameters that can be provided to the script. + * Additional parameters that can be provided to the script.@deprecated Use + * {@link #script(Script)} instead */ + @Deprecated public ScriptScoreFunctionBuilder params(Map params) { if (this.params == null) { this.params = params; @@ -69,11 +85,13 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { } /** - * Additional parameters that can be provided to the script. + * Additional parameters that can be provided to the script.@deprecated Use + * {@link #script(Script)} instead */ + @Deprecated public ScriptScoreFunctionBuilder param(String key, Object value) { if (params == null) { - params = Maps.newHashMap(); + params = new HashMap<>(); } params.put(key, value); return this; @@ -82,12 +100,18 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder { @Override public void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); - builder.field("script", script); - if (lang != null) { - builder.field("lang", lang); - } - if (this.params != null) { - builder.field("params", this.params); + if (script != null) { + builder.field(ScriptField.SCRIPT.getPreferredName(), script); + } else { + if (scriptString != null) { + builder.field("script", scriptString); + } + if (lang != null) { + builder.field("lang", lang); + } + if (this.params != null) { + builder.field("params", this.params); + } } builder.endObject(); } diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java index b01eaee3615..72a592da5b3 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/script/ScriptScoreFunctionParser.java @@ -29,15 +29,17 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.script.Script; +import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import java.io.IOException; import java.util.Map; +import static com.google.common.collect.Maps.newHashMap; + /** * */ @@ -57,16 +59,17 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { @Override public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException { ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); - String script = null; + Script script = null; Map vars = null; - ScriptService.ScriptType scriptType = null; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { - if ("params".equals(currentFieldName)) { + if (ScriptField.SCRIPT.match(currentFieldName)) { + script = Script.parse(parser); + } else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs) vars = parser.map(); } else { throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]"); @@ -78,19 +81,26 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser { } } - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - script = scriptValue.script(); - scriptType = scriptValue.scriptType(); + if (script == null) { // Didn't find anything using the new API so try using the old one instead + ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); + if (scriptValue != null) { + if (vars == null) { + vars = newHashMap(); + } + script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), vars); + } + } else if (vars != null) { + throw new QueryParsingException(parseContext, "script params must be specified inside script object"); } + if (script == null) { throw new QueryParsingException(parseContext, NAMES[0] + " requires 'script' field"); } SearchScript searchScript; try { - searchScript = parseContext.scriptService().search(parseContext.lookup(), new Script(scriptParameterParser.lang(), script, scriptType, vars), ScriptContext.Standard.SEARCH); - return new ScriptScoreFunction(script, vars, searchScript); + searchScript = parseContext.scriptService().search(parseContext.lookup(), script, ScriptContext.Standard.SEARCH); + return new ScriptScoreFunction(script, searchScript); } catch (Exception e) { throw new QueryParsingException(parseContext, NAMES[0] + " the script could not be loaded", e); } diff --git a/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java b/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java index 0987e5dd4f6..8e991c8f130 100644 --- a/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java +++ b/src/main/java/org/elasticsearch/index/query/support/BaseInnerHitBuilder.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.script.Script; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -148,33 +149,60 @@ public abstract class BaseInnerHitBuilder impleme * @param name The name that will represent this value in the return hit * @param script The script to use */ + public T addScriptField(String name, Script script) { + sourceBuilder().scriptField(name, script); + return (T) this; + } + + /** + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. + * + * @param name + * The name that will represent this value in the return hit + * @param script + * The script to use + * @deprecated Use {@link #addScriptField(String, Script)} instead. + */ + @Deprecated public T addScriptField(String name, String script) { sourceBuilder().scriptField(name, script); return (T) this; } /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. * - * @param name The name that will represent this value in the return hit - * @param script The script to use - * @param params Parameters that the script can use. + * @param name + * The name that will represent this value in the return hit + * @param script + * The script to use + * @param params + * Parameters that the script can use. + * @deprecated Use {@link #addScriptField(String, Script)} instead. */ + @Deprecated public T addScriptField(String name, String script, Map params) { sourceBuilder().scriptField(name, script, params); return (T) this; } /** - * Adds a script based field to load and return. The field does not have to be stored, - * but its recommended to use non analyzed or numeric fields. + * Adds a script based field to load and return. The field does not have to + * be stored, but its recommended to use non analyzed or numeric fields. * - * @param name The name that will represent this value in the return hit - * @param lang The language of the script - * @param script The script to use - * @param params Parameters that the script can use (can be null). + * @param name + * The name that will represent this value in the return hit + * @param lang + * The language of the script + * @param script + * The script to use + * @param params + * Parameters that the script can use (can be null). + * @deprecated Use {@link #addScriptField(String, Script)} instead. */ + @Deprecated public T addScriptField(String name, String lang, String script, Map params) { sourceBuilder().scriptField(name, lang, script, params); return (T) this; diff --git a/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index d019e598cac..a23780db62e 100644 --- a/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -31,12 +31,20 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; -import org.elasticsearch.rest.*; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -68,16 +76,13 @@ public class RestUpdateAction extends BaseRestHandler { scriptParameterParser.parseParams(request); ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); if (scriptValue != null) { - updateRequest.script(scriptValue.script(), scriptValue.scriptType()); - } - String scriptLang = scriptParameterParser.lang(); - if (scriptLang != null) { - updateRequest.scriptLang(scriptLang); - } - for (Map.Entry entry : request.params().entrySet()) { - if (entry.getKey().startsWith("sp_")) { - updateRequest.addScriptParam(entry.getKey().substring(3), entry.getValue()); + Map scriptParams = new HashMap<>(); + for (Map.Entry entry : request.params().entrySet()) { + if (entry.getKey().startsWith("sp_")) { + scriptParams.put(entry.getKey().substring(3), entry.getValue()); + } } + updateRequest.script(new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), scriptParams)); } String sField = request.param("fields"); if (sField != null) { diff --git a/src/main/java/org/elasticsearch/script/AbstractScriptParser.java b/src/main/java/org/elasticsearch/script/AbstractScriptParser.java new file mode 100644 index 00000000000..8198dd18ebc --- /dev/null +++ b/src/main/java/org/elasticsearch/script/AbstractScriptParser.java @@ -0,0 +1,196 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script.ScriptField; +import org.elasticsearch.script.Script.ScriptParseException; +import org.elasticsearch.script.ScriptService.ScriptType; + +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; + +public abstract class AbstractScriptParser { + + protected abstract String parseInlineScript(XContentParser parser) throws IOException; + + protected abstract S createScript(String script, ScriptType type, String lang, Map params); + + protected abstract S createSimpleScript(XContentParser parser) throws IOException; + + @Deprecated + protected Map getAdditionalScriptParameters() { + return Collections.emptyMap(); + } + + public S parse(XContentParser parser) throws IOException { + + XContentParser.Token token = parser.currentToken(); + // If the parser hasn't yet been pushed to the first token, do it now + if (token == null) { + token = parser.nextToken(); + } + + if (token == XContentParser.Token.VALUE_STRING) { + return createSimpleScript(parser); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ScriptParseException("expected a string value or an object, but found [{}] instead", token); + } + + String script = null; + ScriptType type = null; + String lang = getDefaultScriptLang(); + Map params = null; + + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (ScriptType.INLINE.getParseField().match(currentFieldName) || ScriptService.SCRIPT_INLINE.match(currentFieldName)) { + type = ScriptType.INLINE; + script = parseInlineScript(parser); + } else if (ScriptType.FILE.getParseField().match(currentFieldName) || ScriptService.SCRIPT_FILE.match(currentFieldName)) { + type = ScriptType.FILE; + if (token == XContentParser.Token.VALUE_STRING) { + script = parser.text(); + } else { + throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token); + } + } else if (ScriptType.INDEXED.getParseField().match(currentFieldName) || ScriptService.SCRIPT_ID.match(currentFieldName)) { + type = ScriptType.INDEXED; + if (token == XContentParser.Token.VALUE_STRING) { + script = parser.text(); + } else { + throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token); + } + } else if (ScriptField.LANG.match(currentFieldName) || ScriptService.SCRIPT_LANG.match(currentFieldName)) { + if (token == XContentParser.Token.VALUE_STRING) { + lang = parser.text(); + } else { + throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token); + } + } else if (ScriptField.PARAMS.match(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + params = parser.map(); + } else { + throw new ScriptParseException("expected an object for field [{}], but found [{}]", currentFieldName, token); + } + } else { + // TODO remove this in 2.0 + ScriptType paramScriptType = getAdditionalScriptParameters().get(currentFieldName); + if (paramScriptType != null) { + script = parseInlineScript(parser); + type = paramScriptType; + } else { + throw new ScriptParseException("unexpected field [{}]", currentFieldName); + } + } + } + if (script == null) { + throw new ScriptParseException("expected one of [{}], [{}] or [{}] fields, but found none", ScriptType.INLINE.getParseField() + .getPreferredName(), ScriptType.FILE.getParseField().getPreferredName(), ScriptType.INDEXED.getParseField() + .getPreferredName()); + } + assert type != null : "if script is not null, type should definitely not be null"; + return createScript(script, type, lang, params); + + } + + /** + * @return the default script language for this parser or null + * to use the default set in the ScriptService + */ + protected String getDefaultScriptLang() { + return null; + } + + public Script parse(Map config, boolean removeMatchedEntries) { + String script = null; + ScriptType type = null; + String lang = null; + Map params = null; + for (Iterator> itr = config.entrySet().iterator(); itr.hasNext();) { + Entry entry = itr.next(); + String parameterName = entry.getKey(); + Object parameterValue = entry.getValue(); + if (ScriptField.LANG.match(parameterName) || ScriptService.SCRIPT_LANG.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + lang = (String) parameterValue; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptField.PARAMS.match(parameterName)) { + if (parameterValue instanceof Map || parameterValue == null) { + params = (Map) parameterValue; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptType.INLINE.getParseField().match(parameterName) || ScriptService.SCRIPT_INLINE.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptType.INLINE; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptType.FILE.getParseField().match(parameterName) || ScriptService.SCRIPT_FILE.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptType.FILE; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (ScriptType.INDEXED.getParseField().match(parameterName) || ScriptService.SCRIPT_ID.match(parameterName)) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptType.INDEXED; + if (removeMatchedEntries) { + itr.remove(); + } + } else { + throw new ScriptParseException("Value must be of type String: [" + parameterName + "]"); + } + } + } + if (script == null) { + throw new ScriptParseException("expected one of [{}], [{}] or [{}] fields, but found none", ScriptType.INLINE.getParseField() + .getPreferredName(), ScriptType.FILE.getParseField().getPreferredName(), ScriptType.INDEXED.getParseField() + .getPreferredName()); + } + assert type != null : "if script is not null, type should definitely not be null"; + return createScript(script, type, lang, params); + } + +} \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/script/Script.java b/src/main/java/org/elasticsearch/script/Script.java index 655ff82c08e..d826eaad8ed 100644 --- a/src/main/java/org/elasticsearch/script/Script.java +++ b/src/main/java/org/elasticsearch/script/Script.java @@ -19,52 +19,93 @@ package org.elasticsearch.script; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.logging.support.LoggerMessageFormat; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.ScriptService.ScriptType; + +import java.io.IOException; import java.util.Map; - -import static org.elasticsearch.script.ScriptService.ScriptType; - /** * Script holds all the parameters necessary to compile or find in cache and then execute a script. */ -public class Script { +public class Script implements ToXContent, Streamable { - private final String lang; - private final String script; - private final ScriptType type; - private final Map params; + public static final ScriptType DEFAULT_TYPE = ScriptType.INLINE; + private static final ScriptParser PARSER = new ScriptParser(); + + private String script; + private @Nullable ScriptType type; + private @Nullable String lang; + private @Nullable Map params; + + /** + * For Serialization + */ + Script() { + } + + /** + * Constructor for simple inline script. The script will have no lang or + * params set. + * + * @param script + * The inline script to execute. + */ + public Script(String script) { + if (script == null) { + throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); + } + this.script = script; + } + + /** + * For sub-classes to use to override the default language + */ + protected Script(String script, String lang) { + if (script == null) { + throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); + } + this.script = script; + this.lang = lang; + } /** * Constructor for Script. - * @param lang The language of the script to be compiled/executed. - * @param script The cache key of the script to be compiled/executed. For dynamic scripts this is the actual - * script source code. For indexed scripts this is the id used in the request. For on disk scripts - * this is the file name. - * @param type The type of script -- dynamic, indexed, or file. - * @param params The map of parameters the script will be executed with. + * + * @param script + * The cache key of the script to be compiled/executed. For + * inline scripts this is the actual script source code. For + * indexed scripts this is the id used in the request. For on + * file scripts this is the file name. + * @param type + * The type of script -- dynamic, indexed, or file. + * @param lang + * The language of the script to be compiled/executed. + * @param params + * The map of parameters the script will be executed with. */ - public Script(String lang, String script, ScriptType type, Map params) { + public Script(String script, ScriptType type, @Nullable String lang, @Nullable Map params) { if (script == null) { throw new IllegalArgumentException("The parameter script (String) must not be null in Script."); } if (type == null) { throw new IllegalArgumentException("The parameter type (ScriptType) must not be null in Script."); } - - this.lang = lang; this.script = script; this.type = type; + this.lang = lang; this.params = params; } - /** - * Method for getting language. - * @return The language of the script to be compiled/executed. - */ - public String getLang() { - return lang; - } - /** * Method for getting the script. * @return The cache key of the script to be compiled/executed. For dynamic scripts this is the actual @@ -77,17 +118,190 @@ public class Script { /** * Method for getting the type. - * @return The type of script -- dynamic, indexed, or file. + * + * @return The type of script -- inline, indexed, or file. */ public ScriptType getType() { - return type; + return type == null ? DEFAULT_TYPE : type; + } + + /** + * Method for getting language. + * + * @return The language of the script to be compiled/executed. + */ + public String getLang() { + return lang; } /** * Method for getting the parameters. + * * @return The map of parameters the script will be executed with. */ public Map getParams() { return params; } + + @Override + public final void readFrom(StreamInput in) throws IOException { + script = in.readString(); + if (in.readBoolean()) { + type = ScriptType.readFrom(in); + } + lang = in.readOptionalString(); + if (in.readBoolean()) { + params = in.readMap(); + } + doReadFrom(in); + } + + protected void doReadFrom(StreamInput in) throws IOException { + // For sub-classes to Override + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + out.writeString(script); + boolean hasType = type != null; + out.writeBoolean(hasType); + if (hasType) { + ScriptType.writeTo(type, out); + } + out.writeOptionalString(lang); + boolean hasParams = params != null; + out.writeBoolean(hasParams); + if (hasParams) { + out.writeMap(params); + } + doWriteTo(out); + } + + protected void doWriteTo(StreamOutput out) throws IOException { + // For sub-classes to Override + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { + if (type == null) { + return builder.value(script); + } + + builder.startObject(); + scriptFieldToXContent(script, type, builder, builderParams); + if (lang != null) { + builder.field(ScriptField.LANG.getPreferredName(), lang); + } + if (params != null) { + builder.field(ScriptField.PARAMS.getPreferredName(), params); + } + builder.endObject(); + return builder; + } + + protected XContentBuilder scriptFieldToXContent(String script, ScriptType type, XContentBuilder builder, Params builderParams) + throws IOException { + builder.field(type.getParseField().getPreferredName(), script); + return builder; + } + + public static Script readScript(StreamInput in) throws IOException { + Script script = new Script(); + script.readFrom(in); + return script; + } + + public static Script parse(Map config, boolean removeMatchedEntries) { + return PARSER.parse(config, removeMatchedEntries); + } + + public static Script parse(XContentParser parser) throws IOException { + return PARSER.parse(parser); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((lang == null) ? 0 : lang.hashCode()); + result = prime * result + ((params == null) ? 0 : params.hashCode()); + result = prime * result + ((script == null) ? 0 : script.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Script other = (Script) obj; + if (lang == null) { + if (other.lang != null) + return false; + } else if (!lang.equals(other.lang)) + return false; + if (params == null) { + if (other.params != null) + return false; + } else if (!params.equals(other.params)) + return false; + if (script == null) { + if (other.script != null) + return false; + } else if (!script.equals(other.script)) + return false; + if (type != other.type) + return false; + return true; + } + + @Override + public String toString() { + return "[script: " + script + ", type: " + type.getParseField().getPreferredName() + ", lang: " + lang + ", params: " + params + + "]"; + } + + private static class ScriptParser extends AbstractScriptParser