Merge pull request #11279 from jpountz/fix/simplify_compression

Internal: tighten up our compression framework.
This commit is contained in:
Adrien Grand 2015-05-29 17:23:07 +02:00
commit 0f3206e60c
66 changed files with 891 additions and 593 deletions

View File

@ -43,7 +43,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -1080,7 +1080,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
// to find a _meta document // to find a _meta document
// So we have no choice but to index first and send mappings afterwards // So we have no choice but to index first and send mappings afterwards
MapperService mapperService = indexShard.indexService().mapperService(); MapperService mapperService = indexShard.indexService().mapperService();
mapperService.merge(request.type(), new CompressedString(update.toBytes()), true); mapperService.merge(request.type(), new CompressedXContent(update.toBytes()), true);
created = operation.execute(indexShard); created = operation.execute(indexShard);
mappingUpdatedAction.updateMappingOnMasterAsynchronously(indexName, request.type(), update); mappingUpdatedAction.updateMappingOnMasterAsynchronously(indexName, request.type(), update);
} else { } else {

View File

@ -20,8 +20,14 @@
package org.elasticsearch.client.transport; package org.elasticsearch.client.transport;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.*; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionModule;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.support.Headers;
@ -30,7 +36,6 @@ import org.elasticsearch.cluster.ClusterNameModule;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkModule;
@ -122,8 +127,6 @@ public class TransportClient extends AbstractClient {
Version version = Version.CURRENT; Version version = Version.CURRENT;
CompressorFactory.configure(this.settings);
final ThreadPool threadPool = new ThreadPool(settings); final ThreadPool threadPool = new ThreadPool(settings);
boolean success = false; boolean success = false;

View File

@ -37,7 +37,7 @@ import org.elasticsearch.cluster.service.InternalClusterService;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -422,7 +422,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
builder.endObject(); builder.endObject();
builder.startObject("mappings"); builder.startObject("mappings");
for (ObjectObjectCursor<String, CompressedString> cursor1 : templateMetaData.mappings()) { for (ObjectObjectCursor<String, CompressedXContent> cursor1 : templateMetaData.mappings()) {
byte[] mappingSource = cursor1.value.uncompressed(); byte[] mappingSource = cursor1.value.uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map(); Map<String, Object> mapping = parser.map();

View File

@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableSet;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -45,7 +45,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
private final String alias; private final String alias;
private final CompressedString filter; private final CompressedXContent filter;
private final String indexRouting; private final String indexRouting;
@ -53,7 +53,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
private final Set<String> searchRoutingValues; private final Set<String> searchRoutingValues;
private AliasMetaData(String alias, CompressedString filter, String indexRouting, String searchRouting) { private AliasMetaData(String alias, CompressedXContent filter, String indexRouting, String searchRouting) {
this.alias = alias; this.alias = alias;
this.filter = filter; this.filter = filter;
this.indexRouting = indexRouting; this.indexRouting = indexRouting;
@ -77,11 +77,11 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
return alias(); return alias();
} }
public CompressedString filter() { public CompressedXContent filter() {
return filter; return filter;
} }
public CompressedString getFilter() { public CompressedXContent getFilter() {
return filter(); return filter();
} }
@ -176,9 +176,9 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
@Override @Override
public AliasMetaData readFrom(StreamInput in) throws IOException { public AliasMetaData readFrom(StreamInput in) throws IOException {
String alias = in.readString(); String alias = in.readString();
CompressedString filter = null; CompressedXContent filter = null;
if (in.readBoolean()) { if (in.readBoolean()) {
filter = CompressedString.readCompressedString(in); filter = CompressedXContent.readCompressedString(in);
} }
String indexRouting = null; String indexRouting = null;
if (in.readBoolean()) { if (in.readBoolean()) {
@ -195,7 +195,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
private final String alias; private final String alias;
private CompressedString filter; private CompressedXContent filter;
private String indexRouting; private String indexRouting;
@ -217,7 +217,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
return alias; return alias;
} }
public Builder filter(CompressedString filter) { public Builder filter(CompressedXContent filter) {
this.filter = filter; this.filter = filter;
return this; return this;
} }
@ -244,7 +244,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
} }
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder().map(filter); XContentBuilder builder = XContentFactory.jsonBuilder().map(filter);
this.filter = new CompressedString(builder.bytes()); this.filter = new CompressedXContent(builder.bytes());
return this; return this;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e); throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -324,7 +324,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
} }
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
if ("filter".equals(currentFieldName)) { if ("filter".equals(currentFieldName)) {
builder.filter(new CompressedString(parser.binaryValue())); builder.filter(new CompressedXContent(parser.binaryValue()));
} }
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if ("routing".equals(currentFieldName)) { if ("routing".equals(currentFieldName)) {

View File

@ -35,7 +35,7 @@ import org.elasticsearch.cluster.routing.Murmur3HashFunction;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -874,7 +874,7 @@ public class IndexMetaData implements Diffable<IndexMetaData> {
if ("mappings".equals(currentFieldName)) { if ("mappings".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue()))); builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue())));
} else { } else {
Map<String, Object> mapping = parser.mapOrdered(); Map<String, Object> mapping = parser.mapOrdered();
if (mapping.size() == 1) { if (mapping.size() == 1) {

View File

@ -24,7 +24,7 @@ import com.google.common.collect.Sets;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -54,13 +54,13 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
private final Settings settings; private final Settings settings;
// the mapping source should always include the type as top level // the mapping source should always include the type as top level
private final ImmutableOpenMap<String, CompressedString> mappings; private final ImmutableOpenMap<String, CompressedXContent> mappings;
private final ImmutableOpenMap<String, AliasMetaData> aliases; private final ImmutableOpenMap<String, AliasMetaData> aliases;
private final ImmutableOpenMap<String, IndexMetaData.Custom> customs; private final ImmutableOpenMap<String, IndexMetaData.Custom> customs;
public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap<String, CompressedString> mappings, public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap<String, CompressedXContent> mappings,
ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, IndexMetaData.Custom> customs) { ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, IndexMetaData.Custom> customs) {
this.name = name; this.name = name;
this.order = order; this.order = order;
@ -103,11 +103,11 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
return settings(); return settings();
} }
public ImmutableOpenMap<String, CompressedString> mappings() { public ImmutableOpenMap<String, CompressedXContent> mappings() {
return this.mappings; return this.mappings;
} }
public ImmutableOpenMap<String, CompressedString> getMappings() { public ImmutableOpenMap<String, CompressedXContent> getMappings() {
return this.mappings; return this.mappings;
} }
@ -170,7 +170,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
builder.settings(Settings.readSettingsFromStream(in)); builder.settings(Settings.readSettingsFromStream(in));
int mappingsSize = in.readVInt(); int mappingsSize = in.readVInt();
for (int i = 0; i < mappingsSize; i++) { for (int i = 0; i < mappingsSize; i++) {
builder.putMapping(in.readString(), CompressedString.readCompressedString(in)); builder.putMapping(in.readString(), CompressedXContent.readCompressedString(in));
} }
int aliasesSize = in.readVInt(); int aliasesSize = in.readVInt();
for (int i = 0; i < aliasesSize; i++) { for (int i = 0; i < aliasesSize; i++) {
@ -193,7 +193,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
out.writeString(template); out.writeString(template);
Settings.writeSettingsToStream(settings, out); Settings.writeSettingsToStream(settings, out);
out.writeVInt(mappings.size()); out.writeVInt(mappings.size());
for (ObjectObjectCursor<String, CompressedString> cursor : mappings) { for (ObjectObjectCursor<String, CompressedXContent> cursor : mappings) {
out.writeString(cursor.key); out.writeString(cursor.key);
cursor.value.writeTo(out); cursor.value.writeTo(out);
} }
@ -223,7 +223,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
private Settings settings = Settings.Builder.EMPTY_SETTINGS; private Settings settings = Settings.Builder.EMPTY_SETTINGS;
private final ImmutableOpenMap.Builder<String, CompressedString> mappings; private final ImmutableOpenMap.Builder<String, CompressedXContent> mappings;
private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases; private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases;
@ -276,13 +276,13 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
return this; return this;
} }
public Builder putMapping(String mappingType, CompressedString mappingSource) throws IOException { public Builder putMapping(String mappingType, CompressedXContent mappingSource) throws IOException {
mappings.put(mappingType, mappingSource); mappings.put(mappingType, mappingSource);
return this; return this;
} }
public Builder putMapping(String mappingType, String mappingSource) throws IOException { public Builder putMapping(String mappingType, String mappingSource) throws IOException {
mappings.put(mappingType, new CompressedString(mappingSource)); mappings.put(mappingType, new CompressedXContent(mappingSource));
return this; return this;
} }
@ -327,7 +327,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
if (params.paramAsBoolean("reduce_mappings", false)) { if (params.paramAsBoolean("reduce_mappings", false)) {
builder.startObject("mappings"); builder.startObject("mappings");
for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) { for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetaData.mappings()) {
byte[] mappingSource = cursor.value.uncompressed(); byte[] mappingSource = cursor.value.uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource); XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map(); Map<String, Object> mapping = parser.map();
@ -341,7 +341,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
builder.endObject(); builder.endObject();
} else { } else {
builder.startArray("mappings"); builder.startArray("mappings");
for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) { for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetaData.mappings()) {
byte[] data = cursor.value.uncompressed(); byte[] data = cursor.value.uncompressed();
XContentParser parser = XContentFactory.xContent(data).createParser(data); XContentParser parser = XContentFactory.xContent(data).createParser(data);
Map<String, Object> mapping = parser.mapOrderedAndClose(); Map<String, Object> mapping = parser.mapOrderedAndClose();

View File

@ -23,7 +23,7 @@ import org.elasticsearch.action.TimestampParsingException;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -276,7 +276,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
private final String type; private final String type;
private final CompressedString source; private final CompressedXContent source;
private Id id; private Id id;
private Routing routing; private Routing routing;
@ -294,9 +294,9 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
this.hasParentField = docMapper.parentFieldMapper().active(); this.hasParentField = docMapper.parentFieldMapper().active();
} }
public MappingMetaData(CompressedString mapping) throws IOException { public MappingMetaData(CompressedXContent mapping) throws IOException {
this.source = mapping; this.source = mapping;
Map<String, Object> mappingMap = XContentHelper.createParser(mapping.compressed(), 0, mapping.compressed().length).mapOrderedAndClose(); Map<String, Object> mappingMap = XContentHelper.createParser(mapping.compressedReference()).mapOrderedAndClose();
if (mappingMap.size() != 1) { if (mappingMap.size() != 1) {
throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string());
} }
@ -311,7 +311,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
public MappingMetaData(String type, Map<String, Object> mapping) throws IOException { public MappingMetaData(String type, Map<String, Object> mapping) throws IOException {
this.type = type; this.type = type;
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping);
this.source = new CompressedString(mappingBuilder.bytes()); this.source = new CompressedXContent(mappingBuilder.bytes());
Map<String, Object> withoutType = mapping; Map<String, Object> withoutType = mapping;
if (mapping.size() == 1 && mapping.containsKey(type)) { if (mapping.size() == 1 && mapping.containsKey(type)) {
withoutType = (Map<String, Object>) mapping.get(type); withoutType = (Map<String, Object>) mapping.get(type);
@ -322,7 +322,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
private MappingMetaData() { private MappingMetaData() {
this.type = ""; this.type = "";
try { try {
this.source = new CompressedString(""); this.source = new CompressedXContent("{}");
} catch (IOException ex) { } catch (IOException ex) {
throw new IllegalStateException("Cannot create MappingMetaData prototype", ex); throw new IllegalStateException("Cannot create MappingMetaData prototype", ex);
} }
@ -393,7 +393,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
} }
} }
public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) {
this.type = type; this.type = type;
this.source = source; this.source = source;
this.id = id; this.id = id;
@ -418,7 +418,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
return this.type; return this.type;
} }
public CompressedString source() { public CompressedXContent source() {
return this.source; return this.source;
} }
@ -430,7 +430,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
* Converts the serialized compressed form of the mappings into a parsed map. * Converts the serialized compressed form of the mappings into a parsed map.
*/ */
public Map<String, Object> sourceAsMap() throws IOException { public Map<String, Object> sourceAsMap() throws IOException {
Map<String, Object> mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length, true).v2(); Map<String, Object> mapping = XContentHelper.convertToMap(source.compressedReference(), true).v2();
if (mapping.size() == 1 && mapping.containsKey(type())) { if (mapping.size() == 1 && mapping.containsKey(type())) {
// the type name is the root value, reduce it // the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(type()); mapping = (Map<String, Object>) mapping.get(type());
@ -599,7 +599,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
public MappingMetaData readFrom(StreamInput in) throws IOException { public MappingMetaData readFrom(StreamInput in) throws IOException {
String type = in.readString(); String type = in.readString();
CompressedString source = CompressedString.readCompressedString(in); CompressedXContent source = CompressedXContent.readCompressedString(in);
// id // id
Id id = new Id(in.readBoolean() ? in.readString() : null); Id id = new Id(in.readBoolean() ? in.readString() : null);
// routing // routing

View File

@ -46,7 +46,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
@ -252,7 +252,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
// apply templates, merging the mappings into the request mapping if exists // apply templates, merging the mappings into the request mapping if exists
for (IndexTemplateMetaData template : templates) { for (IndexTemplateMetaData template : templates) {
templateNames.add(template.getName()); templateNames.add(template.getName());
for (ObjectObjectCursor<String, CompressedString> cursor : template.mappings()) { for (ObjectObjectCursor<String, CompressedXContent> cursor : template.mappings()) {
if (mappings.containsKey(cursor.key)) { if (mappings.containsKey(cursor.key)) {
XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string())); XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string()));
} else { } else {
@ -355,7 +355,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
// first, add the default mapping // first, add the default mapping
if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
try { try {
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false);
} catch (Exception e) { } catch (Exception e) {
removalReason = "failed on parsing default mapping on index creation"; removalReason = "failed on parsing default mapping on index creation";
throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e); throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e);
@ -367,7 +367,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
} }
try { try {
// apply the default here, its the first time we parse it // apply the default here, its the first time we parse it
mapperService.merge(entry.getKey(), new CompressedString(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true); mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true);
} catch (Exception e) { } catch (Exception e) {
removalReason = "failed on parsing mappings on index creation"; removalReason = "failed on parsing mappings on index creation";
throw new MapperParsingException("mapping [" + entry.getKey() + "]", e); throw new MapperParsingException("mapping [" + entry.getKey() + "]", e);

View File

@ -32,7 +32,7 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -91,11 +91,11 @@ public class MetaDataMappingService extends AbstractComponent {
static class UpdateTask extends MappingTask { static class UpdateTask extends MappingTask {
final String type; final String type;
final CompressedString mappingSource; final CompressedXContent mappingSource;
final String nodeId; // null fr unknown final String nodeId; // null fr unknown
final ActionListener<ClusterStateUpdateResponse> listener; final ActionListener<ClusterStateUpdateResponse> listener;
UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, String nodeId, ActionListener<ClusterStateUpdateResponse> listener) { UpdateTask(String index, String indexUUID, String type, CompressedXContent mappingSource, String nodeId, ActionListener<ClusterStateUpdateResponse> listener) {
super(index, indexUUID); super(index, indexUUID);
this.type = type; this.type = type;
this.mappingSource = mappingSource; this.mappingSource = mappingSource;
@ -254,7 +254,7 @@ public class MetaDataMappingService extends AbstractComponent {
UpdateTask updateTask = (UpdateTask) task; UpdateTask updateTask = (UpdateTask) task;
try { try {
String type = updateTask.type; String type = updateTask.type;
CompressedString mappingSource = updateTask.mappingSource; CompressedXContent mappingSource = updateTask.mappingSource;
MappingMetaData mappingMetaData = builder.mapping(type); MappingMetaData mappingMetaData = builder.mapping(type);
if (mappingMetaData != null && mappingMetaData.source().equals(mappingSource)) { if (mappingMetaData != null && mappingMetaData.source().equals(mappingSource)) {
@ -376,9 +376,9 @@ public class MetaDataMappingService extends AbstractComponent {
DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type()); DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type());
if (MapperService.DEFAULT_MAPPING.equals(request.type())) { if (MapperService.DEFAULT_MAPPING.equals(request.type())) {
// _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default // _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default
newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), false); newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false);
} else { } else {
newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), existingMapper == null); newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null);
if (existingMapper != null) { if (existingMapper != null) {
// first, simulate // first, simulate
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true); MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true);
@ -415,12 +415,12 @@ public class MetaDataMappingService extends AbstractComponent {
continue; continue;
} }
CompressedString existingSource = null; CompressedXContent existingSource = null;
if (existingMappers.containsKey(entry.getKey())) { if (existingMappers.containsKey(entry.getKey())) {
existingSource = existingMappers.get(entry.getKey()).mappingSource(); existingSource = existingMappers.get(entry.getKey()).mappingSource();
} }
DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false); DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false);
CompressedString updatedSource = mergedMapper.mappingSource(); CompressedXContent updatedSource = mergedMapper.mappingSource();
if (existingSource != null) { if (existingSource != null) {
if (existingSource.equals(updatedSource)) { if (existingSource.equals(updatedSource)) {

View File

@ -352,6 +352,7 @@ public class PagedBytesReference implements BytesReference {
private final int offset; private final int offset;
private final int length; private final int length;
private int pos; private int pos;
private int mark;
public PagedBytesReferenceStreamInput(ByteArray bytearray, int offset, int length) { public PagedBytesReferenceStreamInput(ByteArray bytearray, int offset, int length) {
this.bytearray = bytearray; this.bytearray = bytearray;
@ -420,9 +421,19 @@ public class PagedBytesReference implements BytesReference {
return copiedBytes; return copiedBytes;
} }
@Override
public boolean markSupported() {
return true;
}
@Override
public void mark(int readlimit) {
this.mark = pos;
}
@Override @Override
public void reset() throws IOException { public void reset() throws IOException {
pos = 0; pos = mark;
} }
@Override @Override

View File

@ -30,10 +30,9 @@ import java.io.IOException;
* @deprecated Used only for backward comp. to read old compressed files, since we now use codec based compression * @deprecated Used only for backward comp. to read old compressed files, since we now use codec based compression
*/ */
@Deprecated @Deprecated
public abstract class CompressedIndexInput<T extends CompressorContext> extends IndexInput { public abstract class CompressedIndexInput extends IndexInput {
private IndexInput in; private IndexInput in;
protected final T context;
private int version; private int version;
private long totalUncompressedLength; private long totalUncompressedLength;
@ -48,10 +47,9 @@ public abstract class CompressedIndexInput<T extends CompressorContext> extends
private int currentOffsetIdx; private int currentOffsetIdx;
private long currentUncompressedChunkPointer; private long currentUncompressedChunkPointer;
public CompressedIndexInput(IndexInput in, T context) throws IOException { public CompressedIndexInput(IndexInput in) throws IOException {
super("compressed(" + in.toString() + ")"); super("compressed(" + in.toString() + ")");
this.in = in; this.in = in;
this.context = context;
readHeader(in); readHeader(in);
this.version = in.readInt(); this.version = in.readInt();
long metaDataPosition = in.readLong(); long metaDataPosition = in.readLong();

View File

@ -27,10 +27,9 @@ import java.io.IOException;
/** /**
*/ */
public abstract class CompressedStreamInput<T extends CompressorContext> extends StreamInput { public abstract class CompressedStreamInput extends StreamInput {
private final StreamInput in; private final StreamInput in;
protected final CompressorContext context;
private boolean closed; private boolean closed;
@ -38,9 +37,8 @@ public abstract class CompressedStreamInput<T extends CompressorContext> extends
private int position = 0; private int position = 0;
private int valid = 0; private int valid = 0;
public CompressedStreamInput(StreamInput in, T context) throws IOException { public CompressedStreamInput(StreamInput in) throws IOException {
this.in = in; this.in = in;
this.context = context;
super.setVersion(in.getVersion()); super.setVersion(in.getVersion());
readHeader(in); readHeader(in);
} }
@ -51,13 +49,6 @@ public abstract class CompressedStreamInput<T extends CompressorContext> extends
return super.setVersion(version); return super.setVersion(version);
} }
/**
* Expert!, resets to buffer start, without the need to decompress it again.
*/
public void resetToBufferStart() {
this.position = 0;
}
/** /**
* Method is overridden to report number of bytes that can now be read * Method is overridden to report number of bytes that can now be read
* from decoded data buffer, without reading bytes from the underlying * from decoded data buffer, without reading bytes from the underlying

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.compress;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -34,33 +35,32 @@ import java.util.Arrays;
* memory. Note that the compressed string might still sometimes need to be * memory. Note that the compressed string might still sometimes need to be
* decompressed in order to perform equality checks or to compute hash codes. * decompressed in order to perform equality checks or to compute hash codes.
*/ */
public final class CompressedString { public final class CompressedXContent {
private final byte[] bytes; private final byte[] bytes;
private int hashCode; private int hashCode;
public CompressedString(BytesReference data) throws IOException { public CompressedXContent(BytesReference data) throws IOException {
Compressor compressor = CompressorFactory.compressor(data); Compressor compressor = CompressorFactory.compressor(data);
if (compressor != null) { if (compressor != null) {
// already compressed... // already compressed...
this.bytes = data.toBytes(); this.bytes = data.toBytes();
} else { } else {
BytesArray bytesArray = data.toBytesArray(); BytesStreamOutput out = new BytesStreamOutput();
this.bytes = CompressorFactory.defaultCompressor().compress(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length()); try (StreamOutput compressedOutput = CompressorFactory.defaultCompressor().streamOutput(out)) {
assert CompressorFactory.compressor(bytes) != null; data.writeTo(compressedOutput);
}
this.bytes = out.bytes().toBytes();
assert CompressorFactory.compressor(new BytesArray(bytes)) != null;
} }
} }
public CompressedString(byte[] data, int offset, int length) throws IOException { public CompressedXContent(byte[] data) throws IOException {
this(new BytesArray(data, offset, length)); this(new BytesArray(data));
} }
public CompressedString(byte[] data) throws IOException { public CompressedXContent(String str) throws IOException {
this(data, 0, data.length);
}
public CompressedString(String str) throws IOException {
this(new BytesArray(new BytesRef(str))); this(new BytesArray(new BytesRef(str)));
} }
@ -69,12 +69,15 @@ public final class CompressedString {
return this.bytes; return this.bytes;
} }
/** Return the compressed bytes as a {@link BytesReference}. */
public BytesReference compressedReference() {
return new BytesArray(bytes);
}
/** Return the uncompressed bytes. */ /** Return the uncompressed bytes. */
public byte[] uncompressed() { public byte[] uncompressed() {
Compressor compressor = CompressorFactory.compressor(bytes);
assert compressor != null;
try { try {
return compressor.uncompress(bytes, 0, bytes.length); return CompressorFactory.uncompress(new BytesArray(bytes)).toBytes();
} catch (IOException e) { } catch (IOException e) {
throw new IllegalStateException("Cannot decompress compressed string", e); throw new IllegalStateException("Cannot decompress compressed string", e);
} }
@ -84,10 +87,10 @@ public final class CompressedString {
return new BytesRef(uncompressed()).utf8ToString(); return new BytesRef(uncompressed()).utf8ToString();
} }
public static CompressedString readCompressedString(StreamInput in) throws IOException { public static CompressedXContent readCompressedString(StreamInput in) throws IOException {
byte[] bytes = new byte[in.readVInt()]; byte[] bytes = new byte[in.readVInt()];
in.readBytes(bytes, 0, bytes.length); in.readBytes(bytes, 0, bytes.length);
return new CompressedString(bytes); return new CompressedXContent(bytes);
} }
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
@ -100,7 +103,7 @@ public final class CompressedString {
if (this == o) return true; if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
CompressedString that = (CompressedString) o; CompressedXContent that = (CompressedXContent) o;
if (Arrays.equals(compressed(), that.compressed())) { if (Arrays.equals(compressed(), that.compressed())) {
return true; return true;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer;
import java.io.IOException; import java.io.IOException;
@ -32,32 +31,20 @@ import java.io.IOException;
*/ */
public interface Compressor { public interface Compressor {
String type();
void configure(Settings settings);
boolean isCompressed(BytesReference bytes); boolean isCompressed(BytesReference bytes);
boolean isCompressed(byte[] data, int offset, int length);
boolean isCompressed(ChannelBuffer buffer); boolean isCompressed(ChannelBuffer buffer);
StreamInput streamInput(StreamInput in) throws IOException;
StreamOutput streamOutput(StreamOutput out) throws IOException;
/**
* @deprecated Used for backward comp. since we now use Lucene compressed codec.
*/
@Deprecated
boolean isCompressed(IndexInput in) throws IOException; boolean isCompressed(IndexInput in) throws IOException;
/**
* Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
*/
byte[] uncompress(byte[] data, int offset, int length) throws IOException;
/**
* Compresses the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
*/
byte[] compress(byte[] data, int offset, int length) throws IOException;
CompressedStreamInput streamInput(StreamInput in) throws IOException;
CompressedStreamOutput streamOutput(StreamOutput out) throws IOException;
/** /**
* @deprecated Used for backward comp. since we now use Lucene compressed codec. * @deprecated Used for backward comp. since we now use Lucene compressed codec.
*/ */

View File

@ -19,68 +19,36 @@
package org.elasticsearch.common.compress; package org.elasticsearch.common.compress;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.deflate.DeflateCompressor;
import org.elasticsearch.common.compress.lzf.LZFCompressor; import org.elasticsearch.common.compress.lzf.LZFCompressor;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType;
import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import java.util.Locale;
/** /**
*/ */
public class CompressorFactory { public class CompressorFactory {
private static final LZFCompressor LZF = new LZFCompressor();
private static final Compressor[] compressors; private static final Compressor[] compressors;
private static final ImmutableMap<String, Compressor> compressorsByType; private static volatile Compressor defaultCompressor;
private static Compressor defaultCompressor;
static { static {
List<Compressor> compressorsX = Lists.newArrayList(); compressors = new Compressor[] {
compressorsX.add(LZF); new LZFCompressor(),
new DeflateCompressor()
compressors = compressorsX.toArray(new Compressor[compressorsX.size()]); };
MapBuilder<String, Compressor> compressorsByTypeX = MapBuilder.newMapBuilder(); defaultCompressor = new DeflateCompressor();
for (Compressor compressor : compressors) {
compressorsByTypeX.put(compressor.type(), compressor);
}
compressorsByType = compressorsByTypeX.immutableMap();
defaultCompressor = LZF;
} }
public static synchronized void configure(Settings settings) { public static void setDefaultCompressor(Compressor defaultCompressor) {
for (Compressor compressor : compressors) {
compressor.configure(settings);
}
String defaultType = settings.get("compress.default.type", "lzf").toLowerCase(Locale.ENGLISH);
boolean found = false;
for (Compressor compressor : compressors) {
if (defaultType.equalsIgnoreCase(compressor.type())) {
defaultCompressor = compressor;
found = true;
break;
}
}
if (!found) {
Loggers.getLogger(CompressorFactory.class).warn("failed to find default type [{}]", defaultType);
}
}
public static synchronized void setDefaultCompressor(Compressor defaultCompressor) {
CompressorFactory.defaultCompressor = defaultCompressor; CompressorFactory.defaultCompressor = defaultCompressor;
} }
@ -92,14 +60,10 @@ public class CompressorFactory {
return compressor(bytes) != null; return compressor(bytes) != null;
} }
public static boolean isCompressed(byte[] data) { /**
return compressor(data, 0, data.length) != null; * @deprecated we don't compress lucene indexes anymore and rely on lucene codecs
} */
@Deprecated
public static boolean isCompressed(byte[] data, int offset, int length) {
return compressor(data, offset, length) != null;
}
public static boolean isCompressed(IndexInput in) throws IOException { public static boolean isCompressed(IndexInput in) throws IOException {
return compressor(in) != null; return compressor(in) != null;
} }
@ -108,37 +72,35 @@ public class CompressorFactory {
public static Compressor compressor(BytesReference bytes) { public static Compressor compressor(BytesReference bytes) {
for (Compressor compressor : compressors) { for (Compressor compressor : compressors) {
if (compressor.isCompressed(bytes)) { if (compressor.isCompressed(bytes)) {
// bytes should be either detected as compressed or as xcontent,
// if we have bytes that can be either detected as compressed or
// as a xcontent, we have a problem
assert XContentFactory.xContentType(bytes) == null;
return compressor; return compressor;
} }
} }
XContentType contentType = XContentFactory.xContentType(bytes);
if (contentType == null) {
throw new NotXContentException("Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes");
}
return null; return null;
} }
@Nullable
public static Compressor compressor(byte[] data) {
return compressor(data, 0, data.length);
}
@Nullable
public static Compressor compressor(byte[] data, int offset, int length) {
for (Compressor compressor : compressors) {
if (compressor.isCompressed(data, offset, length)) {
return compressor;
}
}
return null;
}
@Nullable
public static Compressor compressor(ChannelBuffer buffer) { public static Compressor compressor(ChannelBuffer buffer) {
for (Compressor compressor : compressors) { for (Compressor compressor : compressors) {
if (compressor.isCompressed(buffer)) { if (compressor.isCompressed(buffer)) {
return compressor; return compressor;
} }
} }
return null; throw new NotCompressedException();
} }
/**
* @deprecated we don't compress lucene indexes anymore and rely on lucene codecs
*/
@Deprecated
@Nullable @Nullable
public static Compressor compressor(IndexInput in) throws IOException { public static Compressor compressor(IndexInput in) throws IOException {
for (Compressor compressor : compressors) { for (Compressor compressor : compressors) {
@ -149,25 +111,35 @@ public class CompressorFactory {
return null; return null;
} }
public static Compressor compressor(String type) {
return compressorsByType.get(type);
}
/** /**
* Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}. * Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
*/ */
public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException { public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException {
Compressor compressor = compressor(bytes); Compressor compressor = compressor(bytes);
BytesReference uncompressed;
if (compressor != null) { if (compressor != null) {
if (bytes.hasArray()) { uncompressed = uncompress(bytes, compressor);
return new BytesArray(compressor.uncompress(bytes.array(), bytes.arrayOffset(), bytes.length())); } else {
uncompressed = bytes;
} }
return uncompressed;
}
/** Decompress the provided {@link BytesReference}. */
public static BytesReference uncompress(BytesReference bytes) throws IOException {
Compressor compressor = compressor(bytes);
if (compressor == null) {
throw new NotCompressedException();
}
return uncompress(bytes, compressor);
}
private static BytesReference uncompress(BytesReference bytes, Compressor compressor) throws IOException {
StreamInput compressed = compressor.streamInput(bytes.streamInput()); StreamInput compressed = compressor.streamInput(bytes.streamInput());
BytesStreamOutput bStream = new BytesStreamOutput(); BytesStreamOutput bStream = new BytesStreamOutput();
Streams.copy(compressed, bStream); Streams.copy(compressed, bStream);
compressed.close(); compressed.close();
return bStream.bytes(); return bStream.bytes();
} }
return bytes;
}
} }

View File

@ -19,7 +19,13 @@
package org.elasticsearch.common.compress; package org.elasticsearch.common.compress;
/** /** Exception indicating that we were expecting something compressed, which
*/ * was not compressed or corrupted so that the compression format could not
public interface CompressorContext { * be detected. */
public class NotCompressedException extends RuntimeException {
public NotCompressedException() {
super();
}
} }

View File

@ -0,0 +1,32 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress;
import org.elasticsearch.common.xcontent.XContent;
/** Exception indicating that we were expecting some {@link XContent} but could
* not detect its type. */
public class NotXContentException extends RuntimeException {
public NotXContentException(String message) {
super(message);
}
}

View File

@ -0,0 +1,156 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.deflate;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedIndexInput;
import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.jboss.netty.buffer.ChannelBuffer;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
/**
* {@link Compressor} implementation based on the DEFLATE compression algorithm.
*/
public class DeflateCompressor implements Compressor {
// An arbitrary header that we use to identify compressed streams
// It needs to be different from other compressors and to not be specific
// enough so that no stream starting with these bytes could be detected as
// a XContent
private static final byte[] HEADER = new byte[] { 'D', 'F', 'L', '\0' };
// 3 is a good trade-off between speed and compression ratio
private static final int LEVEL = 3;
// We use buffering on the input and ouput of in/def-laters in order to
// limit the number of JNI calls
private static final int BUFFER_SIZE = 4096;
@Override
public boolean isCompressed(BytesReference bytes) {
if (bytes.length() < HEADER.length) {
return false;
}
for (int i = 0; i < HEADER.length; ++i) {
if (bytes.get(i) != HEADER[i]) {
return false;
}
}
return true;
}
@Override
public boolean isCompressed(ChannelBuffer buffer) {
if (buffer.readableBytes() < HEADER.length) {
return false;
}
final int offset = buffer.readerIndex();
for (int i = 0; i < HEADER.length; ++i) {
if (buffer.getByte(offset + i) != HEADER[i]) {
return false;
}
}
return true;
}
@Override
public StreamInput streamInput(StreamInput in) throws IOException {
final byte[] headerBytes = new byte[HEADER.length];
int len = 0;
while (len < headerBytes.length) {
final int read = in.read(headerBytes, len, headerBytes.length - len);
if (read == -1) {
break;
}
len += read;
}
if (len != HEADER.length || Arrays.equals(headerBytes, HEADER) == false) {
throw new IllegalArgumentException("Input stream is not compressed with DEFLATE!");
}
final boolean nowrap = true;
final Inflater inflater = new Inflater(nowrap);
InputStream decompressedIn = new InflaterInputStream(in, inflater, BUFFER_SIZE);
decompressedIn = new BufferedInputStream(decompressedIn, BUFFER_SIZE);
return new InputStreamStreamInput(decompressedIn) {
private boolean closed = false;
public void close() throws IOException {
try {
super.close();
} finally {
if (closed == false) {
// important to release native memory
inflater.end();
closed = true;
}
}
}
};
}
@Override
public StreamOutput streamOutput(StreamOutput out) throws IOException {
out.writeBytes(HEADER);
final boolean nowrap = true;
final Deflater deflater = new Deflater(LEVEL, nowrap);
final boolean syncFlush = true;
OutputStream compressedOut = new DeflaterOutputStream(out, deflater, BUFFER_SIZE, syncFlush);
compressedOut = new BufferedOutputStream(compressedOut, BUFFER_SIZE);
return new OutputStreamStreamOutput(compressedOut) {
private boolean closed = false;
public void close() throws IOException {
try {
super.close();
} finally {
if (closed == false) {
// important to release native memory
deflater.end();
closed = true;
}
}
}
};
}
@Override
public boolean isCompressed(IndexInput in) throws IOException {
return false;
}
@Override
public CompressedIndexInput indexInput(IndexInput in) throws IOException {
throw new UnsupportedOperationException();
}
}

View File

@ -32,14 +32,14 @@ import java.util.Arrays;
/** /**
*/ */
@Deprecated @Deprecated
public class LZFCompressedIndexInput extends CompressedIndexInput<LZFCompressorContext> { public class LZFCompressedIndexInput extends CompressedIndexInput {
private final ChunkDecoder decoder; private final ChunkDecoder decoder;
// scratch area buffer // scratch area buffer
private byte[] inputBuffer; private byte[] inputBuffer;
public LZFCompressedIndexInput(IndexInput in, ChunkDecoder decoder) throws IOException { public LZFCompressedIndexInput(IndexInput in, ChunkDecoder decoder) throws IOException {
super(in, LZFCompressorContext.INSTANCE); super(in);
this.decoder = decoder; this.decoder = decoder;
this.uncompressed = new byte[LZFChunk.MAX_CHUNK_LEN]; this.uncompressed = new byte[LZFChunk.MAX_CHUNK_LEN];

View File

@ -29,7 +29,7 @@ import java.io.IOException;
/** /**
*/ */
public class LZFCompressedStreamInput extends CompressedStreamInput<LZFCompressorContext> { public class LZFCompressedStreamInput extends CompressedStreamInput {
private final BufferRecycler recycler; private final BufferRecycler recycler;
@ -39,7 +39,7 @@ public class LZFCompressedStreamInput extends CompressedStreamInput<LZFCompresso
private byte[] inputBuffer; private byte[] inputBuffer;
public LZFCompressedStreamInput(StreamInput in, ChunkDecoder decoder) throws IOException { public LZFCompressedStreamInput(StreamInput in, ChunkDecoder decoder) throws IOException {
super(in, LZFCompressorContext.INSTANCE); super(in);
this.recycler = BufferRecycler.instance(); this.recycler = BufferRecycler.instance();
this.decoder = decoder; this.decoder = decoder;

View File

@ -21,30 +21,27 @@ package org.elasticsearch.common.compress.lzf;
import com.ning.compress.lzf.ChunkDecoder; import com.ning.compress.lzf.ChunkDecoder;
import com.ning.compress.lzf.LZFChunk; import com.ning.compress.lzf.LZFChunk;
import com.ning.compress.lzf.LZFEncoder;
import com.ning.compress.lzf.util.ChunkDecoderFactory; import com.ning.compress.lzf.util.ChunkDecoderFactory;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedIndexInput; import org.elasticsearch.common.compress.CompressedIndexInput;
import org.elasticsearch.common.compress.CompressedStreamInput;
import org.elasticsearch.common.compress.CompressedStreamOutput;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.deflate.DeflateCompressor;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer;
import java.io.IOException; import java.io.IOException;
/** /**
* @deprecated Use {@link DeflateCompressor} instead
*/ */
@Deprecated
public class LZFCompressor implements Compressor { public class LZFCompressor implements Compressor {
static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0}; static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0};
public static final String TYPE = "lzf";
private ChunkDecoder decoder; private ChunkDecoder decoder;
public LZFCompressor() { public LZFCompressor() {
@ -53,14 +50,6 @@ public class LZFCompressor implements Compressor {
this.decoder.getClass().getSimpleName()); this.decoder.getClass().getSimpleName());
} }
@Override
public String type() {
return TYPE;
}
@Override
public void configure(Settings settings) {}
@Override @Override
public boolean isCompressed(BytesReference bytes) { public boolean isCompressed(BytesReference bytes) {
return bytes.length() >= 3 && return bytes.length() >= 3 &&
@ -69,14 +58,6 @@ public class LZFCompressor implements Compressor {
(bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED); (bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED);
} }
@Override
public boolean isCompressed(byte[] data, int offset, int length) {
return length >= 3 &&
data[offset] == LZFChunk.BYTE_Z &&
data[offset + 1] == LZFChunk.BYTE_V &&
(data[offset + 2] == LZFChunk.BLOCK_TYPE_COMPRESSED || data[offset + 2] == LZFChunk.BLOCK_TYPE_NON_COMPRESSED);
}
@Override @Override
public boolean isCompressed(ChannelBuffer buffer) { public boolean isCompressed(ChannelBuffer buffer) {
int offset = buffer.readerIndex(); int offset = buffer.readerIndex();
@ -104,23 +85,13 @@ public class LZFCompressor implements Compressor {
} }
@Override @Override
public byte[] uncompress(byte[] data, int offset, int length) throws IOException { public StreamInput streamInput(StreamInput in) throws IOException {
return decoder.decode(data, offset, length);
}
@Override
public byte[] compress(byte[] data, int offset, int length) throws IOException {
return LZFEncoder.safeEncode(data, offset, length);
}
@Override
public CompressedStreamInput streamInput(StreamInput in) throws IOException {
return new LZFCompressedStreamInput(in, decoder); return new LZFCompressedStreamInput(in, decoder);
} }
@Override @Override
public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException { public StreamOutput streamOutput(StreamOutput out) throws IOException {
return new LZFCompressedStreamOutput(out); throw new UnsupportedOperationException("LZF is only here for back compat, no write support");
} }
@Override @Override

View File

@ -59,6 +59,16 @@ public class InputStreamStreamInput extends StreamInput {
is.reset(); is.reset();
} }
@Override
public boolean markSupported() {
return is.markSupported();
}
@Override
public void mark(int readlimit) {
is.mark(readlimit);
}
@Override @Override
public void close() throws IOException { public void close() throws IOException {
is.close(); is.close();

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent;
import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.cbor.CBORConstants;
import com.fasterxml.jackson.dataformat.smile.SmileConstants; import com.fasterxml.jackson.dataformat.smile.SmileConstants;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -163,6 +164,9 @@ public class XContentFactory {
if (c == '{') { if (c == '{') {
return XContentType.JSON; return XContentType.JSON;
} }
if (Character.isWhitespace(c) == false) {
break;
}
} }
return null; return null;
} }
@ -204,9 +208,14 @@ public class XContentFactory {
} }
/** /**
* Guesses the content type based on the provided input stream. * Guesses the content type based on the provided input stream without consuming it.
*/ */
public static XContentType xContentType(InputStream si) throws IOException { public static XContentType xContentType(InputStream si) throws IOException {
if (si.markSupported() == false) {
throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass());
}
si.mark(GUESS_HEADER_LENGTH);
try {
final int firstInt = si.read(); // this must be an int since we need to respect the method contract final int firstInt = si.read(); // this must be an int since we need to respect the method contract
if (firstInt == -1) { if (firstInt == -1) {
return null; return null;
@ -261,8 +270,14 @@ public class XContentFactory {
if (val == '{') { if (val == '{') {
return XContentType.JSON; return XContentType.JSON;
} }
if (Character.isWhitespace(val) == false) {
break;
}
} }
return null; return null;
} finally {
si.reset();
}
} }
/** /**
@ -284,7 +299,7 @@ public class XContentFactory {
* Guesses the content type based on the provided bytes. * Guesses the content type based on the provided bytes.
*/ */
public static XContentType xContentType(BytesReference bytes) { public static XContentType xContentType(BytesReference bytes) {
int length = bytes.length() < GUESS_HEADER_LENGTH ? bytes.length() : GUESS_HEADER_LENGTH; int length = bytes.length();
if (length == 0) { if (length == 0) {
return null; return null;
} }
@ -316,9 +331,13 @@ public class XContentFactory {
// a last chance for JSON // a last chance for JSON
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
if (bytes.get(i) == '{') { byte b = bytes.get(i);
if (b == '{') {
return XContentType.JSON; return XContentType.JSON;
} }
if (Character.isWhitespace(b) == false) {
break;
}
} }
return null; return null;
} }

View File

@ -28,14 +28,14 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedStreamInput;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContent.Params;
import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -49,45 +49,30 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
public class XContentHelper { public class XContentHelper {
public static XContentParser createParser(BytesReference bytes) throws IOException { public static XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
Compressor compressor = CompressorFactory.compressor(bytes); Compressor compressor = CompressorFactory.compressor(bytes);
if (compressor != null) { if (compressor != null) {
CompressedStreamInput compressedInput = compressor.streamInput(bytes.streamInput()); InputStream compressedInput = compressor.streamInput(bytes.streamInput());
if (compressedInput.markSupported() == false) {
compressedInput = new BufferedInputStream(compressedInput);
}
XContentType contentType = XContentFactory.xContentType(compressedInput); XContentType contentType = XContentFactory.xContentType(compressedInput);
compressedInput.resetToBufferStart();
return XContentFactory.xContent(contentType).createParser(compressedInput); return XContentFactory.xContent(contentType).createParser(compressedInput);
} else { } else {
return XContentFactory.xContent(bytes).createParser(bytes.streamInput()); return XContentFactory.xContent(bytes).createParser(bytes.streamInput());
} }
} }
public static XContentParser createParser(byte[] data, int offset, int length) throws IOException {
Compressor compressor = CompressorFactory.compressor(data, offset, length);
if (compressor != null) {
CompressedStreamInput compressedInput = compressor.streamInput(StreamInput.wrap(data, offset, length));
XContentType contentType = XContentFactory.xContentType(compressedInput);
compressedInput.resetToBufferStart();
return XContentFactory.xContent(contentType).createParser(compressedInput);
} else {
return XContentFactory.xContent(data, offset, length).createParser(data, offset, length);
}
}
public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException { public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException {
if (bytes.hasArray()) {
return convertToMap(bytes.array(), bytes.arrayOffset(), bytes.length(), ordered);
}
try { try {
XContentParser parser; XContentParser parser;
XContentType contentType; XContentType contentType;
Compressor compressor = CompressorFactory.compressor(bytes); Compressor compressor = CompressorFactory.compressor(bytes);
if (compressor != null) { if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(bytes.streamInput()); InputStream compressedStreamInput = compressor.streamInput(bytes.streamInput());
if (compressedStreamInput.markSupported() == false) {
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
}
contentType = XContentFactory.xContentType(compressedStreamInput); contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput); parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
} else { } else {
contentType = XContentFactory.xContentType(bytes); contentType = XContentFactory.xContentType(bytes);
@ -103,34 +88,6 @@ public class XContentHelper {
} }
} }
public static Tuple<XContentType, Map<String, Object>> convertToMap(byte[] data, boolean ordered) throws ElasticsearchParseException {
return convertToMap(data, 0, data.length, ordered);
}
public static Tuple<XContentType, Map<String, Object>> convertToMap(byte[] data, int offset, int length, boolean ordered) throws ElasticsearchParseException {
try {
XContentParser parser;
XContentType contentType;
Compressor compressor = CompressorFactory.compressor(data, offset, length);
if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(StreamInput.wrap(data, offset, length));
contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
} else {
contentType = XContentFactory.xContentType(data, offset, length);
parser = XContentFactory.xContent(contentType).createParser(data, offset, length);
}
if (ordered) {
return Tuple.tuple(contentType, parser.mapOrderedAndClose());
} else {
return Tuple.tuple(contentType, parser.mapAndClose());
}
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse content to map", e);
}
}
public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException { public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException {
return convertToJson(bytes, reformatJson, false); return convertToJson(bytes, reformatJson, false);
} }
@ -426,9 +383,11 @@ public class XContentHelper {
public static void writeDirect(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException { public static void writeDirect(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException {
Compressor compressor = CompressorFactory.compressor(source); Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) { if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); InputStream compressedStreamInput = compressor.streamInput(source.streamInput());
if (compressedStreamInput.markSupported() == false) {
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
}
XContentType contentType = XContentFactory.xContentType(compressedStreamInput); XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
if (contentType == rawBuilder.contentType()) { if (contentType == rawBuilder.contentType()) {
Streams.copy(compressedStreamInput, rawBuilder.stream()); Streams.copy(compressedStreamInput, rawBuilder.stream());
} else { } else {
@ -457,9 +416,11 @@ public class XContentHelper {
public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException { public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException {
Compressor compressor = CompressorFactory.compressor(source); Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) { if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); InputStream compressedStreamInput = compressor.streamInput(source.streamInput());
if (compressedStreamInput.markSupported() == false) {
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
}
XContentType contentType = XContentFactory.xContentType(compressedStreamInput); XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
if (contentType == builder.contentType()) { if (contentType == builder.contentType()) {
builder.rawField(field, compressedStreamInput); builder.rawField(field, compressedStreamInput);
} else { } else {

View File

@ -227,21 +227,21 @@ public class PublishClusterStateAction extends AbstractComponent {
public static BytesReference serializeFullClusterState(ClusterState clusterState, Version nodeVersion) throws IOException { public static BytesReference serializeFullClusterState(ClusterState clusterState, Version nodeVersion) throws IOException {
BytesStreamOutput bStream = new BytesStreamOutput(); BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream); try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) {
stream.setVersion(nodeVersion); stream.setVersion(nodeVersion);
stream.writeBoolean(true); stream.writeBoolean(true);
clusterState.writeTo(stream); clusterState.writeTo(stream);
stream.close(); }
return bStream.bytes(); return bStream.bytes();
} }
public static BytesReference serializeDiffClusterState(Diff diff, Version nodeVersion) throws IOException { public static BytesReference serializeDiffClusterState(Diff diff, Version nodeVersion) throws IOException {
BytesStreamOutput bStream = new BytesStreamOutput(); BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream); try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) {
stream.setVersion(nodeVersion); stream.setVersion(nodeVersion);
stream.writeBoolean(false); stream.writeBoolean(false);
diff.writeTo(stream); diff.writeTo(stream);
stream.close(); }
return bStream.bytes(); return bStream.bytes();
} }

View File

@ -21,16 +21,26 @@ package org.elasticsearch.gateway;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.Collections2; import com.google.common.collect.Collections2;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.store.*; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.OutputStreamIndexOutput;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.xcontent.*; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
@ -280,7 +290,7 @@ public abstract class MetaDataStateFormat<T> {
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
continue; continue;
} }
parser = XContentHelper.createParser(data, 0, data.length); parser = XContentHelper.createParser(new BytesArray(data));
state = fromXContent(parser); state = fromXContent(parser);
if (state == null) { if (state == null) {
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.aliases;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
/** /**
* *
@ -30,11 +30,11 @@ public class IndexAlias {
private final String alias; private final String alias;
private final CompressedString filter; private final CompressedXContent filter;
private final Query parsedFilter; private final Query parsedFilter;
public IndexAlias(String alias, @Nullable CompressedString filter, @Nullable Query parsedFilter) { public IndexAlias(String alias, @Nullable CompressedXContent filter, @Nullable Query parsedFilter) {
this.alias = alias; this.alias = alias;
this.filter = filter; this.filter = filter;
this.parsedFilter = parsedFilter; this.parsedFilter = parsedFilter;
@ -45,7 +45,7 @@ public class IndexAlias {
} }
@Nullable @Nullable
public CompressedString filter() { public CompressedXContent filter() {
return filter; return filter;
} }

View File

@ -23,7 +23,7 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
@ -63,11 +63,11 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
return aliases.get(alias); return aliases.get(alias);
} }
public IndexAlias create(String alias, @Nullable CompressedString filter) { public IndexAlias create(String alias, @Nullable CompressedXContent filter) {
return new IndexAlias(alias, filter, parse(alias, filter)); return new IndexAlias(alias, filter, parse(alias, filter));
} }
public void add(String alias, @Nullable CompressedString filter) { public void add(String alias, @Nullable CompressedXContent filter) {
add(new IndexAlias(alias, filter, parse(alias, filter))); add(new IndexAlias(alias, filter, parse(alias, filter)));
} }
@ -120,7 +120,7 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
aliases.remove(alias); aliases.remove(alias);
} }
private Query parse(String alias, CompressedString filter) { private Query parse(String alias, CompressedXContent filter) {
if (filter == null) { if (filter == null) {
return null; return null;
} }

View File

@ -35,7 +35,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -150,7 +150,7 @@ public class DocumentMapper implements ToXContent {
private final String type; private final String type;
private final StringAndBytesText typeText; private final StringAndBytesText typeText;
private volatile CompressedString mappingSource; private volatile CompressedXContent mappingSource;
private final Mapping mapping; private final Mapping mapping;
@ -235,7 +235,7 @@ public class DocumentMapper implements ToXContent {
return mapping.meta; return mapping.meta;
} }
public CompressedString mappingSource() { public CompressedXContent mappingSource() {
return this.mappingSource; return this.mappingSource;
} }
@ -468,12 +468,12 @@ public class DocumentMapper implements ToXContent {
private void refreshSource() throws ElasticsearchGenerationException { private void refreshSource() throws ElasticsearchGenerationException {
try { try {
BytesStreamOutput bStream = new BytesStreamOutput(); BytesStreamOutput bStream = new BytesStreamOutput();
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream)); try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream))) {
builder.startObject(); builder.startObject();
toXContent(builder, ToXContent.EMPTY_PARAMS); toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject(); builder.endObject();
builder.close(); }
mappingSource = new CompressedString(bStream.bytes()); mappingSource = new CompressedXContent(bStream.bytes());
} catch (Exception e) { } catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e); throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
} }

View File

@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
@ -194,15 +194,15 @@ public class DocumentMapperParser extends AbstractIndexComponent {
return parse(type, mapping, defaultSource); return parse(type, mapping, defaultSource);
} }
public DocumentMapper parseCompressed(@Nullable String type, CompressedString source) throws MapperParsingException { public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException {
return parseCompressed(type, source, null); return parseCompressed(type, source, null);
} }
@SuppressWarnings({"unchecked"}) @SuppressWarnings({"unchecked"})
public DocumentMapper parseCompressed(@Nullable String type, CompressedString source, String defaultSource) throws MapperParsingException { public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException {
Map<String, Object> mapping = null; Map<String, Object> mapping = null;
if (source != null) { if (source != null) {
Map<String, Object> root = XContentHelper.convertToMap(source.compressed(), true).v2(); Map<String, Object> root = XContentHelper.convertToMap(source.compressedReference(), true).v2();
Tuple<String, Map<String, Object>> t = extractMapping(type, root); Tuple<String, Map<String, Object>> t = extractMapping(type, root);
type = t.v1(); type = t.v1();
mapping = t.v2(); mapping = t.v2();

View File

@ -43,7 +43,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
@ -214,7 +214,7 @@ public class MapperService extends AbstractIndexComponent {
typeListeners.remove(listener); typeListeners.remove(listener);
} }
public DocumentMapper merge(String type, CompressedString mappingSource, boolean applyDefault) { public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault) {
if (DEFAULT_MAPPING.equals(type)) { if (DEFAULT_MAPPING.equals(type)) {
// verify we can parse it // verify we can parse it
DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource); DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource);
@ -293,7 +293,7 @@ public class MapperService extends AbstractIndexComponent {
private boolean assertSerialization(DocumentMapper mapper) { private boolean assertSerialization(DocumentMapper mapper) {
// capture the source now, it may change due to concurrent parsing // capture the source now, it may change due to concurrent parsing
final CompressedString mappingSource = mapper.mappingSource(); final CompressedXContent mappingSource = mapper.mappingSource();
DocumentMapper newMapper = parse(mapper.type(), mappingSource, false); DocumentMapper newMapper = parse(mapper.type(), mappingSource, false);
if (newMapper.mappingSource().equals(mappingSource) == false) { if (newMapper.mappingSource().equals(mappingSource) == false) {
@ -328,7 +328,7 @@ public class MapperService extends AbstractIndexComponent {
this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers); this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers);
} }
public DocumentMapper parse(String mappingType, CompressedString mappingSource, boolean applyDefault) throws MapperParsingException { public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
String defaultMappingSource; String defaultMappingSource;
if (PercolatorService.TYPE_NAME.equals(mappingType)) { if (PercolatorService.TYPE_NAME.equals(mappingType)) {
defaultMappingSource = this.defaultPercolatorMappingSource; defaultMappingSource = this.defaultPercolatorMappingSource;

View File

@ -36,6 +36,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -144,10 +145,18 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
} }
try { try {
if (indexCreatedBefore2x) { if (indexCreatedBefore2x) {
try {
return CompressorFactory.uncompressIfNeeded(bytes); return CompressorFactory.uncompressIfNeeded(bytes);
} else { } catch (NotXContentException e) {
return bytes; // NOTE: previous versions of Elasticsearch used to try to detect if
// data was compressed. However this could cause decompression failures
// as a user may have submitted arbitrary data which looks like it is
// compressed to elasticsearch but is not. So we removed the ability to
// compress binary fields and keep this empty catch block for backward
// compatibility with 1.x
} }
}
return bytes;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e); throw new ElasticsearchParseException("failed to decompress source", e);
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper.internal; package org.elasticsearch.index.mapper.internal;
import com.google.common.base.Objects; import com.google.common.base.Objects;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StoredField;
@ -31,7 +32,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedStreamInput;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -53,7 +53,9 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@ -324,9 +326,11 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
// see if we need to convert the content type // see if we need to convert the content type
Compressor compressor = CompressorFactory.compressor(source); Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) { if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput()); InputStream compressedStreamInput = compressor.streamInput(source.streamInput());
if (compressedStreamInput.markSupported() == false) {
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
}
XContentType contentType = XContentFactory.xContentType(compressedStreamInput); XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
if (contentType != formatContentType) { if (contentType != formatContentType) {
// we need to reread and store back, compressed.... // we need to reread and store back, compressed....
BytesStreamOutput bStream = new BytesStreamOutput(); BytesStreamOutput bStream = new BytesStreamOutput();

View File

@ -41,7 +41,7 @@ import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
@ -369,7 +369,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) { for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) {
MappingMetaData mappingMd = cursor.value; MappingMetaData mappingMd = cursor.value;
String mappingType = mappingMd.type(); String mappingType = mappingMd.type();
CompressedString mappingSource = mappingMd.source(); CompressedXContent mappingSource = mappingMd.source();
if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first
continue; continue;
} }
@ -396,7 +396,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
} }
} }
private boolean processMapping(String index, MapperService mapperService, String mappingType, CompressedString mappingSource) throws Throwable { private boolean processMapping(String index, MapperService mapperService, String mappingType, CompressedXContent mappingSource) throws Throwable {
if (!seenMappings.containsKey(new Tuple<>(index, mappingType))) { if (!seenMappings.containsKey(new Tuple<>(index, mappingType))) {
seenMappings.put(new Tuple<>(index, mappingType), true); seenMappings.put(new Tuple<>(index, mappingType), true);
} }
@ -484,7 +484,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
for (ObjectCursor<AliasMetaData> cursor : aliases) { for (ObjectCursor<AliasMetaData> cursor : aliases) {
AliasMetaData aliasMd = cursor.value; AliasMetaData aliasMd = cursor.value;
String alias = aliasMd.alias(); String alias = aliasMd.alias();
CompressedString filter = aliasMd.filter(); CompressedXContent filter = aliasMd.filter();
try { try {
if (!indexAliasesService.hasAlias(alias)) { if (!indexAliasesService.hasAlias(alias)) {
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {

View File

@ -36,7 +36,6 @@ import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
@ -151,7 +150,6 @@ public class Node implements Releasable {
// create the environment based on the finalized (processed) view of the settings // create the environment based on the finalized (processed) view of the settings
this.environment = new Environment(this.settings()); this.environment = new Environment(this.settings());
CompressorFactory.configure(settings);
final NodeEnvironment nodeEnvironment; final NodeEnvironment nodeEnvironment;
try { try {
nodeEnvironment = new NodeEnvironment(this.settings, this.environment); nodeEnvironment = new NodeEnvironment(this.settings, this.environment);

View File

@ -24,27 +24,35 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.io.ByteStreams; import com.google.common.io.ByteStreams;
import org.apache.lucene.store.RateLimiter; import org.apache.lucene.store.RateLimiter;
import org.elasticsearch.ElasticsearchException; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.*; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.shard.IndexShardException; import org.elasticsearch.index.shard.IndexShardException;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardRepository;
@ -54,14 +62,21 @@ import org.elasticsearch.repositories.Repository;
import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.RepositoryException;
import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.RepositorySettings;
import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.RepositoryVerificationException;
import org.elasticsearch.snapshots.*; import org.elasticsearch.snapshots.InvalidSnapshotNameException;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotCreationException;
import org.elasticsearch.snapshots.SnapshotException;
import org.elasticsearch.snapshots.SnapshotMissingException;
import org.elasticsearch.snapshots.SnapshotShardFailure;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.file.NoSuchFileException; import java.nio.file.NoSuchFileException;
import java.util.*; import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newArrayList;
@ -229,19 +244,15 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
} }
// Write Global MetaData // Write Global MetaData
// TODO: Check if metadata needs to be written // TODO: Check if metadata needs to be written
try (OutputStream output = snapshotsBlobContainer.createOutput(metaDataBlobName(snapshotId))) { try (StreamOutput output = compressIfNeeded(snapshotsBlobContainer.createOutput(metaDataBlobName(snapshotId)))) {
writeGlobalMetaData(metaData, output); writeGlobalMetaData(metaData, output);
} }
for (String index : indices) { for (String index : indices) {
final IndexMetaData indexMetaData = metaData.index(index); final IndexMetaData indexMetaData = metaData.index(index);
final BlobPath indexPath = basePath().add("indices").add(index); final BlobPath indexPath = basePath().add("indices").add(index);
final BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath); final BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
try (OutputStream output = indexMetaDataBlobContainer.createOutput(snapshotBlobName(snapshotId))) { try (StreamOutput output = compressIfNeeded(indexMetaDataBlobContainer.createOutput(snapshotBlobName(snapshotId)))) {
StreamOutput stream = new OutputStreamStreamOutput(output); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, output);
if (isCompress()) {
stream = CompressorFactory.defaultCompressor().streamOutput(stream);
}
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, stream);
builder.startObject(); builder.startObject();
IndexMetaData.Builder.toXContent(indexMetaData, builder, ToXContent.EMPTY_PARAMS); IndexMetaData.Builder.toXContent(indexMetaData, builder, ToXContent.EMPTY_PARAMS);
builder.endObject(); builder.endObject();
@ -317,6 +328,22 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
} }
} }
private StreamOutput compressIfNeeded(OutputStream output) throws IOException {
StreamOutput out = null;
boolean success = false;
try {
out = new OutputStreamStreamOutput(output);
if (isCompress()) {
out = CompressorFactory.defaultCompressor().streamOutput(out);
}
success = true;
return out;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(out, output);
}
}
}
/** /**
* {@inheritDoc} * {@inheritDoc}
@ -327,7 +354,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
String tempBlobName = tempSnapshotBlobName(snapshotId); String tempBlobName = tempSnapshotBlobName(snapshotId);
String blobName = snapshotBlobName(snapshotId); String blobName = snapshotBlobName(snapshotId);
Snapshot blobStoreSnapshot = new Snapshot(snapshotId.getSnapshot(), indices, startTime, failure, System.currentTimeMillis(), totalShards, shardFailures); Snapshot blobStoreSnapshot = new Snapshot(snapshotId.getSnapshot(), indices, startTime, failure, System.currentTimeMillis(), totalShards, shardFailures);
try (OutputStream output = snapshotsBlobContainer.createOutput(tempBlobName)) { try (StreamOutput output = compressIfNeeded(snapshotsBlobContainer.createOutput(tempBlobName))) {
writeSnapshot(blobStoreSnapshot, output); writeSnapshot(blobStoreSnapshot, output);
} }
snapshotsBlobContainer.move(tempBlobName, blobName); snapshotsBlobContainer.move(tempBlobName, blobName);
@ -386,7 +413,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
} }
} catch (FileNotFoundException | NoSuchFileException ex) { } catch (FileNotFoundException | NoSuchFileException ex) {
throw new SnapshotMissingException(snapshotId, ex); throw new SnapshotMissingException(snapshotId, ex);
} catch (IOException ex) { } catch (IOException | NotXContentException ex) {
throw new SnapshotException(snapshotId, "failed to get snapshots", ex); throw new SnapshotException(snapshotId, "failed to get snapshots", ex);
} }
} }
@ -407,7 +434,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath); BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
try (InputStream blob = indexMetaDataBlobContainer.openInput(snapshotBlobName(snapshotId))) { try (InputStream blob = indexMetaDataBlobContainer.openInput(snapshotBlobName(snapshotId))) {
byte[] data = ByteStreams.toByteArray(blob); byte[] data = ByteStreams.toByteArray(blob);
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) { try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
XContentParser.Token token; XContentParser.Token token;
if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) { if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) {
IndexMetaData indexMetaData = IndexMetaData.Builder.fromXContent(parser); IndexMetaData indexMetaData = IndexMetaData.Builder.fromXContent(parser);
@ -459,7 +486,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
* @throws IOException parse exceptions * @throws IOException parse exceptions
*/ */
public Snapshot readSnapshot(byte[] data) throws IOException { public Snapshot readSnapshot(byte[] data) throws IOException {
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) { try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
XContentParser.Token token; XContentParser.Token token;
if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) { if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) {
if ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) { if ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) {
@ -484,7 +511,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
* @throws IOException parse exceptions * @throws IOException parse exceptions
*/ */
private MetaData readMetaData(byte[] data) throws IOException { private MetaData readMetaData(byte[] data) throws IOException {
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) { try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
XContentParser.Token token; XContentParser.Token token;
if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) { if ((token = parser.nextToken()) == XContentParser.Token.START_OBJECT) {
if ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) { if ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) {
@ -536,12 +563,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
* @return BytesStreamOutput representing JSON serialized Snapshot * @return BytesStreamOutput representing JSON serialized Snapshot
* @throws IOException * @throws IOException
*/ */
private void writeSnapshot(Snapshot snapshot, OutputStream outputStream) throws IOException { private void writeSnapshot(Snapshot snapshot, StreamOutput outputStream) throws IOException {
StreamOutput stream = new OutputStreamStreamOutput(outputStream); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, outputStream);
if (isCompress()) {
stream = CompressorFactory.defaultCompressor().streamOutput(stream);
}
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, stream);
builder.startObject(); builder.startObject();
snapshot.toXContent(builder, snapshotOnlyFormatParams); snapshot.toXContent(builder, snapshotOnlyFormatParams);
builder.endObject(); builder.endObject();
@ -555,12 +578,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
* @return BytesStreamOutput representing JSON serialized global MetaData * @return BytesStreamOutput representing JSON serialized global MetaData
* @throws IOException * @throws IOException
*/ */
private void writeGlobalMetaData(MetaData metaData, OutputStream outputStream) throws IOException { private void writeGlobalMetaData(MetaData metaData, StreamOutput outputStream) throws IOException {
StreamOutput stream = new OutputStreamStreamOutput(outputStream); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, outputStream);
if (isCompress()) {
stream = CompressorFactory.defaultCompressor().streamOutput(stream);
}
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, stream);
builder.startObject(); builder.startObject();
MetaData.Builder.toXContent(metaData, builder, snapshotOnlyFormatParams); MetaData.Builder.toXContent(metaData, builder, snapshotOnlyFormatParams);
builder.endObject(); builder.endObject();
@ -608,7 +627,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
try (InputStream blob = snapshotsBlobContainer.openInput(SNAPSHOTS_FILE)) { try (InputStream blob = snapshotsBlobContainer.openInput(SNAPSHOTS_FILE)) {
final byte[] data = ByteStreams.toByteArray(blob); final byte[] data = ByteStreams.toByteArray(blob);
ArrayList<SnapshotId> snapshots = new ArrayList<>(); ArrayList<SnapshotId> snapshots = new ArrayList<>();
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) { try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
if (parser.nextToken() == XContentParser.Token.START_OBJECT) { if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
String currentFieldName = parser.currentName(); String currentFieldName = parser.currentName();

View File

@ -91,14 +91,6 @@ public class SourceLookup implements Map {
return sourceAsMapAndType(source).v2(); return sourceAsMapAndType(source).v2();
} }
public static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(byte[] bytes, int offset, int length) throws ElasticsearchParseException {
return XContentHelper.convertToMap(bytes, offset, length, false);
}
public static Map<String, Object> sourceAsMap(byte[] bytes, int offset, int length) throws ElasticsearchParseException {
return sourceAsMapAndType(bytes, offset, length).v2();
}
public void setSegmentAndDocument(LeafReaderContext context, int docId) { public void setSegmentAndDocument(LeafReaderContext context, int docId) {
if (this.reader == context.reader() && this.docId == docId) { if (this.reader == context.reader() && this.docId == docId) {
// if we are called with the same document, don't invalidate source // if we are called with the same document, don't invalidate source

View File

@ -23,6 +23,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.Lifecycle;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.NotCompressedException;
import org.elasticsearch.common.io.ThrowableObjectInputStream; import org.elasticsearch.common.io.ThrowableObjectInputStream;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
@ -91,8 +92,10 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler {
StreamInput wrappedStream; StreamInput wrappedStream;
if (TransportStatus.isCompress(status) && hasMessageBytesToRead && buffer.readable()) { if (TransportStatus.isCompress(status) && hasMessageBytesToRead && buffer.readable()) {
Compressor compressor = CompressorFactory.compressor(buffer); Compressor compressor;
if (compressor == null) { try {
compressor = CompressorFactory.compressor(buffer);
} catch (NotCompressedException ex) {
int maxToRead = Math.min(buffer.readableBytes(), 10); int maxToRead = Math.min(buffer.readableBytes(), 10);
int offset = buffer.readerIndex(); int offset = buffer.readerIndex();
StringBuilder sb = new StringBuilder("stream marked as compressed, but no compressor found, first [").append(maxToRead).append("] content bytes out of [").append(buffer.readableBytes()).append("] readable bytes with message size [").append(size).append("] ").append("] are ["); StringBuilder sb = new StringBuilder("stream marked as compressed, but no compressor found, first [").append(maxToRead).append("] content bytes out of [").append(buffer.readableBytes()).append("] readable bytes with message size [").append(size).append("] ").append("] are [");

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.metadata; package org.elasticsearch.cluster.metadata;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
@ -34,7 +34,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdAlone() throws Exception { public void testParseIdAlone() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -52,7 +52,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testFailIfIdIsNoValue() throws Exception { public void testFailIfIdIsNoValue() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -79,7 +79,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseRoutingAlone() throws Exception { public void testParseRoutingAlone() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -97,7 +97,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseTimestampAlone() throws Exception { public void testParseTimestampAlone() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -115,11 +115,11 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseTimestampEquals() throws Exception { public void testParseTimestampEquals() throws Exception {
MappingMetaData md1 = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md1 = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
MappingMetaData md2 = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md2 = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -128,7 +128,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdAndRoutingAndTimestamp() throws Exception { public void testParseIdAndRoutingAndTimestamp() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "routing"), new MappingMetaData.Routing(true, "routing"),
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -143,7 +143,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdAndRoutingAndTimestampWithPath() throws Exception { public void testParseIdAndRoutingAndTimestampWithPath() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.id"), new MappingMetaData.Id("obj1.id"),
new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Routing(true, "obj1.routing"),
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -161,7 +161,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdWithPath() throws Exception { public void testParseIdWithPath() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.id"), new MappingMetaData.Id("obj1.id"),
new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Routing(true, "obj1.routing"),
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -182,7 +182,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseRoutingWithPath() throws Exception { public void testParseRoutingWithPath() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.id"), new MappingMetaData.Id("obj1.id"),
new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Routing(true, "obj1.routing"),
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -203,7 +203,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseTimestampWithPath() throws Exception { public void testParseTimestampWithPath() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.id"), new MappingMetaData.Id("obj1.id"),
new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Routing(true, "obj1.routing"),
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -224,7 +224,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception { public void testParseIdAndRoutingAndTimestampWithinSamePath() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.id"), new MappingMetaData.Id("obj1.id"),
new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Routing(true, "obj1.routing"),
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -242,7 +242,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception { public void testParseIdAndRoutingAndTimestampWithinSamePathAndMoreLevels() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.obj0.id"), new MappingMetaData.Id("obj1.obj0.id"),
new MappingMetaData.Routing(true, "obj1.obj2.routing"), new MappingMetaData.Routing(true, "obj1.obj2.routing"),
new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -271,7 +271,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception { public void testParseIdAndRoutingAndTimestampWithSameRepeatedObject() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("obj1.id"), new MappingMetaData.Id("obj1.id"),
new MappingMetaData.Routing(true, "obj1.routing"), new MappingMetaData.Routing(true, "obj1.routing"),
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -291,7 +291,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
// //
@Test @Test
public void testParseIdRoutingTimestampWithRepeatedField() throws Exception { public void testParseIdRoutingTimestampWithRepeatedField() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("field1"), new MappingMetaData.Id("field1"),
new MappingMetaData.Routing(true, "field1.field1"), new MappingMetaData.Routing(true, "field1.field1"),
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -314,7 +314,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception { public void testParseNoIdRoutingWithRepeatedFieldAndObject() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id("id"), new MappingMetaData.Id("id"),
new MappingMetaData.Routing(true, "field1.field1.field2"), new MappingMetaData.Routing(true, "field1.field1.field2"),
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);
@ -337,7 +337,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
@Test @Test
public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception { public void testParseRoutingWithRepeatedFieldAndValidRouting() throws Exception {
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""), MappingMetaData md = new MappingMetaData("type1", new CompressedXContent("{}"),
new MappingMetaData.Id(null), new MappingMetaData.Id(null),
new MappingMetaData.Routing(true, "field1.field2"), new MappingMetaData.Routing(true, "field1.field2"),
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false); new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime", TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null), false);

View File

@ -38,12 +38,12 @@ import java.util.concurrent.CountDownLatch;
/** /**
* Test streaming compression (e.g. used for recovery) * Test streaming compression (e.g. used for recovery)
*/ */
public class CompressedStreamTests extends ElasticsearchTestCase { public abstract class AbstractCompressedStreamTests extends ElasticsearchTestCase {
@Override private final Compressor compressor;
public void setUp() throws Exception {
super.setUp(); protected AbstractCompressedStreamTests(Compressor compressor) {
CompressorFactory.configure(Settings.settingsBuilder().put("compress.default.type", "lzf").build()); this.compressor = compressor;
} }
public void testRandom() throws IOException { public void testRandom() throws IOException {
@ -391,7 +391,7 @@ public class CompressedStreamTests extends ElasticsearchTestCase {
private void doTest(byte bytes[]) throws IOException { private void doTest(byte bytes[]) throws IOException {
ByteBuffer bb = ByteBuffer.wrap(bytes); ByteBuffer bb = ByteBuffer.wrap(bytes);
StreamInput rawIn = new ByteBufferStreamInput(bb); StreamInput rawIn = new ByteBufferStreamInput(bb);
Compressor c = CompressorFactory.defaultCompressor(); Compressor c = compressor;
ByteArrayOutputStream bos = new ByteArrayOutputStream(); ByteArrayOutputStream bos = new ByteArrayOutputStream();
OutputStreamStreamOutput rawOs = new OutputStreamStreamOutput(bos); OutputStreamStreamOutput rawOs = new OutputStreamStreamOutput(bos);

View File

@ -23,10 +23,8 @@ import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Random; import java.util.Random;
@ -37,49 +35,58 @@ import static org.hamcrest.Matchers.not;
/** /**
* *
*/ */
public class CompressedStringTests extends ElasticsearchTestCase { public abstract class AbstractCompressedXContentTests extends ElasticsearchTestCase {
@Test private final Compressor compressor;
public void simpleTestsLZF() throws IOException {
simpleTests("lzf"); protected AbstractCompressedXContentTests(Compressor compressor) {
this.compressor = compressor;
} }
private void assertEquals(CompressedString s1, CompressedString s2) { private void assertEquals(CompressedXContent s1, CompressedXContent s2) {
Assert.assertEquals(s1, s2); Assert.assertEquals(s1, s2);
assertArrayEquals(s1.uncompressed(), s2.uncompressed()); assertArrayEquals(s1.uncompressed(), s2.uncompressed());
assertEquals(s1.hashCode(), s2.hashCode()); assertEquals(s1.hashCode(), s2.hashCode());
} }
public void simpleTests(String compressor) throws IOException { public void simpleTests() throws IOException {
CompressorFactory.configure(Settings.settingsBuilder().put("compress.default.type", compressor).build()); Compressor defaultCompressor = CompressorFactory.defaultCompressor();
String str = "this is a simple string"; try {
CompressedString cstr = new CompressedString(str); CompressorFactory.setDefaultCompressor(compressor);
String str = "---\nf:this is a simple string";
CompressedXContent cstr = new CompressedXContent(str);
assertThat(cstr.string(), equalTo(str)); assertThat(cstr.string(), equalTo(str));
assertThat(new CompressedString(str), equalTo(cstr)); assertThat(new CompressedXContent(str), equalTo(cstr));
String str2 = "this is a simple string 2"; String str2 = "---\nf:this is a simple string 2";
CompressedString cstr2 = new CompressedString(str2); CompressedXContent cstr2 = new CompressedXContent(str2);
assertThat(cstr2.string(), not(equalTo(str))); assertThat(cstr2.string(), not(equalTo(str)));
assertThat(new CompressedString(str2), not(equalTo(cstr))); assertThat(new CompressedXContent(str2), not(equalTo(cstr)));
assertEquals(new CompressedString(str2), cstr2); assertEquals(new CompressedXContent(str2), cstr2);
} finally {
CompressorFactory.setDefaultCompressor(defaultCompressor);
}
} }
public void testRandom() throws IOException { public void testRandom() throws IOException {
String compressor = "lzf"; Compressor defaultCompressor = CompressorFactory.defaultCompressor();
CompressorFactory.configure(Settings.settingsBuilder().put("compress.default.type", compressor).build()); try {
CompressorFactory.setDefaultCompressor(compressor);
Random r = getRandom(); Random r = getRandom();
for (int i = 0; i < 1000; i++) { for (int i = 0; i < 1000; i++) {
String string = TestUtil.randomUnicodeString(r, 10000); String string = TestUtil.randomUnicodeString(r, 10000);
CompressedString compressedString = new CompressedString(string); // hack to make it detected as YAML
assertThat(compressedString.string(), equalTo(string)); string = "---\n" + string;
CompressedXContent compressedXContent = new CompressedXContent(string);
assertThat(compressedXContent.string(), equalTo(string));
}
} finally {
CompressorFactory.setDefaultCompressor(defaultCompressor);
} }
} }
public void testDifferentCompressedRepresentation() throws Exception { public void testDifferentCompressedRepresentation() throws Exception {
byte[] b = "abcdefghijabcdefghij".getBytes("UTF-8"); byte[] b = "---\nf:abcdefghijabcdefghij".getBytes("UTF-8");
CompressorFactory.defaultCompressor();
Compressor compressor = CompressorFactory.defaultCompressor();
BytesStreamOutput bout = new BytesStreamOutput(); BytesStreamOutput bout = new BytesStreamOutput();
StreamOutput out = compressor.streamOutput(bout); StreamOutput out = compressor.streamOutput(bout);
out.writeBytes(b); out.writeBytes(b);
@ -100,14 +107,14 @@ public class CompressedStringTests extends ElasticsearchTestCase {
// of different size are being used // of different size are being used
assertFalse(b1.equals(b2)); assertFalse(b1.equals(b2));
// we used the compressed representation directly and did not recompress // we used the compressed representation directly and did not recompress
assertArrayEquals(b1.toBytes(), new CompressedString(b1).compressed()); assertArrayEquals(b1.toBytes(), new CompressedXContent(b1).compressed());
assertArrayEquals(b2.toBytes(), new CompressedString(b2).compressed()); assertArrayEquals(b2.toBytes(), new CompressedXContent(b2).compressed());
// but compressedstring instances are still equal // but compressedstring instances are still equal
assertEquals(new CompressedString(b1), new CompressedString(b2)); assertEquals(new CompressedXContent(b1), new CompressedXContent(b2));
} }
public void testHashCode() throws IOException { public void testHashCode() throws IOException {
assertFalse(new CompressedString("a").hashCode() == new CompressedString("b").hashCode()); assertFalse(new CompressedXContent("{\"a\":\"b\"}").hashCode() == new CompressedXContent("{\"a\":\"c\"}").hashCode());
} }
} }

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.deflate;
import org.elasticsearch.common.compress.AbstractCompressedStreamTests;
public class DeflateCompressedStreamTests extends AbstractCompressedStreamTests {
public DeflateCompressedStreamTests() {
super(new DeflateCompressor());
}
}

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.deflate;
import org.elasticsearch.common.compress.AbstractCompressedXContentTests;
public class DeflateXContentTests extends AbstractCompressedXContentTests {
public DeflateXContentTests() {
super(new DeflateCompressor());
}
}

View File

@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
package org.elasticsearch.common.compress; package org.elasticsearch.common.compress.lzf;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -26,10 +26,9 @@ import java.io.IOException;
/** /**
*/ */
public abstract class CompressedStreamOutput<T extends CompressorContext> extends StreamOutput { public abstract class CompressedStreamOutput extends StreamOutput {
private final StreamOutput out; private final StreamOutput out;
protected final T context;
protected byte[] uncompressed; protected byte[] uncompressed;
protected int uncompressedLength; protected int uncompressedLength;
@ -37,9 +36,8 @@ public abstract class CompressedStreamOutput<T extends CompressorContext> extend
private boolean closed; private boolean closed;
public CompressedStreamOutput(StreamOutput out, T context) throws IOException { public CompressedStreamOutput(StreamOutput out) throws IOException {
this.out = out; this.out = out;
this.context = context;
super.setVersion(out.getVersion()); super.setVersion(out.getVersion());
writeHeader(out); writeHeader(out);
} }

View File

@ -23,18 +23,18 @@ import com.ning.compress.BufferRecycler;
import com.ning.compress.lzf.ChunkEncoder; import com.ning.compress.lzf.ChunkEncoder;
import com.ning.compress.lzf.LZFChunk; import com.ning.compress.lzf.LZFChunk;
import com.ning.compress.lzf.util.ChunkEncoderFactory; import com.ning.compress.lzf.util.ChunkEncoderFactory;
import org.elasticsearch.common.compress.CompressedStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException; import java.io.IOException;
public class LZFCompressedStreamOutput extends CompressedStreamOutput<LZFCompressorContext> { public class LZFCompressedStreamOutput extends CompressedStreamOutput {
private final BufferRecycler recycler; private final BufferRecycler recycler;
private final ChunkEncoder encoder; private final ChunkEncoder encoder;
public LZFCompressedStreamOutput(StreamOutput out) throws IOException { public LZFCompressedStreamOutput(StreamOutput out) throws IOException {
super(out, LZFCompressorContext.INSTANCE); super(out);
this.recycler = BufferRecycler.instance(); this.recycler = BufferRecycler.instance();
this.uncompressed = this.recycler.allocOutputBuffer(LZFChunk.MAX_CHUNK_LEN); this.uncompressed = this.recycler.allocOutputBuffer(LZFChunk.MAX_CHUNK_LEN);
this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN; this.uncompressedLength = LZFChunk.MAX_CHUNK_LEN;

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.lzf;
import org.elasticsearch.common.compress.AbstractCompressedStreamTests;
public class LZFCompressedStreamTests extends AbstractCompressedStreamTests {
public LZFCompressedStreamTests() {
super(new LZFTestCompressor());
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.compress.lzf;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
// LZF compressor with write support, for testing only
public class LZFTestCompressor extends LZFCompressor {
@Override
public StreamOutput streamOutput(StreamOutput out) throws IOException {
return new LZFCompressedStreamOutput(out);
}
}

View File

@ -19,11 +19,12 @@
package org.elasticsearch.common.compress.lzf; package org.elasticsearch.common.compress.lzf;
import org.elasticsearch.common.compress.CompressorContext; import org.elasticsearch.common.compress.AbstractCompressedXContentTests;
/** public class LZFXContentTests extends AbstractCompressedXContentTests {
*/
public class LZFCompressorContext implements CompressorContext { public LZFXContentTests() {
super(new LZFTestCompressor());
}
public static final LZFCompressorContext INSTANCE = new LZFCompressorContext();
} }

View File

@ -82,7 +82,7 @@ public class XContentFactoryTests extends ElasticsearchTestCase {
// this if for {"foo" : 5} in python CBOR // this if for {"foo" : 5} in python CBOR
bytes = new byte[] {(byte) 0xA1, (byte) 0x63, (byte) 0x66, (byte) 0x6f, (byte) 0x6f, (byte) 0x5}; bytes = new byte[] {(byte) 0xA1, (byte) 0x63, (byte) 0x66, (byte) 0x6f, (byte) 0x6f, (byte) 0x5};
assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR)); assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR));
assertThat(((Number) XContentHelper.convertToMap(bytes, true).v2().get("foo")).intValue(), equalTo(5)); assertThat(((Number) XContentHelper.convertToMap(new BytesArray(bytes), true).v2().get("foo")).intValue(), equalTo(5));
// also make sure major type check doesn't collide with SMILE and JSON, just in case // also make sure major type check doesn't collide with SMILE and JSON, just in case
assertThat(CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, SmileConstants.HEADER_BYTE_1), equalTo(false)); assertThat(CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, SmileConstants.HEADER_BYTE_1), equalTo(false));

View File

@ -19,7 +19,8 @@
package org.elasticsearch.index.aliases; package org.elasticsearch.index.aliases;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -47,11 +48,11 @@ public class IndexAliasesServiceTests extends ElasticsearchSingleNodeTest {
return indexService.aliasesService(); return indexService.aliasesService();
} }
public static CompressedString filter(QueryBuilder filterBuilder) throws IOException { public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close(); builder.close();
return new CompressedString(builder.string()); return new CompressedXContent(builder.string());
} }
@Test @Test

View File

@ -33,7 +33,7 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.Uid;
@ -63,10 +63,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTests {
@Before @Before
public void before() throws Exception { public void before() throws Exception {
mapperService.merge( mapperService.merge(
childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true
); );
mapperService.merge( mapperService.merge(
grandChildType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true
); );
Document d = new Document(); Document d = new Document();

View File

@ -82,7 +82,7 @@ public class BinaryMappingTests extends ElasticsearchSingleNodeTest {
new BytesArray(binaryValue1).writeTo(compressed); new BytesArray(binaryValue1).writeTo(compressed);
} }
final byte[] binaryValue2 = out.bytes().toBytes(); final byte[] binaryValue2 = out.bytes().toBytes();
assertTrue(CompressorFactory.isCompressed(binaryValue2)); assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2)));
for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) {
ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes()); ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", value).endObject().bytes());
@ -114,7 +114,7 @@ public class BinaryMappingTests extends ElasticsearchSingleNodeTest {
new BytesArray(original).writeTo(compressed); new BytesArray(original).writeTo(compressed);
} }
final byte[] binaryValue = out.bytes().toBytes(); final byte[] binaryValue = out.bytes().toBytes();
assertTrue(CompressorFactory.isCompressed(binaryValue)); assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue)));
ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", binaryValue).endObject().bytes()); ParsedDocument doc = mapper.parse("type", "id", XContentFactory.jsonBuilder().startObject().field("field", binaryValue).endObject().bytes());
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper.merge;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -160,7 +160,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
public void testConcurrentMergeTest() throws Throwable { public void testConcurrentMergeTest() throws Throwable {
final MapperService mapperService = createIndex("test").mapperService(); final MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("test", new CompressedString("{\"test\":{}}"), true); mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true);
final DocumentMapper documentMapper = mapperService.documentMapper("test"); final DocumentMapper documentMapper = mapperService.documentMapper("test");
DocumentFieldMappers dfm = documentMapper.mappers(); DocumentFieldMappers dfm = documentMapper.mappers();
@ -186,7 +186,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
Mapping update = doc.dynamicMappingsUpdate(); Mapping update = doc.dynamicMappingsUpdate();
assert update != null; assert update != null;
lastIntroducedFieldName.set(fieldName); lastIntroducedFieldName.set(fieldName);
mapperService.merge("test", new CompressedString(update.toString()), false); mapperService.merge("test", new CompressedXContent(update.toString()), false);
} }
} catch (Throwable t) { } catch (Throwable t) {
error.set(t); error.set(t);

View File

@ -432,7 +432,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Arrays.sort(multiFieldNames); Arrays.sort(multiFieldNames);
Map<String, Object> sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressed(), true).v2(); Map<String, Object> sourceAsMap = XContentHelper.convertToMap(docMapper.mappingSource().compressedReference(), true).v2();
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, Object> multiFields = (Map<String, Object>) XContentMapValues.extractValue("type.properties.my_field.fields", sourceAsMap); Map<String, Object> multiFields = (Map<String, Object>) XContentMapValues.extractValue("type.properties.my_field.fields", sourceAsMap);
assertThat(multiFields.size(), equalTo(multiFieldNames.length)); assertThat(multiFields.size(), equalTo(multiFieldNames.length));

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.source;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -51,7 +52,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest {
.field("field2", "value2") .field("field2", "value2")
.endObject().bytes()); .endObject().bytes());
BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); BytesRef bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(false)); assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false));
} }
@Test @Test
@ -68,7 +69,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().bytes()); .endObject().bytes());
BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); BytesRef bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(true)); assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true));
} }
@Test @Test
@ -84,7 +85,7 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().bytes()); .endObject().bytes());
BytesRef bytes = doc.rootDoc().getBinaryValue("_source"); BytesRef bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(false)); assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(false));
doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
.field("field1", "value1") .field("field1", "value1")
@ -95,6 +96,6 @@ public class CompressSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().bytes()); .endObject().bytes());
bytes = doc.rootDoc().getBinaryValue("_source"); bytes = doc.rootDoc().getBinaryValue("_source");
assertThat(CompressorFactory.isCompressed(bytes.bytes, bytes.offset, bytes.length), equalTo(true)); assertThat(CompressorFactory.isCompressed(new BytesArray(bytes)), equalTo(true));
} }
} }

View File

@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -193,7 +193,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string(); .endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService(); MapperService mapperService = createIndex("test").mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true);
DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1(); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1();
assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.type(), equalTo("my_type"));
@ -206,12 +206,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string(); .endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService(); MapperService mapperService = createIndex("test").mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type")
.startObject("_source").field("enabled", true).endObject() .startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
mapperService.merge("my_type", new CompressedString(mapping), true); mapperService.merge("my_type", new CompressedXContent(mapping), true);
DocumentMapper mapper = mapperService.documentMapper("my_type"); DocumentMapper mapper = mapperService.documentMapper("my_type");
assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.type(), equalTo("my_type"));

View File

@ -27,7 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.joda.Joda;
@ -450,7 +450,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
{ {
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null,
TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null);
MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)),
new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
@ -467,7 +467,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
{ {
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null,
TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null);
MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)),
new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
@ -484,7 +484,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
{ {
MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null, MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, null,
TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false);
MappingMetaData expected = new MappingMetaData("type", new CompressedString("{}".getBytes(StandardCharsets.UTF_8)), MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)),
new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false); new MappingMetaData.Id(null), new MappingMetaData.Routing(false, null), timestamp, false);
BytesStreamOutput out = new BytesStreamOutput(); BytesStreamOutput out = new BytesStreamOutput();
@ -652,7 +652,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject().string();
// This was causing a NPE // This was causing a NPE
new MappingMetaData(new CompressedString(mapping)); new MappingMetaData(new CompressedXContent(mapping));
} }
@Test @Test

View File

@ -27,7 +27,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -196,7 +197,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean()); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean());
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
} }
@ -204,7 +205,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean()); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean());
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
} }
@ -213,23 +214,23 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
} }
@Test @Test
public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception { public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
CompressedString mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterCreation, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), false); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
} }
@Test @Test
@ -238,12 +239,12 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
//check if default ttl changed when simulate set to true //check if default ttl changed when simulate set to true
XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d"); XContentBuilder mappingWithTtl = getMappingWithTtlEnabled("6d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl);
CompressedString mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d"); XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true).mapping(), true); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied // make sure simulate flag actually worked - no mappings applied
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge)); assertThat(mappingAfterMerge, equalTo(mappingBeforeMerge));
client().admin().indices().prepareDelete("testindex").get(); client().admin().indices().prepareDelete("testindex").get();
@ -252,7 +253,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled(); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true); mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied // make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -264,7 +265,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), true); mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied // make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -275,21 +276,21 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
mappingWithoutTtl = getMappingWithTtlDisabled("6d"); mappingWithoutTtl = getMappingWithTtlDisabled("6d");
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false); mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied // make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
client().admin().indices().prepareDelete("testindex").get(); client().admin().indices().prepareDelete("testindex").get();
// check if switching simulate flag off works if nothing was applied in the beginning // check if switching simulate flag off works if nothing was applied in the beginning
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), false); mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false);
assertFalse(mergeResult.hasConflicts()); assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied // make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":604800000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
} }

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.mapper.update; package org.elasticsearch.index.mapper.update;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -80,11 +80,11 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
// simulate like in MetaDataMappingService#putMapping // simulate like in MetaDataMappingService#putMapping
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), false); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false);
// assure we have no conflicts // assure we have no conflicts
assertThat(mergeResult.buildConflicts().length, equalTo(0)); assertThat(mergeResult.buildConflicts().length, equalTo(0));
// make sure mappings applied // make sure mappings applied
CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string()));
} }
@ -102,13 +102,13 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException { protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException {
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
CompressedString mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
// simulate like in MetaDataMappingService#putMapping // simulate like in MetaDataMappingService#putMapping
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), true); MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true);
// assure we have conflicts // assure we have conflicts
assertThat(mergeResult.buildConflicts().length, equalTo(1)); assertThat(mergeResult.buildConflicts().length, equalTo(1));
// make sure simulate flag actually worked - no mappings applied // make sure simulate flag actually worked - no mappings applied
CompressedString mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate));
} }
@ -124,9 +124,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
.endObject() .endObject()
.endObject() .endObject()
.endObject(); .endObject();
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled));
documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled)); assertThat(documentMapper.indexMapper().enabled(), equalTo(enabled));
} }
@ -146,11 +146,11 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
.endObject() .endObject()
.endObject() .endObject()
.endObject(); .endObject();
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); assertTrue(documentMapper.timestampFieldMapper().hasDocValues());
documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); assertTrue(documentMapper.timestampFieldMapper().hasDocValues());
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
@ -168,10 +168,10 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
.endObject() .endObject()
.endObject() .endObject()
.endObject(); .endObject();
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled)); assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.sizeFieldMapper().fieldType().stored()); assertTrue(documentMapper.sizeFieldMapper().fieldType().stored());
documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled)); assertThat(documentMapper.sizeFieldMapper().enabled(), equalTo(enabled));
} }
@ -179,9 +179,9 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
public void testSizeTimestampIndexParsing() throws IOException { public void testSizeTimestampIndexParsing() throws IOException {
IndexService indexService = createIndex("test", Settings.settingsBuilder().build()); IndexService indexService = createIndex("test", Settings.settingsBuilder().build());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json"); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(mapping), true); DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true);
assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
} }

View File

@ -23,7 +23,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -57,7 +57,7 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
MapperService mapperService = indexService.mapperService(); MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedString(mapping), true); mapperService.merge("person", new CompressedXContent(mapping), true);
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();

View File

@ -23,7 +23,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -58,7 +58,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS
MapperService mapperService = indexService.mapperService(); MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedString(mapping), true); mapperService.merge("person", new CompressedXContent(mapping), true);
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();

View File

@ -70,7 +70,7 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
@ -209,7 +209,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
MapperService mapperService = indexService.mapperService(); MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedString(mapping), true); mapperService.merge("person", new CompressedXContent(mapping), true);
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate()); assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();

View File

@ -31,7 +31,7 @@ import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -67,8 +67,8 @@ public abstract class AbstractChildTests extends ElasticsearchSingleNodeTest {
MapperService mapperService = indexService.mapperService(); MapperService mapperService = indexService.mapperService();
// Parent/child parsers require that the parent and child type to be presented in mapping // Parent/child parsers require that the parent and child type to be presented in mapping
// Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used // Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used
mapperService.merge(parentType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true); mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true);
mapperService.merge(childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true); mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true);
return createSearchContext(indexService); return createSearchContext(indexService);
} }

View File

@ -20,6 +20,7 @@ package org.elasticsearch.indices.template;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
@ -32,6 +33,7 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
@ -42,6 +44,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test; import org.junit.Test;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -668,4 +671,5 @@ public class SimpleIndexTemplateTests extends ElasticsearchIntegrationTest {
assertThat(response.getItems()[0].isFailed(), equalTo(true)); assertThat(response.getItems()[0].isFailed(), equalTo(true));
assertThat(response.getItems()[0].getFailureMessage(), containsString("failed to parse filter for alias [alias4]")); assertThat(response.getItems()[0].getFailureMessage(), containsString("failed to parse filter for alias [alias4]"));
} }
} }

View File

@ -33,7 +33,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
@ -117,7 +117,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeTest {
IndexSearcher searcher = new IndexSearcher(directoryReader); IndexSearcher searcher = new IndexSearcher(directoryReader);
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
indexService.mapperService().merge("test", new CompressedString(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true); indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true);
SearchContext searchContext = createSearchContext(indexService); SearchContext searchContext = createSearchContext(indexService);
AggregationContext context = new AggregationContext(searchContext); AggregationContext context = new AggregationContext(searchContext);

View File

@ -24,8 +24,9 @@ import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.compress.Compressor;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.lzf.LZFCompressor; import org.elasticsearch.common.compress.lzf.LZFTestCompressor;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -41,10 +42,15 @@ public class SearchSourceCompressTests extends ElasticsearchSingleNodeTest {
@Test @Test
public void testSourceCompressionLZF() throws IOException { public void testSourceCompressionLZF() throws IOException {
CompressorFactory.setDefaultCompressor(new LZFCompressor()); final Compressor defaultCompressor = CompressorFactory.defaultCompressor();
try {
CompressorFactory.setDefaultCompressor(new LZFTestCompressor());
verifySource(true); verifySource(true);
verifySource(false); verifySource(false);
verifySource(null); verifySource(null);
} finally {
CompressorFactory.setDefaultCompressor(defaultCompressor);
}
} }
private void verifySource(Boolean compress) throws IOException { private void verifySource(Boolean compress) throws IOException {