add the option to get mapping as a parsed map, also do some internal refactoring to share the code that parses into a map

This commit is contained in:
Shay Banon 2011-11-28 20:19:02 +02:00
parent b1707d219f
commit 6b894d9f53
7 changed files with 69 additions and 90 deletions

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
@ -331,6 +332,25 @@ public class MappingMetaData {
return this.source;
}
/**
* Converts the serialized compressed form of the mappings into a parsed map.
*/
public Map<String, Object> sourceAsMap() throws IOException {
Map<String, Object> mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length).v2();
if (mapping.size() == 1 && mapping.containsKey(type())) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(type());
}
return mapping;
}
/**
* Converts the serialized compressed form of the mappings into a parsed map.
*/
public Map<String, Object> getSourceAsMap() throws IOException {
return sourceAsMap();
}
public Id id() {
return this.id;
}

View File

@ -19,7 +19,13 @@
package org.elasticsearch.common.xcontent;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.base.Charsets;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import java.io.IOException;
import java.util.ArrayList;
@ -32,6 +38,35 @@ import java.util.Map;
*/
public class XContentHelper {
public static XContentParser createParser(byte[] data, int offset, int length) throws IOException {
if (LZF.isCompressed(data, offset, length)) {
BytesStreamInput siBytes = new BytesStreamInput(data, offset, length);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
return XContentFactory.xContent(contentType).createParser(siLzf);
} else {
return XContentFactory.xContent(data, offset, length).createParser(data, offset, length);
}
}
public static Tuple<XContentType, Map<String, Object>> convertToMap(byte[] data, int offset, int length) throws ElasticSearchParseException {
try {
if (LZF.isCompressed(data, offset, length)) {
BytesStreamInput siBytes = new BytesStreamInput(data, offset, length);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
return Tuple.create(contentType, XContentFactory.xContent(contentType).createParser(siLzf).mapAndClose());
} else {
XContentType contentType = XContentFactory.xContentType(data, offset, length);
return Tuple.create(contentType, XContentFactory.xContent(contentType).createParser(data, offset, length).mapAndClose());
}
} catch (IOException e) {
throw new ElasticSearchParseException("Failed to parse content to map", e);
}
}
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson) throws IOException {
XContentType xContentType = XContentFactory.xContentType(data, offset, length);
if (xContentType == XContentType.JSON && reformatJson) {

View File

@ -31,14 +31,11 @@ import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.analysis.NamedAnalyzer;
@ -428,15 +425,7 @@ public class DocumentMapper implements ToXContent {
XContentParser parser = source.parser();
try {
if (parser == null) {
if (LZF.isCompressed(source.source(), source.sourceOffset(), source.sourceLength())) {
BytesStreamInput siBytes = new BytesStreamInput(source.source(), source.sourceOffset(), source.sourceLength());
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
} else {
parser = XContentFactory.xContent(source.source(), source.sourceOffset(), source.sourceLength()).createParser(source.source(), source.sourceOffset(), source.sourceLength());
}
parser = XContentHelper.createParser(source.source(), source.sourceOffset(), source.sourceLength());
}
context.reset(parser, new Document(), source, listener);

View File

@ -24,12 +24,10 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.compress.lzf.LZFDecoder;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamOutput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
@ -37,6 +35,7 @@ import org.elasticsearch.common.lucene.document.ResetFieldSelector;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.InternalMapper;
@ -229,19 +228,8 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
if (filtered) {
// we don't update the context source if we filter, we want to keep it as is...
XContentType contentType;
Map<String, Object> sourceAsMap;
if (LZF.isCompressed(data, dataOffset, dataLength)) {
BytesStreamInput siBytes = new BytesStreamInput(data, dataOffset, dataLength);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
sourceAsMap = XContentFactory.xContent(contentType).createParser(siLzf).mapAndClose();
} else {
contentType = XContentFactory.xContentType(data, dataOffset, dataLength);
sourceAsMap = XContentFactory.xContent(contentType).createParser(data, dataOffset, dataLength).mapAndClose();
}
Map<String, Object> filteredSource = XContentMapValues.filter(sourceAsMap, includes, excludes);
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(data, dataOffset, dataLength);
Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes);
CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry();
StreamOutput streamOutput;
if (compress != null && compress && (compressThreshold == -1 || dataLength > compressThreshold)) {
@ -249,7 +237,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
} else {
streamOutput = cachedEntry.cachedBytes();
}
XContentBuilder builder = XContentFactory.contentBuilder(contentType, streamOutput).map(filteredSource);
XContentBuilder builder = XContentFactory.contentBuilder(mapTuple.v1(), streamOutput).map(filteredSource);
builder.close();
data = cachedEntry.bytes().copiedByteArray();

View File

@ -33,20 +33,16 @@ import org.elasticsearch.common.Preconditions;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.BytesStream;
import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.IndexCache;
@ -221,15 +217,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
public Query parseQuery(String name, byte[] source, int sourceOffset, int sourceLength) throws ElasticSearchException {
XContentParser parser = null;
try {
if (LZF.isCompressed(source, sourceOffset, sourceLength)) {
BytesStreamInput siBytes = new BytesStreamInput(source, sourceOffset, sourceLength);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
} else {
parser = XContentFactory.xContent(source, sourceOffset, sourceLength).createParser(source, sourceOffset, sourceLength);
}
parser = XContentHelper.createParser(source, sourceOffset, sourceLength);
Query query = null;
String currentFieldName = null;
XContentParser.Token token = parser.nextToken(); // move the START_OBJECT

View File

@ -31,8 +31,6 @@ import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.indices.TypeMissingException;
@ -45,7 +43,6 @@ import org.elasticsearch.rest.XContentThrowableRestResponse;
import org.elasticsearch.rest.action.support.RestXContentBuilder;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.rest.RestRequest.Method.*;
@ -93,15 +90,8 @@ public class RestGetMappingAction extends BaseRestHandler {
continue;
}
foundType = true;
byte[] mappingSource = mappingMd.source().uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map();
if (mapping.size() == 1 && mapping.containsKey(mappingMd.type())) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(mappingMd.type());
}
builder.field(mappingMd.type());
builder.map(mapping);
builder.map(mappingMd.sourceAsMap());
}
if (!foundType) {
channel.sendResponse(new XContentThrowableRestResponse(request, new TypeMissingException(new Index(indices[0]), types.iterator().next())));
@ -116,15 +106,8 @@ public class RestGetMappingAction extends BaseRestHandler {
// filter this type out...
continue;
}
byte[] mappingSource = mappingMd.source().uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map();
if (mapping.size() == 1 && mapping.containsKey(mappingMd.type())) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(mappingMd.type());
}
builder.field(mappingMd.type());
builder.map(mapping);
builder.map(mappingMd.sourceAsMap());
}
builder.endObject();

View File

@ -24,13 +24,8 @@ import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.compress.lzf.LZF;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldSelector;
@ -79,27 +74,8 @@ public class SourceLookup implements Map {
return this.source;
}
public static Map<String, Object> sourceAsMap(byte[] bytes, int offset, int length) {
XContentParser parser = null;
try {
if (LZF.isCompressed(bytes, offset, length)) {
BytesStreamInput siBytes = new BytesStreamInput(bytes, offset, length);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
return parser.map();
} else {
parser = XContentFactory.xContent(bytes, offset, length).createParser(bytes, offset, length);
return parser.map();
}
} catch (Exception e) {
throw new ElasticSearchParseException("Failed to parse source to map", e);
} finally {
if (parser != null) {
parser.close();
}
}
public static Map<String, Object> sourceAsMap(byte[] bytes, int offset, int length) throws ElasticSearchParseException {
return XContentHelper.convertToMap(bytes, offset, length).v2();
}
public void setNextReader(IndexReader reader) {