refactor json handling to use byte[] instead of string for better performance, storage, and memory consumption
This commit is contained in:
parent
8adcbb2832
commit
c111e1ab80
|
@ -65,6 +65,8 @@ public class SimpleEngineBenchmark {
|
|||
|
||||
private String[] contentItems = new String[]{"test1", "test2", "test3"};
|
||||
|
||||
private static byte[] TRANSLOG_PAYLOAD = new byte[12];
|
||||
|
||||
private volatile int lastRefreshedId = 0;
|
||||
|
||||
|
||||
|
@ -152,7 +154,7 @@ public class SimpleEngineBenchmark {
|
|||
String sId = Integer.toString(id);
|
||||
Document doc = doc().add(field("_id", sId))
|
||||
.add(field("content", contentItem)).build();
|
||||
engine.index(new Engine.Index(new Term("_id", sId), doc, Lucene.STANDARD_ANALYZER, "type", sId, "{ ... }"));
|
||||
engine.index(new Engine.Index(new Term("_id", sId), doc, Lucene.STANDARD_ANALYZER, "type", sId, TRANSLOG_PAYLOAD));
|
||||
}
|
||||
engine.refresh(new Engine.Refresh(true));
|
||||
stopWatch.stop();
|
||||
|
@ -261,7 +263,7 @@ public class SimpleEngineBenchmark {
|
|||
String sId = Integer.toString(id);
|
||||
Document doc = doc().add(field("_id", sId))
|
||||
.add(field("content", content(id))).build();
|
||||
engine.index(new Engine.Index(new Term("_id", sId), doc, Lucene.STANDARD_ANALYZER, "type", sId, "{ ... }"));
|
||||
engine.index(new Engine.Index(new Term("_id", sId), doc, Lucene.STANDARD_ANALYZER, "type", sId, TRANSLOG_PAYLOAD));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out.println("Writer thread failed");
|
||||
|
|
|
@ -78,7 +78,7 @@ public class CountRequest extends BroadcastOperationRequest {
|
|||
}
|
||||
|
||||
@Required public CountRequest querySource(QueryBuilder queryBuilder) {
|
||||
return querySource(queryBuilder.build());
|
||||
return querySource(queryBuilder.buildAsString());
|
||||
}
|
||||
|
||||
public CountRequest querySource(String querySource) {
|
||||
|
|
|
@ -55,7 +55,7 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
|||
}
|
||||
|
||||
@Required public DeleteByQueryRequest querySource(QueryBuilder queryBuilder) {
|
||||
return querySource(queryBuilder.build());
|
||||
return querySource(queryBuilder.buildAsString());
|
||||
}
|
||||
|
||||
@Required public DeleteByQueryRequest querySource(String querySource) {
|
||||
|
|
|
@ -71,7 +71,7 @@ public class IndexDeleteByQueryRequest extends IndexReplicationOperationRequest
|
|||
}
|
||||
|
||||
@Required public IndexDeleteByQueryRequest querySource(QueryBuilder queryBuilder) {
|
||||
return querySource(queryBuilder.build());
|
||||
return querySource(queryBuilder.buildAsString());
|
||||
}
|
||||
|
||||
@Required public IndexDeleteByQueryRequest querySource(String querySource) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.get;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.util.Unicode;
|
||||
import org.elasticsearch.util.io.Streamable;
|
||||
|
||||
import java.io.DataInput;
|
||||
|
@ -37,12 +38,12 @@ public class GetResponse implements ActionResponse, Streamable {
|
|||
|
||||
private String id;
|
||||
|
||||
private String source;
|
||||
private byte[] source;
|
||||
|
||||
public GetResponse() {
|
||||
}
|
||||
|
||||
public GetResponse(String index, String type, String id, String source) {
|
||||
public GetResponse(String index, String type, String id, byte[] source) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
|
@ -65,16 +66,22 @@ public class GetResponse implements ActionResponse, Streamable {
|
|||
return id;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public String sourceAsString() {
|
||||
return Unicode.fromBytes(source);
|
||||
}
|
||||
|
||||
@Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
|
||||
index = in.readUTF();
|
||||
type = in.readUTF();
|
||||
id = in.readUTF();
|
||||
if (in.readBoolean()) {
|
||||
source = in.readUTF();
|
||||
int size = in.readInt();
|
||||
if (size > 0) {
|
||||
source = new byte[size];
|
||||
in.readFully(source);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -83,10 +90,10 @@ public class GetResponse implements ActionResponse, Streamable {
|
|||
out.writeUTF(type);
|
||||
out.writeUTF(id);
|
||||
if (source == null) {
|
||||
out.writeBoolean(false);
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeUTF(source);
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
|||
import org.elasticsearch.action.support.replication.ShardReplicationOperationRequest;
|
||||
import org.elasticsearch.util.Required;
|
||||
import org.elasticsearch.util.TimeValue;
|
||||
import org.elasticsearch.util.Unicode;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
|
||||
import java.io.DataInput;
|
||||
|
@ -72,14 +73,14 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
|||
|
||||
private String type;
|
||||
private String id;
|
||||
private String source;
|
||||
private byte[] source;
|
||||
private OpType opType = OpType.INDEX;
|
||||
|
||||
public IndexRequest(String index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
public IndexRequest(String index, String type, String id, String source) {
|
||||
public IndexRequest(String index, String type, String id, byte[] source) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
|
@ -128,19 +129,25 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
|||
return this;
|
||||
}
|
||||
|
||||
String source() {
|
||||
byte[] source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
@Required public IndexRequest source(String source) {
|
||||
this.source = Unicode.fromStringAsBytes(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Required public IndexRequest source(JsonBuilder jsonBuilder) {
|
||||
try {
|
||||
return source(jsonBuilder.string());
|
||||
jsonBuilder.flush();
|
||||
return source(jsonBuilder.copiedBytes());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchIllegalArgumentException("Failed to build json for index request", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Required public IndexRequest source(String source) {
|
||||
@Required public IndexRequest source(byte[] source) {
|
||||
this.source = source;
|
||||
return this;
|
||||
}
|
||||
|
@ -163,7 +170,8 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
|||
super.readFrom(in);
|
||||
type = in.readUTF();
|
||||
id = in.readUTF();
|
||||
source = in.readUTF();
|
||||
source = new byte[in.readInt()];
|
||||
in.readFully(source, 0, source.length);
|
||||
opType = OpType.fromId(in.readByte());
|
||||
}
|
||||
|
||||
|
@ -171,7 +179,8 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
|||
super.writeTo(out);
|
||||
out.writeUTF(type);
|
||||
out.writeUTF(id);
|
||||
out.writeUTF(source);
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
out.writeByte(opType.id());
|
||||
}
|
||||
}
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.util.MapBuilder;
|
|||
import org.elasticsearch.util.Nullable;
|
||||
import org.elasticsearch.util.concurrent.Immutable;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.json.StringJsonBuilder;
|
||||
import org.elasticsearch.util.json.ToJson;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
|
||||
|
@ -35,7 +36,6 @@ import java.io.DataOutput;
|
|||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.util.MapBuilder.*;
|
||||
import static org.elasticsearch.util.json.JsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
|
@ -126,7 +126,7 @@ public class MetaData implements Iterable<IndexMetaData> {
|
|||
}
|
||||
|
||||
public static String toJson(MetaData metaData) throws IOException {
|
||||
JsonBuilder builder = jsonBuilder().prettyPrint();
|
||||
StringJsonBuilder builder = JsonBuilder.stringJsonBuilder().prettyPrint();
|
||||
builder.startObject();
|
||||
toJson(metaData, builder, ToJson.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.gateway.fs.FsIndexGatewayModule;
|
|||
import org.elasticsearch.util.component.AbstractComponent;
|
||||
import org.elasticsearch.util.component.Lifecycle;
|
||||
import org.elasticsearch.util.io.FileSystemUtils;
|
||||
import org.elasticsearch.util.json.BinaryJsonBuilder;
|
||||
import org.elasticsearch.util.json.Jackson;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.json.ToJson;
|
||||
|
@ -157,7 +158,7 @@ public class FsGateway extends AbstractComponent implements Gateway {
|
|||
|
||||
FileOutputStream fileStream = new FileOutputStream(file);
|
||||
|
||||
JsonBuilder builder = new JsonBuilder(Jackson.defaultJsonFactory().createJsonGenerator(fileStream, JsonEncoding.UTF8));
|
||||
JsonBuilder builder = new BinaryJsonBuilder(Jackson.defaultJsonFactory().createJsonGenerator(fileStream, JsonEncoding.UTF8));
|
||||
builder.prettyPrint();
|
||||
builder.startObject();
|
||||
MetaData.Builder.toJson(metaData, builder, ToJson.EMPTY_PARAMS);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.http.netty;
|
||||
|
||||
import org.elasticsearch.http.HttpChannel;
|
||||
import org.elasticsearch.http.HttpException;
|
||||
import org.elasticsearch.http.HttpResponse;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
|
@ -29,6 +30,7 @@ import org.jboss.netty.channel.ChannelFuture;
|
|||
import org.jboss.netty.channel.ChannelFutureListener;
|
||||
import org.jboss.netty.handler.codec.http.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -64,10 +66,14 @@ public class NettyHttpChannel implements HttpChannel {
|
|||
}
|
||||
// Convert the response content to a ChannelBuffer.
|
||||
ChannelBuffer buf;
|
||||
if (response.contentThreadSafe()) {
|
||||
buf = ChannelBuffers.wrappedBuffer(response.content(), 0, response.contentLength());
|
||||
} else {
|
||||
buf = ChannelBuffers.copiedBuffer(response.content(), 0, response.contentLength());
|
||||
try {
|
||||
if (response.contentThreadSafe()) {
|
||||
buf = ChannelBuffers.wrappedBuffer(response.content(), 0, response.contentLength());
|
||||
} else {
|
||||
buf = ChannelBuffers.copiedBuffer(response.content(), 0, response.contentLength());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new HttpException("Failed to convert response to bytes", e);
|
||||
}
|
||||
if (response.prefixContent() != null || response.suffixContent() != null) {
|
||||
ChannelBuffer prefixBuf = ChannelBuffers.EMPTY_BUFFER;
|
||||
|
|
|
@ -25,10 +25,12 @@ import org.elasticsearch.http.HttpRequest;
|
|||
import org.elasticsearch.util.SizeValue;
|
||||
import org.elasticsearch.util.TimeValue;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
import org.jboss.netty.buffer.ChannelBufferInputStream;
|
||||
import org.jboss.netty.handler.codec.http.HttpHeaders;
|
||||
import org.jboss.netty.handler.codec.http.HttpMethod;
|
||||
import org.jboss.netty.handler.codec.http.QueryStringDecoder;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -81,6 +83,16 @@ public class NettyHttpRequest implements HttpRequest {
|
|||
return request.getContent().readableBytes() > 0;
|
||||
}
|
||||
|
||||
@Override public InputStream contentAsStream() {
|
||||
return new ChannelBufferInputStream(request.getContent());
|
||||
}
|
||||
|
||||
@Override public byte[] contentAsBytes() {
|
||||
byte[] data = new byte[request.getContent().readableBytes()];
|
||||
request.getContent().getBytes(request.getContent().readerIndex(), data);
|
||||
return data;
|
||||
}
|
||||
|
||||
@Override public String contentAsString() {
|
||||
UnicodeUtil.UTF16Result result = utf16Result.get();
|
||||
ChannelBuffer content = request.getContent();
|
||||
|
@ -144,6 +156,10 @@ public class NettyHttpRequest implements HttpRequest {
|
|||
return parseSizeValue(param(key), defaultValue);
|
||||
}
|
||||
|
||||
@Override public boolean hasParam(String key) {
|
||||
return queryStringDecoder.getParameters().containsKey(key);
|
||||
}
|
||||
|
||||
@Override public String param(String key) {
|
||||
List<String> keyParams = params(key);
|
||||
if (keyParams == null || keyParams.isEmpty()) {
|
||||
|
|
|
@ -226,9 +226,9 @@ public interface Engine extends IndexShardComponent {
|
|||
private final Analyzer analyzer;
|
||||
private final String type;
|
||||
private final String id;
|
||||
private final String source;
|
||||
private final byte[] source;
|
||||
|
||||
public Create(Document document, Analyzer analyzer, String type, String id, String source) {
|
||||
public Create(Document document, Analyzer analyzer, String type, String id, byte[] source) {
|
||||
this.document = document;
|
||||
this.analyzer = analyzer;
|
||||
this.type = type;
|
||||
|
@ -252,7 +252,7 @@ public interface Engine extends IndexShardComponent {
|
|||
return this.analyzer;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
}
|
||||
|
@ -263,9 +263,9 @@ public interface Engine extends IndexShardComponent {
|
|||
private final Analyzer analyzer;
|
||||
private final String type;
|
||||
private final String id;
|
||||
private final String source;
|
||||
private final byte[] source;
|
||||
|
||||
public Index(Term uid, Document document, Analyzer analyzer, String type, String id, String source) {
|
||||
public Index(Term uid, Document document, Analyzer analyzer, String type, String id, byte[] source) {
|
||||
this.uid = uid;
|
||||
this.document = document;
|
||||
this.analyzer = analyzer;
|
||||
|
@ -294,7 +294,7 @@ public interface Engine extends IndexShardComponent {
|
|||
return this.type;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,12 +77,12 @@ public interface DocumentMapper {
|
|||
* <p>Validates that the source has the provided id and type. Note, most times
|
||||
* we will already have the id and the type even though they exist in the source as well.
|
||||
*/
|
||||
ParsedDocument parse(@Nullable String type, @Nullable String id, String source) throws MapperParsingException;
|
||||
ParsedDocument parse(@Nullable String type, @Nullable String id, byte[] source) throws MapperParsingException;
|
||||
|
||||
/**
|
||||
* Parses the source into the parsed document.
|
||||
*/
|
||||
ParsedDocument parse(String source) throws MapperParsingException;
|
||||
ParsedDocument parse(byte[] source) throws MapperParsingException;
|
||||
|
||||
/**
|
||||
* Adds a field mapper listener.
|
||||
|
|
|
@ -34,11 +34,11 @@ public class ParsedDocument {
|
|||
|
||||
private final Document document;
|
||||
|
||||
private final String source;
|
||||
private final byte[] source;
|
||||
|
||||
private boolean mappersAdded;
|
||||
|
||||
public ParsedDocument(String uid, String id, String type, Document document, String source, boolean mappersAdded) {
|
||||
public ParsedDocument(String uid, String id, String type, Document document, byte[] source, boolean mappersAdded) {
|
||||
this.uid = uid;
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
|
@ -63,7 +63,7 @@ public class ParsedDocument {
|
|||
return this.document;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.util.concurrent.ThreadSafe;
|
|||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
@ThreadSafe
|
||||
public interface SourceFieldMapper extends FieldMapper<String>, InternalMapper {
|
||||
public interface SourceFieldMapper extends FieldMapper<byte[]>, InternalMapper {
|
||||
|
||||
public final String NAME = StringHelper.intern("_source");
|
||||
|
||||
|
@ -39,7 +39,7 @@ public interface SourceFieldMapper extends FieldMapper<String>, InternalMapper {
|
|||
*/
|
||||
boolean enabled();
|
||||
|
||||
String value(Document document);
|
||||
byte[] value(Document document);
|
||||
|
||||
/**
|
||||
* A field selector that loads just the source field.
|
||||
|
|
|
@ -27,9 +27,9 @@ import org.codehaus.jackson.JsonToken;
|
|||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.util.Nullable;
|
||||
import org.elasticsearch.util.Preconditions;
|
||||
import org.elasticsearch.util.io.FastStringReader;
|
||||
import org.elasticsearch.util.json.Jackson;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.json.StringJsonBuilder;
|
||||
import org.elasticsearch.util.json.ToJson;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -247,11 +247,11 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
|||
return this.fieldMappers;
|
||||
}
|
||||
|
||||
@Override public ParsedDocument parse(String source) {
|
||||
@Override public ParsedDocument parse(byte[] source) {
|
||||
return parse(null, null, source);
|
||||
}
|
||||
|
||||
@Override public ParsedDocument parse(String type, String id, String source) {
|
||||
@Override public ParsedDocument parse(String type, String id, byte[] source) {
|
||||
JsonParseContext jsonContext = cache.get();
|
||||
|
||||
if (type != null && !type.equals(this.type)) {
|
||||
|
@ -261,7 +261,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
|||
|
||||
JsonParser jp = null;
|
||||
try {
|
||||
jp = jsonFactory.createJsonParser(new FastStringReader(source));
|
||||
jp = jsonFactory.createJsonParser(source);
|
||||
jsonContext.reset(jp, new Document(), type, source);
|
||||
|
||||
// will result in JsonToken.START_OBJECT
|
||||
|
@ -354,7 +354,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
|||
|
||||
@Override public String buildSource() throws FailedToGenerateSourceMapperException {
|
||||
try {
|
||||
JsonBuilder builder = jsonBuilder().prettyPrint();
|
||||
StringJsonBuilder builder = stringJsonBuilder().prettyPrint();
|
||||
builder.startObject();
|
||||
toJson(builder, ToJson.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
|
|
|
@ -29,9 +29,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.util.io.FastStringReader;
|
||||
import org.elasticsearch.util.io.compression.GZIPCompressor;
|
||||
import org.elasticsearch.util.io.compression.LzfCompressor;
|
||||
import org.elasticsearch.util.io.compression.ZipCompressor;
|
||||
import org.elasticsearch.util.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.util.joda.Joda;
|
||||
import org.elasticsearch.util.json.Jackson;
|
||||
|
@ -182,20 +179,20 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
|||
Map.Entry<String, JsonNode> entry = fieldsIt.next();
|
||||
String fieldName = entry.getKey();
|
||||
JsonNode fieldNode = entry.getValue();
|
||||
if (fieldName.equals("compressionThreshold")) {
|
||||
builder.compressionThreshold(fieldNode.getNumberValue().intValue());
|
||||
} else if (fieldName.equals("compressionType")) {
|
||||
String compressionType = fieldNode.getTextValue();
|
||||
if ("zip".equals(compressionType)) {
|
||||
builder.compressor(new ZipCompressor());
|
||||
} else if ("gzip".equals(compressionType)) {
|
||||
builder.compressor(new GZIPCompressor());
|
||||
} else if ("lzf".equals(compressionType)) {
|
||||
builder.compressor(new LzfCompressor());
|
||||
} else {
|
||||
throw new MapperParsingException("No compressor registed under [" + compressionType + "]");
|
||||
}
|
||||
}
|
||||
// if (fieldName.equals("compressionThreshold")) {
|
||||
// builder.compressionThreshold(fieldNode.getNumberValue().intValue());
|
||||
// } else if (fieldName.equals("compressionType")) {
|
||||
// String compressionType = fieldNode.getTextValue();
|
||||
// if ("zip".equals(compressionType)) {
|
||||
// builder.compressor(new ZipCompressor());
|
||||
// } else if ("gzip".equals(compressionType)) {
|
||||
// builder.compressor(new GZIPCompressor());
|
||||
// } else if ("lzf".equals(compressionType)) {
|
||||
// builder.compressor(new LzfCompressor());
|
||||
// } else {
|
||||
// throw new MapperParsingException("No compressor registed under [" + compressionType + "]");
|
||||
// }
|
||||
// }
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ public class JsonParseContext {
|
|||
|
||||
private String type;
|
||||
|
||||
private String source;
|
||||
private byte[] source;
|
||||
|
||||
private String id;
|
||||
|
||||
|
@ -56,7 +56,7 @@ public class JsonParseContext {
|
|||
this.path = path;
|
||||
}
|
||||
|
||||
public void reset(JsonParser jsonParser, Document document, String type, String source) {
|
||||
public void reset(JsonParser jsonParser, Document document, String type, byte[] source) {
|
||||
this.jsonParser = jsonParser;
|
||||
this.document = document;
|
||||
this.type = type;
|
||||
|
@ -78,7 +78,7 @@ public class JsonParseContext {
|
|||
return this.type;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,10 +20,7 @@
|
|||
package org.elasticsearch.index.mapper.json;
|
||||
|
||||
import org.apache.lucene.document.*;
|
||||
import org.elasticsearch.index.mapper.MapperCompressionException;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.util.io.compression.Compressor;
|
||||
import org.elasticsearch.util.io.compression.ZipCompressor;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.lucene.Lucene;
|
||||
|
||||
|
@ -32,7 +29,7 @@ import java.io.IOException;
|
|||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
public class JsonSourceFieldMapper extends JsonFieldMapper<String> implements SourceFieldMapper {
|
||||
public class JsonSourceFieldMapper extends JsonFieldMapper<byte[]> implements SourceFieldMapper {
|
||||
|
||||
public static final String JSON_TYPE = "sourceField";
|
||||
|
||||
|
@ -43,18 +40,12 @@ public class JsonSourceFieldMapper extends JsonFieldMapper<String> implements So
|
|||
public static final Field.Store STORE = Field.Store.YES;
|
||||
public static final boolean OMIT_NORMS = true;
|
||||
public static final boolean OMIT_TERM_FREQ_AND_POSITIONS = true;
|
||||
public static final Compressor COMPRESSOR = new ZipCompressor();
|
||||
public static final int NO_COMPRESSION = -1;
|
||||
}
|
||||
|
||||
public static class Builder extends JsonMapper.Builder<Builder, JsonSourceFieldMapper> {
|
||||
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
private Compressor compressor = Defaults.COMPRESSOR;
|
||||
|
||||
private int compressionThreshold = Defaults.NO_COMPRESSION;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME);
|
||||
}
|
||||
|
@ -65,28 +56,13 @@ public class JsonSourceFieldMapper extends JsonFieldMapper<String> implements So
|
|||
// return this;
|
||||
// }
|
||||
|
||||
public Builder compressor(Compressor compressor) {
|
||||
this.compressor = compressor;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder compressionThreshold(int compressionThreshold) {
|
||||
this.compressionThreshold = compressionThreshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public JsonSourceFieldMapper build(BuilderContext context) {
|
||||
return new JsonSourceFieldMapper(name, enabled, compressionThreshold, compressor);
|
||||
return new JsonSourceFieldMapper(name, enabled);
|
||||
}
|
||||
}
|
||||
|
||||
private final boolean enabled;
|
||||
|
||||
private final Compressor compressor;
|
||||
|
||||
// the size of the source file that we will perform compression for
|
||||
private final int compressionThreshold;
|
||||
|
||||
private final SourceFieldSelector fieldSelector;
|
||||
|
||||
protected JsonSourceFieldMapper() {
|
||||
|
@ -94,15 +70,9 @@ public class JsonSourceFieldMapper extends JsonFieldMapper<String> implements So
|
|||
}
|
||||
|
||||
protected JsonSourceFieldMapper(String name, boolean enabled) {
|
||||
this(name, enabled, Defaults.NO_COMPRESSION, Defaults.COMPRESSOR);
|
||||
}
|
||||
|
||||
protected JsonSourceFieldMapper(String name, boolean enabled, int compressionThreshold, Compressor compressor) {
|
||||
super(new Names(name, name, name, name), Defaults.INDEX, Defaults.STORE, Defaults.TERM_VECTOR, Defaults.BOOST,
|
||||
Defaults.OMIT_NORMS, Defaults.OMIT_TERM_FREQ_AND_POSITIONS, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER);
|
||||
this.enabled = enabled;
|
||||
this.compressionThreshold = compressionThreshold;
|
||||
this.compressor = compressor;
|
||||
this.fieldSelector = new SourceFieldSelector(names.indexName());
|
||||
}
|
||||
|
||||
|
@ -118,41 +88,20 @@ public class JsonSourceFieldMapper extends JsonFieldMapper<String> implements So
|
|||
if (!enabled) {
|
||||
return null;
|
||||
}
|
||||
Field sourceField;
|
||||
if (compressionThreshold == Defaults.NO_COMPRESSION || jsonContext.source().length() < compressionThreshold) {
|
||||
sourceField = new Field(names.indexName(), jsonContext.source(), store, index);
|
||||
} else {
|
||||
try {
|
||||
sourceField = new Field(names.indexName(), compressor.compressString(jsonContext.source()), store);
|
||||
} catch (IOException e) {
|
||||
throw new MapperCompressionException("Failed to compress data", e);
|
||||
}
|
||||
}
|
||||
return sourceField;
|
||||
return new Field(names.indexName(), jsonContext.source(), store);
|
||||
}
|
||||
|
||||
@Override public String value(Document document) {
|
||||
@Override public byte[] value(Document document) {
|
||||
Fieldable field = document.getFieldable(names.indexName());
|
||||
return field == null ? null : value(field);
|
||||
}
|
||||
|
||||
@Override public String value(Fieldable field) {
|
||||
if (field.stringValue() != null) {
|
||||
return field.stringValue();
|
||||
}
|
||||
byte[] compressed = field.getBinaryValue();
|
||||
if (compressed == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return compressor.decompressString(compressed);
|
||||
} catch (IOException e) {
|
||||
throw new MapperCompressionException("Failed to decompress data", e);
|
||||
}
|
||||
@Override public byte[] value(Fieldable field) {
|
||||
return field.getBinaryValue();
|
||||
}
|
||||
|
||||
@Override public String valueAsString(Fieldable field) {
|
||||
return value(field);
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override public String indexedValue(String value) {
|
||||
|
|
|
@ -30,6 +30,8 @@ public interface IndexQueryParser extends IndexComponent {
|
|||
|
||||
String name();
|
||||
|
||||
Query parse(byte[] source) throws ElasticSearchException;
|
||||
|
||||
Query parse(String source) throws ElasticSearchException;
|
||||
|
||||
Query parse(QueryBuilder queryBuilder) throws ElasticSearchException;
|
||||
|
|
|
@ -19,10 +19,16 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.util.io.FastCharArrayWriter;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface QueryBuilder {
|
||||
|
||||
String build() throws QueryBuilderException;
|
||||
String buildAsString() throws QueryBuilderException;
|
||||
|
||||
FastCharArrayWriter buildAsUnsafeChars() throws QueryBuilderException;
|
||||
|
||||
byte[] buildAsBytes() throws QueryBuilderException;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,10 @@
|
|||
package org.elasticsearch.index.query.json;
|
||||
|
||||
import org.elasticsearch.index.query.QueryBuilderException;
|
||||
import org.elasticsearch.util.io.FastCharArrayWriter;
|
||||
import org.elasticsearch.util.json.BinaryJsonBuilder;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.json.StringJsonBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -29,9 +32,9 @@ import java.io.IOException;
|
|||
*/
|
||||
public abstract class BaseJsonQueryBuilder implements JsonQueryBuilder {
|
||||
|
||||
@Override public String build() throws QueryBuilderException {
|
||||
@Override public String buildAsString() throws QueryBuilderException {
|
||||
try {
|
||||
JsonBuilder builder = JsonBuilder.jsonBuilder();
|
||||
StringJsonBuilder builder = JsonBuilder.stringJsonBuilder();
|
||||
toJson(builder, EMPTY_PARAMS);
|
||||
return builder.string();
|
||||
} catch (Exception e) {
|
||||
|
@ -39,6 +42,26 @@ public abstract class BaseJsonQueryBuilder implements JsonQueryBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
@Override public FastCharArrayWriter buildAsUnsafeChars() throws QueryBuilderException {
|
||||
try {
|
||||
StringJsonBuilder builder = JsonBuilder.stringJsonBuilder();
|
||||
toJson(builder, EMPTY_PARAMS);
|
||||
return builder.unsafeChars();
|
||||
} catch (Exception e) {
|
||||
throw new QueryBuilderException("Failed to build query", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public byte[] buildAsBytes() throws QueryBuilderException {
|
||||
try {
|
||||
BinaryJsonBuilder builder = JsonBuilder.binaryJsonBuilder();
|
||||
toJson(builder, EMPTY_PARAMS);
|
||||
return builder.copiedBytes();
|
||||
} catch (Exception e) {
|
||||
throw new QueryBuilderException("Failed to build query", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
doJson(builder, params);
|
||||
|
|
|
@ -35,6 +35,8 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.util.Nullable;
|
||||
import org.elasticsearch.util.io.FastCharArrayReader;
|
||||
import org.elasticsearch.util.io.FastCharArrayWriter;
|
||||
import org.elasticsearch.util.io.FastStringReader;
|
||||
import org.elasticsearch.util.json.Jackson;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
|
@ -119,14 +121,51 @@ public class JsonIndexQueryParser extends AbstractIndexComponent implements Inde
|
|||
}
|
||||
|
||||
@Override public Query parse(QueryBuilder queryBuilder) throws ElasticSearchException {
|
||||
return parse(queryBuilder.build());
|
||||
JsonParser jp = null;
|
||||
try {
|
||||
FastCharArrayWriter unsafeChars = queryBuilder.buildAsUnsafeChars();
|
||||
jp = jsonFactory.createJsonParser(new FastCharArrayReader(unsafeChars.unsafeCharArray(), 0, unsafeChars.size()));
|
||||
return parse(cache.get(), jp);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(index, "Failed to parse", e);
|
||||
} finally {
|
||||
if (jp != null) {
|
||||
try {
|
||||
jp.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public Query parse(byte[] source) throws ElasticSearchException {
|
||||
JsonParser jp = null;
|
||||
try {
|
||||
jp = jsonFactory.createJsonParser(source);
|
||||
return parse(cache.get(), jp);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(index, "Failed to parse", e);
|
||||
} finally {
|
||||
if (jp != null) {
|
||||
try {
|
||||
jp.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public Query parse(String source) throws QueryParsingException {
|
||||
JsonParser jp = null;
|
||||
try {
|
||||
jp = jsonFactory.createJsonParser(new FastStringReader(source));
|
||||
return parse(cache.get(), source, jp);
|
||||
return parse(cache.get(), jp);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
|
@ -142,15 +181,15 @@ public class JsonIndexQueryParser extends AbstractIndexComponent implements Inde
|
|||
}
|
||||
}
|
||||
|
||||
public Query parse(JsonParser jsonParser, String source) {
|
||||
public Query parse(JsonParser jsonParser) {
|
||||
try {
|
||||
return parse(cache.get(), source, jsonParser);
|
||||
return parse(cache.get(), jsonParser);
|
||||
} catch (IOException e) {
|
||||
throw new QueryParsingException(index, "Failed to parse [" + source + "]", e);
|
||||
throw new QueryParsingException(index, "Failed to parse", e);
|
||||
}
|
||||
}
|
||||
|
||||
private Query parse(JsonQueryParseContext parseContext, String source, JsonParser jsonParser) throws IOException, QueryParsingException {
|
||||
private Query parse(JsonQueryParseContext parseContext, JsonParser jsonParser) throws IOException, QueryParsingException {
|
||||
parseContext.reset(jsonParser);
|
||||
return parseContext.parseInnerQuery();
|
||||
}
|
||||
|
|
|
@ -45,9 +45,9 @@ public interface IndexShard extends IndexShardComponent {
|
|||
*/
|
||||
SizeValue estimateFlushableMemorySize() throws ElasticSearchException;
|
||||
|
||||
ParsedDocument create(String type, String id, String source) throws ElasticSearchException;
|
||||
ParsedDocument create(String type, String id, byte[] source) throws ElasticSearchException;
|
||||
|
||||
ParsedDocument index(String type, String id, String source) throws ElasticSearchException;
|
||||
ParsedDocument index(String type, String id, byte[] source) throws ElasticSearchException;
|
||||
|
||||
void delete(String type, String id);
|
||||
|
||||
|
@ -55,7 +55,7 @@ public interface IndexShard extends IndexShardComponent {
|
|||
|
||||
void deleteByQuery(String querySource, @Nullable String queryParserName, String... types) throws ElasticSearchException;
|
||||
|
||||
String get(String type, String id) throws ElasticSearchException;
|
||||
byte[] get(String type, String id) throws ElasticSearchException;
|
||||
|
||||
long count(float minScore, String querySource, @Nullable String queryParserName, String... types) throws ElasticSearchException;
|
||||
|
||||
|
|
|
@ -76,11 +76,6 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
|
|||
|
||||
private final Translog translog;
|
||||
|
||||
|
||||
// the number of docs to sniff for mapping information in each type
|
||||
private final int mappingSnifferDocs;
|
||||
|
||||
|
||||
private final Object mutex = new Object();
|
||||
|
||||
private volatile IndexShardState state;
|
||||
|
@ -100,8 +95,6 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
|
|||
this.queryParserService = queryParserService;
|
||||
this.filterCache = filterCache;
|
||||
state = IndexShardState.CREATED;
|
||||
|
||||
this.mappingSnifferDocs = componentSettings.getAsInt("mappingSnifferDocs", 100);
|
||||
}
|
||||
|
||||
public Store store() {
|
||||
|
@ -204,12 +197,12 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
|
|||
return engine.estimateFlushableMemorySize();
|
||||
}
|
||||
|
||||
public ParsedDocument create(String type, String id, String source) throws ElasticSearchException {
|
||||
public ParsedDocument create(String type, String id, byte[] source) throws ElasticSearchException {
|
||||
writeAllowed();
|
||||
return innerCreate(type, id, source);
|
||||
}
|
||||
|
||||
private ParsedDocument innerCreate(String type, String id, String source) {
|
||||
private ParsedDocument innerCreate(String type, String id, byte[] source) {
|
||||
DocumentMapper docMapper = mapperService.type(type);
|
||||
if (docMapper == null) {
|
||||
throw new DocumentMapperNotFoundException("No mapper found for type [" + type + "]");
|
||||
|
@ -222,12 +215,12 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
|
|||
return doc;
|
||||
}
|
||||
|
||||
public ParsedDocument index(String type, String id, String source) throws ElasticSearchException {
|
||||
public ParsedDocument index(String type, String id, byte[] source) throws ElasticSearchException {
|
||||
writeAllowed();
|
||||
return innerIndex(type, id, source);
|
||||
}
|
||||
|
||||
private ParsedDocument innerIndex(String type, String id, String source) {
|
||||
private ParsedDocument innerIndex(String type, String id, byte[] source) {
|
||||
DocumentMapper docMapper = mapperService.type(type);
|
||||
if (docMapper == null) {
|
||||
throw new DocumentMapperNotFoundException("No mapper found for type [" + type + "]");
|
||||
|
@ -287,7 +280,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
|
|||
engine.delete(new Engine.DeleteByQuery(query, querySource, queryParserName, types));
|
||||
}
|
||||
|
||||
public String get(String type, String id) throws ElasticSearchException {
|
||||
public byte[] get(String type, String id) throws ElasticSearchException {
|
||||
readAllowed();
|
||||
DocumentMapper docMapper = mapperService.type(type);
|
||||
if (docMapper == null) {
|
||||
|
|
|
@ -151,7 +151,7 @@ public interface Translog extends IndexShardComponent {
|
|||
static class Create implements Operation {
|
||||
private String id;
|
||||
private String type;
|
||||
private String source;
|
||||
private byte[] source;
|
||||
|
||||
public Create() {
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ public interface Translog extends IndexShardComponent {
|
|||
this(create.type(), create.id(), create.source());
|
||||
}
|
||||
|
||||
public Create(String type, String id, String source) {
|
||||
public Create(String type, String id, byte[] source) {
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
|
@ -171,14 +171,14 @@ public interface Translog extends IndexShardComponent {
|
|||
}
|
||||
|
||||
@Override public long estimateSize() {
|
||||
return ((id.length() + type.length() + source.length()) * 2) + 12;
|
||||
return ((id.length() + type.length()) * 2) + source.length + 12;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
|
@ -193,20 +193,22 @@ public interface Translog extends IndexShardComponent {
|
|||
@Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
|
||||
id = in.readUTF();
|
||||
type = in.readUTF();
|
||||
source = in.readUTF();
|
||||
source = new byte[in.readInt()];
|
||||
in.readFully(source);
|
||||
}
|
||||
|
||||
@Override public void writeTo(DataOutput out) throws IOException {
|
||||
out.writeUTF(id);
|
||||
out.writeUTF(type);
|
||||
out.writeUTF(source);
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
}
|
||||
}
|
||||
|
||||
static class Index implements Operation {
|
||||
private String id;
|
||||
private String type;
|
||||
private String source;
|
||||
private byte[] source;
|
||||
|
||||
public Index() {
|
||||
}
|
||||
|
@ -215,7 +217,7 @@ public interface Translog extends IndexShardComponent {
|
|||
this(index.type(), index.id(), index.source());
|
||||
}
|
||||
|
||||
public Index(String type, String id, String source) {
|
||||
public Index(String type, String id, byte[] source) {
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.source = source;
|
||||
|
@ -226,7 +228,7 @@ public interface Translog extends IndexShardComponent {
|
|||
}
|
||||
|
||||
@Override public long estimateSize() {
|
||||
return ((id.length() + type.length() + source.length()) * 2) + 12;
|
||||
return ((id.length() + type.length()) * 2) + source.length + 12;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
|
@ -237,7 +239,7 @@ public interface Translog extends IndexShardComponent {
|
|||
return this.id;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
public byte[] source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
|
@ -248,13 +250,15 @@ public interface Translog extends IndexShardComponent {
|
|||
@Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
|
||||
id = in.readUTF();
|
||||
type = in.readUTF();
|
||||
source = in.readUTF();
|
||||
source = new byte[in.readInt()];
|
||||
in.readFully(source);
|
||||
}
|
||||
|
||||
@Override public void writeTo(DataOutput out) throws IOException {
|
||||
out.writeUTF(id);
|
||||
out.writeUTF(type);
|
||||
out.writeUTF(source);
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,20 +25,17 @@ import org.elasticsearch.util.json.JsonBuilder;
|
|||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonRestResponse extends Utf8RestResponse {
|
||||
public class JsonRestResponse extends AbstractRestResponse {
|
||||
|
||||
private static ThreadLocal<UnicodeUtil.UTF8Result> cache = new ThreadLocal<UnicodeUtil.UTF8Result>() {
|
||||
@Override protected UnicodeUtil.UTF8Result initialValue() {
|
||||
return new UnicodeUtil.UTF8Result();
|
||||
}
|
||||
};
|
||||
|
||||
private static final UnicodeUtil.UTF8Result END_JSONP = new UnicodeUtil.UTF8Result();
|
||||
private static final byte[] END_JSONP;
|
||||
|
||||
static {
|
||||
UnicodeUtil.UTF16toUTF8(");", 0, ");".length(), END_JSONP);
|
||||
UnicodeUtil.UTF8Result U_END_JSONP = new UnicodeUtil.UTF8Result();
|
||||
UnicodeUtil.UTF16toUTF8(");", 0, ");".length(), U_END_JSONP);
|
||||
END_JSONP = new byte[U_END_JSONP.length];
|
||||
System.arraycopy(U_END_JSONP.result, 0, END_JSONP, 0, U_END_JSONP.length);
|
||||
}
|
||||
|
||||
private static ThreadLocal<UnicodeUtil.UTF8Result> prefixCache = new ThreadLocal<UnicodeUtil.UTF8Result>() {
|
||||
|
@ -47,26 +44,70 @@ public class JsonRestResponse extends Utf8RestResponse {
|
|||
}
|
||||
};
|
||||
|
||||
private final UnicodeUtil.UTF8Result prefixUtf8Result;
|
||||
|
||||
private final Status status;
|
||||
|
||||
private final JsonBuilder jsonBuilder;
|
||||
|
||||
public JsonRestResponse(RestRequest request, Status status) {
|
||||
super(status, EMPTY, startJsonp(request), endJsonp(request));
|
||||
this.jsonBuilder = null;
|
||||
this.status = status;
|
||||
this.prefixUtf8Result = startJsonp(request);
|
||||
}
|
||||
|
||||
public JsonRestResponse(RestRequest request, Status status, JsonBuilder jsonBuilder) throws IOException {
|
||||
super(status, jsonBuilder.utf8(), startJsonp(request), endJsonp(request));
|
||||
}
|
||||
|
||||
public JsonRestResponse(RestRequest request, Status status, String source) throws IOException {
|
||||
super(status, convert(source), startJsonp(request), endJsonp(request));
|
||||
this.jsonBuilder = jsonBuilder;
|
||||
this.status = status;
|
||||
this.prefixUtf8Result = startJsonp(request);
|
||||
}
|
||||
|
||||
@Override public String contentType() {
|
||||
return "application/json; charset=UTF-8";
|
||||
}
|
||||
|
||||
private static UnicodeUtil.UTF8Result convert(String content) {
|
||||
UnicodeUtil.UTF8Result result = cache.get();
|
||||
UnicodeUtil.UTF16toUTF8(content, 0, content.length(), result);
|
||||
return result;
|
||||
@Override public boolean contentThreadSafe() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override public byte[] content() throws IOException {
|
||||
return jsonBuilder.unsafeBytes();
|
||||
}
|
||||
|
||||
@Override public int contentLength() throws IOException {
|
||||
return jsonBuilder.unsafeBytesLength();
|
||||
}
|
||||
|
||||
@Override public Status status() {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
@Override public byte[] prefixContent() {
|
||||
if (prefixUtf8Result != null) {
|
||||
return prefixUtf8Result.result;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public int prefixContentLength() {
|
||||
if (prefixUtf8Result != null) {
|
||||
return prefixUtf8Result.length;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
@Override public byte[] suffixContent() {
|
||||
if (prefixUtf8Result != null) {
|
||||
return END_JSONP;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public int suffixContentLength() {
|
||||
if (prefixUtf8Result != null) {
|
||||
return END_JSONP.length;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static UnicodeUtil.UTF8Result startJsonp(RestRequest request) {
|
||||
|
@ -80,13 +121,4 @@ public class JsonRestResponse extends Utf8RestResponse {
|
|||
result.length++;
|
||||
return result;
|
||||
}
|
||||
|
||||
private static UnicodeUtil.UTF8Result endJsonp(RestRequest request) {
|
||||
String callback = request.param("callback");
|
||||
if (callback == null) {
|
||||
return null;
|
||||
}
|
||||
return END_JSONP;
|
||||
}
|
||||
|
||||
}
|
|
@ -40,7 +40,7 @@ public class JsonThrowableRestResponse extends JsonRestResponse {
|
|||
}
|
||||
|
||||
private static JsonBuilder convert(RestRequest request, Throwable t) throws IOException {
|
||||
JsonBuilder builder = jsonBuilder().prettyPrint()
|
||||
JsonBuilder builder = binaryJsonBuilder().prettyPrint()
|
||||
.startObject().field("error", detailedMessage(t));
|
||||
if (t != null && request.paramAsBoolean("errorTrace", false)) {
|
||||
builder.startObject("errorTrace");
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.util.SizeValue;
|
|||
import org.elasticsearch.util.TimeValue;
|
||||
import org.elasticsearch.util.json.ToJson;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -42,6 +43,10 @@ public interface RestRequest extends ToJson.Params {
|
|||
|
||||
boolean hasContent();
|
||||
|
||||
InputStream contentAsStream();
|
||||
|
||||
byte[] contentAsBytes();
|
||||
|
||||
String contentAsString();
|
||||
|
||||
Set<String> headerNames();
|
||||
|
@ -52,6 +57,8 @@ public interface RestRequest extends ToJson.Params {
|
|||
|
||||
String cookie();
|
||||
|
||||
boolean hasParam(String key);
|
||||
|
||||
String param(String key);
|
||||
|
||||
float paramAsFloat(String key, float defaultValue);
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
|
@ -495,12 +497,12 @@ public interface RestResponse {
|
|||
* Returns the actual content. Note, use {@link #contentLength()} in order to know the
|
||||
* content length of the byte array.
|
||||
*/
|
||||
byte[] content();
|
||||
byte[] content() throws IOException;
|
||||
|
||||
/**
|
||||
* The content length.
|
||||
*/
|
||||
int contentLength();
|
||||
int contentLength() throws IOException;
|
||||
|
||||
byte[] prefixContent();
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ public class RestNodesInfoAction extends BaseRestHandler {
|
|||
client.admin().cluster().execNodesInfo(nodesInfoRequest, new ActionListener<NodesInfoResponse>() {
|
||||
@Override public void onResponse(NodesInfoResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("clusterName", result.clusterName().value());
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ public class RestBroadcastPingAction extends BaseRestHandler {
|
|||
client.admin().cluster().execPing(broadcastPingRequest, new ActionListener<BroadcastPingResponse>() {
|
||||
@Override public void onResponse(BroadcastPingResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
buildBroadcastShardsHeader(builder, response);
|
||||
|
|
|
@ -54,7 +54,7 @@ public class RestReplicationPingAction extends BaseRestHandler {
|
|||
client.admin().cluster().execPing(replicationPingRequest, new ActionListener<ReplicationPingResponse>() {
|
||||
@Override public void onResponse(ReplicationPingResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
for (IndexReplicationPingResponse indexResponse : result.indices().values()) {
|
||||
|
|
|
@ -53,7 +53,7 @@ public class RestSinglePingAction extends BaseRestHandler {
|
|||
client.admin().cluster().execPing(singlePingRequest, new ActionListener<SinglePingResponse>() {
|
||||
@Override public void onResponse(SinglePingResponse result) {
|
||||
try {
|
||||
JsonBuilder generator = RestJsonBuilder.cached(request);
|
||||
JsonBuilder generator = RestJsonBuilder.restJsonBuilder(request);
|
||||
generator.startObject().field("ok", true).endObject();
|
||||
channel.sendResponse(new JsonRestResponse(request, OK, generator));
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -54,7 +54,7 @@ public class RestClusterStateAction extends BaseRestHandler {
|
|||
@Override public void onResponse(ClusterStateResponse response) {
|
||||
try {
|
||||
ClusterState state = response.state();
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
|
||||
// meta data
|
||||
|
|
|
@ -70,7 +70,7 @@ public class RestCreateIndexAction extends BaseRestHandler {
|
|||
client.admin().indices().execCreate(createIndexRequest, new ActionListener<CreateIndexResponse>() {
|
||||
@Override public void onResponse(CreateIndexResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject()
|
||||
.field("ok", true)
|
||||
.endObject();
|
||||
|
@ -84,7 +84,8 @@ public class RestCreateIndexAction extends BaseRestHandler {
|
|||
try {
|
||||
Throwable t = unwrapCause(e);
|
||||
if (t instanceof IndexAlreadyExistsException || t instanceof InvalidIndexNameException) {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, JsonBuilder.jsonBuilder().startObject().field("error", t.getMessage()).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", t.getMessage()).endObject()));
|
||||
} else {
|
||||
channel.sendResponse(new JsonThrowableRestResponse(request, e));
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.rest.RestResponse.Status.*;
|
||||
import static org.elasticsearch.util.TimeValue.*;
|
||||
import static org.elasticsearch.util.json.JsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
|
@ -53,7 +52,8 @@ public class RestDeleteIndexAction extends BaseRestHandler {
|
|||
client.admin().indices().execDelete(deleteIndexRequest, new ActionListener<DeleteIndexResponse>() {
|
||||
@Override public void onResponse(DeleteIndexResponse result) {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, OK, jsonBuilder().startObject().field("ok", true).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, OK, builder.startObject().field("ok", true).endObject()));
|
||||
} catch (IOException e) {
|
||||
onFailure(e);
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class RestDeleteIndexAction extends BaseRestHandler {
|
|||
@Override public void onFailure(Throwable e) {
|
||||
try {
|
||||
if (ExceptionsHelper.unwrapCause(e) instanceof IndexMissingException) {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject()
|
||||
.field("ok", true)
|
||||
.endObject();
|
||||
|
|
|
@ -62,7 +62,7 @@ public class RestFlushAction extends BaseRestHandler {
|
|||
client.admin().indices().execFlush(flushRequest, new ActionListener<FlushResponse>() {
|
||||
@Override public void onResponse(FlushResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ public class RestGatewaySnapshotAction extends BaseRestHandler {
|
|||
client.admin().indices().execGatewaySnapshot(gatewaySnapshotRequest, new ActionListener<GatewaySnapshotResponse>() {
|
||||
@Override public void onResponse(GatewaySnapshotResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
builder.startObject("indices");
|
||||
|
|
|
@ -60,7 +60,7 @@ public class RestPutMappingAction extends BaseRestHandler {
|
|||
client.admin().indices().execPutMapping(putMappingRequest, new ActionListener<PutMappingResponse>() {
|
||||
@Override public void onResponse(PutMappingResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject()
|
||||
.field("ok", true)
|
||||
.field("acknowledged", response.acknowledged());
|
||||
|
@ -73,9 +73,10 @@ public class RestPutMappingAction extends BaseRestHandler {
|
|||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
Throwable t = unwrapCause(e);
|
||||
if (t instanceof IndexMissingException || t instanceof InvalidTypeNameException) {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, JsonBuilder.jsonBuilder().startObject().field("error", t.getMessage()).endObject()));
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", t.getMessage()).endObject()));
|
||||
} else {
|
||||
channel.sendResponse(new JsonThrowableRestResponse(request, e));
|
||||
}
|
||||
|
|
|
@ -67,7 +67,8 @@ public class RestOptimizeAction extends BaseRestHandler {
|
|||
optimizeRequest.operationThreading(operationThreading);
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, JsonBuilder.jsonBuilder().startObject().field("error", e.getMessage()).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
|
@ -76,7 +77,7 @@ public class RestOptimizeAction extends BaseRestHandler {
|
|||
client.admin().indices().execOptimize(optimizeRequest, new ActionListener<OptimizeResponse>() {
|
||||
@Override public void onResponse(OptimizeResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class RestRefreshAction extends BaseRestHandler {
|
|||
client.admin().indices().execRefresh(refreshRequest, new ActionListener<RefreshResponse>() {
|
||||
@Override public void onResponse(RefreshResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ public class RestIndicesStatusAction extends BaseRestHandler {
|
|||
client.admin().indices().execStatus(indicesStatusRequest, new ActionListener<IndicesStatusResponse>() {
|
||||
@Override public void onResponse(IndicesStatusResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
|
||||
|
|
|
@ -37,7 +37,6 @@ import static org.elasticsearch.action.count.CountRequest.*;
|
|||
import static org.elasticsearch.rest.RestRequest.Method.*;
|
||||
import static org.elasticsearch.rest.RestResponse.Status.*;
|
||||
import static org.elasticsearch.rest.action.support.RestActions.*;
|
||||
import static org.elasticsearch.util.json.JsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
|
@ -75,7 +74,8 @@ public class RestCountAction extends BaseRestHandler {
|
|||
}
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, jsonBuilder().startObject().field("error", e.getMessage()).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ public class RestCountAction extends BaseRestHandler {
|
|||
client.execCount(countRequest, new ActionListener<CountResponse>() {
|
||||
@Override public void onResponse(CountResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("count", response.count());
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ public class RestDeleteAction extends BaseRestHandler {
|
|||
client.execDelete(deleteRequest, new ActionListener<DeleteResponse>() {
|
||||
@Override public void onResponse(DeleteResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject()
|
||||
.field("ok", true)
|
||||
.field("_index", result.index())
|
||||
|
|
|
@ -63,7 +63,8 @@ public class RestDeleteByQueryAction extends BaseRestHandler {
|
|||
deleteByQueryRequest.timeout(request.paramAsTime("timeout", ShardDeleteByQueryRequest.DEFAULT_TIMEOUT));
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, PRECONDITION_FAILED, JsonBuilder.jsonBuilder().startObject().field("error", e.getMessage()).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, PRECONDITION_FAILED, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
|
@ -72,7 +73,7 @@ public class RestDeleteByQueryAction extends BaseRestHandler {
|
|||
client.execDeleteByQuery(deleteByQueryRequest, new ActionListener<DeleteByQueryResponse>() {
|
||||
@Override public void onResponse(DeleteByQueryResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject().field("ok", true);
|
||||
|
||||
builder.startObject("_indices");
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.action.get.GetRequest;
|
|||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.rest.*;
|
||||
import org.elasticsearch.rest.action.support.RestJsonBuilder;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
|
||||
|
@ -33,6 +32,7 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.*;
|
||||
import static org.elasticsearch.rest.RestResponse.Status.*;
|
||||
import static org.elasticsearch.rest.action.support.RestJsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
|
@ -56,7 +56,7 @@ public class RestGetAction extends BaseRestHandler {
|
|||
if (result.empty()) {
|
||||
channel.sendResponse(new JsonRestResponse(request, NOT_FOUND));
|
||||
} else {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
builder.field("_index", result.index());
|
||||
builder.field("_type", result.type());
|
||||
|
|
|
@ -46,7 +46,7 @@ public class RestIndexAction extends BaseRestHandler {
|
|||
}
|
||||
|
||||
@Override public void handleRequest(final RestRequest request, final RestChannel channel) {
|
||||
IndexRequest indexRequest = new IndexRequest(request.param("index"), request.param("type"), request.param("id"), request.contentAsString());
|
||||
IndexRequest indexRequest = new IndexRequest(request.param("index"), request.param("type"), request.param("id"), request.contentAsBytes());
|
||||
indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT));
|
||||
String sOpType = request.param("opType");
|
||||
if (sOpType != null) {
|
||||
|
@ -56,7 +56,8 @@ public class RestIndexAction extends BaseRestHandler {
|
|||
indexRequest.opType(IndexRequest.OpType.CREATE);
|
||||
} else {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, JsonBuilder.jsonBuilder().startObject().field("error", "opType [" + sOpType + "] not allowed, either [index] or [create] are allowed").endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", "opType [" + sOpType + "] not allowed, either [index] or [create] are allowed").endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.warn("Failed to send response", e1);
|
||||
return;
|
||||
|
@ -70,7 +71,7 @@ public class RestIndexAction extends BaseRestHandler {
|
|||
client.execIndex(indexRequest, new ActionListener<IndexResponse>() {
|
||||
@Override public void onResponse(IndexResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject()
|
||||
.field("ok", true)
|
||||
.field("_index", result.index())
|
||||
|
|
|
@ -66,7 +66,7 @@ public class RestMainAction extends BaseRestHandler {
|
|||
|
||||
@Override public void handleRequest(RestRequest request, RestChannel channel) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request).prettyPrint();
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request).prettyPrint();
|
||||
builder.startObject();
|
||||
builder.field("ok", true);
|
||||
if (settings.get("name") != null) {
|
||||
|
|
|
@ -82,7 +82,8 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
searchRequest.operationThreading(operationThreading);
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, JsonBuilder.jsonBuilder().startObject().field("error", e.getMessage()).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
|
@ -91,7 +92,7 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
client.execSearch(searchRequest, new ActionListener<SearchResponse>() {
|
||||
@Override public void onResponse(SearchResponse result) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
result.toJson(builder, request);
|
||||
builder.endObject();
|
||||
|
|
|
@ -91,7 +91,7 @@ public class RestActions {
|
|||
throw new ElasticSearchIllegalArgumentException("Unsupported defaultOperator [" + defaultOperator + "], can either be [OR] or [AND]");
|
||||
}
|
||||
}
|
||||
return queryBuilder.build();
|
||||
return queryBuilder.buildAsString();
|
||||
}
|
||||
|
||||
public static String[] splitIndices(String indices) {
|
||||
|
|
|
@ -20,17 +20,19 @@
|
|||
package org.elasticsearch.rest.action.support;
|
||||
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.json.BinaryJsonBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.util.json.JsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
public class RestJsonBuilder {
|
||||
|
||||
public static JsonBuilder cached(RestRequest request) throws IOException {
|
||||
JsonBuilder builder = JsonBuilder.jsonBuilder();
|
||||
public static BinaryJsonBuilder restJsonBuilder(RestRequest request) throws IOException {
|
||||
BinaryJsonBuilder builder = binaryJsonBuilder();
|
||||
if (request.paramAsBoolean("pretty", false)) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
|
|
|
@ -100,7 +100,8 @@ public class RestTermsAction extends BaseRestHandler {
|
|||
termsRequest.sortType(TermsRequest.SortType.fromString(request.param("sort"), termsRequest.sortType()));
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, JsonBuilder.jsonBuilder().startObject().field("error", e.getMessage()).endObject()));
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
|
@ -111,7 +112,7 @@ public class RestTermsAction extends BaseRestHandler {
|
|||
client.execTerms(termsRequest, new ActionListener<TermsResponse>() {
|
||||
@Override public void onResponse(TermsResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.cached(request);
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
|
||||
buildBroadcastShardsHeader(builder, response);
|
||||
|
|
|
@ -36,7 +36,9 @@ public interface SearchHit extends Streamable, ToJson {
|
|||
|
||||
String type();
|
||||
|
||||
String source();
|
||||
byte[] source();
|
||||
|
||||
String sourceAsString();
|
||||
|
||||
Explanation explanation();
|
||||
|
||||
|
|
|
@ -21,13 +21,14 @@ package org.elasticsearch.search.builder;
|
|||
|
||||
import org.elasticsearch.index.query.json.JsonQueryBuilder;
|
||||
import org.elasticsearch.search.SearchException;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.json.StringJsonBuilder;
|
||||
import org.elasticsearch.util.json.ToJson;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static com.google.common.collect.Lists.*;
|
||||
import static org.elasticsearch.util.json.JsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
|
@ -126,7 +127,7 @@ public class SearchSourceBuilder {
|
|||
|
||||
public String build() throws SearchException {
|
||||
try {
|
||||
JsonBuilder builder = JsonBuilder.jsonBuilder();
|
||||
StringJsonBuilder builder = stringJsonBuilder();
|
||||
builder.startObject();
|
||||
|
||||
if (from != -1) {
|
||||
|
|
|
@ -72,7 +72,7 @@ public class FacetsParseElement implements SearchParseElement {
|
|||
|
||||
if ("query".equals(facetType)) {
|
||||
JsonIndexQueryParser indexQueryParser = (JsonIndexQueryParser) context.queryParser();
|
||||
Query facetQuery = indexQueryParser.parse(jp, context.source());
|
||||
Query facetQuery = indexQueryParser.parse(jp);
|
||||
|
||||
if (queryFacets == null) {
|
||||
queryFacets = Lists.newArrayListWithCapacity(2);
|
||||
|
|
|
@ -58,7 +58,7 @@ public class FetchPhase implements SearchPhase {
|
|||
|
||||
DocumentMapper documentMapper = context.mapperService().type(uid.type());
|
||||
|
||||
String source = extractSource(doc, documentMapper);
|
||||
byte[] source = extractSource(doc, documentMapper);
|
||||
|
||||
InternalSearchHit searchHit = new InternalSearchHit(uid.id(), uid.type(), source, null);
|
||||
hits[index] = searchHit;
|
||||
|
@ -111,11 +111,11 @@ public class FetchPhase implements SearchPhase {
|
|||
}
|
||||
}
|
||||
|
||||
private String extractSource(Document doc, DocumentMapper documentMapper) {
|
||||
String source = null;
|
||||
private byte[] extractSource(Document doc, DocumentMapper documentMapper) {
|
||||
byte[] source = null;
|
||||
Fieldable sourceField = doc.getFieldable(documentMapper.sourceMapper().names().indexName());
|
||||
if (sourceField != null) {
|
||||
source = documentMapper.sourceMapper().valueAsString(sourceField);
|
||||
source = documentMapper.sourceMapper().value(sourceField);
|
||||
doc.removeField(documentMapper.sourceMapper().names().indexName());
|
||||
}
|
||||
return source;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.search.SearchHit;
|
|||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.util.Nullable;
|
||||
import org.elasticsearch.util.Unicode;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
|
||||
import java.io.DataInput;
|
||||
|
@ -45,7 +46,7 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
private String type;
|
||||
|
||||
private String source;
|
||||
private byte[] source;
|
||||
|
||||
private Map<String, SearchHitField> fields;
|
||||
|
||||
|
@ -57,7 +58,7 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
}
|
||||
|
||||
public InternalSearchHit(String id, String type, String source, Map<String, SearchHitField> fields) {
|
||||
public InternalSearchHit(String id, String type, byte[] source, Map<String, SearchHitField> fields) {
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
|
@ -68,19 +69,26 @@ public class InternalSearchHit implements SearchHit {
|
|||
return shard.index();
|
||||
}
|
||||
|
||||
public String id() {
|
||||
@Override public String id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
@Override public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String source() {
|
||||
@Override public byte[] source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public Map<String, SearchHitField> fields() {
|
||||
@Override public String sourceAsString() {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
return Unicode.fromBytes(source);
|
||||
}
|
||||
|
||||
@Override public Map<String, SearchHitField> fields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
|
@ -88,7 +96,7 @@ public class InternalSearchHit implements SearchHit {
|
|||
this.fields = fields;
|
||||
}
|
||||
|
||||
public Explanation explanation() {
|
||||
@Override public Explanation explanation() {
|
||||
return explanation;
|
||||
}
|
||||
|
||||
|
@ -169,13 +177,15 @@ public class InternalSearchHit implements SearchHit {
|
|||
@Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
|
||||
id = in.readUTF();
|
||||
type = in.readUTF();
|
||||
if (in.readBoolean()) {
|
||||
source = in.readUTF();
|
||||
int size = in.readInt();
|
||||
if (size > 0) {
|
||||
source = new byte[size];
|
||||
in.readFully(source);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
explanation = readExplanation(in);
|
||||
}
|
||||
int size = in.readInt();
|
||||
size = in.readInt();
|
||||
if (size == 0) {
|
||||
fields = ImmutableMap.of();
|
||||
} else if (size == 1) {
|
||||
|
@ -220,10 +230,10 @@ public class InternalSearchHit implements SearchHit {
|
|||
out.writeUTF(id);
|
||||
out.writeUTF(type);
|
||||
if (source == null) {
|
||||
out.writeBoolean(false);
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeUTF(source);
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
}
|
||||
if (explanation == null) {
|
||||
out.writeBoolean(false);
|
||||
|
|
|
@ -32,7 +32,7 @@ public class QueryParseElement implements SearchParseElement {
|
|||
|
||||
@Override public void parse(JsonParser jp, SearchContext context) throws Exception {
|
||||
JsonIndexQueryParser indexQueryParser = (JsonIndexQueryParser) context.queryParser();
|
||||
Query query = indexQueryParser.parse(jp, context.source());
|
||||
Query query = indexQueryParser.parse(jp);
|
||||
query.setBoost(query.getBoost() * context.queryBoost());
|
||||
context.query(query);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class Unicode {
|
||||
|
||||
private static ThreadLocal<UnicodeUtil.UTF8Result> cachedUtf8Result = new ThreadLocal<UnicodeUtil.UTF8Result>() {
|
||||
@Override protected UnicodeUtil.UTF8Result initialValue() {
|
||||
return new UnicodeUtil.UTF8Result();
|
||||
}
|
||||
};
|
||||
|
||||
private static ThreadLocal<UnicodeUtil.UTF16Result> cachedUtf16Result = new ThreadLocal<UnicodeUtil.UTF16Result>() {
|
||||
@Override protected UnicodeUtil.UTF16Result initialValue() {
|
||||
return new UnicodeUtil.UTF16Result();
|
||||
}
|
||||
};
|
||||
|
||||
public static byte[] fromStringAsBytes(String source) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF8Result result = unsafeFromStringAsUtf8(source);
|
||||
return Arrays.copyOfRange(result.result, 0, result.length);
|
||||
}
|
||||
|
||||
public static UnicodeUtil.UTF8Result fromStringAsUtf8(String source) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF8Result result = new UnicodeUtil.UTF8Result();
|
||||
UnicodeUtil.UTF16toUTF8(source, 0, source.length(), result);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static UnicodeUtil.UTF8Result unsafeFromStringAsUtf8(String source) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF8Result result = cachedUtf8Result.get();
|
||||
UnicodeUtil.UTF16toUTF8(source, 0, source.length(), result);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static String fromBytes(byte[] source) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF16Result result = unsafeFromBytesAsUtf16(source);
|
||||
return new String(result.result, 0, result.length);
|
||||
}
|
||||
|
||||
public static UnicodeUtil.UTF16Result fromBytesAsUtf16(byte[] source) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF16Result result = new UnicodeUtil.UTF16Result();
|
||||
UnicodeUtil.UTF8toUTF16(source, 0, source.length, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
public static UnicodeUtil.UTF16Result unsafeFromBytesAsUtf16(byte[] source) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
UnicodeUtil.UTF16Result result = cachedUtf16Result.get();
|
||||
UnicodeUtil.UTF8toUTF16(source, 0, source.length, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,220 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class FastCharArrayReader extends Reader {
|
||||
|
||||
/**
|
||||
* The character buffer.
|
||||
*/
|
||||
protected char buf[];
|
||||
|
||||
/**
|
||||
* The current buffer position.
|
||||
*/
|
||||
protected int pos;
|
||||
|
||||
/**
|
||||
* The position of mark in buffer.
|
||||
*/
|
||||
protected int markedPos = 0;
|
||||
|
||||
/**
|
||||
* The index of the end of this buffer. There is not valid
|
||||
* data at or beyond this index.
|
||||
*/
|
||||
protected int count;
|
||||
|
||||
/**
|
||||
* Creates a CharArrayReader from the specified array of chars.
|
||||
*
|
||||
* @param buf Input buffer (not copied)
|
||||
*/
|
||||
public FastCharArrayReader(char buf[]) {
|
||||
this.buf = buf;
|
||||
this.pos = 0;
|
||||
this.count = buf.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CharArrayReader from the specified array of chars.
|
||||
*
|
||||
* <p> The resulting reader will start reading at the given
|
||||
* <tt>offset</tt>. The total number of <tt>char</tt> values that can be
|
||||
* read from this reader will be either <tt>length</tt> or
|
||||
* <tt>buf.length-offset</tt>, whichever is smaller.
|
||||
*
|
||||
* @param buf Input buffer (not copied)
|
||||
* @param offset Offset of the first char to read
|
||||
* @param length Number of chars to read
|
||||
* @throws IllegalArgumentException If <tt>offset</tt> is negative or greater than
|
||||
* <tt>buf.length</tt>, or if <tt>length</tt> is negative, or if
|
||||
* the sum of these two values is negative.
|
||||
*/
|
||||
public FastCharArrayReader(char buf[], int offset, int length) {
|
||||
if ((offset < 0) || (offset > buf.length) || (length < 0) ||
|
||||
((offset + length) < 0)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
this.buf = buf;
|
||||
this.pos = offset;
|
||||
this.count = Math.min(offset + length, buf.length);
|
||||
this.markedPos = offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to make sure that the stream has not been closed
|
||||
*/
|
||||
private void ensureOpen() throws IOException {
|
||||
if (buf == null)
|
||||
throw new IOException("Stream closed");
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a single character.
|
||||
*
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public int read() throws IOException {
|
||||
ensureOpen();
|
||||
if (pos >= count)
|
||||
return -1;
|
||||
else
|
||||
return buf[pos++];
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads characters into a portion of an array.
|
||||
*
|
||||
* @param b Destination buffer
|
||||
* @param off Offset at which to start storing characters
|
||||
* @param len Maximum number of characters to read
|
||||
* @return The actual number of characters read, or -1 if
|
||||
* the end of the stream has been reached
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public int read(char b[], int off, int len) throws IOException {
|
||||
ensureOpen();
|
||||
if ((off < 0) || (off > b.length) || (len < 0) ||
|
||||
((off + len) > b.length) || ((off + len) < 0)) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
} else if (len == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (pos >= count) {
|
||||
return -1;
|
||||
}
|
||||
if (pos + len > count) {
|
||||
len = count - pos;
|
||||
}
|
||||
if (len <= 0) {
|
||||
return 0;
|
||||
}
|
||||
System.arraycopy(buf, pos, b, off, len);
|
||||
pos += len;
|
||||
return len;
|
||||
}
|
||||
|
||||
/**
|
||||
* Skips characters. Returns the number of characters that were skipped.
|
||||
*
|
||||
* <p>The <code>n</code> parameter may be negative, even though the
|
||||
* <code>skip</code> method of the {@link Reader} superclass throws
|
||||
* an exception in this case. If <code>n</code> is negative, then
|
||||
* this method does nothing and returns <code>0</code>.
|
||||
*
|
||||
* @param n The number of characters to skip
|
||||
* @return The number of characters actually skipped
|
||||
* @throws IOException If the stream is closed, or an I/O error occurs
|
||||
*/
|
||||
public long skip(long n) throws IOException {
|
||||
ensureOpen();
|
||||
if (pos + n > count) {
|
||||
n = count - pos;
|
||||
}
|
||||
if (n < 0) {
|
||||
return 0;
|
||||
}
|
||||
pos += n;
|
||||
return n;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells whether this stream is ready to be read. Character-array readers
|
||||
* are always ready to be read.
|
||||
*
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public boolean ready() throws IOException {
|
||||
ensureOpen();
|
||||
return (count - pos) > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells whether this stream supports the mark() operation, which it does.
|
||||
*/
|
||||
public boolean markSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the present position in the stream. Subsequent calls to reset()
|
||||
* will reposition the stream to this point.
|
||||
*
|
||||
* @param readAheadLimit Limit on the number of characters that may be
|
||||
* read while still preserving the mark. Because
|
||||
* the stream's input comes from a character array,
|
||||
* there is no actual limit; hence this argument is
|
||||
* ignored.
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public void mark(int readAheadLimit) throws IOException {
|
||||
ensureOpen();
|
||||
markedPos = pos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the stream to the most recent mark, or to the beginning if it has
|
||||
* never been marked.
|
||||
*
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public void reset() throws IOException {
|
||||
ensureOpen();
|
||||
pos = markedPos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the stream and releases any system resources associated with
|
||||
* it. Once the stream has been closed, further read(), ready(),
|
||||
* mark(), reset(), or skip() invocations will throw an IOException.
|
||||
* Closing a previously closed stream has no effect.
|
||||
*/
|
||||
public void close() {
|
||||
buf = null;
|
||||
}
|
||||
}
|
|
@ -263,4 +263,12 @@ public abstract class Streams {
|
|||
}
|
||||
return copyToString(new InputStreamReader(is));
|
||||
}
|
||||
|
||||
public static byte[] copyToBytesFromClasspath(String path) throws IOException {
|
||||
InputStream is = Streams.class.getResourceAsStream(path);
|
||||
if (is == null) {
|
||||
throw new FileNotFoundException("Resource [" + path + "] not found in classpath");
|
||||
}
|
||||
return copyToByteArray(is);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.json;
|
||||
|
||||
import org.codehaus.jackson.JsonEncoding;
|
||||
import org.codehaus.jackson.JsonFactory;
|
||||
import org.codehaus.jackson.JsonGenerator;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.util.io.FastByteArrayOutputStream;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class BinaryJsonBuilder extends JsonBuilder<BinaryJsonBuilder> {
|
||||
|
||||
/**
|
||||
* A thread local based cache of {@link BinaryJsonBuilder}.
|
||||
*/
|
||||
public static class Cached {
|
||||
|
||||
private BinaryJsonBuilder builder;
|
||||
|
||||
public Cached(BinaryJsonBuilder builder) {
|
||||
this.builder = builder;
|
||||
}
|
||||
|
||||
private static final ThreadLocal<Cached> cache = new ThreadLocal<Cached>() {
|
||||
@Override protected Cached initialValue() {
|
||||
try {
|
||||
return new Cached(new BinaryJsonBuilder());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchException("Failed to create json generator", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the cached thread local generator, with its internal {@link StringBuilder} cleared.
|
||||
*/
|
||||
static BinaryJsonBuilder cached() throws IOException {
|
||||
Cached cached = cache.get();
|
||||
cached.builder.reset();
|
||||
return cached.builder;
|
||||
}
|
||||
}
|
||||
|
||||
private final FastByteArrayOutputStream bos;
|
||||
|
||||
private final JsonFactory factory;
|
||||
|
||||
public BinaryJsonBuilder() throws IOException {
|
||||
this(Jackson.defaultJsonFactory());
|
||||
}
|
||||
|
||||
public BinaryJsonBuilder(JsonFactory factory) throws IOException {
|
||||
this.bos = new FastByteArrayOutputStream();
|
||||
this.factory = factory;
|
||||
this.generator = factory.createJsonGenerator(bos, JsonEncoding.UTF8);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
public BinaryJsonBuilder(JsonGenerator generator) throws IOException {
|
||||
this.bos = null;
|
||||
this.generator = generator;
|
||||
this.factory = null;
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
@Override public BinaryJsonBuilder raw(byte[] json) throws IOException {
|
||||
flush();
|
||||
bos.write(json);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public BinaryJsonBuilder reset() throws IOException {
|
||||
bos.reset();
|
||||
generator = factory.createJsonGenerator(bos, JsonEncoding.UTF8);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FastByteArrayOutputStream unsafeStream() throws IOException {
|
||||
flush();
|
||||
return bos;
|
||||
}
|
||||
|
||||
@Override public byte[] unsafeBytes() throws IOException {
|
||||
flush();
|
||||
return bos.unsafeByteArray();
|
||||
}
|
||||
|
||||
@Override public int unsafeBytesLength() throws IOException {
|
||||
flush();
|
||||
return bos.size();
|
||||
}
|
||||
|
||||
@Override public byte[] copiedBytes() throws IOException {
|
||||
flush();
|
||||
return bos.copiedByteArray();
|
||||
}
|
||||
}
|
|
@ -19,12 +19,7 @@
|
|||
|
||||
package org.elasticsearch.util.json;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.codehaus.jackson.JsonFactory;
|
||||
import org.codehaus.jackson.JsonGenerator;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.util.concurrent.NotThreadSafe;
|
||||
import org.elasticsearch.util.io.FastCharArrayWriter;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.ReadableInstant;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
@ -37,172 +32,112 @@ import java.util.Date;
|
|||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class JsonBuilder {
|
||||
public abstract class JsonBuilder<T extends JsonBuilder> {
|
||||
|
||||
private final static DateTimeFormatter defaultDatePrinter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
|
||||
|
||||
/**
|
||||
* A thread local based cache of {@link JsonBuilder}.
|
||||
*/
|
||||
public static class Cached {
|
||||
protected org.codehaus.jackson.JsonGenerator generator;
|
||||
|
||||
private JsonBuilder generator;
|
||||
protected T builder;
|
||||
|
||||
public Cached(JsonBuilder generator) {
|
||||
this.generator = generator;
|
||||
}
|
||||
|
||||
private static final ThreadLocal<Cached> cache = new ThreadLocal<Cached>() {
|
||||
@Override protected Cached initialValue() {
|
||||
try {
|
||||
return new Cached(new JsonBuilder());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchException("Failed to create json generator", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the cached thread local generator, with its internal {@link StringBuilder} cleared.
|
||||
*/
|
||||
static JsonBuilder cached() throws IOException {
|
||||
Cached cached = cache.get();
|
||||
cached.generator.reset();
|
||||
return cached.generator;
|
||||
}
|
||||
public static StringJsonBuilder stringJsonBuilder() throws IOException {
|
||||
return StringJsonBuilder.Cached.cached();
|
||||
}
|
||||
|
||||
public static JsonBuilder jsonBuilder() throws IOException {
|
||||
return Cached.cached();
|
||||
public static BinaryJsonBuilder binaryJsonBuilder() throws IOException {
|
||||
return BinaryJsonBuilder.Cached.cached();
|
||||
}
|
||||
|
||||
|
||||
private final FastCharArrayWriter writer;
|
||||
|
||||
private final JsonFactory factory;
|
||||
|
||||
private org.codehaus.jackson.JsonGenerator generator;
|
||||
|
||||
final UnicodeUtil.UTF8Result utf8Result = new UnicodeUtil.UTF8Result();
|
||||
|
||||
public JsonBuilder() throws IOException {
|
||||
this(Jackson.defaultJsonFactory());
|
||||
}
|
||||
|
||||
public JsonBuilder(JsonFactory factory) throws IOException {
|
||||
this.writer = new FastCharArrayWriter();
|
||||
this.factory = factory;
|
||||
this.generator = factory.createJsonGenerator(writer);
|
||||
}
|
||||
|
||||
public JsonBuilder(JsonGenerator generator) throws IOException {
|
||||
this.writer = new FastCharArrayWriter();
|
||||
this.generator = generator;
|
||||
this.factory = null;
|
||||
}
|
||||
|
||||
public JsonBuilder prettyPrint() {
|
||||
public T prettyPrint() {
|
||||
generator.useDefaultPrettyPrinter();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder startJsonp(String callback) throws IOException {
|
||||
flush();
|
||||
writer.append(callback).append('(');
|
||||
return this;
|
||||
}
|
||||
|
||||
public JsonBuilder endJsonp() throws IOException {
|
||||
flush();
|
||||
writer.append(");");
|
||||
return this;
|
||||
}
|
||||
|
||||
public JsonBuilder startObject(String name) throws IOException {
|
||||
public T startObject(String name) throws IOException {
|
||||
field(name);
|
||||
startObject();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder startObject() throws IOException {
|
||||
public T startObject() throws IOException {
|
||||
generator.writeStartObject();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder endObject() throws IOException {
|
||||
public T endObject() throws IOException {
|
||||
generator.writeEndObject();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder startArray(String name) throws IOException {
|
||||
public T startArray(String name) throws IOException {
|
||||
field(name);
|
||||
startArray();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder startArray() throws IOException {
|
||||
public T startArray() throws IOException {
|
||||
generator.writeStartArray();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder endArray() throws IOException {
|
||||
public T endArray() throws IOException {
|
||||
generator.writeEndArray();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name) throws IOException {
|
||||
public T field(String name) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, char[] value, int offset, int length) throws IOException {
|
||||
public T field(String name, char[] value, int offset, int length) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeString(value, offset, length);
|
||||
}
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, String value) throws IOException {
|
||||
public T field(String name, String value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeString(value);
|
||||
}
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, int value) throws IOException {
|
||||
public T field(String name, int value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, long value) throws IOException {
|
||||
public T field(String name, long value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, float value) throws IOException {
|
||||
public T field(String name, float value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, double value) throws IOException {
|
||||
public T field(String name, double value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, Object value) throws IOException {
|
||||
public T field(String name, Object value) throws IOException {
|
||||
if (value == null) {
|
||||
nullField(name);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
Class type = value.getClass();
|
||||
if (type == String.class) {
|
||||
|
@ -220,120 +155,122 @@ public class JsonBuilder {
|
|||
} else {
|
||||
field(name, value.toString());
|
||||
}
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, boolean value) throws IOException {
|
||||
public T field(String name, boolean value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
generator.writeBoolean(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, byte[] value) throws IOException {
|
||||
public T field(String name, byte[] value) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
generator.writeBinary(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, ReadableInstant date) throws IOException {
|
||||
public T field(String name, ReadableInstant date) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
return date(date);
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, ReadableInstant date, DateTimeFormatter formatter) throws IOException {
|
||||
public T field(String name, ReadableInstant date, DateTimeFormatter formatter) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
return date(date, formatter);
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, Date date) throws IOException {
|
||||
public T field(String name, Date date) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
return date(date);
|
||||
}
|
||||
|
||||
public JsonBuilder field(String name, Date date, DateTimeFormatter formatter) throws IOException {
|
||||
public T field(String name, Date date, DateTimeFormatter formatter) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
return date(date, formatter);
|
||||
}
|
||||
|
||||
public JsonBuilder nullField(String name) throws IOException {
|
||||
public T nullField(String name) throws IOException {
|
||||
generator.writeNullField(name);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder binary(byte[] bytes) throws IOException {
|
||||
public T binary(byte[] bytes) throws IOException {
|
||||
generator.writeBinary(bytes);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder raw(String json) throws IOException {
|
||||
public T raw(String json) throws IOException {
|
||||
generator.writeRaw(json);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder string(String value) throws IOException {
|
||||
public abstract T raw(byte[] json) throws IOException;
|
||||
|
||||
public T string(String value) throws IOException {
|
||||
generator.writeString(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(int value) throws IOException {
|
||||
public T number(int value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(long value) throws IOException {
|
||||
public T number(long value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(double value) throws IOException {
|
||||
public T number(double value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(Integer value) throws IOException {
|
||||
public T number(Integer value) throws IOException {
|
||||
generator.writeNumber(value.intValue());
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(Long value) throws IOException {
|
||||
public T number(Long value) throws IOException {
|
||||
generator.writeNumber(value.longValue());
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(Float value) throws IOException {
|
||||
public T number(Float value) throws IOException {
|
||||
generator.writeNumber(value.floatValue());
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder number(Double value) throws IOException {
|
||||
public T number(Double value) throws IOException {
|
||||
generator.writeNumber(value.doubleValue());
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder bool(boolean value) throws IOException {
|
||||
public T bool(boolean value) throws IOException {
|
||||
generator.writeBoolean(value);
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder date(ReadableInstant date) throws IOException {
|
||||
public T date(ReadableInstant date) throws IOException {
|
||||
return date(date, defaultDatePrinter);
|
||||
}
|
||||
|
||||
public JsonBuilder date(ReadableInstant date, DateTimeFormatter dateTimeFormatter) throws IOException {
|
||||
public T date(ReadableInstant date, DateTimeFormatter dateTimeFormatter) throws IOException {
|
||||
string(dateTimeFormatter.print(date));
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder date(Date date) throws IOException {
|
||||
public T date(Date date) throws IOException {
|
||||
return date(date, defaultDatePrinter);
|
||||
}
|
||||
|
||||
public JsonBuilder date(Date date, DateTimeFormatter dateTimeFormatter) throws IOException {
|
||||
public T date(Date date, DateTimeFormatter dateTimeFormatter) throws IOException {
|
||||
string(dateTimeFormatter.print(date.getTime()));
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder value(Object value) throws IOException {
|
||||
public T value(Object value) throws IOException {
|
||||
Class type = value.getClass();
|
||||
if (type == String.class) {
|
||||
string((String) value);
|
||||
|
@ -352,60 +289,21 @@ public class JsonBuilder {
|
|||
} else {
|
||||
throw new IOException("Type not allowed [" + type + "]");
|
||||
}
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder flush() throws IOException {
|
||||
public T flush() throws IOException {
|
||||
generator.flush();
|
||||
return this;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public JsonBuilder reset() throws IOException {
|
||||
writer.reset();
|
||||
generator = factory.createJsonGenerator(writer);
|
||||
return this;
|
||||
}
|
||||
public abstract T reset() throws IOException;
|
||||
|
||||
public String string() throws IOException {
|
||||
flush();
|
||||
return writer.toStringTrim();
|
||||
}
|
||||
public abstract byte[] unsafeBytes() throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the byte[] that represents the utf8 of the json written up until now.
|
||||
* Note, the result is shared within this instance, so copy the byte array if needed
|
||||
* or use {@link #utf8copied()}.
|
||||
*/
|
||||
public UnicodeUtil.UTF8Result utf8() throws IOException {
|
||||
flush();
|
||||
public abstract int unsafeBytesLength() throws IOException;
|
||||
|
||||
// ignore whitepsaces
|
||||
int st = 0;
|
||||
int len = writer.size();
|
||||
char[] val = writer.unsafeCharArray();
|
||||
|
||||
while ((st < len) && (val[st] <= ' ')) {
|
||||
st++;
|
||||
len--;
|
||||
}
|
||||
while ((st < len) && (val[len - 1] <= ' ')) {
|
||||
len--;
|
||||
}
|
||||
|
||||
UnicodeUtil.UTF16toUTF8(val, st, len, utf8Result);
|
||||
|
||||
return utf8Result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copied byte[] that represnts the utf8 o fthe json written up until now.
|
||||
*/
|
||||
public byte[] utf8copied() throws IOException {
|
||||
utf8();
|
||||
byte[] result = new byte[utf8Result.length];
|
||||
System.arraycopy(utf8Result.result, 0, result, 0, utf8Result.length);
|
||||
return result;
|
||||
}
|
||||
public abstract byte[] copiedBytes() throws IOException;
|
||||
|
||||
public void close() {
|
||||
try {
|
||||
|
|
|
@ -0,0 +1,169 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.json;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.codehaus.jackson.JsonFactory;
|
||||
import org.codehaus.jackson.JsonGenerator;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.util.Unicode;
|
||||
import org.elasticsearch.util.concurrent.NotThreadSafe;
|
||||
import org.elasticsearch.util.io.FastCharArrayWriter;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
@NotThreadSafe
|
||||
public class StringJsonBuilder extends JsonBuilder<StringJsonBuilder> {
|
||||
|
||||
/**
|
||||
* A thread local based cache of {@link StringJsonBuilder}.
|
||||
*/
|
||||
public static class Cached {
|
||||
|
||||
private StringJsonBuilder builder;
|
||||
|
||||
public Cached(StringJsonBuilder builder) {
|
||||
this.builder = builder;
|
||||
}
|
||||
|
||||
private static final ThreadLocal<Cached> cache = new ThreadLocal<Cached>() {
|
||||
@Override protected Cached initialValue() {
|
||||
try {
|
||||
return new Cached(new StringJsonBuilder());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchException("Failed to create json generator", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the cached thread local generator, with its internal {@link StringBuilder} cleared.
|
||||
*/
|
||||
static StringJsonBuilder cached() throws IOException {
|
||||
Cached cached = cache.get();
|
||||
cached.builder.reset();
|
||||
return cached.builder;
|
||||
}
|
||||
}
|
||||
|
||||
private final FastCharArrayWriter writer;
|
||||
|
||||
private final JsonFactory factory;
|
||||
|
||||
final UnicodeUtil.UTF8Result utf8Result = new UnicodeUtil.UTF8Result();
|
||||
|
||||
public StringJsonBuilder() throws IOException {
|
||||
this(Jackson.defaultJsonFactory());
|
||||
}
|
||||
|
||||
public StringJsonBuilder(JsonFactory factory) throws IOException {
|
||||
this.writer = new FastCharArrayWriter();
|
||||
this.factory = factory;
|
||||
this.generator = factory.createJsonGenerator(writer);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
public StringJsonBuilder(JsonGenerator generator) throws IOException {
|
||||
this.writer = new FastCharArrayWriter();
|
||||
this.generator = generator;
|
||||
this.factory = null;
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
@Override public StringJsonBuilder raw(byte[] json) throws IOException {
|
||||
flush();
|
||||
UnicodeUtil.UTF16Result result = Unicode.unsafeFromBytesAsUtf16(json);
|
||||
writer.write(result.result, 0, result.length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public StringJsonBuilder reset() throws IOException {
|
||||
writer.reset();
|
||||
generator = factory.createJsonGenerator(writer);
|
||||
return this;
|
||||
}
|
||||
|
||||
public String string() throws IOException {
|
||||
flush();
|
||||
return writer.toStringTrim();
|
||||
}
|
||||
|
||||
public FastCharArrayWriter unsafeChars() throws IOException {
|
||||
flush();
|
||||
return writer;
|
||||
}
|
||||
|
||||
@Override public byte[] unsafeBytes() throws IOException {
|
||||
return utf8().result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call this AFTER {@link #unsafeBytes()}.
|
||||
*/
|
||||
@Override public int unsafeBytesLength() {
|
||||
return utf8Result.length;
|
||||
}
|
||||
|
||||
@Override public byte[] copiedBytes() throws IOException {
|
||||
flush();
|
||||
byte[] ret = new byte[utf8Result.length];
|
||||
System.arraycopy(utf8Result.result, 0, ret, 0, ret.length);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the byte[] that represents the utf8 of the json written up until now.
|
||||
* Note, the result is shared within this instance, so copy the byte array if needed
|
||||
* or use {@link #utf8copied()}.
|
||||
*/
|
||||
public UnicodeUtil.UTF8Result utf8() throws IOException {
|
||||
flush();
|
||||
|
||||
// ignore whitepsaces
|
||||
int st = 0;
|
||||
int len = writer.size();
|
||||
char[] val = writer.unsafeCharArray();
|
||||
|
||||
while ((st < len) && (val[st] <= ' ')) {
|
||||
st++;
|
||||
len--;
|
||||
}
|
||||
while ((st < len) && (val[len - 1] <= ' ')) {
|
||||
len--;
|
||||
}
|
||||
|
||||
UnicodeUtil.UTF16toUTF8(val, st, len, utf8Result);
|
||||
|
||||
return utf8Result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copied byte[] that represnts the utf8 o fthe json written up until now.
|
||||
*/
|
||||
public byte[] utf8copied() throws IOException {
|
||||
utf8();
|
||||
byte[] result = new byte[utf8Result.length];
|
||||
System.arraycopy(utf8Result.result, 0, result, 0, utf8Result.length);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -102,6 +102,10 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
protected abstract Engine createEngine(Store store);
|
||||
|
||||
private static final byte[] B_1 = new byte[]{1};
|
||||
private static final byte[] B_2 = new byte[]{2};
|
||||
private static final byte[] B_3 = new byte[]{3};
|
||||
|
||||
@Test public void testSimpleOperations() throws Exception {
|
||||
Engine.Searcher searchResult = engine.searcher();
|
||||
|
||||
|
@ -109,7 +113,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// create a document
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
|
||||
// its not there...
|
||||
searchResult = engine.searcher();
|
||||
|
@ -127,7 +131,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// now do an update
|
||||
engine.index(new Engine.Index(newUid("1"), doc().add(field("_uid", "1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.index(new Engine.Index(newUid("1"), doc().add(field("_uid", "1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
|
||||
// its not updated yet...
|
||||
searchResult = engine.searcher();
|
||||
|
@ -165,7 +169,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// add it back
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
|
||||
// its not there...
|
||||
searchResult = engine.searcher();
|
||||
|
@ -189,7 +193,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
// make sure we can still work with the engine
|
||||
// now do an update
|
||||
engine.index(new Engine.Index(newUid("1"), doc().add(field("_uid", "1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.index(new Engine.Index(newUid("1"), doc().add(field("_uid", "1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
|
||||
// its not updated yet...
|
||||
searchResult = engine.searcher();
|
||||
|
@ -228,7 +232,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// create a document
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
|
||||
// its not there...
|
||||
searchResult = engine.searcher();
|
||||
|
@ -260,7 +264,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test public void testSimpleSnapshot() throws Exception {
|
||||
// create a document
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
|
||||
final ExecutorService executorService = Executors.newCachedThreadPool();
|
||||
|
||||
|
@ -269,14 +273,14 @@ public abstract class AbstractSimpleEngineTests {
|
|||
assertThat(snapshotIndexCommit1, snapshotIndexCommitExists());
|
||||
assertThat(translogSnapshot1, translogSize(1));
|
||||
Translog.Create create1 = (Translog.Create) translogSnapshot1.iterator().next();
|
||||
assertThat(create1.source(), equalTo("{1}"));
|
||||
assertThat(create1.source(), equalTo(B_1));
|
||||
|
||||
Future<Object> future = executorService.submit(new Callable<Object>() {
|
||||
@Override public Object call() throws Exception {
|
||||
engine.flush(new Engine.Flush());
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "2", "{2}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "2", B_2));
|
||||
engine.flush(new Engine.Flush());
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "3", "{3}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "3", B_3));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
@ -297,7 +301,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
assertThat(snapshotIndexCommit2.getSegmentsFileName(), not(equalTo(snapshotIndexCommit1.getSegmentsFileName())));
|
||||
assertThat(translogSnapshot2, translogSize(1));
|
||||
Translog.Create create3 = (Translog.Create) translogSnapshot2.iterator().next();
|
||||
assertThat(create3.source(), equalTo("{3}"));
|
||||
assertThat(create3.source(), equalTo(B_3));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -307,7 +311,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
}
|
||||
|
||||
@Test public void testSimpleRecover() throws Exception {
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
engine.flush(new Engine.Flush());
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
|
@ -347,9 +351,9 @@ public abstract class AbstractSimpleEngineTests {
|
|||
}
|
||||
|
||||
@Test public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
engine.flush(new Engine.Flush());
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "2", "{2}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "2", B_2));
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
@Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException {
|
||||
|
@ -358,7 +362,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
@Override public void phase2(Translog.Snapshot snapshot) throws EngineException {
|
||||
assertThat(snapshot, translogSize(1));
|
||||
Translog.Create create = (Translog.Create) snapshot.iterator().next();
|
||||
assertThat(create.source(), equalTo("{2}"));
|
||||
assertThat(create.source(), equalTo(B_2));
|
||||
}
|
||||
|
||||
@Override public void phase3(Translog.Snapshot snapshot) throws EngineException {
|
||||
|
@ -371,9 +375,9 @@ public abstract class AbstractSimpleEngineTests {
|
|||
}
|
||||
|
||||
@Test public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", "{1}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "1", B_1));
|
||||
engine.flush(new Engine.Flush());
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "2", "{2}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "2", B_2));
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
@Override public void phase1(SnapshotIndexCommit snapshot) throws EngineException {
|
||||
|
@ -382,16 +386,16 @@ public abstract class AbstractSimpleEngineTests {
|
|||
@Override public void phase2(Translog.Snapshot snapshot) throws EngineException {
|
||||
assertThat(snapshot, translogSize(1));
|
||||
Translog.Create create = (Translog.Create) snapshot.iterator().next();
|
||||
assertThat(create.source(), equalTo("{2}"));
|
||||
assertThat(create.source(), equalTo(B_2));
|
||||
|
||||
// add for phase3
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "3", "{3}"));
|
||||
engine.create(new Engine.Create(doc().add(field("_uid", "3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, "test", "3", B_3));
|
||||
}
|
||||
|
||||
@Override public void phase3(Translog.Snapshot snapshot) throws EngineException {
|
||||
assertThat(snapshot, translogSize(1));
|
||||
Translog.Create create = (Translog.Create) snapshot.iterator().next();
|
||||
assertThat(create.source(), equalTo("{3}"));
|
||||
assertThat(create.source(), equalTo(B_3));
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -43,9 +43,9 @@ public class SimpleJsonMapperTests {
|
|||
JsonDocumentMapper docMapper = doc(
|
||||
object("person")
|
||||
.add(object("name").add(stringField("first").store(YES).index(Field.Index.NO)))
|
||||
).sourceField(source().compressionThreshold(0)).build();
|
||||
).sourceField(source()).build();
|
||||
|
||||
String json = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
Document doc = docMapper.parse("person", "1", json).doc();
|
||||
|
||||
assertThat((double) doc.getBoost(), closeTo(3.7, 0.01));
|
||||
|
@ -65,7 +65,7 @@ public class SimpleJsonMapperTests {
|
|||
// System.out.println(builtMapping);
|
||||
// reparse it
|
||||
JsonDocumentMapper builtDocMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(builtMapping);
|
||||
String json = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
Document doc = builtDocMapper.parse(json).doc();
|
||||
assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1")));
|
||||
assertThat((double) doc.getBoost(), closeTo(3.7, 0.01));
|
||||
|
@ -78,7 +78,7 @@ public class SimpleJsonMapperTests {
|
|||
@Test public void testSimpleParser() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test-mapping.json");
|
||||
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(mapping);
|
||||
String json = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
Document doc = docMapper.parse(json).doc();
|
||||
assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1")));
|
||||
assertThat((double) doc.getBoost(), closeTo(3.7, 0.01));
|
||||
|
@ -91,7 +91,7 @@ public class SimpleJsonMapperTests {
|
|||
@Test public void testSimpleParserMappingWithNoType() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test-mapping-notype.json");
|
||||
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse("person", mapping);
|
||||
String json = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1.json");
|
||||
Document doc = docMapper.parse(json).doc();
|
||||
assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1")));
|
||||
assertThat((double) doc.getBoost(), closeTo(3.7, 0.01));
|
||||
|
@ -104,7 +104,7 @@ public class SimpleJsonMapperTests {
|
|||
@Test public void testSimpleParserNoTypeNoId() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test-mapping.json");
|
||||
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(mapping);
|
||||
String json = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1-notype-noid.json");
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/simple/test1-notype-noid.json");
|
||||
Document doc = docMapper.parse("person", "1", json).doc();
|
||||
assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1")));
|
||||
assertThat((double) doc.getBoost(), closeTo(3.7, 0.01));
|
||||
|
|
|
@ -54,7 +54,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testQueryStringBuilder() throws Exception {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(queryString("test").defaultField("content").phraseSlop(1).build());
|
||||
Query parsedQuery = queryParser.parse(queryString("test").defaultField("content").phraseSlop(1));
|
||||
|
||||
assertThat(parsedQuery, instanceOf(TermQuery.class));
|
||||
TermQuery termQuery = (TermQuery) parsedQuery;
|
||||
|
@ -72,7 +72,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testMatchAllBuilder() throws Exception {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(matchAllQuery().boost(1.2f).build());
|
||||
Query parsedQuery = queryParser.parse(matchAllQuery().boost(1.2f).buildAsString());
|
||||
assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class));
|
||||
MatchAllDocsQuery matchAllDocsQuery = (MatchAllDocsQuery) parsedQuery;
|
||||
assertThat((double) matchAllDocsQuery.getBoost(), closeTo(1.2, 0.01));
|
||||
|
@ -132,7 +132,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testTermQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(termQuery("age", 34).build());
|
||||
Query parsedQuery = queryParser.parse(termQuery("age", 34).buildAsBytes());
|
||||
assertThat(parsedQuery, instanceOf(TermQuery.class));
|
||||
TermQuery termQuery = (TermQuery) parsedQuery;
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
|
@ -151,7 +151,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testTermWithBoostQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(termQuery("age", 34).boost(2.0f).build());
|
||||
Query parsedQuery = queryParser.parse(termQuery("age", 34).boost(2.0f));
|
||||
assertThat(parsedQuery, instanceOf(TermQuery.class));
|
||||
TermQuery termQuery = (TermQuery) parsedQuery;
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
|
@ -172,7 +172,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testPrefixQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh").build());
|
||||
Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh"));
|
||||
assertThat(parsedQuery, instanceOf(PrefixQuery.class));
|
||||
PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
|
@ -191,7 +191,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testPrefixFilteredQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), prefixFilter("name.first", "sh")).build());
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), prefixFilter("name.first", "sh")));
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||
PrefixFilter prefixFilter = (PrefixFilter) filteredQuery.getFilter();
|
||||
|
@ -210,7 +210,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testPrefixQueryBoostQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh").boost(2.0f).build());
|
||||
Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh").boost(2.0f));
|
||||
assertThat(parsedQuery, instanceOf(PrefixQuery.class));
|
||||
PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;
|
||||
assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh")));
|
||||
|
@ -229,7 +229,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testWildcardQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(wildcardQuery("name.first", "sh*").build());
|
||||
Query parsedQuery = queryParser.parse(wildcardQuery("name.first", "sh*"));
|
||||
assertThat(parsedQuery, instanceOf(WildcardQuery.class));
|
||||
WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery;
|
||||
assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*")));
|
||||
|
@ -246,7 +246,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testRangeQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(rangeQuery("age").from(23).to(54).includeLower(true).includeUpper(false).build());
|
||||
Query parsedQuery = queryParser.parse(rangeQuery("age").from(23).to(54).includeLower(true).includeUpper(false));
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
|
||||
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
|
||||
|
@ -273,7 +273,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testRangeFilteredQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), rangeFilter("age").from(23).to(54).includeLower(true).includeUpper(false)).build());
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), rangeFilter("age").from(23).to(54).includeLower(true).includeUpper(false)));
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
Filter filter = ((FilteredQuery) parsedQuery).getFilter();
|
||||
|
@ -315,7 +315,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testBoolQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(boolQuery().must(termQuery("content", "test1")).must(termQuery("content", "test4")).mustNot(termQuery("content", "test2")).should(termQuery("content", "test3")).build());
|
||||
Query parsedQuery = queryParser.parse(boolQuery().must(termQuery("content", "test1")).must(termQuery("content", "test4")).mustNot(termQuery("content", "test2")).should(termQuery("content", "test3")));
|
||||
assertThat(parsedQuery, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) parsedQuery;
|
||||
BooleanClause[] clauses = booleanQuery.getClauses();
|
||||
|
@ -361,7 +361,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testFilteredQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), termFilter("name.last", "banon")).build());
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), termFilter("name.last", "banon")));
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||
assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay")));
|
||||
|
@ -419,7 +419,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testTermsFilterQueryBuilder() throws Exception {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), termsFilter("name.last", "banon", "kimchy")).build());
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), termsFilter("name.last", "banon", "kimchy")));
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||
assertThat(filteredQuery.getFilter(), instanceOf(TermsFilter.class));
|
||||
|
@ -466,7 +466,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testSpanTermQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(spanTermQuery("age", 34).build());
|
||||
Query parsedQuery = queryParser.parse(spanTermQuery("age", 34));
|
||||
assertThat(parsedQuery, instanceOf(SpanTermQuery.class));
|
||||
SpanTermQuery termQuery = (SpanTermQuery) parsedQuery;
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
|
@ -485,7 +485,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testSpanNotQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(spanNotQuery().include(spanTermQuery("age", 34)).exclude(spanTermQuery("age", 35)).build());
|
||||
Query parsedQuery = queryParser.parse(spanNotQuery().include(spanTermQuery("age", 34)).exclude(spanTermQuery("age", 35)));
|
||||
assertThat(parsedQuery, instanceOf(SpanNotQuery.class));
|
||||
SpanNotQuery spanNotQuery = (SpanNotQuery) parsedQuery;
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
|
@ -506,7 +506,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testSpanFirstQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(spanFirstQuery(spanTermQuery("age", 34), 12).build());
|
||||
Query parsedQuery = queryParser.parse(spanFirstQuery(spanTermQuery("age", 34), 12));
|
||||
assertThat(parsedQuery, instanceOf(SpanFirstQuery.class));
|
||||
SpanFirstQuery spanFirstQuery = (SpanFirstQuery) parsedQuery;
|
||||
// since age is automatically registered in data, we encode it as numeric
|
||||
|
@ -527,7 +527,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testSpanNearQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(spanNearQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36)).slop(12).inOrder(false).collectPayloads(false).build());
|
||||
Query parsedQuery = queryParser.parse(spanNearQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36)).slop(12).inOrder(false).collectPayloads(false));
|
||||
assertThat(parsedQuery, instanceOf(SpanNearQuery.class));
|
||||
SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery;
|
||||
assertThat(spanNearQuery.getClauses().length, equalTo(3));
|
||||
|
@ -552,7 +552,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testSpanOrQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(spanOrQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36)).build());
|
||||
Query parsedQuery = queryParser.parse(spanOrQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36)));
|
||||
assertThat(parsedQuery, instanceOf(SpanOrQuery.class));
|
||||
SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery;
|
||||
assertThat(spanOrQuery.getClauses().length, equalTo(3));
|
||||
|
@ -575,7 +575,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testQueryFilterBuilder() throws Exception {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), queryFilter(termQuery("name.last", "banon"))).build());
|
||||
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), queryFilter(termQuery("name.last", "banon"))));
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||
QueryWrapperFilter queryWrapperFilter = (QueryWrapperFilter) filteredQuery.getFilter();
|
||||
|
@ -609,7 +609,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
Environment environment = new Environment();
|
||||
MapperService mapperService = new MapperService(index, EMPTY_SETTINGS, environment, new AnalysisService(index));
|
||||
// init a mapping with data
|
||||
mapperService.type("person").parse(copyToStringFromClasspath("/org/elasticsearch/index/query/json/data.json"));
|
||||
mapperService.type("person").parse(copyToBytesFromClasspath("/org/elasticsearch/index/query/json/data.json"));
|
||||
return mapperService;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.index.store.ram.RamStore;
|
|||
import org.elasticsearch.index.translog.memory.MemoryTranslog;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.dynamic.DynamicThreadPool;
|
||||
import org.elasticsearch.util.Unicode;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
|
@ -83,10 +84,10 @@ public class SimpleIndexShardTests {
|
|||
|
||||
@Test public void testSimpleIndexGetDelete() {
|
||||
String source1 = "{ type1 : { _id : \"1\", name : \"test\", age : 35 } }";
|
||||
indexShard.index("type1", "1", source1);
|
||||
indexShard.index("type1", "1", Unicode.fromStringAsBytes(source1));
|
||||
indexShard.refresh(new Engine.Refresh(true));
|
||||
|
||||
String sourceFetched = indexShard.get("type1", "1");
|
||||
String sourceFetched = Unicode.fromBytes(indexShard.get("type1", "1"));
|
||||
|
||||
assertThat(sourceFetched, equalTo(source1));
|
||||
|
||||
|
@ -99,9 +100,9 @@ public class SimpleIndexShardTests {
|
|||
|
||||
assertThat(indexShard.get("type1", "1"), nullValue());
|
||||
|
||||
indexShard.index("type1", "1", source1);
|
||||
indexShard.index("type1", "1", Unicode.fromStringAsBytes(source1));
|
||||
indexShard.refresh(new Engine.Refresh(true));
|
||||
sourceFetched = indexShard.get("type1", "1");
|
||||
sourceFetched = Unicode.fromBytes(indexShard.get("type1", "1"));
|
||||
assertThat(sourceFetched, equalTo(source1));
|
||||
indexShard.deleteByQuery("{ term : { name : \"test\" } }", null);
|
||||
indexShard.refresh(new Engine.Refresh(true));
|
||||
|
|
|
@ -59,12 +59,12 @@ public abstract class AbstractSimpleTranslogTests {
|
|||
assertThat(snapshot, translogSize(0));
|
||||
snapshot.release();
|
||||
|
||||
translog.add(new Translog.Create("test", "1", "{1}"));
|
||||
translog.add(new Translog.Create("test", "1", new byte[]{1}));
|
||||
snapshot = translog.snapshot();
|
||||
assertThat(snapshot, translogSize(1));
|
||||
snapshot.release();
|
||||
|
||||
translog.add(new Translog.Index("test", "2", "{2}"));
|
||||
translog.add(new Translog.Index("test", "2", new byte[]{2}));
|
||||
snapshot = translog.snapshot();
|
||||
assertThat(snapshot, translogSize(2));
|
||||
snapshot.release();
|
||||
|
@ -82,9 +82,9 @@ public abstract class AbstractSimpleTranslogTests {
|
|||
snapshot = translog.snapshot();
|
||||
Iterator<Translog.Operation> it = snapshot.iterator();
|
||||
Translog.Create create = (Translog.Create) it.next();
|
||||
assertThat(create.source(), equalTo("{1}"));
|
||||
assertThat(create.source(), equalTo(new byte[]{1}));
|
||||
Translog.Index index = (Translog.Index) it.next();
|
||||
assertThat(index.source(), equalTo("{2}"));
|
||||
assertThat(index.source(), equalTo(new byte[]{2}));
|
||||
Translog.Delete delete = (Translog.Delete) it.next();
|
||||
assertThat(delete.uid(), equalTo(newUid("3")));
|
||||
Translog.DeleteByQuery deleteByQuery = (Translog.DeleteByQuery) it.next();
|
||||
|
@ -105,18 +105,18 @@ public abstract class AbstractSimpleTranslogTests {
|
|||
assertThat(snapshot, translogSize(0));
|
||||
snapshot.release();
|
||||
|
||||
translog.add(new Translog.Create("test", "1", "{1}"));
|
||||
translog.add(new Translog.Create("test", "1", new byte[]{1}));
|
||||
snapshot = translog.snapshot();
|
||||
assertThat(snapshot, translogSize(1));
|
||||
Translog.Create create = (Translog.Create) snapshot.iterator().next();
|
||||
assertThat(create.source(), equalTo("{1}"));
|
||||
assertThat(create.source(), equalTo(new byte[]{1}));
|
||||
snapshot.release();
|
||||
|
||||
translog.add(new Translog.Index("test", "2", "{2}"));
|
||||
translog.add(new Translog.Index("test", "2", new byte[]{2}));
|
||||
snapshot = translog.snapshot(snapshot);
|
||||
assertThat(snapshot, translogSize(1));
|
||||
Translog.Index index = (Translog.Index) snapshot.iterator().next();
|
||||
assertThat(index.source(), equalTo("{2}"));
|
||||
assertThat(index.source(), equalTo(new byte[]{2}));
|
||||
snapshot.release();
|
||||
}
|
||||
|
||||
|
@ -125,19 +125,19 @@ public abstract class AbstractSimpleTranslogTests {
|
|||
assertThat(snapshot, translogSize(0));
|
||||
snapshot.release();
|
||||
|
||||
translog.add(new Translog.Create("test", "1", "{1}"));
|
||||
translog.add(new Translog.Create("test", "1", new byte[]{1}));
|
||||
Translog.Snapshot actualSnapshot = translog.snapshot();
|
||||
|
||||
translog.add(new Translog.Index("test", "2", "{2}"));
|
||||
translog.add(new Translog.Index("test", "2", new byte[]{2}));
|
||||
|
||||
translog.newTranslog();
|
||||
|
||||
translog.add(new Translog.Index("test", "3", "{3}"));
|
||||
translog.add(new Translog.Index("test", "3", new byte[]{3}));
|
||||
|
||||
snapshot = translog.snapshot(actualSnapshot);
|
||||
assertThat(snapshot, translogSize(1));
|
||||
Translog.Index index = (Translog.Index) snapshot.iterator().next();
|
||||
assertThat(index.source(), equalTo("{3}"));
|
||||
assertThat(index.source(), equalTo(new byte[]{3}));
|
||||
|
||||
actualSnapshot.release();
|
||||
snapshot.release();
|
||||
|
|
|
@ -56,10 +56,12 @@ public class JsonBuilderTests {
|
|||
}
|
||||
|
||||
@Test public void testSimpleJacksonGenerator() throws Exception {
|
||||
JsonBuilder builder = new JsonBuilder();
|
||||
assertThat(builder.startObject().field("test", "value").endObject().string(), equalTo("{\"test\":\"value\"}"));
|
||||
StringJsonBuilder builder = JsonBuilder.stringJsonBuilder();
|
||||
builder.startObject().field("test", "value").endObject();
|
||||
assertThat(builder.string(), equalTo("{\"test\":\"value\"}"));
|
||||
builder.reset();
|
||||
assertThat(builder.startObject().field("test", "value").endObject().string(), equalTo("{\"test\":\"value\"}"));
|
||||
builder.startObject().field("test", "value").endObject();
|
||||
assertThat(builder.string(), equalTo("{\"test\":\"value\"}"));
|
||||
}
|
||||
|
||||
@Test public void testWritingBinaryToStream() throws Exception {
|
||||
|
|
|
@ -107,9 +107,9 @@ public class SimpleSingleTransportClientTests extends AbstractServersTests {
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client.get(getRequest("test").type("type1").id("1").threadedOperation(false)).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client.get(getRequest("test").type("type1").id("1").threadedOperation(true)).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
}
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
|
@ -137,9 +137,9 @@ public class SimpleSingleTransportClientTests extends AbstractServersTests {
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client.get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client.get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
}
|
||||
|
||||
// check count
|
||||
|
@ -164,7 +164,7 @@ public class SimpleSingleTransportClientTests extends AbstractServersTests {
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client.get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client.get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.empty(), equalTo(false));
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ import static org.hamcrest.MatcherAssert.*;
|
|||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class DocumentActionsTests extends AbstractServersTests {
|
||||
|
||||
|
@ -73,9 +73,9 @@ public class DocumentActionsTests extends AbstractServersTests {
|
|||
logger.info("Get [type1/1]");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("1").threadedOperation(false)).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("1").threadedOperation(true)).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
}
|
||||
|
||||
logger.info("Get [type1/2] (should be empty)");
|
||||
|
@ -112,9 +112,9 @@ public class DocumentActionsTests extends AbstractServersTests {
|
|||
logger.info("Get [type1/1] and [type1/2]");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
}
|
||||
|
||||
logger.info("Count");
|
||||
|
@ -153,7 +153,7 @@ public class DocumentActionsTests extends AbstractServersTests {
|
|||
logger.info("Get [type1/1] and [type1/2], should be empty");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat("cycle #" + i, getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat("cycle #" + i, getResult.empty(), equalTo(false));
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ public abstract class AbstractSimpleIndexGatewayTests extends AbstractServersTes
|
|||
assertThat(getResponse.empty(), equalTo(true));
|
||||
logger.info("Getting #2");
|
||||
getResponse = client("server1").get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat(getResponse.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("2", "test")));
|
||||
|
||||
// Now flush and add some data (so we have index recovery as well)
|
||||
logger.info("Flushing, so we have actual content in the index files (#2 should be in the index)");
|
||||
|
@ -109,10 +109,10 @@ public abstract class AbstractSimpleIndexGatewayTests extends AbstractServersTes
|
|||
assertThat(getResponse.empty(), equalTo(true));
|
||||
logger.info("Getting #2 (not from the translog, but from the index)");
|
||||
getResponse = client("server1").get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat(getResponse.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("2", "test")));
|
||||
logger.info("Getting #3 (from the translog)");
|
||||
getResponse = client("server1").get(getRequest("test").type("type1").id("3")).actionGet();
|
||||
assertThat(getResponse.source(), equalTo(source("3", "test")));
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("3", "test")));
|
||||
|
||||
logger.info("Flushing, so we have actual content in the index files (#3 should be in the index now as well)");
|
||||
client("server1").admin().indices().flush(flushRequest("test")).actionGet();
|
||||
|
@ -134,10 +134,10 @@ public abstract class AbstractSimpleIndexGatewayTests extends AbstractServersTes
|
|||
assertThat(getResponse.empty(), equalTo(true));
|
||||
logger.info("Getting #2 (not from the translog, but from the index)");
|
||||
getResponse = client("server1").get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat(getResponse.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("2", "test")));
|
||||
logger.info("Getting #3 (not from the translog, but from the index)");
|
||||
getResponse = client("server1").get(getRequest("test").type("type1").id("3")).actionGet();
|
||||
assertThat(getResponse.source(), equalTo(source("3", "test")));
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("3", "test")));
|
||||
}
|
||||
|
||||
private String source(String id, String nameValue) {
|
||||
|
|
|
@ -55,13 +55,13 @@ public class SimpleRecoveryTests extends AbstractServersTests {
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("1").threadedOperation(false)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server2").get(getRequest("test").type("type1").id("1").threadedOperation(false)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("2").threadedOperation(true)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
getResult = client("server2").get(getRequest("test").type("type1").id("2").threadedOperation(true)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
}
|
||||
|
||||
// now start another one so we move some primaries
|
||||
|
@ -70,17 +70,17 @@ public class SimpleRecoveryTests extends AbstractServersTests {
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("1")).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server2").get(getRequest("test").type("type1").id("1")).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server3").get(getRequest("test").type("type1").id("1")).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("1", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("1", "test")));
|
||||
getResult = client("server1").get(getRequest("test").type("type1").id("2").threadedOperation(true)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
getResult = client("server2").get(getRequest("test").type("type1").id("2").threadedOperation(true)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
getResult = client("server3").get(getRequest("test").type("type1").id("2").threadedOperation(true)).actionGet(1000);
|
||||
assertThat(getResult.source(), equalTo(source("2", "test")));
|
||||
assertThat(getResult.sourceAsString(), equalTo(source("2", "test")));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ public class SingleInstanceEmbeddedSearchTests extends AbstractServersTests {
|
|||
QueryFetchSearchResult queryFetchResult = searchService.executeFetchPhase(searchRequest(searchSource().query(termQuery("name", "test1"))));
|
||||
assertThat(queryFetchResult.queryResult().topDocs().totalHits, equalTo(1));
|
||||
assertThat(queryFetchResult.fetchResult().hits().hits().length, equalTo(1));
|
||||
assertThat(queryFetchResult.fetchResult().hits().hits()[0].source(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(queryFetchResult.fetchResult().hits().hits()[0].sourceAsString(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(queryFetchResult.fetchResult().hits().hits()[0].id(), equalTo("1"));
|
||||
assertThat(queryFetchResult.fetchResult().hits().hits()[0].type(), equalTo("type1"));
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ public class SingleInstanceEmbeddedSearchTests extends AbstractServersTests {
|
|||
assertThat(docIdsToLoad.values().iterator().next().size(), equalTo(1));
|
||||
|
||||
FetchSearchResult fetchResult = searchService.executeFetchPhase(new FetchSearchRequest(queryResult.id(), docIdsToLoad.values().iterator().next()));
|
||||
assertThat(fetchResult.hits().hits()[0].source(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(fetchResult.hits().hits()[0].sourceAsString(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(fetchResult.hits().hits()[0].id(), equalTo("1"));
|
||||
assertThat(fetchResult.hits().hits()[0].type(), equalTo("type1"));
|
||||
}
|
||||
|
@ -122,7 +122,7 @@ public class SingleInstanceEmbeddedSearchTests extends AbstractServersTests {
|
|||
@Test public void testQueryFetchInOneGo() throws Exception {
|
||||
QueryFetchSearchResult result = searchService.executeFetchPhase(searchRequest(searchSource().query(termQuery("name", "test1"))));
|
||||
FetchSearchResult fetchResult = result.fetchResult();
|
||||
assertThat(fetchResult.hits().hits()[0].source(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(fetchResult.hits().hits()[0].sourceAsString(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(fetchResult.hits().hits()[0].id(), equalTo("1"));
|
||||
assertThat(fetchResult.hits().hits()[0].type(), equalTo("type1"));
|
||||
}
|
||||
|
@ -140,7 +140,7 @@ public class SingleInstanceEmbeddedSearchTests extends AbstractServersTests {
|
|||
assertThat(docIdsToLoad.values().iterator().next().size(), equalTo(1));
|
||||
|
||||
FetchSearchResult fetchResult = searchService.executeFetchPhase(new FetchSearchRequest(queryResult.id(), docIdsToLoad.values().iterator().next()));
|
||||
assertThat(fetchResult.hits().hits()[0].source(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(fetchResult.hits().hits()[0].sourceAsString(), equalTo(source("1", "test1", 1)));
|
||||
assertThat(fetchResult.hits().hits()[0].id(), equalTo("1"));
|
||||
assertThat(fetchResult.hits().hits()[0].type(), equalTo("type1"));
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ public class TermsActionTests extends AbstractServersTests {
|
|||
assertThat("no term freqs for the 'value' since nothing is indexed", termsResponse.field("value").iterator().hasNext(), equalTo(false));
|
||||
|
||||
logger.info("Index [1]");
|
||||
client.index(indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("value", "aaa").endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("1").source(binaryJsonBuilder().startObject().field("value", "aaa").endObject())).actionGet();
|
||||
logger.info("Refresh");
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
|
@ -90,7 +90,7 @@ public class TermsActionTests extends AbstractServersTests {
|
|||
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(-1));
|
||||
|
||||
logger.info("Index [2]");
|
||||
client.index(indexRequest("test").type("type1").id("2").source(jsonBuilder().startObject().field("value", "bbb bbb").endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("2").source(binaryJsonBuilder().startObject().field("value", "bbb bbb").endObject())).actionGet();
|
||||
logger.info("Refresh");
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
|
@ -106,7 +106,7 @@ public class TermsActionTests extends AbstractServersTests {
|
|||
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(1));
|
||||
|
||||
logger.info("Delete 3");
|
||||
client.index(indexRequest("test").type("type1").id("3").source(jsonBuilder().startObject().field("value", "bbb").endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("3").source(binaryJsonBuilder().startObject().field("value", "bbb").endObject())).actionGet();
|
||||
logger.info("Refresh");
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
|
|
Loading…
Reference in New Issue