mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-20 03:45:02 +00:00
buffer management refactoring
First phase at improving buffer management and reducing even further buffer copies. Introduce a BytesReference abstraction, allowing to more easily slice and "read/write references" from streams. This is the foundation for later using it to create smarter buffers on top of composite netty channels for example (which http now produces) as well as reducing buffer copies when sending transport/rest responses.
This commit is contained in:
parent
5d93b2bfe9
commit
35233564fd
@ -26,6 +26,8 @@ import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -238,11 +240,7 @@ public class CreateIndexRequest extends MasterNodeOperationRequest {
|
||||
* Sets the settings and mappings as a single source.
|
||||
*/
|
||||
public CreateIndexRequest source(XContentBuilder source) {
|
||||
try {
|
||||
return source(source.underlyingBytes(), 0, source.underlyingBytesLength());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchParseException("failed to parse source to create index", e);
|
||||
}
|
||||
return source(source.bytes());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -252,19 +250,23 @@ public class CreateIndexRequest extends MasterNodeOperationRequest {
|
||||
return source(source, 0, source.length);
|
||||
}
|
||||
|
||||
public CreateIndexRequest source(byte[] source, int offset, int length) {
|
||||
return source(new BytesArray(source, offset, length));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the settings and mappings as a single source.
|
||||
*/
|
||||
public CreateIndexRequest source(byte[] source, int offset, int length) {
|
||||
XContentType xContentType = XContentFactory.xContentType(source, offset, length);
|
||||
public CreateIndexRequest source(BytesReference source) {
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
if (xContentType != null) {
|
||||
try {
|
||||
source(XContentFactory.xContent(xContentType).createParser(source, offset, length).mapAndClose());
|
||||
source(XContentFactory.xContent(xContentType).createParser(source).mapAndClose());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchParseException("failed to parse source for create index", e);
|
||||
}
|
||||
} else {
|
||||
settings(new String(source, offset, length, Charsets.UTF_8));
|
||||
settings(new String(source.toBytes(), Charsets.UTF_8));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.support.BaseIndicesRequestBuilder;
|
||||
import org.elasticsearch.client.IndicesAdminClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -136,6 +137,14 @@ public class CreateIndexRequestBuilder extends BaseIndicesRequestBuilder<CreateI
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the settings and mappings as a single source.
|
||||
*/
|
||||
public CreateIndexRequestBuilder setSource(BytesReference source) {
|
||||
request.source(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the settings and mappings as a single source.
|
||||
*/
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -247,7 +248,7 @@ public class PutIndexTemplateRequest extends MasterNodeOperationRequest {
|
||||
*/
|
||||
public PutIndexTemplateRequest source(XContentBuilder templateBuilder) {
|
||||
try {
|
||||
return source(templateBuilder.underlyingBytes(), 0, templateBuilder.underlyingBytesLength());
|
||||
return source(templateBuilder.bytes());
|
||||
} catch (Exception e) {
|
||||
throw new ElasticSearchIllegalArgumentException("Failed to build json for template request", e);
|
||||
}
|
||||
@ -321,6 +322,16 @@ public class PutIndexTemplateRequest extends MasterNodeOperationRequest {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The template source definition.
|
||||
*/
|
||||
public PutIndexTemplateRequest source(BytesReference source) {
|
||||
try {
|
||||
return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchIllegalArgumentException("failed to parse template source", e);
|
||||
}
|
||||
}
|
||||
|
||||
public PutIndexTemplateRequest custom(IndexMetaData.Custom custom) {
|
||||
customs.put(custom.type(), custom);
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.template.put;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.support.BaseIndicesRequestBuilder;
|
||||
import org.elasticsearch.client.IndicesAdminClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -163,6 +164,14 @@ public class PutIndexTemplateRequestBuilder extends BaseIndicesRequestBuilder<Pu
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The template source definition.
|
||||
*/
|
||||
public PutIndexTemplateRequestBuilder setSource(BytesReference templateSource) {
|
||||
request.source(templateSource);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The template source definition.
|
||||
*/
|
||||
|
@ -20,9 +20,9 @@
|
||||
package org.elasticsearch.action.admin.indices.validate.query;
|
||||
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
@ -33,10 +33,10 @@ import java.io.IOException;
|
||||
*/
|
||||
class ShardValidateQueryRequest extends BroadcastShardOperationRequest {
|
||||
|
||||
private BytesHolder querySource;
|
||||
private BytesReference querySource;
|
||||
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
|
||||
|
||||
private boolean explain;
|
||||
|
||||
@Nullable
|
||||
@ -54,14 +54,14 @@ class ShardValidateQueryRequest extends BroadcastShardOperationRequest {
|
||||
this.filteringAliases = filteringAliases;
|
||||
}
|
||||
|
||||
public BytesHolder querySource() {
|
||||
public BytesReference querySource() {
|
||||
return querySource;
|
||||
}
|
||||
|
||||
public String[] types() {
|
||||
return this.types;
|
||||
}
|
||||
|
||||
|
||||
public boolean explain() {
|
||||
return this.explain;
|
||||
}
|
||||
@ -96,7 +96,7 @@ class ShardValidateQueryRequest extends BroadcastShardOperationRequest {
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBytesHolder(querySource);
|
||||
out.writeBytesReference(querySource, true);
|
||||
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
|
@ -136,8 +136,8 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
|
||||
} else {
|
||||
ShardValidateQueryResponse validateQueryResponse = (ShardValidateQueryResponse) shardResponse;
|
||||
valid = valid && validateQueryResponse.valid();
|
||||
if(request.explain()) {
|
||||
if(queryExplanations == null) {
|
||||
if (request.explain()) {
|
||||
if (queryExplanations == null) {
|
||||
queryExplanations = newArrayList();
|
||||
}
|
||||
queryExplanations.add(new QueryExplanation(
|
||||
@ -170,9 +170,9 @@ public class TransportValidateQueryAction extends TransportBroadcastOperationAct
|
||||
null, indexShard.searcher(), indexService, indexShard,
|
||||
scriptService));
|
||||
try {
|
||||
ParsedQuery parsedQuery = queryParserService.parse(request.querySource().bytes(), request.querySource().offset(), request.querySource().length());
|
||||
ParsedQuery parsedQuery = queryParserService.parse(request.querySource());
|
||||
valid = true;
|
||||
if(request.explain()) {
|
||||
if (request.explain()) {
|
||||
explanation = parsedQuery.query().toString();
|
||||
}
|
||||
} catch (QueryParsingException e) {
|
||||
|
@ -19,21 +19,20 @@
|
||||
|
||||
package org.elasticsearch.action.admin.indices.validate.query;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationThreading;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Required;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
@ -51,11 +50,9 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
|
||||
private static final XContentType contentType = Requests.CONTENT_TYPE;
|
||||
|
||||
private byte[] querySource;
|
||||
private int querySourceOffset;
|
||||
private int querySourceLength;
|
||||
private BytesReference querySource;
|
||||
private boolean querySourceUnsafe;
|
||||
|
||||
|
||||
private boolean explain;
|
||||
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
@ -89,8 +86,7 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
@Override
|
||||
protected void beforeStart() {
|
||||
if (querySourceUnsafe) {
|
||||
querySource = Arrays.copyOfRange(querySource, querySourceOffset, querySourceOffset + querySourceLength);
|
||||
querySourceOffset = 0;
|
||||
querySource = querySource.copyBytesArray();
|
||||
querySourceUnsafe = false;
|
||||
}
|
||||
}
|
||||
@ -112,8 +108,8 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
/**
|
||||
* The query source to execute.
|
||||
*/
|
||||
BytesHolder querySource() {
|
||||
return new BytesHolder(querySource, querySourceOffset, querySourceLength);
|
||||
BytesReference querySource() {
|
||||
return querySource;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -123,10 +119,7 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public ValidateQueryRequest query(QueryBuilder queryBuilder) {
|
||||
BytesStream bos = queryBuilder.buildAsBytes();
|
||||
this.querySource = bos.underlyingBytes();
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = bos.size();
|
||||
this.querySource = queryBuilder.buildAsBytes();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
@ -147,15 +140,9 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
|
||||
@Required
|
||||
public ValidateQueryRequest query(XContentBuilder builder) {
|
||||
try {
|
||||
this.querySource = builder.underlyingBytes();
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = builder.underlyingBytesLength();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + builder + "]", e);
|
||||
}
|
||||
this.querySource = builder.bytes();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -164,11 +151,9 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public ValidateQueryRequest query(String querySource) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(querySource);
|
||||
this.querySource = result.result;
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = result.length;
|
||||
this.querySourceUnsafe = true;
|
||||
this.querySource = new BytesArray(querySource);
|
||||
;
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -185,9 +170,15 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public ValidateQueryRequest query(byte[] querySource, int offset, int length, boolean unsafe) {
|
||||
return query(new BytesArray(querySource, offset, length), unsafe);
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to validate.
|
||||
*/
|
||||
@Required
|
||||
public ValidateQueryRequest query(BytesReference querySource, boolean unsafe) {
|
||||
this.querySource = querySource;
|
||||
this.querySourceOffset = offset;
|
||||
this.querySourceLength = length;
|
||||
this.querySourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -225,11 +216,8 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
querySourceUnsafe = false;
|
||||
querySource = bytes.bytes();
|
||||
querySourceOffset = bytes.offset();
|
||||
querySourceLength = bytes.length();
|
||||
querySource = in.readBytesReference();
|
||||
|
||||
int typesSize = in.readVInt();
|
||||
if (typesSize > 0) {
|
||||
@ -240,25 +228,31 @@ public class ValidateQueryRequest extends BroadcastOperationRequest {
|
||||
}
|
||||
|
||||
explain = in.readBoolean();
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
|
||||
out.writeBytesHolder(querySource, querySourceOffset, querySourceLength);
|
||||
out.writeBytesReference(querySource, true);
|
||||
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
out.writeUTF(type);
|
||||
}
|
||||
|
||||
|
||||
out.writeBoolean(explain);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", querySource[" + Unicode.fromBytes(querySource, querySourceOffset, querySourceLength) + "], explain:" + explain;
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = XContentHelper.convertToJson(querySource, false);
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", querySource[" + sSource + "], explain:" + explain;
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.support.BaseIndicesRequestBuilder;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationThreading;
|
||||
import org.elasticsearch.client.IndicesAdminClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
/**
|
||||
@ -40,6 +41,26 @@ public class ValidateQueryRequestBuilder extends BaseIndicesRequestBuilder<Valid
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to validate.
|
||||
*
|
||||
* @see org.elasticsearch.index.query.QueryBuilders
|
||||
*/
|
||||
public ValidateQueryRequestBuilder setQuery(BytesReference querySource) {
|
||||
request.query(querySource, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to validate.
|
||||
*
|
||||
* @see org.elasticsearch.index.query.QueryBuilders
|
||||
*/
|
||||
public ValidateQueryRequestBuilder setQuery(BytesReference querySource, boolean unsafe) {
|
||||
request.query(querySource, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to validate.
|
||||
*
|
||||
|
@ -30,7 +30,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
@ -108,11 +108,11 @@ public class TransportPutWarmerAction extends TransportMasterNodeOperationAction
|
||||
String[] concreteIndices = metaData.concreteIndices(request.searchRequest().indices());
|
||||
|
||||
|
||||
BytesHolder source = null;
|
||||
if (request.searchRequest().source() != null && request.searchRequest().source().length > 0) {
|
||||
source = new BytesHolder(request.searchRequest().source(), request.searchRequest().sourceOffset(), request.searchRequest().sourceLength());
|
||||
} else if (request.searchRequest().extraSource() != null && request.searchRequest().extraSource().length > 0) {
|
||||
source = new BytesHolder(request.searchRequest().extraSource(), request.searchRequest().extraSourceOffset(), request.searchRequest().extraSourceLength());
|
||||
BytesReference source = null;
|
||||
if (request.searchRequest().source() != null && request.searchRequest().source().length() > 0) {
|
||||
source = request.searchRequest().source();
|
||||
} else if (request.searchRequest().extraSource() != null && request.searchRequest().extraSource().length() > 0) {
|
||||
source = request.searchRequest().extraSource();
|
||||
}
|
||||
|
||||
// now replace it on the metadata
|
||||
|
@ -27,6 +27,8 @@ import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationType;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
@ -78,6 +80,10 @@ public class BulkRequest implements ActionRequest {
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<ActionRequest> requests() {
|
||||
return this.requests;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a framed data in binary format
|
||||
*/
|
||||
@ -89,7 +95,16 @@ public class BulkRequest implements ActionRequest {
|
||||
* Adds a framed data in binary format
|
||||
*/
|
||||
public BulkRequest add(byte[] data, int from, int length, boolean contentUnsafe, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception {
|
||||
XContent xContent = XContentFactory.xContent(data, from, length);
|
||||
return add(new BytesArray(data, from, length), contentUnsafe, defaultIndex, defaultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a framed data in binary format
|
||||
*/
|
||||
public BulkRequest add(BytesReference data, boolean contentUnsafe, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception {
|
||||
XContent xContent = XContentFactory.xContent(data);
|
||||
int from = 0;
|
||||
int length = data.length();
|
||||
byte marker = xContent.streamSeparator();
|
||||
while (true) {
|
||||
int nextMarker = findNextMarker(marker, from, data, length);
|
||||
@ -97,7 +112,7 @@ public class BulkRequest implements ActionRequest {
|
||||
break;
|
||||
}
|
||||
// now parse the action
|
||||
XContentParser parser = xContent.createParser(data, from, nextMarker - from);
|
||||
XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));
|
||||
|
||||
try {
|
||||
// move pointers
|
||||
@ -177,18 +192,18 @@ public class BulkRequest implements ActionRequest {
|
||||
if ("index".equals(action)) {
|
||||
if (opType == null) {
|
||||
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
|
||||
.source(data, from, nextMarker - from, contentUnsafe)
|
||||
.source(data.slice(from, nextMarker - from), contentUnsafe)
|
||||
.percolate(percolate));
|
||||
} else {
|
||||
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
|
||||
.create("create".equals(opType))
|
||||
.source(data, from, nextMarker - from, contentUnsafe)
|
||||
.source(data.slice(from, nextMarker - from), contentUnsafe)
|
||||
.percolate(percolate));
|
||||
}
|
||||
} else if ("create".equals(action)) {
|
||||
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
|
||||
.create(true)
|
||||
.source(data, from, nextMarker - from, contentUnsafe)
|
||||
.source(data.slice(from, nextMarker - from), contentUnsafe)
|
||||
.percolate(percolate));
|
||||
}
|
||||
// move pointers
|
||||
@ -239,9 +254,9 @@ public class BulkRequest implements ActionRequest {
|
||||
return this.replicationType;
|
||||
}
|
||||
|
||||
private int findNextMarker(byte marker, int from, byte[] data, int length) {
|
||||
private int findNextMarker(byte marker, int from, BytesReference data, int length) {
|
||||
for (int i = from; i < length; i++) {
|
||||
if (data[i] == marker) {
|
||||
if (data.get(i) == marker) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
@ -148,7 +148,7 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
|
||||
}
|
||||
}
|
||||
|
||||
SourceToParse sourceToParse = SourceToParse.source(indexRequest.underlyingSource(), indexRequest.underlyingSourceOffset(), indexRequest.underlyingSourceLength()).type(indexRequest.type()).id(indexRequest.id())
|
||||
SourceToParse sourceToParse = SourceToParse.source(indexRequest.source()).type(indexRequest.type()).id(indexRequest.id())
|
||||
.routing(indexRequest.routing()).parent(indexRequest.parent()).timestamp(indexRequest.timestamp()).ttl(indexRequest.ttl());
|
||||
|
||||
long version;
|
||||
@ -292,7 +292,7 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
|
||||
if (item.request() instanceof IndexRequest) {
|
||||
IndexRequest indexRequest = (IndexRequest) item.request();
|
||||
try {
|
||||
SourceToParse sourceToParse = SourceToParse.source(indexRequest.underlyingSource(), indexRequest.underlyingSourceOffset(), indexRequest.underlyingSourceLength()).type(indexRequest.type()).id(indexRequest.id())
|
||||
SourceToParse sourceToParse = SourceToParse.source(indexRequest.source()).type(indexRequest.type()).id(indexRequest.id())
|
||||
.routing(indexRequest.routing()).parent(indexRequest.parent()).timestamp(indexRequest.timestamp()).ttl(indexRequest.ttl());
|
||||
|
||||
if (indexRequest.opType() == IndexRequest.OpType.INDEX) {
|
||||
|
@ -19,18 +19,21 @@
|
||||
|
||||
package org.elasticsearch.action.count;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationThreading;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.*;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Required;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
@ -62,9 +65,7 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
@Nullable
|
||||
protected String routing;
|
||||
|
||||
private byte[] querySource;
|
||||
private int querySourceOffset;
|
||||
private int querySourceLength;
|
||||
private BytesReference querySource;
|
||||
private boolean querySourceUnsafe;
|
||||
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
@ -103,8 +104,7 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
@Override
|
||||
protected void beforeStart() {
|
||||
if (querySourceUnsafe) {
|
||||
querySource = Arrays.copyOfRange(querySource, querySourceOffset, querySourceOffset + querySourceLength);
|
||||
querySourceOffset = 0;
|
||||
querySource = querySource.copyBytesArray();
|
||||
querySourceUnsafe = false;
|
||||
}
|
||||
}
|
||||
@ -150,18 +150,10 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
/**
|
||||
* The query source to execute.
|
||||
*/
|
||||
byte[] querySource() {
|
||||
BytesReference querySource() {
|
||||
return querySource;
|
||||
}
|
||||
|
||||
int querySourceOffset() {
|
||||
return querySourceOffset;
|
||||
}
|
||||
|
||||
int querySourceLength() {
|
||||
return querySourceLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*
|
||||
@ -169,10 +161,7 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public CountRequest query(QueryBuilder queryBuilder) {
|
||||
BytesStream bos = queryBuilder.buildAsBytes();
|
||||
this.querySource = bos.underlyingBytes();
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = bos.size();
|
||||
this.querySource = queryBuilder.buildAsBytes();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
@ -193,15 +182,9 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
|
||||
@Required
|
||||
public CountRequest query(XContentBuilder builder) {
|
||||
try {
|
||||
this.querySource = builder.underlyingBytes();
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = builder.underlyingBytesLength();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + builder + "]", e);
|
||||
}
|
||||
this.querySource = builder.bytes();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -210,11 +193,8 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public CountRequest query(String querySource) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(querySource);
|
||||
this.querySource = result.result;
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = result.length;
|
||||
this.querySourceUnsafe = true;
|
||||
this.querySource = new BytesArray(querySource);
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -231,9 +211,12 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public CountRequest query(byte[] querySource, int offset, int length, boolean unsafe) {
|
||||
return query(new BytesArray(querySource, offset, length), unsafe);
|
||||
}
|
||||
|
||||
@Required
|
||||
public CountRequest query(BytesReference querySource, boolean unsafe) {
|
||||
this.querySource = querySource;
|
||||
this.querySourceOffset = offset;
|
||||
this.querySourceLength = length;
|
||||
this.querySourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -288,11 +271,8 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
routing = in.readUTF();
|
||||
}
|
||||
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
querySourceUnsafe = false;
|
||||
querySource = bytes.bytes();
|
||||
querySourceOffset = bytes.offset();
|
||||
querySourceLength = bytes.length();
|
||||
querySource = in.readBytesReference();
|
||||
|
||||
int typesSize = in.readVInt();
|
||||
if (typesSize > 0) {
|
||||
@ -321,7 +301,7 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
out.writeUTF(routing);
|
||||
}
|
||||
|
||||
out.writeBytesHolder(querySource, querySourceOffset, querySourceLength());
|
||||
out.writeBytesReference(querySource, true);
|
||||
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
@ -331,6 +311,12 @@ public class CountRequest extends BroadcastOperationRequest {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", querySource[" + Unicode.fromBytes(querySource) + "]";
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = XContentHelper.convertToJson(querySource, false);
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", querySource[" + sSource + "]";
|
||||
}
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.BaseRequestBuilder;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationThreading;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
/**
|
||||
@ -93,6 +94,26 @@ public class CountRequestBuilder extends BaseRequestBuilder<CountRequest, CountR
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*
|
||||
* @see org.elasticsearch.index.query.QueryBuilders
|
||||
*/
|
||||
public CountRequestBuilder setQuery(BytesReference querySource) {
|
||||
request.query(querySource, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*
|
||||
* @see org.elasticsearch.index.query.QueryBuilders
|
||||
*/
|
||||
public CountRequestBuilder setQuery(BytesReference querySource, boolean unsafe) {
|
||||
request.query(querySource, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*
|
||||
|
@ -20,9 +20,9 @@
|
||||
package org.elasticsearch.action.count;
|
||||
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
@ -35,7 +35,7 @@ class ShardCountRequest extends BroadcastShardOperationRequest {
|
||||
|
||||
private float minScore;
|
||||
|
||||
private byte[] querySource;
|
||||
private BytesReference querySource;
|
||||
private int querySourceOffset;
|
||||
private int querySourceLength;
|
||||
|
||||
@ -52,8 +52,6 @@ class ShardCountRequest extends BroadcastShardOperationRequest {
|
||||
super(index, shardId);
|
||||
this.minScore = request.minScore();
|
||||
this.querySource = request.querySource();
|
||||
this.querySourceOffset = request.querySourceOffset();
|
||||
this.querySourceLength = request.querySourceLength();
|
||||
this.types = request.types();
|
||||
this.filteringAliases = filteringAliases;
|
||||
}
|
||||
@ -62,18 +60,10 @@ class ShardCountRequest extends BroadcastShardOperationRequest {
|
||||
return minScore;
|
||||
}
|
||||
|
||||
public byte[] querySource() {
|
||||
public BytesReference querySource() {
|
||||
return querySource;
|
||||
}
|
||||
|
||||
public int querySourceOffset() {
|
||||
return querySourceOffset;
|
||||
}
|
||||
|
||||
public int querySourceLength() {
|
||||
return querySourceLength;
|
||||
}
|
||||
|
||||
public String[] types() {
|
||||
return this.types;
|
||||
}
|
||||
@ -87,10 +77,7 @@ class ShardCountRequest extends BroadcastShardOperationRequest {
|
||||
super.readFrom(in);
|
||||
minScore = in.readFloat();
|
||||
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
querySource = bytes.bytes();
|
||||
querySourceOffset = bytes.offset();
|
||||
querySourceLength = bytes.length();
|
||||
querySource = in.readBytesReference();
|
||||
|
||||
int typesSize = in.readVInt();
|
||||
if (typesSize > 0) {
|
||||
@ -113,7 +100,7 @@ class ShardCountRequest extends BroadcastShardOperationRequest {
|
||||
super.writeTo(out);
|
||||
out.writeFloat(minScore);
|
||||
|
||||
out.writeBytesHolder(querySource, querySourceOffset, querySourceLength);
|
||||
out.writeBytesReference(querySource, true);
|
||||
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
|
@ -131,7 +131,7 @@ public class TransportCountAction extends TransportBroadcastOperationAction<Coun
|
||||
@Override
|
||||
protected ShardCountResponse shardOperation(ShardCountRequest request) throws ElasticSearchException {
|
||||
IndexShard indexShard = indicesService.indexServiceSafe(request.index()).shardSafe(request.shardId());
|
||||
long count = indexShard.count(request.minScore(), request.querySource(), request.querySourceOffset(), request.querySourceLength(),
|
||||
long count = indexShard.count(request.minScore(), request.querySource(),
|
||||
request.filteringAliases(), request.types());
|
||||
return new ShardCountResponse(request.index(), request.shardId(), count);
|
||||
}
|
||||
|
@ -19,20 +19,24 @@
|
||||
|
||||
package org.elasticsearch.action.deletebyquery;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.support.replication.IndicesReplicationOperationRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationType;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.*;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Required;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
||||
@ -57,9 +61,7 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
|
||||
private static final XContentType contentType = Requests.CONTENT_TYPE;
|
||||
|
||||
private byte[] querySource;
|
||||
private int querySourceOffset;
|
||||
private int querySourceLength;
|
||||
private BytesReference querySource;
|
||||
private boolean querySourceUnsafe;
|
||||
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
@ -106,13 +108,11 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
/**
|
||||
* The query source to execute.
|
||||
*/
|
||||
BytesHolder querySource() {
|
||||
BytesReference querySource() {
|
||||
if (querySourceUnsafe) {
|
||||
querySource = Arrays.copyOfRange(querySource, querySourceOffset, querySourceOffset + querySourceLength);
|
||||
querySourceOffset = 0;
|
||||
querySourceUnsafe = false;
|
||||
querySource = querySource.copyBytesArray();
|
||||
}
|
||||
return new BytesHolder(querySource, querySourceOffset, querySourceLength);
|
||||
return querySource;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -122,10 +122,7 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public DeleteByQueryRequest query(QueryBuilder queryBuilder) {
|
||||
BytesStream bos = queryBuilder.buildAsBytes();
|
||||
this.querySource = bos.underlyingBytes();
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = bos.size();
|
||||
this.querySource = queryBuilder.buildAsBytes();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
@ -136,11 +133,8 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public DeleteByQueryRequest query(String querySource) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(querySource);
|
||||
this.querySource = result.result;
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = result.length;
|
||||
this.querySourceUnsafe = true;
|
||||
this.querySource = new BytesArray(querySource.getBytes(Charsets.UTF_8));
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -160,15 +154,9 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
|
||||
@Required
|
||||
public DeleteByQueryRequest query(XContentBuilder builder) {
|
||||
try {
|
||||
this.querySource = builder.underlyingBytes();
|
||||
this.querySourceOffset = 0;
|
||||
this.querySourceLength = builder.underlyingBytesLength();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + builder + "]", e);
|
||||
}
|
||||
this.querySource = builder.bytes();
|
||||
this.querySourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -184,9 +172,13 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public DeleteByQueryRequest query(byte[] querySource, int offset, int length, boolean unsafe) {
|
||||
this.querySource = querySource;
|
||||
this.querySourceOffset = offset;
|
||||
this.querySourceLength = length;
|
||||
this.querySource = new BytesArray(querySource, offset, length);
|
||||
this.querySourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DeleteByQueryRequest query(BytesReference source, boolean unsafe) {
|
||||
this.querySource = source;
|
||||
this.querySourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -270,11 +262,8 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
querySourceUnsafe = false;
|
||||
querySource = bytes.bytes();
|
||||
querySourceOffset = bytes.offset();
|
||||
querySourceLength = bytes.length();
|
||||
querySource = in.readBytesReference();
|
||||
|
||||
if (in.readBoolean()) {
|
||||
routing = in.readUTF();
|
||||
@ -294,7 +283,7 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
|
||||
out.writeBytesHolder(querySource, querySourceOffset, querySourceLength);
|
||||
out.writeBytesReference(querySource, true);
|
||||
|
||||
if (routing == null) {
|
||||
out.writeBoolean(false);
|
||||
@ -311,6 +300,12 @@ public class DeleteByQueryRequest extends IndicesReplicationOperationRequest {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "], querySource[" + Unicode.fromBytes(querySource) + "]";
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = XContentHelper.convertToJson(querySource, false);
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
return "[" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "], querySource[" + sSource + "]";
|
||||
}
|
||||
}
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.support.BaseRequestBuilder;
|
||||
import org.elasticsearch.action.support.replication.ReplicationType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
@ -115,6 +116,22 @@ public class DeleteByQueryRequestBuilder extends BaseRequestBuilder<DeleteByQuer
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*/
|
||||
public DeleteByQueryRequestBuilder setQuery(BytesReference querySource) {
|
||||
request.query(querySource, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*/
|
||||
public DeleteByQueryRequestBuilder setQuery(BytesReference querySource, boolean unsafe) {
|
||||
request.query(querySource, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query source to execute.
|
||||
*/
|
||||
|
@ -22,9 +22,9 @@ package org.elasticsearch.action.deletebyquery;
|
||||
import gnu.trove.set.hash.THashSet;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.replication.IndexReplicationOperationRequest;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
@ -39,7 +39,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
*/
|
||||
public class IndexDeleteByQueryRequest extends IndexReplicationOperationRequest {
|
||||
|
||||
private BytesHolder querySource;
|
||||
private BytesReference querySource;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
@Nullable
|
||||
private Set<String> routing;
|
||||
@ -60,7 +60,7 @@ public class IndexDeleteByQueryRequest extends IndexReplicationOperationRequest
|
||||
IndexDeleteByQueryRequest() {
|
||||
}
|
||||
|
||||
BytesHolder querySource() {
|
||||
BytesReference querySource() {
|
||||
return querySource;
|
||||
}
|
||||
|
||||
@ -118,7 +118,7 @@ public class IndexDeleteByQueryRequest extends IndexReplicationOperationRequest
|
||||
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBytesHolder(querySource);
|
||||
out.writeBytesReference(querySource, true);
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
out.writeUTF(type);
|
||||
|
@ -22,12 +22,12 @@ package org.elasticsearch.action.deletebyquery;
|
||||
import gnu.trove.set.hash.THashSet;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.replication.ShardReplicationOperationRequest;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
@ -41,7 +41,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
public class ShardDeleteByQueryRequest extends ShardReplicationOperationRequest {
|
||||
|
||||
private int shardId;
|
||||
private BytesHolder querySource;
|
||||
private BytesReference querySource;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
@Nullable
|
||||
private Set<String> routing;
|
||||
@ -76,7 +76,7 @@ public class ShardDeleteByQueryRequest extends ShardReplicationOperationRequest
|
||||
return this.shardId;
|
||||
}
|
||||
|
||||
BytesHolder querySource() {
|
||||
BytesReference querySource() {
|
||||
return querySource;
|
||||
}
|
||||
|
||||
@ -123,7 +123,7 @@ public class ShardDeleteByQueryRequest extends ShardReplicationOperationRequest
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBytesHolder(querySource);
|
||||
out.writeBytesReference(querySource, true);
|
||||
out.writeVInt(shardId);
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
@ -151,7 +151,7 @@ public class ShardDeleteByQueryRequest extends ShardReplicationOperationRequest
|
||||
public String toString() {
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = Unicode.fromBytes(querySource.bytes(), querySource.offset(), querySource.length());
|
||||
sSource = XContentHelper.convertToJson(querySource, false);
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ package org.elasticsearch.action.get;
|
||||
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
@ -138,14 +138,14 @@ public class GetResponse implements ActionResponse, Streamable, Iterable<GetFiel
|
||||
/**
|
||||
* Returns bytes reference, also un compress the source if needed.
|
||||
*/
|
||||
public BytesHolder sourceRef() {
|
||||
public BytesReference sourceRef() {
|
||||
return getResult.sourceRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns bytes reference, also un compress the source if needed.
|
||||
*/
|
||||
public BytesHolder getSourceAsBytesRef() {
|
||||
public BytesReference getSourceAsBytesRef() {
|
||||
return sourceRef();
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.action.index;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
@ -32,20 +32,19 @@ import org.elasticsearch.action.support.replication.ShardReplicationOperationReq
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.*;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Required;
|
||||
import org.elasticsearch.common.UUID;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
@ -121,9 +120,7 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
private String timestamp;
|
||||
private long ttl = -1;
|
||||
|
||||
private byte[] source;
|
||||
private int sourceOffset;
|
||||
private int sourceLength;
|
||||
private BytesReference source;
|
||||
private boolean sourceUnsafe;
|
||||
|
||||
private OpType opType = OpType.INDEX;
|
||||
@ -177,9 +174,7 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
@Override
|
||||
public void beforeLocalFork() {
|
||||
// only fork if copy over if source is unsafe
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
safeSource();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -319,44 +314,21 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
}
|
||||
|
||||
/**
|
||||
* The source of the document to index, recopied to a new array if it has an offset or unsafe.
|
||||
* The source of the document to index, recopied to a new array if it is unsage.
|
||||
*/
|
||||
public byte[] source() {
|
||||
if (sourceUnsafe || sourceOffset > 0 || source.length != sourceLength) {
|
||||
source = Arrays.copyOfRange(source, sourceOffset, sourceOffset + sourceLength);
|
||||
sourceOffset = 0;
|
||||
sourceUnsafe = false;
|
||||
public BytesReference source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public BytesReference safeSource() {
|
||||
if (sourceUnsafe) {
|
||||
source = source.copyBytesArray();
|
||||
}
|
||||
return source;
|
||||
}
|
||||
|
||||
public BytesHolder underlyingSourceBytes() {
|
||||
return new BytesHolder(underlyingSource(), underlyingSourceOffset(), underlyingSourceLength());
|
||||
}
|
||||
|
||||
public Map<String, Object> underlyingSourceAsMap() {
|
||||
return XContentHelper.convertToMap(underlyingSource(), underlyingSourceOffset(), underlyingSourceLength(), false).v2();
|
||||
}
|
||||
|
||||
public byte[] underlyingSource() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public int underlyingSourceOffset() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
return this.sourceOffset;
|
||||
}
|
||||
|
||||
public int underlyingSourceLength() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
return this.sourceLength;
|
||||
public Map<String, Object> sourceAsMap() {
|
||||
return XContentHelper.convertToMap(source, false).v2();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -393,11 +365,8 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public IndexRequest source(String source) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(source);
|
||||
this.source = result.result;
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = result.length;
|
||||
this.sourceUnsafe = true;
|
||||
this.source = new BytesArray(source.getBytes(Charsets.UTF_8));
|
||||
this.sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -406,14 +375,8 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public IndexRequest source(XContentBuilder sourceBuilder) {
|
||||
try {
|
||||
source = sourceBuilder.underlyingBytes();
|
||||
sourceOffset = 0;
|
||||
sourceLength = sourceBuilder.underlyingBytesLength();
|
||||
sourceUnsafe = false;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + sourceBuilder + "]", e);
|
||||
}
|
||||
source = sourceBuilder.bytes();
|
||||
sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -461,6 +424,15 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the document to index in bytes form.
|
||||
*/
|
||||
public IndexRequest source(BytesReference source, boolean unsafe) {
|
||||
this.source = source;
|
||||
this.sourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the document to index in bytes form.
|
||||
*/
|
||||
@ -491,9 +463,7 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
*/
|
||||
@Required
|
||||
public IndexRequest source(byte[] source, int offset, int length, boolean unsafe) {
|
||||
this.source = source;
|
||||
this.sourceOffset = offset;
|
||||
this.sourceLength = length;
|
||||
this.source = new BytesArray(source, offset, length);
|
||||
this.sourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -647,7 +617,7 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
if (parseContext.shouldParse()) {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source, sourceOffset, sourceLength).createParser(source, sourceOffset, sourceLength);
|
||||
parser = XContentHelper.createParser(source);
|
||||
mappingMd.parse(parser, parseContext);
|
||||
if (parseContext.shouldParseId()) {
|
||||
id = parseContext.id();
|
||||
@ -706,11 +676,8 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
timestamp = in.readUTF();
|
||||
}
|
||||
ttl = in.readLong();
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
source = in.readBytesReference();
|
||||
sourceUnsafe = false;
|
||||
source = bytes.bytes();
|
||||
sourceOffset = bytes.offset();
|
||||
sourceLength = bytes.length();
|
||||
|
||||
opType = OpType.fromId(in.readByte());
|
||||
refresh = in.readBoolean();
|
||||
@ -750,7 +717,7 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
out.writeUTF(timestamp);
|
||||
}
|
||||
out.writeLong(ttl);
|
||||
out.writeBytesHolder(source, sourceOffset, sourceLength);
|
||||
out.writeBytesReference(source, true);
|
||||
out.writeByte(opType.id());
|
||||
out.writeBoolean(refresh);
|
||||
out.writeLong(version);
|
||||
@ -767,7 +734,7 @@ public class IndexRequest extends ShardReplicationOperationRequest {
|
||||
public String toString() {
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = Unicode.fromBytes(source, sourceOffset, sourceLength);
|
||||
sSource = XContentHelper.convertToJson(source, false);
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.action.support.BaseRequestBuilder;
|
||||
import org.elasticsearch.action.support.replication.ReplicationType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
@ -88,6 +89,22 @@ public class IndexRequestBuilder extends BaseRequestBuilder<IndexRequest, IndexR
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source.
|
||||
*/
|
||||
public IndexRequestBuilder setSource(BytesReference source, boolean unsafe) {
|
||||
request.source(source, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source.
|
||||
*/
|
||||
public IndexRequestBuilder setSource(BytesReference source) {
|
||||
request.source(source, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Index the Map as a JSON.
|
||||
*
|
||||
|
@ -194,7 +194,7 @@ public class TransportIndexAction extends TransportShardReplicationOperationActi
|
||||
}
|
||||
|
||||
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
|
||||
SourceToParse sourceToParse = SourceToParse.source(request.underlyingSource(), request.underlyingSourceOffset(), request.underlyingSourceLength()).type(request.type()).id(request.id())
|
||||
SourceToParse sourceToParse = SourceToParse.source(request.source()).type(request.type()).id(request.id())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
long version;
|
||||
Engine.IndexingOperation op;
|
||||
@ -251,7 +251,7 @@ public class TransportIndexAction extends TransportShardReplicationOperationActi
|
||||
protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {
|
||||
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
|
||||
IndexRequest request = shardRequest.request;
|
||||
SourceToParse sourceToParse = SourceToParse.source(request.underlyingSource(), request.underlyingSourceOffset(), request.underlyingSourceLength()).type(request.type()).id(request.id())
|
||||
SourceToParse sourceToParse = SourceToParse.source(request.source()).type(request.type()).id(request.id())
|
||||
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
|
||||
if (request.opType() == IndexRequest.OpType.INDEX) {
|
||||
Engine.Index index = indexShard.prepareIndex(sourceToParse)
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.mlt;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
@ -27,11 +26,10 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.Bytes;
|
||||
import org.elasticsearch.common.Required;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -41,7 +39,6 @@ import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.search.Scroll.readScroll;
|
||||
@ -86,9 +83,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
private String[] searchTypes;
|
||||
private Scroll searchScroll;
|
||||
|
||||
private byte[] searchSource;
|
||||
private int searchSourceOffset;
|
||||
private int searchSourceLength;
|
||||
private BytesReference searchSource;
|
||||
private boolean searchSourceUnsafe;
|
||||
|
||||
private boolean threadedListener = false;
|
||||
@ -315,8 +310,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
|
||||
void beforeLocalFork() {
|
||||
if (searchSourceUnsafe) {
|
||||
searchSource = Arrays.copyOfRange(searchSource, searchSourceOffset, searchSourceOffset + searchSourceLength);
|
||||
searchSourceOffset = 0;
|
||||
searchSource = searchSource.copyBytesArray();
|
||||
searchSourceUnsafe = false;
|
||||
}
|
||||
}
|
||||
@ -326,11 +320,8 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
* more like this documents.
|
||||
*/
|
||||
public MoreLikeThisRequest searchSource(SearchSourceBuilder sourceBuilder) {
|
||||
BytesStream bos = sourceBuilder.buildAsBytesStream(Requests.CONTENT_TYPE);
|
||||
this.searchSource = bos.underlyingBytes();
|
||||
this.searchSourceOffset = 0;
|
||||
this.searchSourceLength = bos.size();
|
||||
this.searchSourceUnsafe = true;
|
||||
this.searchSource = sourceBuilder.buildAsBytes(Requests.CONTENT_TYPE);
|
||||
this.searchSourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -339,11 +330,8 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
* more like this documents.
|
||||
*/
|
||||
public MoreLikeThisRequest searchSource(String searchSource) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(searchSource);
|
||||
this.searchSource = result.result;
|
||||
this.searchSourceOffset = 0;
|
||||
this.searchSourceLength = result.length;
|
||||
this.searchSourceUnsafe = true;
|
||||
this.searchSource = new BytesArray(searchSource);
|
||||
this.searchSourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -358,15 +346,9 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest searchSource(XContentBuilder builder) {
|
||||
try {
|
||||
this.searchSource = builder.underlyingBytes();
|
||||
this.searchSourceOffset = 0;
|
||||
this.searchSourceLength = builder.underlyingBytesLength();
|
||||
this.searchSourceUnsafe = false;
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + builder + "]", e);
|
||||
}
|
||||
this.searchSource = builder.bytes();
|
||||
this.searchSourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -382,9 +364,15 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
* more like this documents.
|
||||
*/
|
||||
public MoreLikeThisRequest searchSource(byte[] searchSource, int offset, int length, boolean unsafe) {
|
||||
return searchSource(new BytesArray(searchSource, offset, length), unsafe);
|
||||
}
|
||||
|
||||
/**
|
||||
* An optional search source request allowing to control the search request for the
|
||||
* more like this documents.
|
||||
*/
|
||||
public MoreLikeThisRequest searchSource(BytesReference searchSource, boolean unsafe) {
|
||||
this.searchSource = searchSource;
|
||||
this.searchSourceOffset = offset;
|
||||
this.searchSourceLength = length;
|
||||
this.searchSourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -393,18 +381,10 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
* An optional search source request allowing to control the search request for the
|
||||
* more like this documents.
|
||||
*/
|
||||
public byte[] searchSource() {
|
||||
public BytesReference searchSource() {
|
||||
return this.searchSource;
|
||||
}
|
||||
|
||||
public int searchSourceOffset() {
|
||||
return searchSourceOffset;
|
||||
}
|
||||
|
||||
public int searchSourceLength() {
|
||||
return searchSourceLength;
|
||||
}
|
||||
|
||||
public boolean searchSourceUnsafe() {
|
||||
return searchSourceUnsafe;
|
||||
}
|
||||
@ -615,14 +595,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
}
|
||||
|
||||
searchSourceUnsafe = false;
|
||||
searchSourceOffset = 0;
|
||||
searchSourceLength = in.readVInt();
|
||||
if (searchSourceLength == 0) {
|
||||
searchSource = Bytes.EMPTY_ARRAY;
|
||||
} else {
|
||||
searchSource = new byte[searchSourceLength];
|
||||
in.readFully(searchSource);
|
||||
}
|
||||
searchSource = in.readBytesReference();
|
||||
|
||||
searchSize = in.readVInt();
|
||||
searchFrom = in.readVInt();
|
||||
@ -688,12 +661,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
out.writeBoolean(true);
|
||||
searchScroll.writeTo(out);
|
||||
}
|
||||
if (searchSource == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(searchSourceLength);
|
||||
out.writeBytes(searchSource, searchSourceOffset, searchSourceLength);
|
||||
}
|
||||
out.writeBytesReference(searchSource, true);
|
||||
|
||||
out.writeVInt(searchSize);
|
||||
out.writeVInt(searchFrom);
|
||||
|
@ -181,7 +181,7 @@ public class TransportMoreLikeThisAction extends TransportAction<MoreLikeThisReq
|
||||
.listenerThreaded(request.listenerThreaded());
|
||||
|
||||
if (request.searchSource() != null) {
|
||||
searchRequest.source(request.searchSource(), request.searchSourceOffset(), request.searchSourceLength(), request.searchSourceUnsafe());
|
||||
searchRequest.source(request.searchSource(), request.searchSourceUnsafe());
|
||||
}
|
||||
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
@ -208,7 +208,7 @@ public class TransportMoreLikeThisAction extends TransportAction<MoreLikeThisReq
|
||||
if (getResponse.source() == null) {
|
||||
return;
|
||||
}
|
||||
docMapper.parse(SourceToParse.source(getResponse.sourceRef().bytes(), getResponse.sourceRef().offset(), getResponse.sourceRef().length()).type(request.type()).id(request.id()), new DocumentMapper.ParseListenerAdapter() {
|
||||
docMapper.parse(SourceToParse.source(getResponse.sourceRef()).type(request.type()).id(request.id()), new DocumentMapper.ParseListenerAdapter() {
|
||||
@Override
|
||||
public boolean beforeFieldAdded(FieldMapper fieldMapper, Fieldable field, Object parseContext) {
|
||||
if (fieldMapper instanceof InternalMapper) {
|
||||
|
@ -19,13 +19,12 @@
|
||||
|
||||
package org.elasticsearch.action.percolate;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.single.custom.SingleCustomOperationRequest;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Required;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -33,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
@ -46,9 +44,7 @@ public class PercolateRequest extends SingleCustomOperationRequest {
|
||||
private String index;
|
||||
private String type;
|
||||
|
||||
private byte[] source;
|
||||
private int sourceOffset;
|
||||
private int sourceLength;
|
||||
private BytesReference source;
|
||||
private boolean sourceUnsafe;
|
||||
|
||||
public PercolateRequest() {
|
||||
@ -90,40 +86,15 @@ public class PercolateRequest extends SingleCustomOperationRequest {
|
||||
@Override
|
||||
public void beforeLocalFork() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
if (sourceUnsafe || sourceOffset > 0 || source.length != sourceLength) {
|
||||
source = Arrays.copyOfRange(source, sourceOffset, sourceOffset + sourceLength);
|
||||
sourceOffset = 0;
|
||||
source = source.copyBytesArray();
|
||||
sourceUnsafe = false;
|
||||
}
|
||||
}
|
||||
|
||||
public BytesReference source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public byte[] underlyingSource() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public int underlyingSourceOffset() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
return this.sourceOffset;
|
||||
}
|
||||
|
||||
public int underlyingSourceLength() {
|
||||
if (sourceUnsafe) {
|
||||
source();
|
||||
}
|
||||
return this.sourceLength;
|
||||
}
|
||||
|
||||
@Required
|
||||
public PercolateRequest source(Map source) throws ElasticSearchGenerationException {
|
||||
return source(source, XContentType.SMILE);
|
||||
@ -142,24 +113,15 @@ public class PercolateRequest extends SingleCustomOperationRequest {
|
||||
|
||||
@Required
|
||||
public PercolateRequest source(String source) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(source);
|
||||
this.source = result.result;
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = result.length;
|
||||
this.sourceUnsafe = true;
|
||||
this.source = new BytesArray(source);
|
||||
this.sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Required
|
||||
public PercolateRequest source(XContentBuilder sourceBuilder) {
|
||||
try {
|
||||
source = sourceBuilder.underlyingBytes();
|
||||
sourceOffset = 0;
|
||||
sourceLength = sourceBuilder.underlyingBytesLength();
|
||||
sourceUnsafe = false;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + sourceBuilder + "]", e);
|
||||
}
|
||||
source = sourceBuilder.bytes();
|
||||
sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -174,9 +136,12 @@ public class PercolateRequest extends SingleCustomOperationRequest {
|
||||
|
||||
@Required
|
||||
public PercolateRequest source(byte[] source, int offset, int length, boolean unsafe) {
|
||||
return source(new BytesArray(source, offset, length), unsafe);
|
||||
}
|
||||
|
||||
@Required
|
||||
public PercolateRequest source(BytesReference source, boolean unsafe) {
|
||||
this.source = source;
|
||||
this.sourceOffset = offset;
|
||||
this.sourceLength = length;
|
||||
this.sourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -212,11 +177,8 @@ public class PercolateRequest extends SingleCustomOperationRequest {
|
||||
index = in.readUTF();
|
||||
type = in.readUTF();
|
||||
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
sourceUnsafe = false;
|
||||
source = bytes.bytes();
|
||||
sourceOffset = bytes.offset();
|
||||
sourceLength = bytes.length();
|
||||
source = in.readBytesReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -224,6 +186,6 @@ public class PercolateRequest extends SingleCustomOperationRequest {
|
||||
super.writeTo(out);
|
||||
out.writeUTF(index);
|
||||
out.writeUTF(type);
|
||||
out.writeBytesHolder(source, sourceOffset, sourceLength);
|
||||
out.writeBytesReference(source, true);
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.action.percolate;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.BaseRequestBuilder;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
@ -95,6 +96,22 @@ public class PercolateRequestBuilder extends BaseRequestBuilder<PercolateRequest
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the document to index in bytes form.
|
||||
*/
|
||||
public PercolateRequestBuilder setSource(BytesReference source) {
|
||||
request.source(source, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the document to index in bytes form.
|
||||
*/
|
||||
public PercolateRequestBuilder setSource(BytesReference source, boolean unsafe) {
|
||||
request.source(source, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the document to index in bytes form.
|
||||
*/
|
||||
|
@ -90,7 +90,7 @@ public class TransportPercolateAction extends TransportSingleCustomOperationActi
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.index());
|
||||
PercolatorService percolatorService = indexService.percolateService();
|
||||
|
||||
PercolatorExecutor.Response percolate = percolatorService.percolate(new PercolatorExecutor.SourceRequest(request.type(), request.underlyingSource(), request.underlyingSourceOffset(), request.underlyingSourceLength()));
|
||||
PercolatorExecutor.Response percolate = percolatorService.percolate(new PercolatorExecutor.SourceRequest(request.type(), request.source()));
|
||||
return new PercolateResponse(percolate.matches());
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,8 @@ import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
@ -64,7 +66,14 @@ public class MultiSearchRequest implements ActionRequest {
|
||||
|
||||
public MultiSearchRequest add(byte[] data, int from, int length, boolean contentUnsafe,
|
||||
@Nullable String[] indices, @Nullable String[] types, @Nullable String searchType) throws Exception {
|
||||
XContent xContent = XContentFactory.xContent(data, from, length);
|
||||
return add(new BytesArray(data, from, length), contentUnsafe, indices, types, searchType);
|
||||
}
|
||||
|
||||
public MultiSearchRequest add(BytesReference data, boolean contentUnsafe,
|
||||
@Nullable String[] indices, @Nullable String[] types, @Nullable String searchType) throws Exception {
|
||||
XContent xContent = XContentFactory.xContent(data);
|
||||
int from = 0;
|
||||
int length = data.length();
|
||||
byte marker = xContent.streamSeparator();
|
||||
while (true) {
|
||||
int nextMarker = findNextMarker(marker, from, data, length);
|
||||
@ -85,7 +94,7 @@ public class MultiSearchRequest implements ActionRequest {
|
||||
|
||||
// now parse the action
|
||||
if (nextMarker - from > 0) {
|
||||
XContentParser parser = xContent.createParser(data, from, nextMarker - from);
|
||||
XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));
|
||||
try {
|
||||
// Move to START_OBJECT, if token is null, its an empty data
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
@ -125,7 +134,7 @@ public class MultiSearchRequest implements ActionRequest {
|
||||
break;
|
||||
}
|
||||
|
||||
searchRequest.source(data, from, nextMarker - from, contentUnsafe);
|
||||
searchRequest.source(data.slice(from, nextMarker - from), contentUnsafe);
|
||||
// move pointers
|
||||
from = nextMarker + 1;
|
||||
|
||||
@ -135,9 +144,9 @@ public class MultiSearchRequest implements ActionRequest {
|
||||
return this;
|
||||
}
|
||||
|
||||
private int findNextMarker(byte marker, int from, byte[] data, int length) {
|
||||
private int findNextMarker(byte marker, int from, BytesReference data, int length) {
|
||||
for (int i = from; i < length; i++) {
|
||||
if (data[i] == marker) {
|
||||
if (data.get(i) == marker) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
@ -19,17 +19,15 @@
|
||||
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.ElasticSearchGenerationException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
@ -40,7 +38,6 @@ import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.search.Scroll.readScroll;
|
||||
@ -73,14 +70,10 @@ public class SearchRequest implements ActionRequest {
|
||||
@Nullable
|
||||
private String preference;
|
||||
|
||||
private byte[] source;
|
||||
private int sourceOffset;
|
||||
private int sourceLength;
|
||||
private BytesReference source;
|
||||
private boolean sourceUnsafe;
|
||||
|
||||
private byte[] extraSource;
|
||||
private int extraSourceOffset;
|
||||
private int extraSourceLength;
|
||||
private BytesReference extraSource;
|
||||
private boolean extraSourceUnsafe;
|
||||
|
||||
private Scroll scroll;
|
||||
@ -106,7 +99,7 @@ public class SearchRequest implements ActionRequest {
|
||||
*/
|
||||
public SearchRequest(String[] indices, byte[] source) {
|
||||
this.indices = indices;
|
||||
this.source = source;
|
||||
this.source = new BytesArray(source);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -123,13 +116,11 @@ public class SearchRequest implements ActionRequest {
|
||||
// we always copy over if needed, the reason is that a request might fail while being search remotely
|
||||
// and then we need to keep the buffer around
|
||||
if (source != null && sourceUnsafe) {
|
||||
source = Arrays.copyOfRange(source, sourceOffset, sourceOffset + sourceLength);
|
||||
sourceOffset = 0;
|
||||
source = source.copyBytesArray();
|
||||
sourceUnsafe = false;
|
||||
}
|
||||
if (extraSource != null && extraSourceUnsafe) {
|
||||
extraSource = Arrays.copyOfRange(extraSource, extraSourceOffset, extraSourceOffset + extraSourceLength);
|
||||
extraSourceOffset = 0;
|
||||
extraSource = extraSource.copyBytesArray();
|
||||
extraSourceUnsafe = false;
|
||||
}
|
||||
}
|
||||
@ -263,10 +254,7 @@ public class SearchRequest implements ActionRequest {
|
||||
* The source of the search request.
|
||||
*/
|
||||
public SearchRequest source(SearchSourceBuilder sourceBuilder) {
|
||||
BytesStream bos = sourceBuilder.buildAsBytesStream(Requests.CONTENT_TYPE);
|
||||
this.source = bos.underlyingBytes();
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = bos.size();
|
||||
this.source = sourceBuilder.buildAsBytes(contentType);
|
||||
this.sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
@ -276,11 +264,8 @@ public class SearchRequest implements ActionRequest {
|
||||
* {@link #source(org.elasticsearch.search.builder.SearchSourceBuilder)}.
|
||||
*/
|
||||
public SearchRequest source(String source) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(source);
|
||||
this.source = result.result;
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = result.length;
|
||||
this.sourceUnsafe = true;
|
||||
this.source = new BytesArray(source);
|
||||
this.sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -298,15 +283,9 @@ public class SearchRequest implements ActionRequest {
|
||||
}
|
||||
|
||||
public SearchRequest source(XContentBuilder builder) {
|
||||
try {
|
||||
this.source = builder.underlyingBytes();
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = builder.underlyingBytesLength();
|
||||
this.sourceUnsafe = false;
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + builder + "]", e);
|
||||
}
|
||||
this.source = builder.bytes();
|
||||
this.sourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -328,9 +307,14 @@ public class SearchRequest implements ActionRequest {
|
||||
* The search source to execute.
|
||||
*/
|
||||
public SearchRequest source(byte[] source, int offset, int length, boolean unsafe) {
|
||||
return source(new BytesArray(source, offset, length), unsafe);
|
||||
}
|
||||
|
||||
/**
|
||||
* The search source to execute.
|
||||
*/
|
||||
public SearchRequest source(BytesReference source, boolean unsafe) {
|
||||
this.source = source;
|
||||
this.sourceOffset = offset;
|
||||
this.sourceLength = length;
|
||||
this.sourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -338,18 +322,10 @@ public class SearchRequest implements ActionRequest {
|
||||
/**
|
||||
* The search source to execute.
|
||||
*/
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public int sourceOffset() {
|
||||
return sourceOffset;
|
||||
}
|
||||
|
||||
public int sourceLength() {
|
||||
return sourceLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will be used as well.
|
||||
*/
|
||||
@ -358,10 +334,7 @@ public class SearchRequest implements ActionRequest {
|
||||
extraSource = null;
|
||||
return this;
|
||||
}
|
||||
BytesStream bos = sourceBuilder.buildAsBytesStream(Requests.CONTENT_TYPE);
|
||||
this.extraSource = bos.underlyingBytes();
|
||||
this.extraSourceOffset = 0;
|
||||
this.extraSourceLength = bos.size();
|
||||
this.extraSource = sourceBuilder.buildAsBytes(contentType);
|
||||
this.extraSourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
@ -377,26 +350,17 @@ public class SearchRequest implements ActionRequest {
|
||||
}
|
||||
|
||||
public SearchRequest extraSource(XContentBuilder builder) {
|
||||
try {
|
||||
this.extraSource = builder.underlyingBytes();
|
||||
this.extraSourceOffset = 0;
|
||||
this.extraSourceLength = builder.underlyingBytesLength();
|
||||
this.extraSourceUnsafe = false;
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to generate [" + builder + "]", e);
|
||||
}
|
||||
this.extraSource = builder.bytes();
|
||||
this.extraSourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will use used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(String source) {
|
||||
UnicodeUtil.UTF8Result result = Unicode.fromStringAsUtf8(source);
|
||||
this.extraSource = result.result;
|
||||
this.extraSourceOffset = 0;
|
||||
this.extraSourceLength = result.length;
|
||||
this.extraSourceUnsafe = true;
|
||||
this.extraSource = new BytesArray(source);
|
||||
this.extraSourceUnsafe = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -418,9 +382,14 @@ public class SearchRequest implements ActionRequest {
|
||||
* Allows to provide additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(byte[] source, int offset, int length, boolean unsafe) {
|
||||
return extraSource(new BytesArray(source, offset, length), unsafe);
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(BytesReference source, boolean unsafe) {
|
||||
this.extraSource = source;
|
||||
this.extraSourceOffset = offset;
|
||||
this.extraSourceLength = length;
|
||||
this.extraSourceUnsafe = unsafe;
|
||||
return this;
|
||||
}
|
||||
@ -428,18 +397,10 @@ public class SearchRequest implements ActionRequest {
|
||||
/**
|
||||
* Additional search source to execute.
|
||||
*/
|
||||
public byte[] extraSource() {
|
||||
public BytesReference extraSource() {
|
||||
return this.extraSource;
|
||||
}
|
||||
|
||||
public int extraSourceOffset() {
|
||||
return extraSourceOffset;
|
||||
}
|
||||
|
||||
public int extraSourceLength() {
|
||||
return extraSourceLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* The tye of search to execute.
|
||||
*/
|
||||
@ -522,17 +483,11 @@ public class SearchRequest implements ActionRequest {
|
||||
scroll = readScroll(in);
|
||||
}
|
||||
|
||||
BytesHolder bytes = in.readBytesReference();
|
||||
sourceUnsafe = false;
|
||||
source = bytes.bytes();
|
||||
sourceOffset = bytes.offset();
|
||||
sourceLength = bytes.length();
|
||||
source = in.readBytesReference();
|
||||
|
||||
bytes = in.readBytesReference();
|
||||
extraSourceUnsafe = false;
|
||||
extraSource = bytes.bytes();
|
||||
extraSourceOffset = bytes.offset();
|
||||
extraSourceLength = bytes.length();
|
||||
extraSource = in.readBytesReference();
|
||||
|
||||
int typesSize = in.readVInt();
|
||||
if (typesSize > 0) {
|
||||
@ -578,8 +533,8 @@ public class SearchRequest implements ActionRequest {
|
||||
out.writeBoolean(true);
|
||||
scroll.writeTo(out);
|
||||
}
|
||||
out.writeBytesHolder(source, sourceOffset, sourceLength);
|
||||
out.writeBytesHolder(extraSource, extraSourceOffset, extraSourceLength);
|
||||
out.writeBytesReference(source, true);
|
||||
out.writeBytesReference(extraSource, true);
|
||||
out.writeVInt(types.length);
|
||||
for (String type : types) {
|
||||
out.writeUTF(type);
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.BaseRequestBuilder;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.FilterBuilder;
|
||||
@ -200,6 +201,14 @@ public class SearchRequestBuilder extends BaseRequestBuilder<SearchRequest, Sear
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new search source builder with a raw search query.
|
||||
*/
|
||||
public SearchRequestBuilder setQuery(BytesReference queryBinary) {
|
||||
sourceBuilder().query(queryBinary);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new search source builder with a raw search query.
|
||||
*/
|
||||
@ -250,6 +259,15 @@ public class SearchRequestBuilder extends BaseRequestBuilder<SearchRequest, Sear
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a filter on the query executed that only applies to the search query
|
||||
* (and not facets for example).
|
||||
*/
|
||||
public SearchRequestBuilder setFilter(BytesReference filter) {
|
||||
sourceBuilder().filter(filter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a filter on the query executed that only applies to the search query
|
||||
* (and not facets for example).
|
||||
@ -476,6 +494,14 @@ public class SearchRequestBuilder extends BaseRequestBuilder<SearchRequest, Sear
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent) binary representation of facets to use.
|
||||
*/
|
||||
public SearchRequestBuilder setFacets(BytesReference facets) {
|
||||
sourceBuilder().facets(facets);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent) binary representation of facets to use.
|
||||
*/
|
||||
@ -636,6 +662,27 @@ public class SearchRequestBuilder extends BaseRequestBuilder<SearchRequest, Sear
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(BytesReference)}.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(BytesReference source) {
|
||||
request.source(source, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(BytesReference)}.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(BytesReference source, boolean unsafe) {
|
||||
request.source(source, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
@ -646,6 +693,22 @@ public class SearchRequestBuilder extends BaseRequestBuilder<SearchRequest, Sear
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(BytesReference source) {
|
||||
request.extraSource(source, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(BytesReference source, boolean unsafe) {
|
||||
request.extraSource(source, unsafe);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
|
@ -48,8 +48,8 @@ public abstract class TransportSearchHelper {
|
||||
|
||||
public static InternalSearchRequest internalSearchRequest(ShardRouting shardRouting, int numberOfShards, SearchRequest request, String[] filteringAliases, long nowInMillis) {
|
||||
InternalSearchRequest internalRequest = new InternalSearchRequest(shardRouting, numberOfShards, request.searchType());
|
||||
internalRequest.source(request.source(), request.sourceOffset(), request.sourceLength());
|
||||
internalRequest.extraSource(request.extraSource(), request.extraSourceOffset(), request.extraSourceLength());
|
||||
internalRequest.source(request.source());
|
||||
internalRequest.extraSource(request.extraSource());
|
||||
internalRequest.scroll(request.scroll());
|
||||
internalRequest.filteringAliases(filteringAliases);
|
||||
internalRequest.types(request.types());
|
||||
|
@ -39,8 +39,8 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.routing.PlainShardIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -179,7 +179,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
||||
.replicationType(request.replicationType()).consistencyLevel(request.consistencyLevel());
|
||||
indexRequest.operationThreaded(false);
|
||||
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
|
||||
final BytesHolder updateSourceBytes = indexRequest.underlyingSourceBytes();
|
||||
final BytesReference updateSourceBytes = indexRequest.source();
|
||||
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
@ -216,7 +216,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
||||
return;
|
||||
}
|
||||
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef().bytes(), getResult.internalSourceRef().offset(), getResult.internalSourceRef().length(), true);
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true);
|
||||
String operation = null;
|
||||
String timestamp = null;
|
||||
Long ttl = null;
|
||||
@ -239,7 +239,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
||||
if (indexRequest.parent() != null) {
|
||||
parent = indexRequest.parent();
|
||||
}
|
||||
XContentHelper.update(updatedSourceAsMap, indexRequest.underlyingSourceAsMap());
|
||||
XContentHelper.update(updatedSourceAsMap, indexRequest.sourceAsMap());
|
||||
} else {
|
||||
Map<String, Object> ctx = new HashMap<String, Object>(2);
|
||||
ctx.put("_source", sourceAndContent.v2());
|
||||
@ -288,7 +288,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
||||
.refresh(request.refresh());
|
||||
indexRequest.operationThreaded(false);
|
||||
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
|
||||
final BytesHolder updateSourceBytes = indexRequest.underlyingSourceBytes();
|
||||
final BytesReference updateSourceBytes = indexRequest.source();
|
||||
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
@ -355,7 +355,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected GetResult extractGetResult(final UpdateRequest request, long version, final Map<String, Object> source, XContentType sourceContentType, @Nullable final BytesHolder sourceAsBytes) {
|
||||
protected GetResult extractGetResult(final UpdateRequest request, long version, final Map<String, Object> source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) {
|
||||
if (request.fields() == null || request.fields().length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
@ -26,6 +26,8 @@ import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationType;
|
||||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
@ -485,7 +487,7 @@ public class UpdateRequest extends InstanceShardOperationRequest {
|
||||
}
|
||||
|
||||
public UpdateRequest source(XContentBuilder source) throws Exception {
|
||||
return source(source.underlyingBytes(), 0, source.underlyingBytesLength());
|
||||
return source(source.bytes());
|
||||
}
|
||||
|
||||
public UpdateRequest source(byte[] source) throws Exception {
|
||||
@ -493,8 +495,12 @@ public class UpdateRequest extends InstanceShardOperationRequest {
|
||||
}
|
||||
|
||||
public UpdateRequest source(byte[] source, int offset, int length) throws Exception {
|
||||
XContentType xContentType = XContentFactory.xContentType(source, offset, length);
|
||||
XContentParser parser = XContentFactory.xContent(xContentType).createParser(source, offset, length);
|
||||
return source(new BytesArray(source, offset, length));
|
||||
}
|
||||
|
||||
public UpdateRequest source(BytesReference source) throws Exception {
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
XContentParser parser = XContentFactory.xContent(xContentType).createParser(source);
|
||||
XContentParser.Token t = parser.nextToken();
|
||||
if (t == null) {
|
||||
return this;
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.BaseRequestBuilder;
|
||||
import org.elasticsearch.action.support.replication.ReplicationType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
@ -238,7 +239,7 @@ public class UpdateRequestBuilder extends BaseRequestBuilder<UpdateRequest, Upda
|
||||
request.doc(source, offset, length);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the index request to be used if the document does not exists. Otherwise, a {@link org.elasticsearch.index.engine.DocumentMissingException}
|
||||
* is thrown.
|
||||
@ -311,6 +312,11 @@ public class UpdateRequestBuilder extends BaseRequestBuilder<UpdateRequest, Upda
|
||||
return this;
|
||||
}
|
||||
|
||||
public UpdateRequestBuilder setSource(BytesReference source) throws Exception {
|
||||
request.source(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(ActionListener<UpdateResponse> listener) {
|
||||
client.update(request, listener);
|
||||
|
@ -226,7 +226,7 @@ public class ClusterState {
|
||||
try {
|
||||
BytesStreamOutput os = cachedEntry.bytes();
|
||||
writeTo(state, os);
|
||||
return os.copiedByteArray();
|
||||
return os.bytes().copyBytesArray().toBytes();
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
}
|
||||
|
@ -173,7 +173,7 @@ public class AliasMetaData {
|
||||
}
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().map(filter);
|
||||
this.filter = new CompressedString(builder.underlyingBytes(), 0, builder.underlyingBytesLength());
|
||||
this.filter = new CompressedString(builder.bytes());
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchGenerationException("Failed to build json for alias request", e);
|
||||
|
@ -281,7 +281,7 @@ public class MappingMetaData {
|
||||
public MappingMetaData(String type, Map<String, Object> mapping) throws IOException {
|
||||
this.type = type;
|
||||
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping);
|
||||
this.source = new CompressedString(mappingBuilder.underlyingBytes(), 0, mappingBuilder.underlyingBytesLength());
|
||||
this.source = new CompressedString(mappingBuilder.bytes());
|
||||
Map<String, Object> withoutType = mapping;
|
||||
if (mapping.size() == 1 && mapping.containsKey(type)) {
|
||||
withoutType = (Map<String, Object>) mapping.get(type);
|
||||
|
@ -1,94 +0,0 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class BytesHolder {
|
||||
|
||||
public static final BytesHolder EMPTY = new BytesHolder(Bytes.EMPTY_ARRAY, 0, 0);
|
||||
|
||||
private byte[] bytes;
|
||||
private int offset;
|
||||
private int length;
|
||||
|
||||
BytesHolder() {
|
||||
|
||||
}
|
||||
|
||||
public BytesHolder(byte[] bytes) {
|
||||
this.bytes = bytes;
|
||||
this.offset = 0;
|
||||
this.length = bytes.length;
|
||||
}
|
||||
|
||||
public BytesHolder(byte[] bytes, int offset, int length) {
|
||||
this.bytes = bytes;
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
public byte[] copyBytes() {
|
||||
return Arrays.copyOfRange(bytes, offset, offset + length);
|
||||
}
|
||||
|
||||
public byte[] bytes() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public int offset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
public int length() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return bytesEquals((BytesHolder) obj);
|
||||
}
|
||||
|
||||
public boolean bytesEquals(BytesHolder other) {
|
||||
if (length == other.length) {
|
||||
int otherUpto = other.offset;
|
||||
final byte[] otherBytes = other.bytes;
|
||||
final int end = offset + length;
|
||||
for (int upto = offset; upto < end; upto++, otherUpto++) {
|
||||
if (bytes[upto] != otherBytes[otherUpto]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 0;
|
||||
final int end = offset + length;
|
||||
for (int i = offset; i < end; i++) {
|
||||
result = 31 * result + bytes[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class BytesWrap {
|
||||
|
||||
private final byte[] bytes;
|
||||
|
||||
// we pre-compute the hashCode for better performance (especially in IdCache)
|
||||
private final int hashCode;
|
||||
|
||||
public BytesWrap(byte[] bytes) {
|
||||
this.bytes = bytes;
|
||||
this.hashCode = Arrays.hashCode(bytes);
|
||||
}
|
||||
|
||||
public BytesWrap(String str) {
|
||||
this(Unicode.fromStringAsBytes(str));
|
||||
}
|
||||
|
||||
public byte[] bytes() {
|
||||
return this.bytes;
|
||||
}
|
||||
|
||||
public String utf8ToString() {
|
||||
return Unicode.fromBytes(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
BytesWrap bytesWrap = (BytesWrap) o;
|
||||
return Arrays.equals(bytes, bytesWrap.bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
}
|
156
src/main/java/org/elasticsearch/common/bytes/BytesArray.java
Normal file
156
src/main/java/org/elasticsearch/common/bytes/BytesArray.java
Normal file
@ -0,0 +1,156 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Bytes;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class BytesArray implements BytesReference {
|
||||
|
||||
public static final BytesArray EMPTY = new BytesArray(Bytes.EMPTY_ARRAY, 0, 0);
|
||||
|
||||
private byte[] bytes;
|
||||
private int offset;
|
||||
private int length;
|
||||
|
||||
public BytesArray(String bytes) {
|
||||
this(bytes.getBytes(Charsets.UTF_8));
|
||||
}
|
||||
|
||||
public BytesArray(byte[] bytes) {
|
||||
this.bytes = bytes;
|
||||
this.offset = 0;
|
||||
this.length = bytes.length;
|
||||
}
|
||||
|
||||
public BytesArray(byte[] bytes, int offset, int length) {
|
||||
this.bytes = bytes;
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte get(int index) {
|
||||
return bytes[offset + index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int length() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference slice(int from, int length) {
|
||||
if (from < 0 || (from + length) > this.length) {
|
||||
throw new ElasticSearchIllegalArgumentException("can't slice a buffer with length [" + this.length + "], with slice parameters from [" + from + "], length [" + length + "]");
|
||||
}
|
||||
return new BytesArray(bytes, offset + from, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamInput streamInput() {
|
||||
return new BytesStreamInput(bytes, offset, length, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out, boolean withLength) throws IOException {
|
||||
if (withLength) {
|
||||
out.writeVInt(length);
|
||||
}
|
||||
out.writeBytes(bytes, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(OutputStream os) throws IOException {
|
||||
os.write(bytes, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] toBytes() {
|
||||
if (offset == 0 && bytes.length == length) {
|
||||
return bytes;
|
||||
}
|
||||
return Arrays.copyOfRange(bytes, offset, offset + length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesArray toBytesArray() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesArray copyBytesArray() {
|
||||
return new BytesArray(Arrays.copyOfRange(bytes, offset, offset + length));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return bytesEquals((BytesArray) obj);
|
||||
}
|
||||
|
||||
public boolean bytesEquals(BytesArray other) {
|
||||
if (length == other.length) {
|
||||
int otherUpto = other.offset;
|
||||
final byte[] otherBytes = other.bytes;
|
||||
final int end = offset + length;
|
||||
for (int upto = offset; upto < end; upto++, otherUpto++) {
|
||||
if (bytes[upto] != otherBytes[otherUpto]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 0;
|
||||
final int end = offset + length;
|
||||
for (int i = offset; i < end; i++) {
|
||||
result = 31 * result + bytes[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* A reference to bytes.
|
||||
*/
|
||||
public interface BytesReference {
|
||||
|
||||
/**
|
||||
* Returns the byte at the specified index. Need to be between 0 and length.
|
||||
*/
|
||||
byte get(int index);
|
||||
|
||||
/**
|
||||
* The length.
|
||||
*/
|
||||
int length();
|
||||
|
||||
/**
|
||||
* Slice the bytes from the <tt>from</tt> index up to <tt>length</tt>.
|
||||
*/
|
||||
BytesReference slice(int from, int length);
|
||||
|
||||
/**
|
||||
* A stream input of the bytes.
|
||||
*/
|
||||
StreamInput streamInput();
|
||||
|
||||
/**
|
||||
* Writes the bytes into the output, with an optional length header (variable encoded).
|
||||
*/
|
||||
void writeTo(StreamOutput out, boolean withLength) throws IOException;
|
||||
|
||||
void writeTo(OutputStream os) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the bytes as a single byte array.
|
||||
*/
|
||||
byte[] toBytes();
|
||||
|
||||
/**
|
||||
* Returns the bytes as a byte array, possibly sharing the underlying byte buffer.
|
||||
*/
|
||||
BytesArray toBytesArray();
|
||||
|
||||
/**
|
||||
* Returns the bytes copied over as a byte array.
|
||||
*/
|
||||
BytesArray copyBytesArray();
|
||||
|
||||
/**
|
||||
* Is there an underlying byte array for this bytes reference.
|
||||
*/
|
||||
boolean hasArray();
|
||||
|
||||
/**
|
||||
* The underlying byte array (if exists).
|
||||
*/
|
||||
byte[] array();
|
||||
|
||||
/**
|
||||
* The offset into the underlying byte array.
|
||||
*/
|
||||
int arrayOffset();
|
||||
}
|
@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.transport.netty.ChannelBufferStreamInputFactory;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ChannelBufferBytesReference implements BytesReference {
|
||||
|
||||
private final ChannelBuffer buffer;
|
||||
|
||||
public ChannelBufferBytesReference(ChannelBuffer buffer) {
|
||||
this.buffer = buffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte get(int index) {
|
||||
return buffer.getByte(buffer.readerIndex() + index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int length() {
|
||||
return buffer.readableBytes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference slice(int from, int length) {
|
||||
return new ChannelBufferBytesReference(buffer.slice(from, length));
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamInput streamInput() {
|
||||
return ChannelBufferStreamInputFactory.create(buffer.duplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out, boolean withLength) throws IOException {
|
||||
if (withLength) {
|
||||
out.writeVInt(buffer.readableBytes());
|
||||
}
|
||||
buffer.getBytes(buffer.readerIndex(), out, length());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(OutputStream os) throws IOException {
|
||||
buffer.getBytes(buffer.readerIndex(), os, length());
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] toBytes() {
|
||||
return copyBytesArray().toBytes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesArray toBytesArray() {
|
||||
if (buffer.hasArray()) {
|
||||
return new BytesArray(buffer.array(), buffer.arrayOffset() + buffer.readerIndex(), buffer.readableBytes());
|
||||
}
|
||||
return copyBytesArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesArray copyBytesArray() {
|
||||
byte[] copy = new byte[buffer.readableBytes()];
|
||||
buffer.getBytes(buffer.readerIndex(), copy);
|
||||
return new BytesArray(copy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return buffer.hasArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
return buffer.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
return buffer.arrayOffset() + buffer.readerIndex();
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.bytes;
|
||||
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HashedBytesArray implements BytesReference {
|
||||
|
||||
private final byte[] bytes;
|
||||
|
||||
// we pre-compute the hashCode for better performance (especially in IdCache)
|
||||
private final int hashCode;
|
||||
|
||||
public HashedBytesArray(byte[] bytes) {
|
||||
this.bytes = bytes;
|
||||
this.hashCode = Arrays.hashCode(bytes);
|
||||
}
|
||||
|
||||
public HashedBytesArray(String str) {
|
||||
this(Unicode.fromStringAsBytes(str));
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte get(int index) {
|
||||
return bytes[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int length() {
|
||||
return bytes.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference slice(int from, int length) {
|
||||
if (from < 0 || (from + length) > bytes.length) {
|
||||
throw new ElasticSearchIllegalArgumentException("can't slice a buffer with length [" + bytes.length + "], with slice parameters from [" + from + "], length [" + length + "]");
|
||||
}
|
||||
return new BytesArray(bytes, from, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamInput streamInput() {
|
||||
return new BytesStreamInput(bytes, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out, boolean withLength) throws IOException {
|
||||
if (withLength) {
|
||||
out.writeVInt(bytes.length);
|
||||
}
|
||||
out.writeBytes(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(OutputStream os) throws IOException {
|
||||
os.write(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] toBytes() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesArray toBytesArray() {
|
||||
return new BytesArray(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesArray copyBytesArray() {
|
||||
byte[] copy = new byte[bytes.length];
|
||||
System.arraycopy(bytes, 0, copy, 0, bytes.length);
|
||||
return new BytesArray(copy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArray() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] array() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int arrayOffset() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
HashedBytesArray bytesWrap = (HashedBytesArray) o;
|
||||
return Arrays.equals(bytes, bytesWrap.bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode;
|
||||
}
|
||||
}
|
@ -21,6 +21,8 @@ package org.elasticsearch.common.compress;
|
||||
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
@ -46,6 +48,17 @@ public class CompressedString implements Streamable {
|
||||
this.bytes = compressed;
|
||||
}
|
||||
|
||||
public CompressedString(BytesReference data) throws IOException {
|
||||
Compressor compressor = CompressorFactory.compressor(data);
|
||||
if (compressor != null) {
|
||||
// already compressed...
|
||||
this.bytes = data.toBytes();
|
||||
} else {
|
||||
BytesArray bytesArray = data.toBytesArray();
|
||||
this.bytes = CompressorFactory.defaultCompressor().compress(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new compressed string, assuming the bytes are UTF8, by copying it over.
|
||||
*
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.compress;
|
||||
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -36,6 +37,8 @@ public interface Compressor {
|
||||
|
||||
void configure(Settings settings);
|
||||
|
||||
boolean isCompressed(BytesReference bytes);
|
||||
|
||||
boolean isCompressed(byte[] data, int offset, int length);
|
||||
|
||||
boolean isCompressed(ChannelBuffer buffer);
|
||||
|
@ -22,13 +22,17 @@ package org.elasticsearch.common.compress;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.lzf.LZFCompressor;
|
||||
import org.elasticsearch.common.compress.snappy.UnavailableSnappyCompressor;
|
||||
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappy;
|
||||
import org.elasticsearch.common.compress.snappy.xerial.XerialSnappyCompressor;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.CachedStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
@ -98,6 +102,10 @@ public class CompressorFactory {
|
||||
return defaultCompressor;
|
||||
}
|
||||
|
||||
public static boolean isCompressed(BytesReference bytes) {
|
||||
return compressor(bytes) != null;
|
||||
}
|
||||
|
||||
public static boolean isCompressed(byte[] data) {
|
||||
return compressor(data, 0, data.length) != null;
|
||||
}
|
||||
@ -111,8 +119,13 @@ public class CompressorFactory {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static Compressor compressor(BytesHolder bytes) {
|
||||
return compressor(bytes.bytes(), bytes.offset(), bytes.length());
|
||||
public static Compressor compressor(BytesReference bytes) {
|
||||
for (Compressor compressor : compressors) {
|
||||
if (compressor.isCompressed(bytes)) {
|
||||
return compressor;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@ -157,10 +170,21 @@ public class CompressorFactory {
|
||||
/**
|
||||
* Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
|
||||
*/
|
||||
public static BytesHolder uncompressIfNeeded(BytesHolder bytes) throws IOException {
|
||||
public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException {
|
||||
Compressor compressor = compressor(bytes);
|
||||
if (compressor != null) {
|
||||
return new BytesHolder(compressor.uncompress(bytes.bytes(), bytes.offset(), bytes.length()));
|
||||
if (bytes.hasArray()) {
|
||||
return new BytesArray(compressor.uncompress(bytes.array(), bytes.arrayOffset(), bytes.length()));
|
||||
}
|
||||
StreamInput compressed = compressor.streamInput(bytes.streamInput());
|
||||
CachedStreamOutput.Entry entry = CachedStreamOutput.popEntry();
|
||||
try {
|
||||
Streams.copy(compressed, entry.bytes());
|
||||
compressed.close();
|
||||
return new BytesArray(entry.bytes().bytes().toBytes());
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(entry);
|
||||
}
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import com.ning.compress.lzf.LZFEncoder;
|
||||
import com.ning.compress.lzf.util.ChunkDecoderFactory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.*;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -70,6 +71,14 @@ public class LZFCompressor implements Compressor {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(BytesReference bytes) {
|
||||
return bytes.length() >= 3 &&
|
||||
bytes.get(0) == LZFChunk.BYTE_Z &&
|
||||
bytes.get(1) == LZFChunk.BYTE_V &&
|
||||
(bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(byte[] data, int offset, int length) {
|
||||
return length >= 3 &&
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.common.compress.snappy;
|
||||
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamInput;
|
||||
@ -69,6 +70,19 @@ public abstract class SnappyCompressor implements Compressor {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(BytesReference bytes) {
|
||||
if (bytes.length() < HEADER.length) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < HEADER.length; i++) {
|
||||
if (bytes.get(i) != HEADER[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(ChannelBuffer buffer) {
|
||||
if (buffer.readableBytes() < HEADER.length) {
|
||||
@ -108,7 +122,7 @@ public abstract class SnappyCompressor implements Compressor {
|
||||
StreamOutput compressed = entry.bytes(this);
|
||||
compressed.writeBytes(data, offset, length);
|
||||
compressed.close();
|
||||
return entry.bytes().copiedByteArray();
|
||||
return entry.bytes().bytes().copyBytesArray().toBytes();
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(entry);
|
||||
}
|
||||
@ -120,7 +134,7 @@ public abstract class SnappyCompressor implements Compressor {
|
||||
CachedStreamOutput.Entry entry = CachedStreamOutput.popEntry();
|
||||
try {
|
||||
Streams.copy(compressed, entry.bytes());
|
||||
return entry.bytes().copiedByteArray();
|
||||
return entry.bytes().bytes().copyBytesArray().toBytes();
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(entry);
|
||||
}
|
||||
|
@ -19,11 +19,9 @@
|
||||
|
||||
package org.elasticsearch.common.io;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
public interface BytesStream {
|
||||
|
||||
byte[] underlyingBytes();
|
||||
|
||||
int size();
|
||||
|
||||
byte[] copiedByteArray();
|
||||
BytesReference bytes();
|
||||
}
|
@ -19,6 +19,9 @@
|
||||
|
||||
package org.elasticsearch.common.io;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
@ -26,8 +29,6 @@ import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Similar to {@link java.io.ByteArrayOutputStream} just not synced.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class FastByteArrayOutputStream extends OutputStream implements BytesStream {
|
||||
|
||||
@ -127,24 +128,13 @@ public class FastByteArrayOutputStream extends OutputStream implements BytesStre
|
||||
count = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a newly allocated byte array. Its size is the current
|
||||
* size of this output stream and the valid contents of the buffer
|
||||
* have been copied into it.
|
||||
*
|
||||
* @return the current contents of this output stream, as a byte array.
|
||||
* @see java.io.ByteArrayOutputStream#size()
|
||||
*/
|
||||
public byte copiedByteArray()[] {
|
||||
return Arrays.copyOf(buf, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying byte array. Note, use {@link #size()} in order to know
|
||||
* the length of it.
|
||||
*/
|
||||
public byte[] underlyingBytes() {
|
||||
return buf;
|
||||
@Override
|
||||
public BytesReference bytes() {
|
||||
return new BytesArray(buf, 0, count);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -164,7 +164,7 @@ public abstract class Streams {
|
||||
try {
|
||||
BytesStreamOutput out = cachedEntry.bytes();
|
||||
copy(in, out);
|
||||
return out.copiedByteArray();
|
||||
return out.bytes().copyBytesArray().toBytes();
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
package org.elasticsearch.common.io.stream;
|
||||
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -32,10 +32,15 @@ public abstract class AdapterStreamInput extends StreamInput {
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesHolder readBytesReference() throws IOException {
|
||||
public BytesReference readBytesReference() throws IOException {
|
||||
return in.readBytesReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference readBytesReference(int length) throws IOException {
|
||||
return in.readBytesReference(length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
in.reset();
|
||||
|
@ -19,8 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.io.stream;
|
||||
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -78,13 +78,8 @@ public class AdapterStreamOutput extends StreamOutput {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytesHolder(byte[] bytes, int offset, int length) throws IOException {
|
||||
out.writeBytesHolder(bytes, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBytesHolder(@Nullable BytesHolder bytes) throws IOException {
|
||||
out.writeBytesHolder(bytes);
|
||||
public void writeBytesReference(@Nullable BytesReference bytes, boolean withLength) throws IOException {
|
||||
out.writeBytesReference(bytes, withLength);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,7 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.io.stream;
|
||||
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
@ -37,6 +38,16 @@ public class BytesStreamInput extends StreamInput {
|
||||
|
||||
private final boolean unsafe;
|
||||
|
||||
public BytesStreamInput(BytesReference bytes) {
|
||||
if (!bytes.hasArray()) {
|
||||
bytes = bytes.toBytesArray();
|
||||
}
|
||||
this.buf = bytes.array();
|
||||
this.pos = bytes.arrayOffset();
|
||||
this.count = bytes.length();
|
||||
this.unsafe = false;
|
||||
}
|
||||
|
||||
public BytesStreamInput(byte buf[], boolean unsafe) {
|
||||
this(buf, 0, buf.length, unsafe);
|
||||
}
|
||||
@ -49,13 +60,12 @@ public class BytesStreamInput extends StreamInput {
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesHolder readBytesReference() throws IOException {
|
||||
public BytesReference readBytesReference(int length) throws IOException {
|
||||
if (unsafe) {
|
||||
return readBytesHolder();
|
||||
return super.readBytesReference(length);
|
||||
}
|
||||
int size = readVInt();
|
||||
BytesHolder bytes = new BytesHolder(buf, pos, size);
|
||||
pos += size;
|
||||
BytesArray bytes = new BytesArray(buf, pos, length);
|
||||
pos += length;
|
||||
return bytes;
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.io.stream;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -88,24 +90,9 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a newly allocated byte array. Its size is the current
|
||||
* size of this output stream and the valid contents of the buffer
|
||||
* have been copied into it.
|
||||
*
|
||||
* @return the current contents of this output stream, as a byte array.
|
||||
* @see java.io.ByteArrayOutputStream#size()
|
||||
*/
|
||||
public byte copiedByteArray()[] {
|
||||
return Arrays.copyOf(buf, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying byte array. Note, use {@link #size()} in order to know
|
||||
* the length of it.
|
||||
*/
|
||||
public byte[] underlyingBytes() {
|
||||
return buf;
|
||||
@Override
|
||||
public BytesReference bytes() {
|
||||
return new BytesArray(buf, 0, count);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,7 +117,7 @@ public class CachedStreamOutput {
|
||||
|
||||
public static void pushEntry(Entry entry) {
|
||||
entry.reset();
|
||||
if (entry.bytes().underlyingBytes().length > BYTES_LIMIT) {
|
||||
if (entry.bytes().bytes().length() > BYTES_LIMIT) {
|
||||
return;
|
||||
}
|
||||
Queue<Entry> ref = cache.get();
|
||||
|
@ -19,9 +19,10 @@
|
||||
|
||||
package org.elasticsearch.common.io.stream;
|
||||
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -48,24 +49,24 @@ public abstract class StreamInput extends InputStream {
|
||||
public abstract void readBytes(byte[] b, int offset, int len) throws IOException;
|
||||
|
||||
/**
|
||||
* Reads a fresh copy of the bytes.
|
||||
* Reads a bytes reference from this stream, might hold an actual reference to the underlying
|
||||
* bytes of the stream.
|
||||
*/
|
||||
public BytesHolder readBytesHolder() throws IOException {
|
||||
int size = readVInt();
|
||||
if (size == 0) {
|
||||
return BytesHolder.EMPTY;
|
||||
}
|
||||
byte[] bytes = new byte[size];
|
||||
readBytes(bytes, 0, size);
|
||||
return new BytesHolder(bytes, 0, size);
|
||||
public BytesReference readBytesReference() throws IOException {
|
||||
return readBytesReference(readVInt());
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a bytes reference from this stream, might hold an actual reference to the underlying
|
||||
* bytes of the stream.
|
||||
*/
|
||||
public BytesHolder readBytesReference() throws IOException {
|
||||
return readBytesHolder();
|
||||
public BytesReference readBytesReference(int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return BytesArray.EMPTY;
|
||||
}
|
||||
byte[] bytes = new byte[length];
|
||||
readBytes(bytes, 0, length);
|
||||
return new BytesArray(bytes, 0, length);
|
||||
}
|
||||
|
||||
public void readFully(byte[] b) throws IOException {
|
||||
|
@ -19,8 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.io.stream;
|
||||
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.joda.time.ReadableInstant;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -68,18 +68,14 @@ public abstract class StreamOutput extends OutputStream {
|
||||
*/
|
||||
public abstract void writeBytes(byte[] b, int offset, int length) throws IOException;
|
||||
|
||||
public void writeBytesHolder(byte[] bytes, int offset, int length) throws IOException {
|
||||
writeVInt(length);
|
||||
writeBytes(bytes, offset, length);
|
||||
}
|
||||
|
||||
public void writeBytesHolder(@Nullable BytesHolder bytes) throws IOException {
|
||||
public void writeBytesReference(@Nullable BytesReference bytes, boolean withLength) throws IOException {
|
||||
if (bytes == null) {
|
||||
writeVInt(0);
|
||||
} else {
|
||||
writeVInt(bytes.length());
|
||||
writeBytes(bytes.bytes(), bytes.offset(), bytes.length());
|
||||
if (withLength) {
|
||||
writeVInt(0);
|
||||
}
|
||||
return;
|
||||
}
|
||||
bytes.writeTo(this, withLength);
|
||||
}
|
||||
|
||||
public final void writeShort(short v) throws IOException {
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
@ -68,7 +68,7 @@ public interface XContent {
|
||||
/**
|
||||
* Creates a parser over the provided bytes.
|
||||
*/
|
||||
XContentParser createParser(BytesHolder bytes) throws IOException;
|
||||
XContentParser createParser(BytesReference bytes) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided reader.
|
||||
|
@ -19,9 +19,11 @@
|
||||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.Unicode;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.io.FastByteArrayOutputStream;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapConverter;
|
||||
@ -39,7 +41,7 @@ import java.util.Map;
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class XContentBuilder {
|
||||
public final class XContentBuilder implements BytesStream {
|
||||
|
||||
public static enum FieldCaseConversion {
|
||||
/**
|
||||
@ -467,6 +469,16 @@ public final class XContentBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, BytesReference value) throws IOException {
|
||||
field(name);
|
||||
if (!value.hasArray()) {
|
||||
value = value.toBytesArray();
|
||||
}
|
||||
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public XContentBuilder field(String name, byte[] value, int offset, int length) throws IOException {
|
||||
field(name);
|
||||
generator.writeBinary(value, offset, length);
|
||||
@ -706,6 +718,8 @@ public final class XContentBuilder {
|
||||
field(name, (float[]) value);
|
||||
} else if (value instanceof double[]) {
|
||||
field(name, (double[]) value);
|
||||
} else if (value instanceof BytesReference) {
|
||||
field(name, (BytesReference) value);
|
||||
} else {
|
||||
field(name, value.toString());
|
||||
}
|
||||
@ -739,6 +753,8 @@ public final class XContentBuilder {
|
||||
value((Date) value);
|
||||
} else if (value instanceof ReadableInstant) {
|
||||
value((ReadableInstant) value);
|
||||
} else if (value instanceof BytesReference) {
|
||||
value((BytesReference) value);
|
||||
} else if (value instanceof Map) {
|
||||
//noinspection unchecked
|
||||
value((Map<String, Object>) value);
|
||||
@ -848,6 +864,11 @@ public final class XContentBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder rawField(String fieldName, BytesReference content) throws IOException {
|
||||
generator.writeRawField(fieldName, content, bos);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Boolean value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
@ -954,6 +975,17 @@ public final class XContentBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(BytesReference value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
if (!value.hasArray()) {
|
||||
value = value.toBytesArray();
|
||||
}
|
||||
generator.writeBinary(value.array(), value.arrayOffset(), value.length());
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder map(Map<String, Object> map) throws IOException {
|
||||
if (map == null) {
|
||||
return nullValue();
|
||||
@ -1009,46 +1041,20 @@ public final class XContentBuilder {
|
||||
return this.bos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the unsafe bytes (thread local bound). Make sure to use it with
|
||||
* {@link #underlyingBytesLength()}.
|
||||
* <p/>
|
||||
* <p>Only applicable when the builder is constructed with {@link FastByteArrayOutputStream}.
|
||||
*/
|
||||
public byte[] underlyingBytes() throws IOException {
|
||||
@Override
|
||||
public BytesReference bytes() {
|
||||
close();
|
||||
return ((BytesStream) bos).underlyingBytes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the unsafe bytes length (thread local bound). Make sure to use it with
|
||||
* {@link #underlyingBytes()}.
|
||||
* <p/>
|
||||
* <p>Only applicable when the builder is constructed with {@link FastByteArrayOutputStream}.
|
||||
*/
|
||||
public int underlyingBytesLength() throws IOException {
|
||||
close();
|
||||
return ((BytesStream) bos).size();
|
||||
return ((BytesStream) bos).bytes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the actual stream used.
|
||||
*/
|
||||
public BytesStream underlyingStream() throws IOException {
|
||||
public BytesStream bytesStream() throws IOException {
|
||||
close();
|
||||
return (BytesStream) bos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of the bytes this builder generated.
|
||||
* <p/>
|
||||
* <p>Only applicable when the builder is constructed with {@link FastByteArrayOutputStream}.
|
||||
*/
|
||||
public byte[] copiedBytes() throws IOException {
|
||||
close();
|
||||
return ((BytesStream) bos).copiedByteArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representation of the builder (only applicable for text based xcontent).
|
||||
* <p/>
|
||||
@ -1056,6 +1062,7 @@ public final class XContentBuilder {
|
||||
*/
|
||||
public String string() throws IOException {
|
||||
close();
|
||||
return Unicode.fromBytes(underlyingBytes(), 0, underlyingBytesLength());
|
||||
BytesArray bytesArray = bytes().toBytesArray();
|
||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.common.xcontent;
|
||||
import org.codehaus.jackson.smile.SmileConstants;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.smile.SmileXContent;
|
||||
|
||||
@ -137,13 +137,6 @@ public class XContentFactory {
|
||||
return xContent(data, 0, data.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Guesses the content type based on the provided bytes.
|
||||
*/
|
||||
public static XContent xContent(BytesHolder bytes) {
|
||||
return xContent(bytes.bytes(), bytes.offset(), bytes.length());
|
||||
}
|
||||
|
||||
/**
|
||||
* Guesses the content type based on the provided bytes.
|
||||
*/
|
||||
@ -211,4 +204,28 @@ public class XContentFactory {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static XContent xContent(BytesReference bytes) {
|
||||
XContentType type = xContentType(bytes);
|
||||
if (type == null) {
|
||||
throw new ElasticSearchParseException("Failed to derive xcontent from " + bytes);
|
||||
}
|
||||
return xContent(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Guesses the content type based on the provided bytes.
|
||||
*/
|
||||
public static XContentType xContentType(BytesReference bytes) {
|
||||
int length = bytes.length() < GUESS_HEADER_LENGTH ? bytes.length() : GUESS_HEADER_LENGTH;
|
||||
if (length > 2 && bytes.get(0) == SmileConstants.HEADER_BYTE_1 && bytes.get(1) == SmileConstants.HEADER_BYTE_2 && bytes.get(2) == SmileConstants.HEADER_BYTE_3) {
|
||||
return XContentType.SMILE;
|
||||
}
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (bytes.get(i) == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
@ -111,6 +113,8 @@ public interface XContentGenerator {
|
||||
|
||||
void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException;
|
||||
|
||||
void writeRawField(String fieldName, BytesReference content, OutputStream bos) throws IOException;
|
||||
|
||||
void copyCurrentStructure(XContentParser parser) throws IOException;
|
||||
|
||||
void flush() throws IOException;
|
||||
|
@ -22,7 +22,8 @@ package org.elasticsearch.common.xcontent;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedStreamInput;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
@ -41,6 +42,22 @@ import java.util.Map;
|
||||
@SuppressWarnings("unchecked")
|
||||
public class XContentHelper {
|
||||
|
||||
public static XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
if (bytes.hasArray()) {
|
||||
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
}
|
||||
Compressor compressor = CompressorFactory.compressor(bytes);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedInput = compressor.streamInput(bytes.streamInput());
|
||||
XContentType contentType = XContentFactory.xContentType(compressedInput);
|
||||
compressedInput.resetToBufferStart();
|
||||
return XContentFactory.xContent(contentType).createParser(compressedInput);
|
||||
} else {
|
||||
return XContentFactory.xContent(bytes).createParser(bytes.streamInput());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static XContentParser createParser(byte[] data, int offset, int length) throws IOException {
|
||||
Compressor compressor = CompressorFactory.compressor(data, offset, length);
|
||||
if (compressor != null) {
|
||||
@ -53,6 +70,33 @@ public class XContentHelper {
|
||||
}
|
||||
}
|
||||
|
||||
public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered) throws ElasticSearchParseException {
|
||||
if (bytes.hasArray()) {
|
||||
return convertToMap(bytes.array(), bytes.arrayOffset(), bytes.length(), ordered);
|
||||
}
|
||||
try {
|
||||
XContentParser parser;
|
||||
XContentType contentType;
|
||||
Compressor compressor = CompressorFactory.compressor(bytes);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(bytes.streamInput());
|
||||
contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
|
||||
} else {
|
||||
contentType = XContentFactory.xContentType(bytes);
|
||||
parser = XContentFactory.xContent(contentType).createParser(bytes.streamInput());
|
||||
}
|
||||
if (ordered) {
|
||||
return Tuple.tuple(contentType, parser.mapOrderedAndClose());
|
||||
} else {
|
||||
return Tuple.tuple(contentType, parser.mapAndClose());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchParseException("Failed to parse content to map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static Tuple<XContentType, Map<String, Object>> convertToMap(byte[] data, boolean ordered) throws ElasticSearchParseException {
|
||||
return convertToMap(data, 0, data.length, ordered);
|
||||
}
|
||||
@ -81,12 +125,34 @@ public class XContentHelper {
|
||||
}
|
||||
}
|
||||
|
||||
public static String convertToJson(BytesHolder bytes, boolean reformatJson) throws IOException {
|
||||
return convertToJson(bytes.bytes(), bytes.offset(), bytes.length(), reformatJson);
|
||||
public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException {
|
||||
return convertToJson(bytes, reformatJson, false);
|
||||
}
|
||||
|
||||
public static String convertToJson(BytesHolder bytes, boolean reformatJson, boolean prettyPrint) throws IOException {
|
||||
return convertToJson(bytes.bytes(), bytes.offset(), bytes.length(), reformatJson, prettyPrint);
|
||||
public static String convertToJson(BytesReference bytes, boolean reformatJson, boolean prettyPrint) throws IOException {
|
||||
if (bytes.hasArray()) {
|
||||
return convertToJson(bytes.array(), bytes.arrayOffset(), bytes.length(), reformatJson, prettyPrint);
|
||||
}
|
||||
XContentType xContentType = XContentFactory.xContentType(bytes);
|
||||
if (xContentType == XContentType.JSON && !reformatJson) {
|
||||
BytesArray bytesArray = bytes.toBytesArray();
|
||||
return new String(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length(), Charsets.UTF_8);
|
||||
}
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(xContentType).createParser(bytes.streamInput());
|
||||
parser.nextToken();
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
if (prettyPrint) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
builder.copyCurrentStructure(parser);
|
||||
return builder.string();
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson) throws IOException {
|
||||
|
@ -23,7 +23,7 @@ import org.codehaus.jackson.JsonEncoding;
|
||||
import org.codehaus.jackson.JsonFactory;
|
||||
import org.codehaus.jackson.JsonGenerator;
|
||||
import org.codehaus.jackson.JsonParser;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
|
||||
@ -92,8 +92,11 @@ public class JsonXContent implements XContent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(BytesHolder bytes) throws IOException {
|
||||
return createParser(bytes.bytes(), bytes.offset(), bytes.length());
|
||||
public XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
if (bytes.hasArray()) {
|
||||
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
}
|
||||
return createParser(bytes.streamInput());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.common.xcontent.json;
|
||||
|
||||
import org.codehaus.jackson.JsonGenerator;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
|
||||
@ -265,6 +266,15 @@ public class JsonXContentGenerator implements XContentGenerator {
|
||||
Streams.copy(content, bos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, BytesReference content, OutputStream bos) throws IOException {
|
||||
generator.writeRaw(", \"");
|
||||
generator.writeRaw(fieldName);
|
||||
generator.writeRaw("\" : ");
|
||||
flush();
|
||||
content.writeTo(bos);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyCurrentStructure(XContentParser parser) throws IOException {
|
||||
// the start of the parser
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.common.xcontent.smile;
|
||||
import org.codehaus.jackson.JsonEncoding;
|
||||
import org.codehaus.jackson.smile.SmileFactory;
|
||||
import org.codehaus.jackson.smile.SmileGenerator;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
|
||||
@ -91,8 +91,11 @@ public class SmileXContent implements XContent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(BytesHolder bytes) throws IOException {
|
||||
return createParser(bytes.bytes(), bytes.offset(), bytes.length());
|
||||
public XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
if (bytes.hasArray()) {
|
||||
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
}
|
||||
return createParser(bytes.streamInput());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent.smile;
|
||||
|
||||
import org.codehaus.jackson.JsonGenerator;
|
||||
import org.codehaus.jackson.smile.SmileParser;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContentGenerator;
|
||||
|
||||
@ -66,6 +67,23 @@ public class SmileXContentGenerator extends JsonXContentGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, BytesReference content, OutputStream bos) throws IOException {
|
||||
writeFieldName(fieldName);
|
||||
SmileParser parser;
|
||||
if (content.hasArray()) {
|
||||
parser = SmileXContent.smileFactory.createJsonParser(content.array(), content.arrayOffset(), content.length());
|
||||
} else {
|
||||
parser = SmileXContent.smileFactory.createJsonParser(content.streamInput());
|
||||
}
|
||||
try {
|
||||
parser.nextToken();
|
||||
generator.copyCurrentStructure(parser);
|
||||
} finally {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException {
|
||||
writeFieldName(fieldName);
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.io.stream.*;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
@ -275,7 +276,7 @@ public class MulticastZenPing extends AbstractLifecycleComponent<ZenPing> implem
|
||||
clusterName.writeTo(out);
|
||||
nodesProvider.nodes().localNode().writeTo(out);
|
||||
out.close();
|
||||
datagramPacketSend.setData(cachedEntry.bytes().copiedByteArray());
|
||||
datagramPacketSend.setData(cachedEntry.bytes().bytes().copyBytesArray().toBytes());
|
||||
multicastSocket.send(datagramPacketSend);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("[{}] sending ping request", id);
|
||||
@ -479,7 +480,8 @@ public class MulticastZenPing extends AbstractLifecycleComponent<ZenPing> implem
|
||||
|
||||
builder.endObject().endObject();
|
||||
synchronized (sendMutex) {
|
||||
datagramPacketSend.setData(builder.underlyingBytes(), 0, builder.underlyingBytesLength());
|
||||
BytesReference bytes = builder.bytes();
|
||||
datagramPacketSend.setData(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
multicastSocket.send(datagramPacketSend);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("sending external ping response {}", builder.string());
|
||||
|
@ -21,7 +21,8 @@ package org.elasticsearch.discovery.zen.publish;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
@ -72,7 +73,7 @@ public class PublishClusterStateAction extends AbstractComponent {
|
||||
StreamOutput stream = cachedEntry.handles(CompressorFactory.defaultCompressor());
|
||||
ClusterState.Builder.writeTo(clusterState, stream);
|
||||
stream.close();
|
||||
clusterStateInBytes = cachedEntry.bytes().copiedByteArray();
|
||||
clusterStateInBytes = cachedEntry.bytes().bytes().copyBytesArray().toBytes();
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to serialize cluster_state before publishing it to nodes", e);
|
||||
return;
|
||||
@ -100,13 +101,13 @@ public class PublishClusterStateAction extends AbstractComponent {
|
||||
|
||||
class PublishClusterStateRequest implements Streamable {
|
||||
|
||||
BytesHolder clusterStateInBytes;
|
||||
BytesReference clusterStateInBytes;
|
||||
|
||||
private PublishClusterStateRequest() {
|
||||
}
|
||||
|
||||
private PublishClusterStateRequest(byte[] clusterStateInBytes) {
|
||||
this.clusterStateInBytes = new BytesHolder(clusterStateInBytes);
|
||||
this.clusterStateInBytes = new BytesArray(clusterStateInBytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -116,7 +117,7 @@ public class PublishClusterStateAction extends AbstractComponent {
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeBytesHolder(clusterStateInBytes);
|
||||
out.writeBytesReference(clusterStateInBytes, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -131,13 +132,12 @@ public class PublishClusterStateAction extends AbstractComponent {
|
||||
|
||||
@Override
|
||||
public void messageReceived(PublishClusterStateRequest request, TransportChannel channel) throws Exception {
|
||||
Compressor compressor = CompressorFactory.compressor(request.clusterStateInBytes.bytes(), request.clusterStateInBytes.offset(), request.clusterStateInBytes.length());
|
||||
BytesStreamInput bytes = new BytesStreamInput(request.clusterStateInBytes.bytes(), request.clusterStateInBytes.offset(), request.clusterStateInBytes.length(), false);
|
||||
Compressor compressor = CompressorFactory.compressor(request.clusterStateInBytes);
|
||||
StreamInput in;
|
||||
if (compressor != null) {
|
||||
in = CachedStreamInput.cachedHandlesCompressed(compressor, bytes);
|
||||
in = CachedStreamInput.cachedHandlesCompressed(compressor, request.clusterStateInBytes.streamInput());
|
||||
} else {
|
||||
in = CachedStreamInput.cachedHandles(bytes);
|
||||
in = CachedStreamInput.cachedHandles(request.clusterStateInBytes.streamInput());
|
||||
}
|
||||
ClusterState clusterState = ClusterState.Builder.readFrom(in, nodesProvider.nodes().localNode());
|
||||
listener.onNewClusterState(clusterState);
|
||||
|
@ -41,7 +41,6 @@ import org.elasticsearch.index.gateway.CommitPoints;
|
||||
import org.elasticsearch.index.gateway.blobstore.BlobStoreIndexGateway;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
@ -164,7 +163,7 @@ public abstract class BlobStoreGateway extends SharedStorageGateway {
|
||||
MetaData.Builder.toXContent(metaData, builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
builder.close();
|
||||
metaDataBlobContainer.writeBlob(newMetaData, new ByteArrayInputStream(cachedEntry.bytes().underlyingBytes(), 0, cachedEntry.bytes().size()), cachedEntry.bytes().size());
|
||||
metaDataBlobContainer.writeBlob(newMetaData, cachedEntry.bytes().bytes().streamInput(), cachedEntry.bytes().size());
|
||||
} catch (IOException e) {
|
||||
throw new GatewayException("Failed to write metadata [" + newMetaData + "]", e);
|
||||
} finally {
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
@ -339,7 +340,8 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
fos = new FileOutputStream(stateFile);
|
||||
fos.write(cachedEntry.bytes().underlyingBytes(), 0, cachedEntry.bytes().size());
|
||||
BytesReference bytes = cachedEntry.bytes().bytes();
|
||||
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
fos.getChannel().force(true);
|
||||
Closeables.closeQuietly(fos);
|
||||
wroteAtLeastOnce = true;
|
||||
@ -402,7 +404,8 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
fos = new FileOutputStream(stateFile);
|
||||
fos.write(cachedEntry.bytes().underlyingBytes(), 0, cachedEntry.bytes().size());
|
||||
BytesReference bytes = cachedEntry.bytes().bytes();
|
||||
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
fos.getChannel().force(true);
|
||||
Closeables.closeQuietly(fos);
|
||||
wroteAtLeastOnce = true;
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.*;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
@ -289,7 +290,8 @@ public class LocalGatewayShardsState extends AbstractComponent implements Cluste
|
||||
FileOutputStream fos = null;
|
||||
try {
|
||||
fos = new FileOutputStream(stateFile);
|
||||
fos.write(cachedEntry.bytes().underlyingBytes(), 0, cachedEntry.bytes().size());
|
||||
BytesReference bytes = cachedEntry.bytes().bytes();
|
||||
fos.write(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
fos.getChannel().force(true);
|
||||
Closeables.closeQuietly(fos);
|
||||
wroteAtLeastOnce = true;
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.http.netty;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.CachedStreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.http.HttpChannel;
|
||||
@ -98,7 +99,8 @@ public class NettyHttpChannel implements HttpChannel {
|
||||
XContentBuilder builder = ((XContentRestResponse) response).builder();
|
||||
if (builder.payload() instanceof CachedStreamOutput.Entry) {
|
||||
releaseContentListener = new NettyTransport.CacheFutureListener((CachedStreamOutput.Entry) builder.payload());
|
||||
buf = ChannelBuffers.wrappedBuffer(builder.underlyingBytes(), 0, builder.underlyingBytesLength());
|
||||
BytesReference bytes = builder.bytes();
|
||||
buf = ChannelBuffers.wrappedBuffer(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
} else if (response.contentThreadSafe()) {
|
||||
buf = ChannelBuffers.wrappedBuffer(response.content(), 0, response.contentLength());
|
||||
} else {
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.cache.id;
|
||||
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -30,7 +30,7 @@ public interface IdReaderCache {
|
||||
|
||||
IdReaderTypeCache type(String type);
|
||||
|
||||
BytesWrap parentIdByDoc(String type, int docId);
|
||||
HashedBytesArray parentIdByDoc(String type, int docId);
|
||||
|
||||
int docById(String type, BytesWrap id);
|
||||
int docById(String type, HashedBytesArray id);
|
||||
}
|
||||
|
@ -19,14 +19,14 @@
|
||||
|
||||
package org.elasticsearch.index.cache.id;
|
||||
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface IdReaderTypeCache {
|
||||
|
||||
BytesWrap parentIdByDoc(int docId);
|
||||
HashedBytesArray parentIdByDoc(int docId);
|
||||
|
||||
int docById(BytesWrap id);
|
||||
int docById(HashedBytesArray id);
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ import gnu.trove.impl.Constants;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -132,7 +132,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||
readerBuilder.put(StringHelper.intern(uid.type()), typeBuilder);
|
||||
}
|
||||
|
||||
BytesWrap idAsBytes = checkIfCanReuse(builders, new BytesWrap(uid.id()));
|
||||
HashedBytesArray idAsBytes = checkIfCanReuse(builders, new HashedBytesArray(uid.id()));
|
||||
termDocs.seek(termEnum);
|
||||
while (termDocs.next()) {
|
||||
// when traversing, make sure to ignore deleted docs, so the key->docId will be correct
|
||||
@ -174,7 +174,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||
readerBuilder.put(StringHelper.intern(uid.type()), typeBuilder);
|
||||
}
|
||||
|
||||
BytesWrap idAsBytes = checkIfCanReuse(builders, new BytesWrap(uid.id()));
|
||||
HashedBytesArray idAsBytes = checkIfCanReuse(builders, new HashedBytesArray(uid.id()));
|
||||
boolean added = false; // optimize for when all the docs are deleted for this id
|
||||
|
||||
termDocs.seek(termEnum);
|
||||
@ -205,7 +205,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||
for (Map.Entry<String, TypeBuilder> typeBuilderEntry : entry.getValue().entrySet()) {
|
||||
types.put(typeBuilderEntry.getKey(), new SimpleIdReaderTypeCache(typeBuilderEntry.getKey(),
|
||||
typeBuilderEntry.getValue().idToDoc,
|
||||
typeBuilderEntry.getValue().parentIdsValues.toArray(new BytesWrap[typeBuilderEntry.getValue().parentIdsValues.size()]),
|
||||
typeBuilderEntry.getValue().parentIdsValues.toArray(new HashedBytesArray[typeBuilderEntry.getValue().parentIdsValues.size()]),
|
||||
typeBuilderEntry.getValue().parentIdsOrdinals));
|
||||
}
|
||||
SimpleIdReaderCache readerCache = new SimpleIdReaderCache(entry.getKey(), types.immutableMap());
|
||||
@ -215,8 +215,8 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||
}
|
||||
}
|
||||
|
||||
private BytesWrap checkIfCanReuse(Map<Object, Map<String, TypeBuilder>> builders, BytesWrap idAsBytes) {
|
||||
BytesWrap finalIdAsBytes;
|
||||
private HashedBytesArray checkIfCanReuse(Map<Object, Map<String, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
|
||||
HashedBytesArray finalIdAsBytes;
|
||||
// go over and see if we can reuse this id
|
||||
for (SimpleIdReaderCache idReaderCache : idReaders.values()) {
|
||||
finalIdAsBytes = idReaderCache.canReuse(idAsBytes);
|
||||
@ -245,8 +245,8 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||
}
|
||||
|
||||
static class TypeBuilder {
|
||||
final ExtTObjectIntHasMap<BytesWrap> idToDoc = new ExtTObjectIntHasMap<BytesWrap>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
|
||||
final ArrayList<BytesWrap> parentIdsValues = new ArrayList<BytesWrap>();
|
||||
final ExtTObjectIntHasMap<HashedBytesArray> idToDoc = new ExtTObjectIntHasMap<HashedBytesArray>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
|
||||
final ArrayList<HashedBytesArray> parentIdsValues = new ArrayList<HashedBytesArray>();
|
||||
final int[] parentIdsOrdinals;
|
||||
int t = 1; // current term number (0 indicated null value)
|
||||
|
||||
@ -259,7 +259,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||
/**
|
||||
* Returns an already stored instance if exists, if not, returns null;
|
||||
*/
|
||||
public BytesWrap canReuse(BytesWrap id) {
|
||||
public HashedBytesArray canReuse(HashedBytesArray id) {
|
||||
return idToDoc.key(id);
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
package org.elasticsearch.index.cache.id.simple;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.index.cache.id.IdReaderCache;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
|
||||
@ -49,7 +49,7 @@ public class SimpleIdReaderCache implements IdReaderCache {
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesWrap parentIdByDoc(String type, int docId) {
|
||||
public HashedBytesArray parentIdByDoc(String type, int docId) {
|
||||
SimpleIdReaderTypeCache typeCache = types.get(type);
|
||||
if (typeCache != null) {
|
||||
return typeCache.parentIdByDoc(docId);
|
||||
@ -58,7 +58,7 @@ public class SimpleIdReaderCache implements IdReaderCache {
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docById(String type, BytesWrap id) {
|
||||
public int docById(String type, HashedBytesArray id) {
|
||||
SimpleIdReaderTypeCache typeCache = types.get(type);
|
||||
if (typeCache != null) {
|
||||
return typeCache.docById(id);
|
||||
@ -69,9 +69,9 @@ public class SimpleIdReaderCache implements IdReaderCache {
|
||||
/**
|
||||
* Returns an already stored instance if exists, if not, returns null;
|
||||
*/
|
||||
public BytesWrap canReuse(BytesWrap id) {
|
||||
public HashedBytesArray canReuse(HashedBytesArray id) {
|
||||
for (SimpleIdReaderTypeCache typeCache : types.values()) {
|
||||
BytesWrap wrap = typeCache.canReuse(id);
|
||||
HashedBytesArray wrap = typeCache.canReuse(id);
|
||||
if (wrap != null) {
|
||||
return wrap;
|
||||
}
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.cache.id.simple;
|
||||
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
|
||||
@ -30,14 +30,14 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
|
||||
|
||||
private final String type;
|
||||
|
||||
private final ExtTObjectIntHasMap<BytesWrap> idToDoc;
|
||||
private final ExtTObjectIntHasMap<HashedBytesArray> idToDoc;
|
||||
|
||||
private final BytesWrap[] parentIdsValues;
|
||||
private final HashedBytesArray[] parentIdsValues;
|
||||
|
||||
private final int[] parentIdsOrdinals;
|
||||
|
||||
public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<BytesWrap> idToDoc,
|
||||
BytesWrap[] parentIdsValues, int[] parentIdsOrdinals) {
|
||||
public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc,
|
||||
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
|
||||
this.type = type;
|
||||
this.idToDoc = idToDoc;
|
||||
this.idToDoc.trimToSize();
|
||||
@ -49,18 +49,18 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
public BytesWrap parentIdByDoc(int docId) {
|
||||
public HashedBytesArray parentIdByDoc(int docId) {
|
||||
return parentIdsValues[parentIdsOrdinals[docId]];
|
||||
}
|
||||
|
||||
public int docById(BytesWrap id) {
|
||||
public int docById(HashedBytesArray id) {
|
||||
return idToDoc.get(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an already stored instance if exists, if not, returns null;
|
||||
*/
|
||||
public BytesWrap canReuse(BytesWrap id) {
|
||||
public HashedBytesArray canReuse(HashedBytesArray id) {
|
||||
return idToDoc.key(id);
|
||||
}
|
||||
}
|
||||
|
@ -28,8 +28,8 @@ import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.component.CloseableComponent;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lucene.uid.UidField;
|
||||
@ -442,18 +442,10 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
||||
return this.doc.analyzer();
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return this.doc.source();
|
||||
}
|
||||
|
||||
public int sourceOffset() {
|
||||
return this.doc.sourceOffset();
|
||||
}
|
||||
|
||||
public int sourceLength() {
|
||||
return this.doc.sourceLength();
|
||||
}
|
||||
|
||||
public UidField uidField() {
|
||||
return (UidField) doc.rootDoc().getFieldable(UidFieldMapper.NAME);
|
||||
}
|
||||
@ -578,18 +570,10 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
||||
return this.doc.ttl();
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return this.doc.source();
|
||||
}
|
||||
|
||||
public int sourceOffset() {
|
||||
return this.doc.sourceOffset();
|
||||
}
|
||||
|
||||
public int sourceLength() {
|
||||
return this.doc.sourceLength();
|
||||
}
|
||||
|
||||
public UidField uidField() {
|
||||
return (UidField) doc.rootDoc().getFieldable(UidFieldMapper.NAME);
|
||||
}
|
||||
@ -719,7 +703,7 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
||||
|
||||
static class DeleteByQuery {
|
||||
private final Query query;
|
||||
private final BytesHolder source;
|
||||
private final BytesReference source;
|
||||
private final String[] filteringAliases;
|
||||
private final Filter aliasFilter;
|
||||
private final String[] types;
|
||||
@ -727,7 +711,7 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
||||
private long startTime;
|
||||
private long endTime;
|
||||
|
||||
public DeleteByQuery(Query query, BytesHolder source, @Nullable String[] filteringAliases, @Nullable Filter aliasFilter, String... types) {
|
||||
public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Filter aliasFilter, String... types) {
|
||||
this.query = query;
|
||||
this.source = source;
|
||||
this.types = types;
|
||||
@ -739,7 +723,7 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
||||
return this.query;
|
||||
}
|
||||
|
||||
public BytesHolder source() {
|
||||
public BytesReference source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ public class CommitPoints implements Iterable<CommitPoint> {
|
||||
builder.endObject();
|
||||
|
||||
builder.endObject();
|
||||
return builder.copiedBytes();
|
||||
return builder.bytes().toBytes();
|
||||
}
|
||||
|
||||
public static CommitPoint fromXContent(byte[] data) throws Exception {
|
||||
|
@ -467,7 +467,7 @@ public abstract class BlobStoreIndexShardGateway extends AbstractIndexShardCompo
|
||||
if (bos.size() < 4) {
|
||||
return;
|
||||
}
|
||||
BytesStreamInput si = new BytesStreamInput(bos.underlyingBytes(), 0, bos.size(), false);
|
||||
BytesStreamInput si = new BytesStreamInput(bos.bytes());
|
||||
int position;
|
||||
while (true) {
|
||||
try {
|
||||
@ -502,7 +502,7 @@ public abstract class BlobStoreIndexShardGateway extends AbstractIndexShardCompo
|
||||
|
||||
int leftOver = bos.size() - position;
|
||||
if (leftOver > 0) {
|
||||
newBos.write(bos.underlyingBytes(), position, leftOver);
|
||||
newBos.write(bos.bytes().array(), position, leftOver);
|
||||
}
|
||||
|
||||
bos = newBos;
|
||||
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.get;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -59,14 +59,14 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
|
||||
private Map<String, Object> sourceAsMap;
|
||||
|
||||
private BytesHolder source;
|
||||
private BytesReference source;
|
||||
|
||||
private byte[] sourceAsBytes;
|
||||
|
||||
GetResult() {
|
||||
}
|
||||
|
||||
public GetResult(String index, String type, String id, long version, boolean exists, BytesHolder source, Map<String, GetField> fields) {
|
||||
public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source, Map<String, GetField> fields) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
@ -159,14 +159,14 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
if (sourceAsBytes != null) {
|
||||
return sourceAsBytes;
|
||||
}
|
||||
this.sourceAsBytes = sourceRef().copyBytes();
|
||||
this.sourceAsBytes = sourceRef().toBytes();
|
||||
return this.sourceAsBytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns bytes reference, also un compress the source if needed.
|
||||
*/
|
||||
public BytesHolder sourceRef() {
|
||||
public BytesReference sourceRef() {
|
||||
try {
|
||||
this.source = CompressorFactory.uncompressIfNeeded(this.source);
|
||||
return this.source;
|
||||
@ -178,7 +178,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
/**
|
||||
* Internal source representation, might be compressed....
|
||||
*/
|
||||
public BytesHolder internalSourceRef() {
|
||||
public BytesReference internalSourceRef() {
|
||||
return source;
|
||||
}
|
||||
|
||||
@ -196,7 +196,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
BytesHolder source = sourceRef();
|
||||
BytesReference source = sourceRef();
|
||||
try {
|
||||
return XContentHelper.convertToJson(source, false);
|
||||
} catch (IOException e) {
|
||||
@ -216,7 +216,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
return sourceAsMap;
|
||||
}
|
||||
|
||||
sourceAsMap = SourceLookup.sourceAsMap(source.bytes(), source.offset(), source.length());
|
||||
sourceAsMap = SourceLookup.sourceAsMap(source);
|
||||
return sourceAsMap;
|
||||
}
|
||||
|
||||
@ -257,7 +257,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
builder.field(Fields.EXISTS, exists);
|
||||
|
||||
if (source != null) {
|
||||
RestXContentBuilder.restDocumentSource(source.bytes(), source.offset(), source.length(), builder, params);
|
||||
RestXContentBuilder.restDocumentSource(source, builder, params);
|
||||
}
|
||||
|
||||
if (fields != null && !fields.isEmpty()) {
|
||||
@ -345,7 +345,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
|
||||
out.writeLong(version);
|
||||
out.writeBoolean(exists);
|
||||
if (exists) {
|
||||
out.writeBytesHolder(source);
|
||||
out.writeBytesReference(source, true);
|
||||
if (fields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.get;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.document.ResetFieldSelector;
|
||||
import org.elasticsearch.common.lucene.uid.UidField;
|
||||
@ -233,7 +233,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
|
||||
}
|
||||
}
|
||||
|
||||
return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), source == null ? null : new BytesHolder(source), fields);
|
||||
return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), source == null ? null : new BytesArray(source), fields);
|
||||
} else {
|
||||
Translog.Source source = get.source();
|
||||
|
||||
@ -274,7 +274,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
|
||||
searchLookup = new SearchLookup(mapperService, indexCache.fieldData(), new String[]{type});
|
||||
}
|
||||
if (sourceAsMap == null) {
|
||||
sourceAsMap = SourceLookup.sourceAsMap(source.source.bytes(), source.source.offset(), source.source.length());
|
||||
sourceAsMap = SourceLookup.sourceAsMap(source.source);
|
||||
}
|
||||
SearchScript searchScript = scriptService.search(searchLookup, "mvel", field, null);
|
||||
// we can't do this, only allow to run scripts against the source
|
||||
@ -295,7 +295,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
|
||||
} else {
|
||||
if (searchLookup == null) {
|
||||
searchLookup = new SearchLookup(mapperService, indexCache.fieldData(), new String[]{type});
|
||||
searchLookup.source().setNextSource(source.source.bytes(), source.source.offset(), source.source.length());
|
||||
searchLookup.source().setNextSource(source.source);
|
||||
}
|
||||
|
||||
FieldMapper<?> x = docMapper.mappers().smartNameFieldMapper(field);
|
||||
|
@ -29,6 +29,7 @@ import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Preconditions;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -425,11 +426,11 @@ public class DocumentMapper implements ToXContent {
|
||||
return this.objectMappers;
|
||||
}
|
||||
|
||||
public ParsedDocument parse(byte[] source) throws MapperParsingException {
|
||||
public ParsedDocument parse(BytesReference source) throws MapperParsingException {
|
||||
return parse(SourceToParse.source(source));
|
||||
}
|
||||
|
||||
public ParsedDocument parse(String type, String id, byte[] source) throws MapperParsingException {
|
||||
public ParsedDocument parse(String type, String id, BytesReference source) throws MapperParsingException {
|
||||
return parse(SourceToParse.source(source).type(type).id(id));
|
||||
}
|
||||
|
||||
@ -448,7 +449,7 @@ public class DocumentMapper implements ToXContent {
|
||||
XContentParser parser = source.parser();
|
||||
try {
|
||||
if (parser == null) {
|
||||
parser = XContentHelper.createParser(source.source(), source.sourceOffset(), source.sourceLength());
|
||||
parser = XContentHelper.createParser(source.source());
|
||||
}
|
||||
context.reset(parser, new Document(), source, listener);
|
||||
// on a newly created instance of document mapper, we always consider it as new mappers that have been added
|
||||
@ -517,7 +518,7 @@ public class DocumentMapper implements ToXContent {
|
||||
Collections.reverse(context.docs());
|
||||
}
|
||||
ParsedDocument doc = new ParsedDocument(context.uid(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), context.analyzer(),
|
||||
context.source(), context.sourceOffset(), context.sourceLength(), context.mappersAdded()).parent(source.parent());
|
||||
context.source(), context.mappersAdded()).parent(source.parent());
|
||||
// reset the context to free up memory
|
||||
context.reset(null, null, null, null);
|
||||
return doc;
|
||||
@ -593,7 +594,7 @@ public class DocumentMapper implements ToXContent {
|
||||
builder.startObject();
|
||||
toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
this.mappingSource = new CompressedString(builder.underlyingBytes(), 0, builder.underlyingBytesLength());
|
||||
this.mappingSource = new CompressedString(builder.bytes());
|
||||
} catch (Exception e) {
|
||||
throw new FailedToGenerateSourceMapperException(e.getMessage(), e);
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -59,9 +60,7 @@ public class ParseContext {
|
||||
private final Settings indexSettings;
|
||||
|
||||
private SourceToParse sourceToParse;
|
||||
private byte[] source;
|
||||
private int sourceOffset;
|
||||
private int sourceLength;
|
||||
private BytesReference source;
|
||||
|
||||
private String id;
|
||||
|
||||
@ -103,8 +102,6 @@ public class ParseContext {
|
||||
this.id = null;
|
||||
this.sourceToParse = source;
|
||||
this.source = source == null ? null : sourceToParse.source();
|
||||
this.sourceOffset = source == null ? 0 : sourceToParse.sourceOffset();
|
||||
this.sourceLength = source == null ? 0 : sourceToParse.sourceLength();
|
||||
this.path.reset();
|
||||
this.mappersAdded = false;
|
||||
this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener;
|
||||
@ -145,23 +142,13 @@ public class ParseContext {
|
||||
return this.sourceToParse;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public int sourceOffset() {
|
||||
return this.sourceOffset;
|
||||
}
|
||||
|
||||
public int sourceLength() {
|
||||
return this.sourceLength;
|
||||
}
|
||||
|
||||
// only should be used by SourceFieldMapper to update with a compressed source
|
||||
public void source(byte[] source, int offset, int length) {
|
||||
public void source(BytesReference source) {
|
||||
this.source = source;
|
||||
this.sourceOffset = offset;
|
||||
this.sourceLength = length;
|
||||
}
|
||||
|
||||
public ContentPath path() {
|
||||
|
@ -21,14 +21,13 @@ package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* The result of parsing a document.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class ParsedDocument {
|
||||
|
||||
@ -48,19 +47,17 @@ public class ParsedDocument {
|
||||
|
||||
private final Analyzer analyzer;
|
||||
|
||||
private final byte[] source;
|
||||
private final int sourceOffset;
|
||||
private final int sourceLength;
|
||||
private final BytesReference source;
|
||||
|
||||
private boolean mappersAdded;
|
||||
|
||||
private String parent;
|
||||
|
||||
public ParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, Analyzer analyzer, byte[] source, boolean mappersAdded) {
|
||||
this(uid, id, type, routing, timestamp, ttl, Arrays.asList(document), analyzer, source, 0, source.length, mappersAdded);
|
||||
public ParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, Analyzer analyzer, BytesReference source, boolean mappersAdded) {
|
||||
this(uid, id, type, routing, timestamp, ttl, Arrays.asList(document), analyzer, source, mappersAdded);
|
||||
}
|
||||
|
||||
public ParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, Analyzer analyzer, byte[] source, int sourceOffset, int sourceLength, boolean mappersAdded) {
|
||||
public ParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, Analyzer analyzer, BytesReference source, boolean mappersAdded) {
|
||||
this.uid = uid;
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
@ -69,8 +66,6 @@ public class ParsedDocument {
|
||||
this.ttl = ttl;
|
||||
this.documents = documents;
|
||||
this.source = source;
|
||||
this.sourceOffset = sourceOffset;
|
||||
this.sourceLength = sourceLength;
|
||||
this.analyzer = analyzer;
|
||||
this.mappersAdded = mappersAdded;
|
||||
}
|
||||
@ -111,18 +106,10 @@ public class ParsedDocument {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public int sourceOffset() {
|
||||
return this.sourceOffset;
|
||||
}
|
||||
|
||||
public int sourceLength() {
|
||||
return this.sourceLength;
|
||||
}
|
||||
|
||||
public ParsedDocument parent(String parent) {
|
||||
this.parent = parent;
|
||||
return this;
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
/**
|
||||
@ -26,21 +27,15 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
*/
|
||||
public class SourceToParse {
|
||||
|
||||
public static SourceToParse source(byte[] source) {
|
||||
return new SourceToParse(source);
|
||||
}
|
||||
|
||||
public static SourceToParse source(byte[] source, int offset, int length) {
|
||||
return new SourceToParse(source, offset, length);
|
||||
}
|
||||
|
||||
public static SourceToParse source(XContentParser parser) {
|
||||
return new SourceToParse(parser);
|
||||
}
|
||||
|
||||
private final byte[] source;
|
||||
private final int sourceOffset;
|
||||
private final int sourceLength;
|
||||
public static SourceToParse source(BytesReference source) {
|
||||
return new SourceToParse(source);
|
||||
}
|
||||
|
||||
private final BytesReference source;
|
||||
|
||||
private final XContentParser parser;
|
||||
|
||||
@ -61,21 +56,12 @@ public class SourceToParse {
|
||||
public SourceToParse(XContentParser parser) {
|
||||
this.parser = parser;
|
||||
this.source = null;
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = 0;
|
||||
}
|
||||
|
||||
public SourceToParse(byte[] source) {
|
||||
this.source = source;
|
||||
this.sourceOffset = 0;
|
||||
this.sourceLength = source.length;
|
||||
this.parser = null;
|
||||
}
|
||||
|
||||
public SourceToParse(byte[] source, int offset, int length) {
|
||||
this.source = source;
|
||||
this.sourceOffset = offset;
|
||||
this.sourceLength = length;
|
||||
public SourceToParse(BytesReference source) {
|
||||
// we always convert back to byte array, since we store it and Field only supports bytes..
|
||||
// so, we might as well do it here, and improve the performance of working with direct byte arrays
|
||||
this.source = source.toBytesArray();
|
||||
this.parser = null;
|
||||
}
|
||||
|
||||
@ -83,18 +69,10 @@ public class SourceToParse {
|
||||
return this.parser;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public int sourceOffset() {
|
||||
return this.sourceOffset;
|
||||
}
|
||||
|
||||
public int sourceLength() {
|
||||
return this.sourceLength;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return this.type;
|
||||
}
|
||||
|
@ -23,8 +23,8 @@ import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.Base64;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.io.stream.CachedStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -131,7 +131,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper<byte[]> {
|
||||
return value;
|
||||
}
|
||||
try {
|
||||
return CompressorFactory.uncompressIfNeeded(new BytesHolder(value)).bytes();
|
||||
return CompressorFactory.uncompressIfNeeded(new BytesArray(value)).toBytes();
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchParseException("failed to decompress source", e);
|
||||
}
|
||||
@ -175,7 +175,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper<byte[]> {
|
||||
streamOutput.close();
|
||||
// we copy over the byte array, since we need to push back the cached entry
|
||||
// TODO, we we had a handle into when we are done with parsing, then we push back then and not copy over bytes
|
||||
value = cachedEntry.bytes().copiedByteArray();
|
||||
value = cachedEntry.bytes().bytes().copyBytesArray().toBytes();
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
}
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
||||
if (!enabled) {
|
||||
return null;
|
||||
}
|
||||
return new CustomIntegerNumericField(this, context.sourceLength());
|
||||
return new CustomIntegerNumericField(this, context.source().length());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -24,13 +24,13 @@ import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedStreamInput;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamInput;
|
||||
import org.elasticsearch.common.io.stream.CachedStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
@ -242,19 +242,17 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
|
||||
if (context.flyweight()) {
|
||||
return null;
|
||||
}
|
||||
byte[] data = context.source();
|
||||
int dataOffset = context.sourceOffset();
|
||||
int dataLength = context.sourceLength();
|
||||
BytesReference source = context.source();
|
||||
|
||||
boolean filtered = includes.length > 0 || excludes.length > 0;
|
||||
if (filtered) {
|
||||
// we don't update the context source if we filter, we want to keep it as is...
|
||||
|
||||
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(data, dataOffset, dataLength, true);
|
||||
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true);
|
||||
Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes);
|
||||
CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry();
|
||||
StreamOutput streamOutput;
|
||||
if (compress != null && compress && (compressThreshold == -1 || dataLength > compressThreshold)) {
|
||||
if (compress != null && compress && (compressThreshold == -1 || source.length() > compressThreshold)) {
|
||||
streamOutput = cachedEntry.bytes(CompressorFactory.defaultCompressor());
|
||||
} else {
|
||||
streamOutput = cachedEntry.bytes();
|
||||
@ -266,41 +264,37 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType, streamOutput).map(filteredSource);
|
||||
builder.close();
|
||||
|
||||
data = cachedEntry.bytes().copiedByteArray();
|
||||
dataOffset = 0;
|
||||
dataLength = data.length;
|
||||
source = cachedEntry.bytes().bytes().copyBytesArray();
|
||||
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
} else if (compress != null && compress && !CompressorFactory.isCompressed(data, dataOffset, dataLength)) {
|
||||
if (compressThreshold == -1 || dataLength > compressThreshold) {
|
||||
} else if (compress != null && compress && !CompressorFactory.isCompressed(source)) {
|
||||
if (compressThreshold == -1 || source.length() > compressThreshold) {
|
||||
CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry();
|
||||
try {
|
||||
XContentType contentType = XContentFactory.xContentType(data, dataOffset, dataLength);
|
||||
XContentType contentType = XContentFactory.xContentType(source);
|
||||
if (formatContentType != null && formatContentType != contentType) {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, cachedEntry.bytes(CompressorFactory.defaultCompressor()));
|
||||
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(data, dataOffset, dataLength));
|
||||
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
|
||||
builder.close();
|
||||
} else {
|
||||
StreamOutput streamOutput = cachedEntry.bytes(CompressorFactory.defaultCompressor());
|
||||
streamOutput.writeBytes(data, dataOffset, dataLength);
|
||||
streamOutput.writeBytesReference(source, false);
|
||||
streamOutput.close();
|
||||
}
|
||||
// we copy over the byte array, since we need to push back the cached entry
|
||||
// TODO, we we had a handle into when we are done with parsing, then we push back then and not copy over bytes
|
||||
data = cachedEntry.bytes().copiedByteArray();
|
||||
dataOffset = 0;
|
||||
dataLength = data.length;
|
||||
source = cachedEntry.bytes().bytes().copyBytesArray();
|
||||
// update the data in the context, so it can be compressed and stored compressed outside...
|
||||
context.source(data, dataOffset, dataLength);
|
||||
context.source(source);
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
}
|
||||
}
|
||||
} else if (formatContentType != null) {
|
||||
// see if we need to convert the content type
|
||||
Compressor compressor = CompressorFactory.compressor(data, dataOffset, dataLength);
|
||||
Compressor compressor = CompressorFactory.compressor(source);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(new BytesStreamInput(data, dataOffset, dataLength, false));
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
if (contentType != formatContentType) {
|
||||
@ -311,11 +305,9 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, streamOutput);
|
||||
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(compressedStreamInput));
|
||||
builder.close();
|
||||
data = cachedEntry.bytes().copiedByteArray();
|
||||
dataOffset = 0;
|
||||
dataLength = data.length;
|
||||
source = cachedEntry.bytes().bytes().copyBytesArray();
|
||||
// update the data in the context, so we store it in the translog in this format
|
||||
context.source(data, dataOffset, dataLength);
|
||||
context.source(source);
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
}
|
||||
@ -323,27 +315,26 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
|
||||
compressedStreamInput.close();
|
||||
}
|
||||
} else {
|
||||
XContentType contentType = XContentFactory.xContentType(data, dataOffset, dataLength);
|
||||
XContentType contentType = XContentFactory.xContentType(source);
|
||||
if (contentType != formatContentType) {
|
||||
// we need to reread and store back
|
||||
// we need to reread and store back, compressed....
|
||||
CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry();
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, cachedEntry.bytes());
|
||||
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(data, dataOffset, dataLength));
|
||||
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
|
||||
builder.close();
|
||||
data = cachedEntry.bytes().copiedByteArray();
|
||||
dataOffset = 0;
|
||||
dataLength = data.length;
|
||||
source = cachedEntry.bytes().bytes().copyBytesArray();
|
||||
// update the data in the context, so we store it in the translog in this format
|
||||
context.source(data, dataOffset, dataLength);
|
||||
context.source(source);
|
||||
} finally {
|
||||
CachedStreamOutput.pushEntry(cachedEntry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Field(names().indexName(), data, dataOffset, dataLength);
|
||||
assert source.hasArray();
|
||||
return new Field(names().indexName(), source.array(), source.arrayOffset(), source.length());
|
||||
}
|
||||
|
||||
public byte[] value(Document document) {
|
||||
@ -362,7 +353,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
|
||||
return value;
|
||||
}
|
||||
try {
|
||||
return CompressorFactory.uncompressIfNeeded(new BytesHolder(value)).bytes();
|
||||
return CompressorFactory.uncompressIfNeeded(new BytesArray(value)).toBytes();
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchParseException("failed to decompress source", e);
|
||||
}
|
||||
|
@ -31,8 +31,8 @@ import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Preconditions;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
@ -73,36 +73,20 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
||||
|
||||
public static class SourceRequest {
|
||||
private final String type;
|
||||
private final byte[] source;
|
||||
private final int offset;
|
||||
private final int length;
|
||||
private final BytesReference source;
|
||||
|
||||
public SourceRequest(String type, byte[] source) {
|
||||
this(type, source, 0, source.length);
|
||||
}
|
||||
|
||||
public SourceRequest(String type, byte[] source, int offset, int length) {
|
||||
public SourceRequest(String type, BytesReference source) {
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
public BytesReference source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public int offset() {
|
||||
return this.offset;
|
||||
}
|
||||
|
||||
public int length() {
|
||||
return this.length;
|
||||
}
|
||||
}
|
||||
|
||||
public static class DocAndSourceQueryRequest {
|
||||
@ -197,25 +181,20 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.smileBuilder()
|
||||
.startObject().field("query", queryBuilder).endObject();
|
||||
BytesStream unsafeBytes = builder.underlyingStream();
|
||||
addQuery(name, unsafeBytes.underlyingBytes(), 0, unsafeBytes.size());
|
||||
addQuery(name, builder.bytes());
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchException("Failed to add query [" + name + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void addQuery(String name, byte[] source) throws ElasticSearchException {
|
||||
addQuery(name, source, 0, source.length);
|
||||
public void addQuery(String name, BytesReference source) throws ElasticSearchException {
|
||||
addQuery(name, parseQuery(name, source));
|
||||
}
|
||||
|
||||
public void addQuery(String name, byte[] source, int sourceOffset, int sourceLength) throws ElasticSearchException {
|
||||
addQuery(name, parseQuery(name, source, sourceOffset, sourceLength));
|
||||
}
|
||||
|
||||
public Query parseQuery(String name, byte[] source, int sourceOffset, int sourceLength) throws ElasticSearchException {
|
||||
public Query parseQuery(String name, BytesReference source) throws ElasticSearchException {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentHelper.createParser(source, sourceOffset, sourceLength);
|
||||
parser = XContentHelper.createParser(source);
|
||||
Query query = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token = parser.nextToken(); // move the START_OBJECT
|
||||
@ -265,7 +244,7 @@ public class PercolatorExecutor extends AbstractIndexComponent {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
|
||||
parser = XContentFactory.xContent(request.source(), request.offset(), request.length()).createParser(request.source(), request.offset(), request.length());
|
||||
parser = XContentFactory.xContent(request.source()).createParser(request.source());
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.TermFilter;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -175,7 +176,7 @@ public class PercolatorService extends AbstractIndexComponent {
|
||||
String id = Uid.createUid(document.get(UidFieldMapper.NAME)).id();
|
||||
try {
|
||||
Fieldable sourceField = document.getFieldable(SourceFieldMapper.NAME);
|
||||
queries.put(id, percolator.parseQuery(id, sourceField.getBinaryValue(), sourceField.getBinaryOffset(), sourceField.getBinaryLength()));
|
||||
queries.put(id, percolator.parseQuery(id, new BytesArray(sourceField.getBinaryValue(), sourceField.getBinaryOffset(), sourceField.getBinaryLength())));
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to add query [{}]", e, id);
|
||||
}
|
||||
@ -259,7 +260,7 @@ public class PercolatorService extends AbstractIndexComponent {
|
||||
public Engine.Create preCreate(Engine.Create create) {
|
||||
// validate the query here, before we index
|
||||
if (create.type().equals(index().name())) {
|
||||
percolator.parseQuery(create.id(), create.source(), create.sourceOffset(), create.sourceLength());
|
||||
percolator.parseQuery(create.id(), create.source());
|
||||
}
|
||||
return create;
|
||||
}
|
||||
@ -268,7 +269,7 @@ public class PercolatorService extends AbstractIndexComponent {
|
||||
public void postCreateUnderLock(Engine.Create create) {
|
||||
// add the query under a doc lock
|
||||
if (create.type().equals(index().name())) {
|
||||
percolator.addQuery(create.id(), create.source(), create.sourceOffset(), create.sourceLength());
|
||||
percolator.addQuery(create.id(), create.source());
|
||||
}
|
||||
}
|
||||
|
||||
@ -276,7 +277,7 @@ public class PercolatorService extends AbstractIndexComponent {
|
||||
public Engine.Index preIndex(Engine.Index index) {
|
||||
// validate the query here, before we index
|
||||
if (index.type().equals(index().name())) {
|
||||
percolator.parseQuery(index.id(), index.source(), index.sourceOffset(), index.sourceLength());
|
||||
percolator.parseQuery(index.id(), index.source());
|
||||
}
|
||||
return index;
|
||||
}
|
||||
@ -285,7 +286,7 @@ public class PercolatorService extends AbstractIndexComponent {
|
||||
public void postIndexUnderLock(Engine.Index index) {
|
||||
// add the query under a doc lock
|
||||
if (index.type().equals(index().name())) {
|
||||
percolator.addQuery(index.id(), index.source(), index.sourceOffset(), index.sourceLength());
|
||||
percolator.addQuery(index.id(), index.source());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
@ -44,16 +44,16 @@ public abstract class BaseQueryBuilder implements QueryBuilder {
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesStream buildAsBytes() throws QueryBuilderException {
|
||||
public BytesReference buildAsBytes() throws QueryBuilderException {
|
||||
return buildAsBytes(XContentType.JSON);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesStream buildAsBytes(XContentType contentType) throws QueryBuilderException {
|
||||
public BytesReference buildAsBytes(XContentType contentType) throws QueryBuilderException {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
|
||||
toXContent(builder, EMPTY_PARAMS);
|
||||
return builder.underlyingStream();
|
||||
return builder.bytes();
|
||||
} catch (Exception e) {
|
||||
throw new QueryBuilderException("Failed to build query", e);
|
||||
}
|
||||
|
@ -24,8 +24,8 @@ import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
@ -175,8 +175,8 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
public ParsedQuery parse(QueryBuilder queryBuilder) throws ElasticSearchException {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
BytesStream bytes = queryBuilder.buildAsBytes();
|
||||
parser = XContentFactory.xContent(bytes.underlyingBytes(), 0, bytes.size()).createParser(bytes.underlyingBytes(), 0, bytes.size());
|
||||
BytesReference bytes = queryBuilder.buildAsBytes();
|
||||
parser = XContentFactory.xContent(bytes).createParser(bytes);
|
||||
return parse(cache.get(), parser);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
@ -209,6 +209,22 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
}
|
||||
}
|
||||
|
||||
public ParsedQuery parse(BytesReference source) throws ElasticSearchException {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return parse(cache.get(), parser);
|
||||
} catch (QueryParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(index, "Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public ParsedQuery parse(String source) throws QueryParsingException {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.io.BytesStream;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
*/
|
||||
public interface QueryBuilder extends ToXContent {
|
||||
|
||||
BytesStream buildAsBytes() throws QueryBuilderException;
|
||||
BytesReference buildAsBytes() throws QueryBuilderException;
|
||||
|
||||
BytesStream buildAsBytes(XContentType contentType) throws QueryBuilderException;
|
||||
BytesReference buildAsBytes(XContentType contentType) throws QueryBuilderException;
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
@ -71,7 +71,7 @@ public class ChildCollector extends Collector {
|
||||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
BytesWrap parentId = typeCache.parentIdByDoc(doc);
|
||||
HashedBytesArray parentId = typeCache.parentIdByDoc(doc);
|
||||
if (parentId == null) {
|
||||
return;
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.ToStringUtils;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.BytesWrap;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.search.EmptyScorer;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
@ -125,7 +125,7 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||
int subDoc = scoreDoc.doc - context.searcher().docStarts()[readerIndex];
|
||||
|
||||
// find the parent id
|
||||
BytesWrap parentId = context.idCache().reader(subReader).parentIdByDoc(parentType, subDoc);
|
||||
HashedBytesArray parentId = context.idCache().reader(subReader).parentIdByDoc(parentType, subDoc);
|
||||
if (parentId == null) {
|
||||
// no parent found
|
||||
continue;
|
||||
|
@ -189,18 +189,18 @@ public class ShardSlowLogSearchService extends AbstractIndexShardComponent {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ");
|
||||
sb.append("search_type[").append(context.searchType()).append("], total_shards[").append(context.numberOfShards()).append("], ");
|
||||
if (context.request().sourceLength() > 0) {
|
||||
if (context.request().source() != null && context.request().source().length() > 0) {
|
||||
try {
|
||||
sb.append("source[").append(XContentHelper.convertToJson(context.request().source(), context.request().sourceOffset(), context.request().sourceLength(), reformat)).append("], ");
|
||||
sb.append("source[").append(XContentHelper.convertToJson(context.request().source(), reformat)).append("], ");
|
||||
} catch (IOException e) {
|
||||
sb.append("source[_failed_to_convert_], ");
|
||||
}
|
||||
} else {
|
||||
sb.append("source[], ");
|
||||
}
|
||||
if (context.request().extraSourceLength() > 0) {
|
||||
if (context.request().extraSource() != null && context.request().extraSource().length() > 0) {
|
||||
try {
|
||||
sb.append("extra_source[").append(XContentHelper.convertToJson(context.request().extraSource(), context.request().extraSourceOffset(), context.request().extraSourceLength(), reformat)).append("], ");
|
||||
sb.append("extra_source[").append(XContentHelper.convertToJson(context.request().extraSource(), reformat)).append("], ");
|
||||
} catch (IOException e) {
|
||||
sb.append("extra_source[_failed_to_convert_], ");
|
||||
}
|
||||
|
@ -21,8 +21,8 @@ package org.elasticsearch.index.shard.service;
|
||||
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.BytesHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineException;
|
||||
import org.elasticsearch.index.flush.FlushStats;
|
||||
@ -90,15 +90,13 @@ public interface IndexShard extends IndexShardComponent {
|
||||
|
||||
void delete(Engine.Delete delete) throws ElasticSearchException;
|
||||
|
||||
Engine.DeleteByQuery prepareDeleteByQuery(BytesHolder querySource, @Nullable String[] filteringAliases, String... types) throws ElasticSearchException;
|
||||
Engine.DeleteByQuery prepareDeleteByQuery(BytesReference querySource, @Nullable String[] filteringAliases, String... types) throws ElasticSearchException;
|
||||
|
||||
void deleteByQuery(Engine.DeleteByQuery deleteByQuery) throws ElasticSearchException;
|
||||
|
||||
Engine.GetResult get(Engine.Get get) throws ElasticSearchException;
|
||||
|
||||
long count(float minScore, byte[] querySource, @Nullable String[] filteringAliases, String... types) throws ElasticSearchException;
|
||||
|
||||
long count(float minScore, byte[] querySource, int querySourceOffset, int querySourceLength, @Nullable String[] filteringAliases, String... types) throws ElasticSearchException;
|
||||
long count(float minScore, BytesReference querySource, @Nullable String[] filteringAliases, String... types) throws ElasticSearchException;
|
||||
|
||||
void refresh(Engine.Refresh refresh) throws ElasticSearchException;
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user