Remove all dependencies from XContentBuilder (#29225)
* Remove all dependencies from XContentBuilder This commit removes all of the non-JDK dependencies from XContentBuilder, with the exception of `CollectionUtils.ensureNoSelfReferences`. It adds a third extension point around dealing with time-based fields and formatters to work around the Joda dependency. This decoupling allows us to be able to move XContentBuilder to a separate lib so it can be available for things like the high level rest client. Relates to #28504
This commit is contained in:
parent
3db6f1c9d5
commit
b4c78019b0
|
@ -121,7 +121,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
builder.startObject();
|
||||
{
|
||||
builder.field("user", "kimchy");
|
||||
builder.field("postDate", new Date());
|
||||
builder.timeField("postDate", new Date());
|
||||
builder.field("message", "trying out Elasticsearch");
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -331,7 +331,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
{
|
||||
builder.field("updated", new Date());
|
||||
builder.timeField("updated", new Date());
|
||||
builder.field("reason", "daily update");
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
|
@ -141,8 +142,8 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
|
|||
builder.startObject(Fields.STATS);
|
||||
builder.field(Fields.NUMBER_OF_FILES, getNumberOfFiles());
|
||||
builder.field(Fields.PROCESSED_FILES, getProcessedFiles());
|
||||
builder.byteSizeField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, getTotalSize());
|
||||
builder.byteSizeField(Fields.PROCESSED_SIZE_IN_BYTES, Fields.PROCESSED_SIZE, getProcessedSize());
|
||||
builder.humanReadableField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, new ByteSizeValue(getTotalSize()));
|
||||
builder.humanReadableField(Fields.PROCESSED_SIZE_IN_BYTES, Fields.PROCESSED_SIZE, new ByteSizeValue(getProcessedSize()));
|
||||
builder.field(Fields.START_TIME_IN_MILLIS, getStartTime());
|
||||
builder.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
|
||||
builder.endObject();
|
||||
|
|
|
@ -501,8 +501,8 @@ public class ClusterStatsNodes implements ToXContentFragment {
|
|||
}
|
||||
builder.endArray();
|
||||
builder.startObject(Fields.MEM);
|
||||
builder.byteSizeField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, heapUsed);
|
||||
builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, heapMax);
|
||||
builder.humanReadableField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, getHeapUsed());
|
||||
builder.humanReadableField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, getHeapMax());
|
||||
builder.endObject();
|
||||
|
||||
builder.field(Fields.THREADS, threads);
|
||||
|
|
|
@ -132,8 +132,8 @@ public class IndicesSegmentResponse extends BroadcastResponse {
|
|||
builder.field(Fields.GENERATION, segment.getGeneration());
|
||||
builder.field(Fields.NUM_DOCS, segment.getNumDocs());
|
||||
builder.field(Fields.DELETED_DOCS, segment.getDeletedDocs());
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, segment.getSizeInBytes());
|
||||
builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, segment.getMemoryInBytes());
|
||||
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, segment.getSize());
|
||||
builder.humanReadableField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, new ByteSizeValue(segment.getMemoryInBytes()));
|
||||
builder.field(Fields.COMMITTED, segment.isCommitted());
|
||||
builder.field(Fields.SEARCH, segment.isSearch());
|
||||
if (segment.getVersion() != null) {
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException;
|
|||
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -116,9 +117,10 @@ public class UpgradeStatusResponse extends BroadcastResponse {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, getTotalBytes());
|
||||
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, getToUpgradeBytes());
|
||||
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, getToUpgradeBytesAncient());
|
||||
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalBytes()));
|
||||
builder.humanReadableField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, new ByteSizeValue(getToUpgradeBytes()));
|
||||
builder.humanReadableField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT,
|
||||
new ByteSizeValue(getToUpgradeBytesAncient()));
|
||||
|
||||
String level = params.param("level", "indices");
|
||||
boolean outputShards = "shards".equals(level);
|
||||
|
@ -128,9 +130,11 @@ public class UpgradeStatusResponse extends BroadcastResponse {
|
|||
for (IndexUpgradeStatus indexUpgradeStatus : getIndices().values()) {
|
||||
builder.startObject(indexUpgradeStatus.getIndex());
|
||||
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, indexUpgradeStatus.getTotalBytes());
|
||||
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, indexUpgradeStatus.getToUpgradeBytes());
|
||||
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, indexUpgradeStatus.getToUpgradeBytesAncient());
|
||||
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(indexUpgradeStatus.getTotalBytes()));
|
||||
builder.humanReadableField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE,
|
||||
new ByteSizeValue(indexUpgradeStatus.getToUpgradeBytes()));
|
||||
builder.humanReadableField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT,
|
||||
new ByteSizeValue(indexUpgradeStatus.getToUpgradeBytesAncient()));
|
||||
if (outputShards) {
|
||||
builder.startObject(Fields.SHARDS);
|
||||
for (IndexShardUpgradeStatus indexShardUpgradeStatus : indexUpgradeStatus) {
|
||||
|
@ -138,9 +142,11 @@ public class UpgradeStatusResponse extends BroadcastResponse {
|
|||
for (ShardUpgradeStatus shardUpgradeStatus : indexShardUpgradeStatus) {
|
||||
builder.startObject();
|
||||
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, getTotalBytes());
|
||||
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, getToUpgradeBytes());
|
||||
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT, getToUpgradeBytesAncient());
|
||||
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalBytes()));
|
||||
builder.humanReadableField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE,
|
||||
new ByteSizeValue(getToUpgradeBytes()));
|
||||
builder.humanReadableField(Fields.SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, Fields.SIZE_TO_UPGRADE_ANCIENT,
|
||||
new ByteSizeValue(getToUpgradeBytesAncient()));
|
||||
|
||||
builder.startObject(Fields.ROUTING);
|
||||
builder.field(Fields.STATE, shardUpgradeStatus.getShardRouting().state());
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -133,7 +134,7 @@ public class ClusterInfo implements ToXContentFragment, Writeable {
|
|||
builder.endObject(); // end "nodes"
|
||||
builder.startObject("shard_sizes"); {
|
||||
for (ObjectObjectCursor<String, Long> c : this.shardSizes) {
|
||||
builder.byteSizeField(c.key + "_bytes", c.key, c.value);
|
||||
builder.humanReadableField(c.key + "_bytes", c.key, new ByteSizeValue(c.value));
|
||||
}
|
||||
}
|
||||
builder.endObject(); // end "shard_sizes"
|
||||
|
|
|
@ -75,9 +75,9 @@ public class DiskUsage implements ToXContentFragment, Writeable {
|
|||
|
||||
XContentBuilder toShortXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field("path", this.path);
|
||||
builder.byteSizeField("total_bytes", "total", this.totalBytes);
|
||||
builder.byteSizeField("used_bytes", "used", this.getUsedBytes());
|
||||
builder.byteSizeField("free_bytes", "free", this.freeBytes);
|
||||
builder.humanReadableField("total_bytes", "total", new ByteSizeValue(this.totalBytes));
|
||||
builder.humanReadableField("used_bytes", "used", new ByteSizeValue(this.getUsedBytes()));
|
||||
builder.humanReadableField("free_bytes", "free", new ByteSizeValue(this.freeBytes));
|
||||
builder.field("free_disk_percent", truncatePercent(this.getFreeDiskAsPercentage()));
|
||||
builder.field("used_disk_percent", truncatePercent(this.getUsedDiskAsPercentage()));
|
||||
return builder;
|
||||
|
|
|
@ -434,7 +434,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
|||
builder.startObject();
|
||||
builder.field(INDEX_KEY);
|
||||
index.toXContent(builder, params);
|
||||
builder.dateField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis);
|
||||
builder.timeField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -289,7 +290,7 @@ public class NodeAllocationResult implements ToXContentObject, Writeable, Compar
|
|||
if (hasMatchingSyncId()) {
|
||||
builder.field("matching_sync_id", true);
|
||||
} else {
|
||||
builder.byteSizeField("matching_size_in_bytes", "matching_size", matchingBytes);
|
||||
builder.humanReadableField("matching_size_in_bytes", "matching_size", new ByteSizeValue(matchingBytes));
|
||||
}
|
||||
}
|
||||
if (storeException != null) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -85,7 +86,7 @@ public final class FieldMemoryStats implements Writeable, Iterable<ObjectLongCur
|
|||
builder.startObject(key);
|
||||
for (ObjectLongCursor<String> entry : stats) {
|
||||
builder.startObject(entry.key);
|
||||
builder.byteSizeField(rawKey, readableKey, entry.value);
|
||||
builder.humanReadableField(rawKey, readableKey, new ByteSizeValue(entry.value));
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -19,12 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.ReadableInstant;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.Closeable;
|
||||
|
@ -38,12 +33,14 @@ import java.util.Arrays;
|
|||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* A utility to build XContent (ie json).
|
||||
|
@ -81,16 +78,15 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
return new XContentBuilder(xContent, new ByteArrayOutputStream(), includes, excludes);
|
||||
}
|
||||
|
||||
public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
|
||||
|
||||
private static final Map<Class<?>, Writer> WRITERS;
|
||||
private static final Map<Class<?>, HumanReadableTransformer> HUMAN_READABLE_TRANSFORMERS;
|
||||
private static final Map<Class<?>, Function<Object, Object>> DATE_TRANSFORMERS;
|
||||
static {
|
||||
Map<Class<?>, Writer> writers = new HashMap<>();
|
||||
writers.put(Boolean.class, (b, v) -> b.value((Boolean) v));
|
||||
writers.put(Byte.class, (b, v) -> b.value((Byte) v));
|
||||
writers.put(byte[].class, (b, v) -> b.value((byte[]) v));
|
||||
writers.put(Date.class, (b, v) -> b.value((Date) v));
|
||||
writers.put(Date.class, XContentBuilder::timeValue);
|
||||
writers.put(Double.class, (b, v) -> b.value((Double) v));
|
||||
writers.put(double[].class, (b, v) -> b.values((double[]) v));
|
||||
writers.put(Float.class, (b, v) -> b.value((Float) v));
|
||||
|
@ -106,26 +102,37 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
writers.put(Locale.class, (b, v) -> b.value(v.toString()));
|
||||
writers.put(Class.class, (b, v) -> b.value(v.toString()));
|
||||
writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString()));
|
||||
writers.put(Calendar.class, XContentBuilder::timeValue);
|
||||
writers.put(GregorianCalendar.class, XContentBuilder::timeValue);
|
||||
|
||||
|
||||
Map<Class<?>, HumanReadableTransformer> humanReadableTransformer = new HashMap<>();
|
||||
Map<Class<?>, Function<Object, Object>> dateTransformers = new HashMap<>();
|
||||
|
||||
// treat strings as already converted
|
||||
dateTransformers.put(String.class, Function.identity());
|
||||
|
||||
// Load pluggable extensions
|
||||
for (XContentBuilderExtension service : ServiceLoader.load(XContentBuilderExtension.class)) {
|
||||
Map<Class<?>, Writer> addlWriters = service.getXContentWriters();
|
||||
Map<Class<?>, HumanReadableTransformer> addlTransformers = service.getXContentHumanReadableTransformers();
|
||||
Map<Class<?>, Function<Object, Object>> addlDateTransformers = service.getDateTransformers();
|
||||
|
||||
addlWriters.forEach((key, value) -> Objects.requireNonNull(value,
|
||||
"invalid null xcontent writer for class " + key));
|
||||
addlTransformers.forEach((key, value) -> Objects.requireNonNull(value,
|
||||
"invalid null xcontent transformer for human readable class " + key));
|
||||
dateTransformers.forEach((key, value) -> Objects.requireNonNull(value,
|
||||
"invalid null xcontent date transformer for class " + key));
|
||||
|
||||
writers.putAll(addlWriters);
|
||||
humanReadableTransformer.putAll(addlTransformers);
|
||||
dateTransformers.putAll(addlDateTransformers);
|
||||
}
|
||||
|
||||
WRITERS = Collections.unmodifiableMap(writers);
|
||||
HUMAN_READABLE_TRANSFORMERS = Collections.unmodifiableMap(humanReadableTransformer);
|
||||
DATE_TRANSFORMERS = Collections.unmodifiableMap(dateTransformers);
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
|
@ -610,15 +617,6 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the binary content of the given byte array as UTF-8 bytes.
|
||||
*
|
||||
* Use {@link XContentParser#charBuffer()} to read the value back
|
||||
*/
|
||||
public XContentBuilder utf8Field(String name, byte[] bytes, int offset, int length) throws IOException {
|
||||
return field(name).utf8Value(bytes, offset, length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the binary content of the given byte array as UTF-8 bytes.
|
||||
*
|
||||
|
@ -634,63 +632,49 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
// Date
|
||||
//////////////////////////////////
|
||||
|
||||
public XContentBuilder field(String name, ReadableInstant value) throws IOException {
|
||||
return field(name).value(value);
|
||||
/**
|
||||
* Write a time-based field and value, if the passed timeValue is null a
|
||||
* null value is written, otherwise a date transformers lookup is performed.
|
||||
|
||||
* @throws IllegalArgumentException if there is no transformers for the type of object
|
||||
*/
|
||||
public XContentBuilder timeField(String name, Object timeValue) throws IOException {
|
||||
return field(name).timeValue(timeValue);
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, ReadableInstant value, DateTimeFormatter formatter) throws IOException {
|
||||
return field(name).value(value, formatter);
|
||||
}
|
||||
|
||||
public XContentBuilder value(ReadableInstant value) throws IOException {
|
||||
return value(value, DEFAULT_DATE_PRINTER);
|
||||
}
|
||||
|
||||
public XContentBuilder value(ReadableInstant value, DateTimeFormatter formatter) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
ensureFormatterNotNull(formatter);
|
||||
return value(formatter.print(value));
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Date value) throws IOException {
|
||||
return field(name).value(value);
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Date value, DateTimeFormatter formatter) throws IOException {
|
||||
return field(name).value(value, formatter);
|
||||
}
|
||||
|
||||
public XContentBuilder value(Date value) throws IOException {
|
||||
return value(value, DEFAULT_DATE_PRINTER);
|
||||
}
|
||||
|
||||
public XContentBuilder value(Date value, DateTimeFormatter formatter) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(formatter, value.getTime());
|
||||
}
|
||||
|
||||
public XContentBuilder dateField(String name, String readableName, long value) throws IOException {
|
||||
/**
|
||||
* If the {@code humanReadable} flag is set, writes both a formatted and
|
||||
* unformatted version of the time value using the date transformer for the
|
||||
* {@link Long} class.
|
||||
*/
|
||||
public XContentBuilder timeField(String name, String readableName, long value) throws IOException {
|
||||
if (humanReadable) {
|
||||
field(readableName).value(DEFAULT_DATE_PRINTER, value);
|
||||
Function<Object, Object> longTransformer = DATE_TRANSFORMERS.get(Long.class);
|
||||
if (longTransformer == null) {
|
||||
throw new IllegalArgumentException("cannot write time value xcontent for unknown value of type Long");
|
||||
}
|
||||
field(readableName).value(longTransformer.apply(value));
|
||||
}
|
||||
field(name, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
XContentBuilder value(Calendar value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(DEFAULT_DATE_PRINTER, value.getTimeInMillis());
|
||||
}
|
||||
/**
|
||||
* Write a time-based value, if the value is null a null value is written,
|
||||
* otherwise a date transformers lookup is performed.
|
||||
|
||||
XContentBuilder value(DateTimeFormatter formatter, long value) throws IOException {
|
||||
ensureFormatterNotNull(formatter);
|
||||
return value(formatter.print(value));
|
||||
* @throws IllegalArgumentException if there is no transformers for the type of object
|
||||
*/
|
||||
public XContentBuilder timeValue(Object timeValue) throws IOException {
|
||||
if (timeValue == null) {
|
||||
return nullValue();
|
||||
} else {
|
||||
Function<Object, Object> transformer = DATE_TRANSFORMERS.get(timeValue.getClass());
|
||||
if (transformer == null) {
|
||||
throw new IllegalArgumentException("cannot write time value xcontent for unknown value of type " + timeValue.getClass());
|
||||
}
|
||||
return value(transformer.apply(timeValue));
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -761,10 +745,6 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
value((Iterable<?>) value, ensureNoSelfReferences);
|
||||
} else if (value instanceof Object[]) {
|
||||
values((Object[]) value, ensureNoSelfReferences);
|
||||
} else if (value instanceof Calendar) {
|
||||
value((Calendar) value);
|
||||
} else if (value instanceof ReadableInstant) {
|
||||
value((ReadableInstant) value);
|
||||
} else if (value instanceof ToXContent) {
|
||||
value((ToXContent) value);
|
||||
} else if (value instanceof Enum<?>) {
|
||||
|
@ -895,14 +875,6 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder byteSizeField(String rawFieldName, String readableFieldName, long rawSize) throws IOException {
|
||||
if (humanReadable) {
|
||||
field(readableFieldName, new ByteSizeValue(rawSize).toString());
|
||||
}
|
||||
field(rawFieldName, rawSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////
|
||||
// Raw fields
|
||||
//////////////////////////////////
|
||||
|
@ -960,10 +932,6 @@ public final class XContentBuilder implements Closeable, Flushable {
|
|||
ensureNotNull(name, "Field name cannot be null");
|
||||
}
|
||||
|
||||
static void ensureFormatterNotNull(DateTimeFormatter formatter) {
|
||||
ensureNotNull(formatter, "DateTimeFormatter cannot be null");
|
||||
}
|
||||
|
||||
static void ensureNotNull(Object value, String message) {
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException(message);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* This interface provides a way for non-JDK classes to plug in a way to serialize to xcontent.
|
||||
|
@ -61,4 +62,20 @@ public interface XContentBuilderExtension {
|
|||
* @return a map of class name to transformer used to retrieve raw value
|
||||
*/
|
||||
Map<Class<?>, XContentBuilder.HumanReadableTransformer> getXContentHumanReadableTransformers();
|
||||
|
||||
/**
|
||||
* Used for plugging a transformer for a date or time type object into a String (or other
|
||||
* encodable object).
|
||||
*
|
||||
* For example:
|
||||
*
|
||||
* <pre>
|
||||
* {@code
|
||||
* final DateTimeFormatter datePrinter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
|
||||
* Map<Class<?>, Function<Object, Object>> transformers = new HashMap<>();
|
||||
* transformers.put(Date.class, d -> datePrinter.print(((Date) d).getTime()));
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
Map<Class<?>, Function<Object, Object>> getDateTransformers();
|
||||
}
|
||||
|
|
|
@ -23,13 +23,23 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Instant;
|
||||
import org.joda.time.MutableDateTime;
|
||||
import org.joda.time.ReadableInstant;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
import org.joda.time.tz.CachedDateTimeZone;
|
||||
import org.joda.time.tz.FixedDateTimeZone;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
/**
|
||||
* SPI extensions for Elasticsearch-specific classes (like the Lucene or Joda
|
||||
|
@ -38,6 +48,8 @@ import java.util.Objects;
|
|||
*/
|
||||
public class XContentElasticsearchExtension implements XContentBuilderExtension {
|
||||
|
||||
public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
|
||||
|
||||
@Override
|
||||
public Map<Class<?>, XContentBuilder.Writer> getXContentWriters() {
|
||||
Map<Class<?>, XContentBuilder.Writer> writers = new HashMap<>();
|
||||
|
@ -47,6 +59,8 @@ public class XContentElasticsearchExtension implements XContentBuilderExtension
|
|||
writers.put(DateTimeZone.class, (b, v) -> b.value(Objects.toString(v)));
|
||||
writers.put(CachedDateTimeZone.class, (b, v) -> b.value(Objects.toString(v)));
|
||||
writers.put(FixedDateTimeZone.class, (b, v) -> b.value(Objects.toString(v)));
|
||||
writers.put(MutableDateTime.class, XContentBuilder::timeValue);
|
||||
writers.put(DateTime.class, XContentBuilder::timeValue);
|
||||
|
||||
writers.put(BytesReference.class, (b, v) -> {
|
||||
if (v == null) {
|
||||
|
@ -75,4 +89,18 @@ public class XContentElasticsearchExtension implements XContentBuilderExtension
|
|||
transformers.put(ByteSizeValue.class, v -> ((ByteSizeValue) v).getBytes());
|
||||
return transformers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Class<?>, Function<Object, Object>> getDateTransformers() {
|
||||
Map<Class<?>, Function<Object, Object>> transformers = new HashMap<>();
|
||||
transformers.put(Date.class, d -> DEFAULT_DATE_PRINTER.print(((Date) d).getTime()));
|
||||
transformers.put(DateTime.class, d -> DEFAULT_DATE_PRINTER.print((DateTime) d));
|
||||
transformers.put(MutableDateTime.class, d -> DEFAULT_DATE_PRINTER.print((MutableDateTime) d));
|
||||
transformers.put(ReadableInstant.class, d -> DEFAULT_DATE_PRINTER.print((ReadableInstant) d));
|
||||
transformers.put(Long.class, d -> DEFAULT_DATE_PRINTER.print((long) d));
|
||||
transformers.put(Calendar.class, d -> DEFAULT_DATE_PRINTER.print(((Calendar) d).getTimeInMillis()));
|
||||
transformers.put(GregorianCalendar.class, d -> DEFAULT_DATE_PRINTER.print(((Calendar) d).getTimeInMillis()));
|
||||
transformers.put(Instant.class, d -> DEFAULT_DATE_PRINTER.print((Instant) d));
|
||||
return transformers;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -235,7 +235,7 @@ public interface XContentParser extends Closeable {
|
|||
* as well as via their <code>String</code> variants of the separated value methods.
|
||||
* Note: Do not use this method to read values written with:
|
||||
* <ul>
|
||||
* <li>{@link XContentBuilder#utf8Field(String, byte[], int, int)}</li>
|
||||
* <li>{@link XContentBuilder#utf8Value(byte[], int, int)}</li>
|
||||
* </ul>
|
||||
*
|
||||
* these methods write UTF-8 encoded strings and must be read through:
|
||||
|
|
|
@ -63,7 +63,7 @@ public class HttpInfo implements Writeable, ToXContentFragment {
|
|||
builder.startObject(Fields.HTTP);
|
||||
builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses());
|
||||
builder.field(Fields.PUBLISH_ADDRESS, address.publishAddress().toString());
|
||||
builder.byteSizeField(Fields.MAX_CONTENT_LENGTH_IN_BYTES, Fields.MAX_CONTENT_LENGTH, maxContentLength);
|
||||
builder.humanReadableField(Fields.MAX_CONTENT_LENGTH_IN_BYTES, Fields.MAX_CONTENT_LENGTH, maxContentLength());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ public class QueryCacheStats implements Streamable, ToXContentFragment {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject(Fields.QUERY_CACHE);
|
||||
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, ramBytesUsed);
|
||||
builder.humanReadableField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, getMemorySize());
|
||||
builder.field(Fields.TOTAL_COUNT, getTotalCount());
|
||||
builder.field(Fields.HIT_COUNT, getHitCount());
|
||||
builder.field(Fields.MISS_COUNT, getMissCount());
|
||||
|
|
|
@ -92,7 +92,7 @@ public class RequestCacheStats implements Streamable, ToXContentFragment {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.REQUEST_CACHE_STATS);
|
||||
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, memorySize);
|
||||
builder.humanReadableField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, getMemorySize());
|
||||
builder.field(Fields.EVICTIONS, getEvictions());
|
||||
builder.field(Fields.HIT_COUNT, getHitCount());
|
||||
builder.field(Fields.MISS_COUNT, getMissCount());
|
||||
|
|
|
@ -291,22 +291,22 @@ public class SegmentsStats implements Streamable, ToXContentFragment {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.SEGMENTS);
|
||||
builder.field(Fields.COUNT, count);
|
||||
builder.byteSizeField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, memoryInBytes);
|
||||
builder.byteSizeField(Fields.TERMS_MEMORY_IN_BYTES, Fields.TERMS_MEMORY, termsMemoryInBytes);
|
||||
builder.byteSizeField(Fields.STORED_FIELDS_MEMORY_IN_BYTES, Fields.STORED_FIELDS_MEMORY, storedFieldsMemoryInBytes);
|
||||
builder.byteSizeField(Fields.TERM_VECTORS_MEMORY_IN_BYTES, Fields.TERM_VECTORS_MEMORY, termVectorsMemoryInBytes);
|
||||
builder.byteSizeField(Fields.NORMS_MEMORY_IN_BYTES, Fields.NORMS_MEMORY, normsMemoryInBytes);
|
||||
builder.byteSizeField(Fields.POINTS_MEMORY_IN_BYTES, Fields.POINTS_MEMORY, pointsMemoryInBytes);
|
||||
builder.byteSizeField(Fields.DOC_VALUES_MEMORY_IN_BYTES, Fields.DOC_VALUES_MEMORY, docValuesMemoryInBytes);
|
||||
builder.byteSizeField(Fields.INDEX_WRITER_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MEMORY, indexWriterMemoryInBytes);
|
||||
builder.byteSizeField(Fields.VERSION_MAP_MEMORY_IN_BYTES, Fields.VERSION_MAP_MEMORY, versionMapMemoryInBytes);
|
||||
builder.byteSizeField(Fields.FIXED_BIT_SET_MEMORY_IN_BYTES, Fields.FIXED_BIT_SET, bitsetMemoryInBytes);
|
||||
builder.humanReadableField(Fields.MEMORY_IN_BYTES, Fields.MEMORY, getMemory());
|
||||
builder.humanReadableField(Fields.TERMS_MEMORY_IN_BYTES, Fields.TERMS_MEMORY, getTermsMemory());
|
||||
builder.humanReadableField(Fields.STORED_FIELDS_MEMORY_IN_BYTES, Fields.STORED_FIELDS_MEMORY, getStoredFieldsMemory());
|
||||
builder.humanReadableField(Fields.TERM_VECTORS_MEMORY_IN_BYTES, Fields.TERM_VECTORS_MEMORY, getTermVectorsMemory());
|
||||
builder.humanReadableField(Fields.NORMS_MEMORY_IN_BYTES, Fields.NORMS_MEMORY, getNormsMemory());
|
||||
builder.humanReadableField(Fields.POINTS_MEMORY_IN_BYTES, Fields.POINTS_MEMORY, getPointsMemory());
|
||||
builder.humanReadableField(Fields.DOC_VALUES_MEMORY_IN_BYTES, Fields.DOC_VALUES_MEMORY, getDocValuesMemory());
|
||||
builder.humanReadableField(Fields.INDEX_WRITER_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MEMORY, getIndexWriterMemory());
|
||||
builder.humanReadableField(Fields.VERSION_MAP_MEMORY_IN_BYTES, Fields.VERSION_MAP_MEMORY, getVersionMapMemory());
|
||||
builder.humanReadableField(Fields.FIXED_BIT_SET_MEMORY_IN_BYTES, Fields.FIXED_BIT_SET, getBitsetMemory());
|
||||
builder.field(Fields.MAX_UNSAFE_AUTO_ID_TIMESTAMP, maxUnsafeAutoIdTimestamp);
|
||||
builder.startObject(Fields.FILE_SIZES);
|
||||
for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) {
|
||||
ObjectObjectCursor<String, Long> entry = it.next();
|
||||
builder.startObject(entry.key);
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, entry.value);
|
||||
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(entry.value));
|
||||
builder.field(Fields.DESCRIPTION, fileDescriptions.getOrDefault(entry.key, "Others"));
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ public class FieldDataStats implements Streamable, ToXContentFragment {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(FIELDDATA);
|
||||
builder.byteSizeField(MEMORY_SIZE_IN_BYTES, MEMORY_SIZE, memorySize);
|
||||
builder.humanReadableField(MEMORY_SIZE_IN_BYTES, MEMORY_SIZE, getMemorySize());
|
||||
builder.field(EVICTIONS, getEvictions());
|
||||
if (fields != null) {
|
||||
fields.toXContent(builder, FIELDS, MEMORY_SIZE_IN_BYTES, MEMORY_SIZE);
|
||||
|
|
|
@ -187,14 +187,17 @@ public class MergeStats implements Streamable, ToXContentFragment {
|
|||
builder.startObject(Fields.MERGES);
|
||||
builder.field(Fields.CURRENT, current);
|
||||
builder.field(Fields.CURRENT_DOCS, currentNumDocs);
|
||||
builder.byteSizeField(Fields.CURRENT_SIZE_IN_BYTES, Fields.CURRENT_SIZE, currentSizeInBytes);
|
||||
builder.humanReadableField(Fields.CURRENT_SIZE_IN_BYTES, Fields.CURRENT_SIZE, getCurrentSize());
|
||||
builder.field(Fields.TOTAL, total);
|
||||
builder.humanReadableField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, getTotalTime());
|
||||
builder.field(Fields.TOTAL_DOCS, totalNumDocs);
|
||||
builder.byteSizeField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, totalSizeInBytes);
|
||||
builder.humanReadableField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, getTotalSize());
|
||||
builder.humanReadableField(Fields.TOTAL_STOPPED_TIME_IN_MILLIS, Fields.TOTAL_STOPPED_TIME, getTotalStoppedTime());
|
||||
builder.humanReadableField(Fields.TOTAL_THROTTLED_TIME_IN_MILLIS, Fields.TOTAL_THROTTLED_TIME, getTotalThrottledTime());
|
||||
builder.byteSizeField(Fields.TOTAL_THROTTLE_BYTES_PER_SEC_IN_BYTES, Fields.TOTAL_THROTTLE_BYTES_PER_SEC, totalBytesPerSecAutoThrottle);
|
||||
if (builder.humanReadable() && totalBytesPerSecAutoThrottle != -1) {
|
||||
builder.field(Fields.TOTAL_THROTTLE_BYTES_PER_SEC).value(new ByteSizeValue(totalBytesPerSecAutoThrottle).toString());
|
||||
}
|
||||
builder.field(Fields.TOTAL_THROTTLE_BYTES_PER_SEC_IN_BYTES, totalBytesPerSecAutoThrottle);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ public class StoreStats implements Streamable, ToXContentFragment {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.STORE);
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, sizeInBytes);
|
||||
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, size());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -100,9 +101,9 @@ public class TranslogStats implements Streamable, ToXContentFragment {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("translog");
|
||||
builder.field("operations", numberOfOperations);
|
||||
builder.byteSizeField("size_in_bytes", "size", translogSizeInBytes);
|
||||
builder.humanReadableField("size_in_bytes", "size", new ByteSizeValue(translogSizeInBytes));
|
||||
builder.field("uncommitted_operations", uncommittedOperations);
|
||||
builder.byteSizeField("uncommitted_size_in_bytes", "uncommitted_size", uncommittedSizeInBytes);
|
||||
builder.humanReadableField("uncommitted_size_in_bytes", "uncommitted_size", new ByteSizeValue(uncommittedSizeInBytes));
|
||||
builder.field("earliest_last_modified_age", earliestLastModifiedAge);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
|
@ -262,9 +263,9 @@ public class RecoveryState implements ToXContentFragment, Streamable {
|
|||
builder.field(Fields.TYPE, recoverySource.getType());
|
||||
builder.field(Fields.STAGE, stage.toString());
|
||||
builder.field(Fields.PRIMARY, primary);
|
||||
builder.dateField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime);
|
||||
builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime);
|
||||
if (timer.stopTime > 0) {
|
||||
builder.dateField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime);
|
||||
builder.timeField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime);
|
||||
}
|
||||
builder.humanReadableField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, new TimeValue(timer.time()));
|
||||
|
||||
|
@ -634,9 +635,9 @@ public class RecoveryState implements ToXContentFragment, Streamable {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Fields.NAME, name);
|
||||
builder.byteSizeField(Fields.LENGTH_IN_BYTES, Fields.LENGTH, length);
|
||||
builder.humanReadableField(Fields.LENGTH_IN_BYTES, Fields.LENGTH, new ByteSizeValue(length));
|
||||
builder.field(Fields.REUSED, reused);
|
||||
builder.byteSizeField(Fields.RECOVERED_IN_BYTES, Fields.RECOVERED, recovered);
|
||||
builder.humanReadableField(Fields.RECOVERED_IN_BYTES, Fields.RECOVERED, new ByteSizeValue(recovered));
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
@ -905,9 +906,9 @@ public class RecoveryState implements ToXContentFragment, Streamable {
|
|||
public synchronized XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
// stream size first, as it matters more and the files section can be long
|
||||
builder.startObject(Fields.SIZE);
|
||||
builder.byteSizeField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, totalBytes());
|
||||
builder.byteSizeField(Fields.REUSED_IN_BYTES, Fields.REUSED, reusedBytes());
|
||||
builder.byteSizeField(Fields.RECOVERED_IN_BYTES, Fields.RECOVERED, recoveredBytes());
|
||||
builder.humanReadableField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, new ByteSizeValue(totalBytes()));
|
||||
builder.humanReadableField(Fields.REUSED_IN_BYTES, Fields.REUSED, new ByteSizeValue(reusedBytes()));
|
||||
builder.humanReadableField(Fields.RECOVERED_IN_BYTES, Fields.RECOVERED, new ByteSizeValue(recoveredBytes()));
|
||||
builder.field(Fields.PERCENT, String.format(Locale.ROOT, "%1.1f%%", recoveredBytesPercent()));
|
||||
builder.endObject();
|
||||
|
||||
|
|
|
@ -165,13 +165,13 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContentFragm
|
|||
}
|
||||
|
||||
if (total != -1) {
|
||||
builder.byteSizeField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, total);
|
||||
builder.humanReadableField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, getTotal());
|
||||
}
|
||||
if (free != -1) {
|
||||
builder.byteSizeField(Fields.FREE_IN_BYTES, Fields.FREE, free);
|
||||
builder.humanReadableField(Fields.FREE_IN_BYTES, Fields.FREE, getFree());
|
||||
}
|
||||
if (available != -1) {
|
||||
builder.byteSizeField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, available);
|
||||
builder.humanReadableField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, getAvailable());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
@ -530,8 +530,9 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContentFragm
|
|||
builder.startObject(Fields.LEAST_ESTIMATE);
|
||||
{
|
||||
builder.field(Fields.PATH, leastDiskEstimate.getPath());
|
||||
builder.byteSizeField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, leastDiskEstimate.getTotalBytes());
|
||||
builder.byteSizeField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, leastDiskEstimate.getFreeBytes());
|
||||
builder.humanReadableField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, new ByteSizeValue(leastDiskEstimate.getTotalBytes()));
|
||||
builder.humanReadableField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE,
|
||||
new ByteSizeValue(leastDiskEstimate.getFreeBytes()));
|
||||
builder.field(Fields.USAGE_PERCENTAGE, leastDiskEstimate.getUsedDiskAsPercentage());
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -541,8 +542,8 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContentFragm
|
|||
builder.startObject(Fields.MOST_ESTIMATE);
|
||||
{
|
||||
builder.field(Fields.PATH, mostDiskEstimate.getPath());
|
||||
builder.byteSizeField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, mostDiskEstimate.getTotalBytes());
|
||||
builder.byteSizeField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, mostDiskEstimate.getFreeBytes());
|
||||
builder.humanReadableField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, new ByteSizeValue(mostDiskEstimate.getTotalBytes()));
|
||||
builder.humanReadableField(Fields.AVAILABLE_IN_BYTES, Fields.AVAILABLE, new ByteSizeValue(mostDiskEstimate.getFreeBytes()));
|
||||
builder.field(Fields.USAGE_PERCENTAGE, mostDiskEstimate.getUsedDiskAsPercentage());
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -436,14 +436,14 @@ public class JvmInfo implements Writeable, ToXContentFragment {
|
|||
builder.field(Fields.VM_NAME, vmName);
|
||||
builder.field(Fields.VM_VERSION, vmVersion);
|
||||
builder.field(Fields.VM_VENDOR, vmVendor);
|
||||
builder.dateField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime);
|
||||
builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime);
|
||||
|
||||
builder.startObject(Fields.MEM);
|
||||
builder.byteSizeField(Fields.HEAP_INIT_IN_BYTES, Fields.HEAP_INIT, mem.heapInit);
|
||||
builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, mem.heapMax);
|
||||
builder.byteSizeField(Fields.NON_HEAP_INIT_IN_BYTES, Fields.NON_HEAP_INIT, mem.nonHeapInit);
|
||||
builder.byteSizeField(Fields.NON_HEAP_MAX_IN_BYTES, Fields.NON_HEAP_MAX, mem.nonHeapMax);
|
||||
builder.byteSizeField(Fields.DIRECT_MAX_IN_BYTES, Fields.DIRECT_MAX, mem.directMemoryMax);
|
||||
builder.humanReadableField(Fields.HEAP_INIT_IN_BYTES, Fields.HEAP_INIT, new ByteSizeValue(mem.heapInit));
|
||||
builder.humanReadableField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, new ByteSizeValue(mem.heapMax));
|
||||
builder.humanReadableField(Fields.NON_HEAP_INIT_IN_BYTES, Fields.NON_HEAP_INIT, new ByteSizeValue(mem.nonHeapInit));
|
||||
builder.humanReadableField(Fields.NON_HEAP_MAX_IN_BYTES, Fields.NON_HEAP_MAX, new ByteSizeValue(mem.nonHeapMax));
|
||||
builder.humanReadableField(Fields.DIRECT_MAX_IN_BYTES, Fields.DIRECT_MAX, new ByteSizeValue(mem.directMemoryMax));
|
||||
builder.endObject();
|
||||
|
||||
builder.array(Fields.GC_COLLECTORS, gcCollectors);
|
||||
|
|
|
@ -194,23 +194,23 @@ public class JvmStats implements Writeable, ToXContentFragment {
|
|||
|
||||
builder.startObject(Fields.MEM);
|
||||
|
||||
builder.byteSizeField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, mem.heapUsed);
|
||||
builder.humanReadableField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, new ByteSizeValue(mem.heapUsed));
|
||||
if (mem.getHeapUsedPercent() >= 0) {
|
||||
builder.field(Fields.HEAP_USED_PERCENT, mem.getHeapUsedPercent());
|
||||
}
|
||||
builder.byteSizeField(Fields.HEAP_COMMITTED_IN_BYTES, Fields.HEAP_COMMITTED, mem.heapCommitted);
|
||||
builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, mem.heapMax);
|
||||
builder.byteSizeField(Fields.NON_HEAP_USED_IN_BYTES, Fields.NON_HEAP_USED, mem.nonHeapUsed);
|
||||
builder.byteSizeField(Fields.NON_HEAP_COMMITTED_IN_BYTES, Fields.NON_HEAP_COMMITTED, mem.nonHeapCommitted);
|
||||
builder.humanReadableField(Fields.HEAP_COMMITTED_IN_BYTES, Fields.HEAP_COMMITTED, new ByteSizeValue(mem.heapCommitted));
|
||||
builder.humanReadableField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, new ByteSizeValue(mem.heapMax));
|
||||
builder.humanReadableField(Fields.NON_HEAP_USED_IN_BYTES, Fields.NON_HEAP_USED, new ByteSizeValue(mem.nonHeapUsed));
|
||||
builder.humanReadableField(Fields.NON_HEAP_COMMITTED_IN_BYTES, Fields.NON_HEAP_COMMITTED, new ByteSizeValue(mem.nonHeapCommitted));
|
||||
|
||||
builder.startObject(Fields.POOLS);
|
||||
for (MemoryPool pool : mem) {
|
||||
builder.startObject(pool.getName());
|
||||
builder.byteSizeField(Fields.USED_IN_BYTES, Fields.USED, pool.used);
|
||||
builder.byteSizeField(Fields.MAX_IN_BYTES, Fields.MAX, pool.max);
|
||||
builder.humanReadableField(Fields.USED_IN_BYTES, Fields.USED, new ByteSizeValue(pool.used));
|
||||
builder.humanReadableField(Fields.MAX_IN_BYTES, Fields.MAX, new ByteSizeValue(pool.max));
|
||||
|
||||
builder.byteSizeField(Fields.PEAK_USED_IN_BYTES, Fields.PEAK_USED, pool.peakUsed);
|
||||
builder.byteSizeField(Fields.PEAK_MAX_IN_BYTES, Fields.PEAK_MAX, pool.peakMax);
|
||||
builder.humanReadableField(Fields.PEAK_USED_IN_BYTES, Fields.PEAK_USED, new ByteSizeValue(pool.peakUsed));
|
||||
builder.humanReadableField(Fields.PEAK_MAX_IN_BYTES, Fields.PEAK_MAX, new ByteSizeValue(pool.peakMax));
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -241,8 +241,9 @@ public class JvmStats implements Writeable, ToXContentFragment {
|
|||
for (BufferPool bufferPool : bufferPools) {
|
||||
builder.startObject(bufferPool.getName());
|
||||
builder.field(Fields.COUNT, bufferPool.getCount());
|
||||
builder.byteSizeField(Fields.USED_IN_BYTES, Fields.USED, bufferPool.used);
|
||||
builder.byteSizeField(Fields.TOTAL_CAPACITY_IN_BYTES, Fields.TOTAL_CAPACITY, bufferPool.totalCapacity);
|
||||
builder.humanReadableField(Fields.USED_IN_BYTES, Fields.USED, new ByteSizeValue(bufferPool.used));
|
||||
builder.humanReadableField(Fields.TOTAL_CAPACITY_IN_BYTES, Fields.TOTAL_CAPACITY,
|
||||
new ByteSizeValue(bufferPool.totalCapacity));
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -113,7 +113,7 @@ public class ProcessStats implements Writeable, ToXContentFragment {
|
|||
}
|
||||
if (mem != null) {
|
||||
builder.startObject(Fields.MEM);
|
||||
builder.byteSizeField(Fields.TOTAL_VIRTUAL_IN_BYTES, Fields.TOTAL_VIRTUAL, mem.totalVirtual);
|
||||
builder.humanReadableField(Fields.TOTAL_VIRTUAL_IN_BYTES, Fields.TOTAL_VIRTUAL, new ByteSizeValue(mem.totalVirtual));
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -75,7 +75,7 @@ public class CompletionStats implements Streamable, ToXContentFragment {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(COMPLETION);
|
||||
builder.byteSizeField(SIZE_IN_BYTES, SIZE, sizeInBytes);
|
||||
builder.humanReadableField(SIZE_IN_BYTES, SIZE, getSize());
|
||||
if (fields != null) {
|
||||
fields.toXContent(builder, FIELDS, SIZE_IN_BYTES, SIZE);
|
||||
}
|
||||
|
|
|
@ -196,7 +196,7 @@ public final class TaskInfo implements Writeable, ToXContentFragment {
|
|||
if (description != null) {
|
||||
builder.field("description", description);
|
||||
}
|
||||
builder.dateField("start_time_in_millis", "start_time", startTime);
|
||||
builder.timeField("start_time_in_millis", "start_time", startTime);
|
||||
if (builder.humanReadable()) {
|
||||
builder.field("running_time", new TimeValue(runningTimeNanos, TimeUnit.NANOSECONDS).toString());
|
||||
}
|
||||
|
|
|
@ -107,9 +107,9 @@ public class TransportStats implements Writeable, ToXContentFragment {
|
|||
builder.startObject(Fields.TRANSPORT);
|
||||
builder.field(Fields.SERVER_OPEN, serverOpen);
|
||||
builder.field(Fields.RX_COUNT, rxCount);
|
||||
builder.byteSizeField(Fields.RX_SIZE_IN_BYTES, Fields.RX_SIZE, rxSize);
|
||||
builder.humanReadableField(Fields.RX_SIZE_IN_BYTES, Fields.RX_SIZE, new ByteSizeValue(rxSize));
|
||||
builder.field(Fields.TX_COUNT, txCount);
|
||||
builder.byteSizeField(Fields.TX_SIZE_IN_BYTES, Fields.TX_SIZE, txSize);
|
||||
builder.humanReadableField(Fields.TX_SIZE_IN_BYTES, Fields.TX_SIZE, new ByteSizeValue(txSize));
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentElasticsearchExtension;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -72,7 +73,8 @@ public class IndexGraveyardTests extends ESTestCase {
|
|||
if (graveyard.getTombstones().size() > 0) {
|
||||
// check that date properly printed
|
||||
assertThat(Strings.toString(graveyard, false, true),
|
||||
containsString(XContentBuilder.DEFAULT_DATE_PRINTER.print(graveyard.getTombstones().get(0).getDeleteDateInMillis())));
|
||||
containsString(XContentElasticsearchExtension.DEFAULT_DATE_PRINTER
|
||||
.print(graveyard.getTombstones().get(0).getDeleteDateInMillis())));
|
||||
}
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
|
||||
parser.nextToken(); // the beginning of the parser
|
||||
|
|
|
@ -330,11 +330,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
|
||||
final BytesRef randomBytesRef = new BytesRef(randomBytes());
|
||||
XContentBuilder builder = builder().startObject();
|
||||
if (randomBoolean()) {
|
||||
builder.utf8Field("utf8", randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length);
|
||||
} else {
|
||||
builder.field("utf8").utf8Value(randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length);
|
||||
}
|
||||
builder.field("utf8").utf8Value(randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length);
|
||||
builder.endObject();
|
||||
|
||||
XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder));
|
||||
|
@ -366,81 +362,73 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testReadableInstant() throws Exception {
|
||||
assertResult("{'instant':null}", () -> builder().startObject().field("instant", (ReadableInstant) null).endObject());
|
||||
assertResult("{'instant':null}", () -> builder().startObject().field("instant").value((ReadableInstant) null).endObject());
|
||||
assertResult("{'instant':null}", () -> builder().startObject().timeField("instant", (ReadableInstant) null).endObject());
|
||||
assertResult("{'instant':null}", () -> builder().startObject().field("instant").timeValue((ReadableInstant) null).endObject());
|
||||
|
||||
final DateTime t1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC);
|
||||
|
||||
String expected = "{'t1':'2016-01-01T00:00:00.000Z'}";
|
||||
assertResult(expected, () -> builder().startObject().field("t1", t1).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t1").value(t1).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("t1", t1).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t1").timeValue(t1).endObject());
|
||||
|
||||
final DateTime t2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC);
|
||||
|
||||
expected = "{'t2':'2016-12-25T07:59:42.213Z'}";
|
||||
assertResult(expected, () -> builder().startObject().field("t2", t2).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t2").value(t2).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("t2", t2).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t2").timeValue(t2).endObject());
|
||||
|
||||
final DateTimeFormatter formatter = randomFrom(ISODateTimeFormat.basicDate(), ISODateTimeFormat.dateTimeNoMillis());
|
||||
final DateTime t3 = DateTime.now();
|
||||
|
||||
expected = "{'t3':'" + formatter.print(t3) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().field("t3", t3, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t3").value(t3, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("t3", formatter.print(t3)).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t3").value(formatter.print(t3)).endObject());
|
||||
|
||||
final DateTime t4 = new DateTime(randomDateTimeZone());
|
||||
|
||||
expected = "{'t4':'" + formatter.print(t4) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().field("t4", t4, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t4").value(t4, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("t4", formatter.print(t4)).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t4").value(formatter.print(t4)).endObject());
|
||||
|
||||
long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00
|
||||
final DateTime t5 = new DateTime(date, randomDateTimeZone());
|
||||
|
||||
expected = "{'t5':'" + XContentBuilder.DEFAULT_DATE_PRINTER.print(t5) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().field("t5", t5).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t5").value(t5).endObject());
|
||||
expected = "{'t5':'" + XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(t5) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().timeField("t5", t5).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t5").timeValue(t5).endObject());
|
||||
|
||||
expected = "{'t5':'" + formatter.print(t5) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().field("t5", t5, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t5").value(t5, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("t5", formatter.print(t5)).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("t5").value(formatter.print(t5)).endObject());
|
||||
|
||||
Instant i1 = new Instant(1451606400000L); // 2016-01-01T00:00:00.000Z
|
||||
expected = "{'i1':'2016-01-01T00:00:00.000Z'}";
|
||||
assertResult(expected, () -> builder().startObject().field("i1", i1).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("i1").value(i1).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("i1", i1).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("i1").timeValue(i1).endObject());
|
||||
|
||||
Instant i2 = new Instant(1482652782213L); // 2016-12-25T07:59:42.213Z
|
||||
expected = "{'i2':'" + formatter.print(i2) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().field("i2", i2, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("i2").value(i2, formatter).endObject());
|
||||
|
||||
expectNonNullFormatterException(() -> builder().startObject().field("t3", t3, null).endObject());
|
||||
expectNonNullFormatterException(() -> builder().startObject().field("t3").value(t3, null).endObject());
|
||||
assertResult(expected, () -> builder().startObject().timeField("i2", formatter.print(i2)).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("i2").value(formatter.print(i2)).endObject());
|
||||
}
|
||||
|
||||
public void testDate() throws Exception {
|
||||
assertResult("{'date':null}", () -> builder().startObject().field("date", (Date) null).endObject());
|
||||
assertResult("{'date':null}", () -> builder().startObject().field("date").value((Date) null).endObject());
|
||||
assertResult("{'date':null}", () -> builder().startObject().timeField("date", (Date) null).endObject());
|
||||
assertResult("{'date':null}", () -> builder().startObject().field("date").timeValue((Date) null).endObject());
|
||||
|
||||
final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
|
||||
assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1", d1).endObject());
|
||||
assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").value(d1).endObject());
|
||||
assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().timeField("d1", d1).endObject());
|
||||
assertResult("{'d1':'2016-01-01T00:00:00.000Z'}", () -> builder().startObject().field("d1").timeValue(d1).endObject());
|
||||
|
||||
final Date d2 = new DateTime(2016, 12, 25, 7, 59, 42, 213, DateTimeZone.UTC).toDate();
|
||||
assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2", d2).endObject());
|
||||
assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2").value(d2).endObject());
|
||||
assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().timeField("d2", d2).endObject());
|
||||
assertResult("{'d2':'2016-12-25T07:59:42.213Z'}", () -> builder().startObject().field("d2").timeValue(d2).endObject());
|
||||
|
||||
final DateTimeFormatter formatter = randomFrom(ISODateTimeFormat.basicDate(), ISODateTimeFormat.dateTimeNoMillis());
|
||||
final Date d3 = DateTime.now().toDate();
|
||||
|
||||
String expected = "{'d3':'" + formatter.print(d3.getTime()) + "'}";
|
||||
assertResult(expected, () -> builder().startObject().field("d3", d3, formatter).endObject());
|
||||
assertResult(expected, () -> builder().startObject().field("d3").value(d3, formatter).endObject());
|
||||
|
||||
expectNonNullFormatterException(() -> builder().startObject().field("d3", d3, null).endObject());
|
||||
expectNonNullFormatterException(() -> builder().startObject().field("d3").value(d3, null).endObject());
|
||||
expectNonNullFormatterException(() -> builder().value(null, 1L));
|
||||
assertResult(expected, () -> builder().startObject().field("d3").value(formatter.print(d3.getTime())).endObject());
|
||||
}
|
||||
|
||||
public void testDateField() throws Exception {
|
||||
|
@ -448,12 +436,12 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
|
||||
assertResult("{'date_in_millis':1451606400000}", () -> builder()
|
||||
.startObject()
|
||||
.dateField("date_in_millis", "date", d.getTime())
|
||||
.timeField("date_in_millis", "date", d.getTime())
|
||||
.endObject());
|
||||
assertResult("{'date':'2016-01-01T00:00:00.000Z','date_in_millis':1451606400000}", () -> builder()
|
||||
.humanReadable(true)
|
||||
.startObject
|
||||
().dateField("date_in_millis", "date", d.getTime())
|
||||
().timeField("date_in_millis", "date", d.getTime())
|
||||
.endObject());
|
||||
}
|
||||
|
||||
|
@ -462,7 +450,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
assertResult("{'calendar':'2016-01-01T00:00:00.000Z'}", () -> builder()
|
||||
.startObject()
|
||||
.field("calendar")
|
||||
.value(calendar)
|
||||
.timeValue(calendar)
|
||||
.endObject());
|
||||
}
|
||||
|
||||
|
@ -514,7 +502,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
final String paths = Constants.WINDOWS ? "{'objects':['a\\\\b\\\\c','d\\\\e']}" : "{'objects':['a/b/c','d/e']}";
|
||||
objects.put(paths, new Object[]{PathUtils.get("a", "b", "c"), PathUtils.get("d", "e")});
|
||||
|
||||
final DateTimeFormatter formatter = XContentBuilder.DEFAULT_DATE_PRINTER;
|
||||
final DateTimeFormatter formatter = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER;
|
||||
final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
|
||||
final Date d2 = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
|
||||
objects.put("{'objects':['" + formatter.print(d1.getTime()) + "','" + formatter.print(d2.getTime()) + "']}", new Object[]{d1, d2});
|
||||
|
@ -562,7 +550,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
final String path = Constants.WINDOWS ? "{'object':'a\\\\b\\\\c'}" : "{'object':'a/b/c'}";
|
||||
object.put(path, PathUtils.get("a", "b", "c"));
|
||||
|
||||
final DateTimeFormatter formatter = XContentBuilder.DEFAULT_DATE_PRINTER;
|
||||
final DateTimeFormatter formatter = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER;
|
||||
final Date d1 = new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC).toDate();
|
||||
object.put("{'object':'" + formatter.print(d1.getTime()) + "'}", d1);
|
||||
|
||||
|
@ -846,11 +834,6 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
assertThat(e.getMessage(), containsString("Field name cannot be null"));
|
||||
}
|
||||
|
||||
public void testFormatterNameNotNull() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> XContentBuilder.ensureFormatterNotNull(null));
|
||||
assertThat(e.getMessage(), containsString("DateTimeFormatter cannot be null"));
|
||||
}
|
||||
|
||||
public void testEnsureNotNull() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> XContentBuilder.ensureNotNull(null, "message"));
|
||||
assertThat(e.getMessage(), containsString("message"));
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentElasticsearchExtension;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentGenerator;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -176,11 +177,11 @@ public class XContentBuilderTests extends ESTestCase {
|
|||
|
||||
public void testDateTypesConversion() throws Exception {
|
||||
Date date = new Date();
|
||||
String expectedDate = XContentBuilder.DEFAULT_DATE_PRINTER.print(date.getTime());
|
||||
String expectedDate = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(date.getTime());
|
||||
Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT);
|
||||
String expectedCalendar = XContentBuilder.DEFAULT_DATE_PRINTER.print(calendar.getTimeInMillis());
|
||||
String expectedCalendar = XContentElasticsearchExtension.DEFAULT_DATE_PRINTER.print(calendar.getTimeInMillis());
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.startObject().field("date", date).endObject();
|
||||
builder.startObject().timeField("date", date).endObject();
|
||||
assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}"));
|
||||
|
||||
builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
|
|
|
@ -97,9 +97,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
private IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
|
||||
return client().prepareIndex(idx, "type").setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("date", date)
|
||||
.timeField("date", date)
|
||||
.field("value", value)
|
||||
.startArray("dates").value(date).value(date.plusMonths(1).plusDays(1)).endArray()
|
||||
.startArray("dates").timeValue(date).timeValue(date.plusMonths(1).plusDays(1)).endArray()
|
||||
.endObject());
|
||||
}
|
||||
|
||||
|
@ -108,8 +108,8 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.startObject()
|
||||
.field("value", value)
|
||||
.field("constant", 1)
|
||||
.field("date", date(month, day))
|
||||
.startArray("dates").value(date(month, day)).value(date(month + 1, day + 1)).endArray()
|
||||
.timeField("date", date(month, day))
|
||||
.startArray("dates").timeValue(date(month, day)).timeValue(date(month + 1, day + 1)).endArray()
|
||||
.endObject());
|
||||
}
|
||||
|
||||
|
@ -161,26 +161,26 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.addMapping("type", "date", "type=date").get());
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 1)).field("l", 1).field("d", i).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 1)).field("l", 1).field("d", i).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 2)).field("l", 2).field("d", i).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 2)).field("l", 2).field("d", i).endObject()));
|
||||
}
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 3)).field("l", 3).field("d", 1).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 3)).field("l", 3).field("d", 1).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 4)).field("l", 3).field("d", 1).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 4)).field("l", 3).field("d", 1).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 5)).field("l", 5).field("d", 1).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 5)).field("l", 5).field("d", 1).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 6)).field("l", 5).field("d", 1).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 6)).field("l", 5).field("d", 1).endObject()));
|
||||
builders.add(client().prepareIndex("sort_idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date(1, 7)).field("l", 5).field("d", 1).endObject()));
|
||||
jsonBuilder().startObject().timeField("date", date(1, 7)).field("l", 5).field("d", 1).endObject()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -968,7 +968,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().timeField("date", date).endObject());
|
||||
date = date.plusHours(1);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
|
|
@ -67,7 +67,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
|
|||
private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException, InterruptedException, ExecutionException {
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
|
||||
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
|
||||
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().timeField("date", date).endObject());
|
||||
date = date.plusHours(stepSizeHours);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
|
|
@ -65,8 +65,8 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
return client().prepareIndex("idx", "type").setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.field("value", value)
|
||||
.field("date", date(month, day))
|
||||
.startArray("dates").value(date(month, day)).value(date(month + 1, day + 1)).endArray()
|
||||
.timeField("date", date(month, day))
|
||||
.startArray("dates").timeValue(date(month, day)).timeValue(date(month + 1, day + 1)).endArray()
|
||||
.endObject());
|
||||
}
|
||||
|
||||
|
@ -889,9 +889,9 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
.get());
|
||||
indexRandom(true,
|
||||
client().prepareIndex("cache_test_idx", "type", "1")
|
||||
.setSource(jsonBuilder().startObject().field("date", date(1, 1)).endObject()),
|
||||
.setSource(jsonBuilder().startObject().timeField("date", date(1, 1)).endObject()),
|
||||
client().prepareIndex("cache_test_idx", "type", "2")
|
||||
.setSource(jsonBuilder().startObject().field("date", date(2, 1)).endObject()));
|
||||
.setSource(jsonBuilder().startObject().timeField("date", date(2, 1)).endObject()));
|
||||
|
||||
// Make sure we are starting with a clear cache
|
||||
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
|
||||
|
|
|
@ -75,13 +75,13 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
|
||||
private static IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
|
||||
return client().prepareIndex(idx, "type").setSource(
|
||||
jsonBuilder().startObject().field("date", date).field("value", value).endObject());
|
||||
jsonBuilder().startObject().timeField("date", date).field("value", value).endObject());
|
||||
}
|
||||
|
||||
private IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception {
|
||||
return client().prepareIndex("idx", "type").setSource(
|
||||
jsonBuilder().startObject().field("value", value).field("date", date(month, day)).startArray("dates")
|
||||
.value(date(month, day)).value(date(month + 1, day + 1)).endArray().endObject());
|
||||
jsonBuilder().startObject().field("value", value).timeField("date", date(month, day)).startArray("dates")
|
||||
.timeValue(date(month, day)).timeValue(date(month + 1, day + 1)).endArray().endObject());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue