mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Clean up serialization on some stats
Removes readFrom which is no longer required/actively discouraged and replaces some instances of `Streamable` with `Writeable`. Relates to #17085
This commit is contained in:
parent
860559e543
commit
edf1c82007
@ -224,7 +224,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
||||
threadPool = ThreadPoolStats.readThreadPoolStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
fs = FsInfo.readFsInfo(in);
|
||||
fs = new FsInfo(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
transport = TransportStats.readTransportStats(in);
|
||||
|
@ -21,13 +21,13 @@ package org.elasticsearch.action.admin.cluster.stats;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
@ -70,7 +70,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
this.os = new OsStats(in);
|
||||
this.process = new ProcessStats(in);
|
||||
this.jvm = new JvmStats(in);
|
||||
this.fs = FsInfo.Path.readInfoFrom(in);
|
||||
this.fs = new FsInfo.Path(in);
|
||||
|
||||
size = in.readVInt();
|
||||
this.plugins = new HashSet<>(size);
|
||||
@ -140,12 +140,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
return plugins;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ClusterStatsNodes readFrom(StreamInput in) throws IOException {
|
||||
return new ClusterStatsNodes(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
counts.writeTo(out);
|
||||
@ -249,11 +243,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
return roles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Counts readFrom(StreamInput in) throws IOException {
|
||||
return new Counts(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(total);
|
||||
@ -279,17 +268,9 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
final int allocatedProcessors;
|
||||
final ObjectIntHashMap<String> names;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private OsStats(StreamInput in) throws IOException {
|
||||
this.availableProcessors = in.readVInt();
|
||||
this.allocatedProcessors = in.readVInt();
|
||||
int size = in.readVInt();
|
||||
this.names = new ObjectIntHashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
names.addTo(in.readString(), in.readVInt());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the stats from information about each node.
|
||||
*/
|
||||
private OsStats(List<NodeInfo> nodeInfos) {
|
||||
this.names = new ObjectIntHashMap<>();
|
||||
int availableProcessors = 0;
|
||||
@ -306,17 +287,17 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
this.allocatedProcessors = allocatedProcessors;
|
||||
}
|
||||
|
||||
public int getAvailableProcessors() {
|
||||
return availableProcessors;
|
||||
}
|
||||
|
||||
public int getAllocatedProcessors() {
|
||||
return allocatedProcessors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public OsStats readFrom(StreamInput in) throws IOException {
|
||||
return new OsStats(in);
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
private OsStats(StreamInput in) throws IOException {
|
||||
this.availableProcessors = in.readVInt();
|
||||
this.allocatedProcessors = in.readVInt();
|
||||
int size = in.readVInt();
|
||||
this.names = new ObjectIntHashMap<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
names.addTo(in.readString(), in.readVInt());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -330,6 +311,14 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
}
|
||||
}
|
||||
|
||||
public int getAvailableProcessors() {
|
||||
return availableProcessors;
|
||||
}
|
||||
|
||||
public int getAllocatedProcessors() {
|
||||
return allocatedProcessors;
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String AVAILABLE_PROCESSORS = "available_processors";
|
||||
static final String ALLOCATED_PROCESSORS = "allocated_processors";
|
||||
@ -362,14 +351,9 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
final long minOpenFileDescriptors;
|
||||
final long maxOpenFileDescriptors;
|
||||
|
||||
private ProcessStats(StreamInput in) throws IOException {
|
||||
this.count = in.readVInt();
|
||||
this.cpuPercent = in.readVInt();
|
||||
this.totalOpenFileDescriptors = in.readVLong();
|
||||
this.minOpenFileDescriptors = in.readLong();
|
||||
this.maxOpenFileDescriptors = in.readLong();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build from looking at a list of node statistics.
|
||||
*/
|
||||
private ProcessStats(List<NodeStats> nodeStatsList) {
|
||||
int count = 0;
|
||||
int cpuPercent = 0;
|
||||
@ -400,6 +384,27 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
this.maxOpenFileDescriptors = maxOpenFileDescriptors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
private ProcessStats(StreamInput in) throws IOException {
|
||||
this.count = in.readVInt();
|
||||
this.cpuPercent = in.readVInt();
|
||||
this.totalOpenFileDescriptors = in.readVLong();
|
||||
this.minOpenFileDescriptors = in.readLong();
|
||||
this.maxOpenFileDescriptors = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(count);
|
||||
out.writeVInt(cpuPercent);
|
||||
out.writeVLong(totalOpenFileDescriptors);
|
||||
out.writeLong(minOpenFileDescriptors);
|
||||
out.writeLong(maxOpenFileDescriptors);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Cpu usage in percentages - 100 is 1 core.
|
||||
*/
|
||||
@ -428,20 +433,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
return minOpenFileDescriptors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProcessStats readFrom(StreamInput in) throws IOException {
|
||||
return new ProcessStats(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(count);
|
||||
out.writeVInt(cpuPercent);
|
||||
out.writeVLong(totalOpenFileDescriptors);
|
||||
out.writeLong(minOpenFileDescriptors);
|
||||
out.writeLong(maxOpenFileDescriptors);
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String CPU = "cpu";
|
||||
static final String PERCENT = "percent";
|
||||
@ -473,18 +464,9 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
private final long heapUsed;
|
||||
private final long heapMax;
|
||||
|
||||
private JvmStats(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
this.versions = new ObjectIntHashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
this.versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
|
||||
}
|
||||
this.threads = in.readVLong();
|
||||
this.maxUptime = in.readVLong();
|
||||
this.heapUsed = in.readVLong();
|
||||
this.heapMax = in.readVLong();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build from lists of information about each node.
|
||||
*/
|
||||
private JvmStats(List<NodeInfo> nodeInfos, List<NodeStats> nodeStatsList) {
|
||||
this.versions = new ObjectIntHashMap<>();
|
||||
long threads = 0;
|
||||
@ -515,6 +497,34 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
this.heapMax = heapMax;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
private JvmStats(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
this.versions = new ObjectIntHashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
this.versions.addTo(new JvmVersion(in), in.readVInt());
|
||||
}
|
||||
this.threads = in.readVLong();
|
||||
this.maxUptime = in.readVLong();
|
||||
this.heapUsed = in.readVLong();
|
||||
this.heapMax = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(versions.size());
|
||||
for (ObjectIntCursor<JvmVersion> v : versions) {
|
||||
v.key.writeTo(out);
|
||||
out.writeVInt(v.value);
|
||||
}
|
||||
out.writeVLong(threads);
|
||||
out.writeVLong(maxUptime);
|
||||
out.writeVLong(heapUsed);
|
||||
out.writeVLong(heapMax);
|
||||
}
|
||||
|
||||
public ObjectIntHashMap<JvmVersion> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
@ -547,24 +557,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
return new ByteSizeValue(heapMax);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JvmStats readFrom(StreamInput in) throws IOException {
|
||||
return new JvmStats(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(versions.size());
|
||||
for (ObjectIntCursor<JvmVersion> v : versions) {
|
||||
v.key.writeTo(out);
|
||||
out.writeVInt(v.value);
|
||||
}
|
||||
out.writeVLong(threads);
|
||||
out.writeVLong(maxUptime);
|
||||
out.writeVLong(heapUsed);
|
||||
out.writeVLong(heapMax);
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String VERSIONS = "versions";
|
||||
static final String VERSION = "version";
|
||||
@ -606,7 +598,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
}
|
||||
}
|
||||
|
||||
public static class JvmVersion implements Streamable {
|
||||
public static class JvmVersion implements Writeable {
|
||||
String version;
|
||||
String vmName;
|
||||
String vmVersion;
|
||||
@ -619,6 +611,24 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
vmVendor = jvmInfo.getVmVendor();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
JvmVersion(StreamInput in) throws IOException {
|
||||
version = in.readString();
|
||||
vmName = in.readString();
|
||||
vmVersion = in.readString();
|
||||
vmVendor = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(version);
|
||||
out.writeString(vmName);
|
||||
out.writeString(vmVersion);
|
||||
out.writeString(vmVendor);
|
||||
}
|
||||
|
||||
JvmVersion() {
|
||||
}
|
||||
|
||||
@ -640,27 +650,5 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
|
||||
public int hashCode() {
|
||||
return vmVersion.hashCode();
|
||||
}
|
||||
|
||||
public static JvmVersion readJvmVersion(StreamInput in) throws IOException {
|
||||
JvmVersion jvm = new JvmVersion();
|
||||
jvm.readFrom(in);
|
||||
return jvm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
version = in.readString();
|
||||
vmName = in.readString();
|
||||
vmVersion = in.readString();
|
||||
vmVendor = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(version);
|
||||
out.writeString(vmName);
|
||||
out.writeString(vmVersion);
|
||||
out.writeString(vmVendor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.monitor.fs;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -33,9 +33,9 @@ import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
public class FsInfo implements Iterable<FsInfo.Path>, Streamable, ToXContent {
|
||||
public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
||||
|
||||
public static class Path implements Streamable, ToXContent {
|
||||
public static class Path implements Writeable, ToXContent {
|
||||
|
||||
String path;
|
||||
@Nullable
|
||||
@ -62,14 +62,10 @@ public class FsInfo implements Iterable<FsInfo.Path>, Streamable, ToXContent {
|
||||
this.available = available;
|
||||
}
|
||||
|
||||
static public Path readInfoFrom(StreamInput in) throws IOException {
|
||||
Path i = new Path();
|
||||
i.readFrom(in);
|
||||
return i;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public Path(StreamInput in) throws IOException {
|
||||
path = in.readOptionalString();
|
||||
mount = in.readOptionalString();
|
||||
type = in.readOptionalString();
|
||||
@ -192,13 +188,9 @@ public class FsInfo implements Iterable<FsInfo.Path>, Streamable, ToXContent {
|
||||
}
|
||||
}
|
||||
|
||||
long timestamp;
|
||||
final long timestamp;
|
||||
final Path[] paths;
|
||||
Path total;
|
||||
Path[] paths;
|
||||
|
||||
FsInfo() {
|
||||
|
||||
}
|
||||
|
||||
public FsInfo(long timestamp, Path[] paths) {
|
||||
this.timestamp = timestamp;
|
||||
@ -206,6 +198,26 @@ public class FsInfo implements Iterable<FsInfo.Path>, Streamable, ToXContent {
|
||||
this.total = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public FsInfo(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
paths = new Path[in.readVInt()];
|
||||
for (int i = 0; i < paths.length; i++) {
|
||||
paths[i] = new Path(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeVInt(paths.length);
|
||||
for (Path path : paths) {
|
||||
path.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
public Path getTotal() {
|
||||
return total();
|
||||
}
|
||||
@ -237,30 +249,6 @@ public class FsInfo implements Iterable<FsInfo.Path>, Streamable, ToXContent {
|
||||
return Arrays.stream(paths).iterator();
|
||||
}
|
||||
|
||||
public static FsInfo readFsInfo(StreamInput in) throws IOException {
|
||||
FsInfo stats = new FsInfo();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
paths = new Path[in.readVInt()];
|
||||
for (int i = 0; i < paths.length; i++) {
|
||||
paths[i] = Path.readInfoFrom(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeVInt(paths.length);
|
||||
for (Path path : paths) {
|
||||
path.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String FS = "fs";
|
||||
static final String TIMESTAMP = "timestamp";
|
||||
|
@ -64,7 +64,7 @@ public class FsService extends AbstractComponent {
|
||||
return probe.stats();
|
||||
} catch (IOException ex) {
|
||||
logger.warn("Failed to fetch fs stats - returning empty instance");
|
||||
return new FsInfo();
|
||||
return new FsInfo(0, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user