Remove unused dump infra

Way back when, when ES started, there was an idea for a dump infrastructure, but it ended up supporting its serviceability aspects through APIs, remove the unused code
This commit is contained in:
Shay Banon 2014-05-06 11:50:17 +02:00
parent a8b6f81525
commit 66296de38d
18 changed files with 8 additions and 1089 deletions

View File

@ -123,7 +123,10 @@ public class Environment {
} }
/** /**
* The work location. * The work location, path to temp files.
*
* Note, currently, we don't use it in ES at all, we should strive to see if we can keep it like that,
* but if we do, we have the infra for it.
*/ */
public File workFile() { public File workFile() {
return workFile; return workFile;
@ -131,6 +134,9 @@ public class Environment {
/** /**
* The work location with the cluster name as a sub directory. * The work location with the cluster name as a sub directory.
*
* Note, currently, we don't use it in ES at all, we should strive to see if we can keep it like that,
* but if we do, we have the infra for it.
*/ */
public File workWithClusterFile() { public File workWithClusterFile() {
return workWithClusterFile; return workWithClusterFile;

View File

@ -25,12 +25,6 @@ import org.elasticsearch.common.inject.assistedinject.FactoryProvider;
import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.MapBinder;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.dump.DumpContributorFactory;
import org.elasticsearch.monitor.dump.DumpMonitorService;
import org.elasticsearch.monitor.dump.cluster.ClusterDumpContributor;
import org.elasticsearch.monitor.dump.heap.HeapDumpContributor;
import org.elasticsearch.monitor.dump.summary.SummaryDumpContributor;
import org.elasticsearch.monitor.dump.thread.ThreadDumpContributor;
import org.elasticsearch.monitor.fs.FsProbe; import org.elasticsearch.monitor.fs.FsProbe;
import org.elasticsearch.monitor.fs.FsService; import org.elasticsearch.monitor.fs.FsService;
import org.elasticsearch.monitor.fs.JmxFsProbe; import org.elasticsearch.monitor.fs.JmxFsProbe;
@ -53,11 +47,6 @@ import org.elasticsearch.monitor.sigar.SigarService;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.monitor.dump.cluster.ClusterDumpContributor.CLUSTER;
import static org.elasticsearch.monitor.dump.heap.HeapDumpContributor.HEAP_DUMP;
import static org.elasticsearch.monitor.dump.summary.SummaryDumpContributor.SUMMARY;
import static org.elasticsearch.monitor.dump.thread.ThreadDumpContributor.THREAD_DUMP;
/** /**
* *
*/ */
@ -106,36 +95,5 @@ public class MonitorModule extends AbstractModule {
bind(FsService.class).asEagerSingleton(); bind(FsService.class).asEagerSingleton();
bind(JvmMonitorService.class).asEagerSingleton(); bind(JvmMonitorService.class).asEagerSingleton();
MapBinder<String, DumpContributorFactory> tokenFilterBinder
= MapBinder.newMapBinder(binder(), String.class, DumpContributorFactory.class);
Map<String, Settings> dumpContSettings = settings.getGroups("monitor.dump");
for (Map.Entry<String, Settings> entry : dumpContSettings.entrySet()) {
String dumpContributorName = entry.getKey();
Settings dumpContributorSettings = entry.getValue();
Class<? extends DumpContributorFactory> type = dumpContributorSettings.getAsClass("type", null, "org.elasticsearch.monitor.dump." + dumpContributorName + ".", "DumpContributor");
if (type == null) {
throw new IllegalArgumentException("Dump Contributor [" + dumpContributorName + "] must have a type associated with it");
}
tokenFilterBinder.addBinding(dumpContributorName).toProvider(FactoryProvider.newFactory(DumpContributorFactory.class, type)).in(Scopes.SINGLETON);
}
// add default
if (!dumpContSettings.containsKey(SUMMARY)) {
tokenFilterBinder.addBinding(SUMMARY).toProvider(FactoryProvider.newFactory(DumpContributorFactory.class, SummaryDumpContributor.class)).in(Scopes.SINGLETON);
}
if (!dumpContSettings.containsKey(THREAD_DUMP)) {
tokenFilterBinder.addBinding(THREAD_DUMP).toProvider(FactoryProvider.newFactory(DumpContributorFactory.class, ThreadDumpContributor.class)).in(Scopes.SINGLETON);
}
if (!dumpContSettings.containsKey(HEAP_DUMP)) {
tokenFilterBinder.addBinding(HEAP_DUMP).toProvider(FactoryProvider.newFactory(DumpContributorFactory.class, HeapDumpContributor.class)).in(Scopes.SINGLETON);
}
if (!dumpContSettings.containsKey(CLUSTER)) {
tokenFilterBinder.addBinding(CLUSTER).toProvider(FactoryProvider.newFactory(DumpContributorFactory.class, ClusterDumpContributor.class)).in(Scopes.SINGLETON);
}
bind(DumpMonitorService.class).asEagerSingleton();
} }
} }

View File

@ -1,94 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.Nullable;
import java.io.*;
import java.util.ArrayList;
import java.util.Map;
/**
*
*/
public abstract class AbstractDump implements Dump {
private final long timestamp;
private final String cause;
private final Map<String, Object> context;
private final ArrayList<File> files = new ArrayList<>();
protected AbstractDump(long timestamp, String cause, @Nullable Map<String, Object> context) {
this.timestamp = timestamp;
this.cause = cause;
if (context == null) {
context = ImmutableMap.of();
}
this.context = context;
}
@Override
public long timestamp() {
return timestamp;
}
@Override
public Map<String, Object> context() {
return this.context;
}
@Override
public String cause() {
return cause;
}
@Override
public File[] files() {
return files.toArray(new File[files.size()]);
}
@Override
public File createFile(String name) throws DumpException {
File file = doCreateFile(name);
files.add(file);
return file;
}
protected abstract File doCreateFile(String name) throws DumpException;
@Override
public OutputStream createFileOutputStream(String name) throws DumpException {
try {
return new FileOutputStream(createFile(name));
} catch (FileNotFoundException e) {
throw new DumpException("Failed to create file [" + name + "]", e);
}
}
@Override
public Writer createFileWriter(String name) throws DumpException {
return new OutputStreamWriter(createFileOutputStream(name), Charsets.UTF_8);
}
}

View File

@ -1,47 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import java.io.File;
import java.io.OutputStream;
import java.io.Writer;
import java.util.Map;
/**
*
*/
public interface Dump {
long timestamp();
Map<String, Object> context();
String cause();
File createFile(String name) throws DumpException;
Writer createFileWriter(String name) throws DumpException;
OutputStream createFileOutputStream(String name) throws DumpException;
File[] files();
void finish() throws DumpException;
}

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
/**
*
*/
public class DumpContributionFailedException extends DumpException {
private final String name;
public DumpContributionFailedException(String name, String msg) {
this(name, msg, null);
}
public DumpContributionFailedException(String name, String msg, Throwable cause) {
super(name + ": " + msg, cause);
this.name = name;
}
public String name() {
return this.name;
}
}

View File

@ -1,30 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
/**
*
*/
public interface DumpContributor {
String getName();
void contribute(Dump dump) throws DumpContributionFailedException;
}

View File

@ -1,30 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import org.elasticsearch.common.settings.Settings;
/**
*
*/
public interface DumpContributorFactory {
DumpContributor create(String name, Settings settings);
}

View File

@ -1,36 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import org.elasticsearch.ElasticsearchException;
/**
*
*/
public class DumpException extends ElasticsearchException {
public DumpException(String msg) {
super(msg);
}
public DumpException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -1,34 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
/**
*
*/
public class DumpGenerationFailedException extends DumpException {
public DumpGenerationFailedException(String msg) {
super(msg);
}
public DumpGenerationFailedException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -1,54 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import org.elasticsearch.common.Nullable;
import java.io.File;
import java.util.Map;
/**
*
*/
public interface DumpGenerator {
Result generateDump(String cause, @Nullable Map<String, Object> context) throws DumpGenerationFailedException;
Result generateDump(String cause, @Nullable Map<String, Object> context, String... contributors) throws DumpGenerationFailedException;
static class Result {
private final File location;
private Iterable<DumpContributionFailedException> failedContributors;
public Result(File location, Iterable<DumpContributionFailedException> failedContributors) {
this.location = location;
this.failedContributors = failedContributors;
}
public String location() {
return location.toString();
}
public Iterable<DumpContributionFailedException> failedContributors() {
return failedContributors;
}
}
}

View File

@ -1,121 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.monitor.dump.heap.HeapDumpContributor;
import org.elasticsearch.monitor.dump.summary.SummaryDumpContributor;
import org.elasticsearch.monitor.dump.thread.ThreadDumpContributor;
import java.io.File;
import java.util.Map;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.monitor.dump.heap.HeapDumpContributor.HEAP_DUMP;
import static org.elasticsearch.monitor.dump.summary.SummaryDumpContributor.SUMMARY;
import static org.elasticsearch.monitor.dump.thread.ThreadDumpContributor.THREAD_DUMP;
/**
*
*/
public class DumpMonitorService extends AbstractComponent {
private final String dumpLocation;
private final DumpGenerator generator;
private final ClusterService clusterService;
private final Map<String, Settings> contSettings;
private final Map<String, DumpContributorFactory> contributors;
private final File workFile;
public DumpMonitorService() {
this(EMPTY_SETTINGS, new Environment(EMPTY_SETTINGS), null, null);
}
@Inject
public DumpMonitorService(Settings settings, Environment environment,
@Nullable ClusterService clusterService, @Nullable Map<String, DumpContributorFactory> contributors) {
super(settings);
this.clusterService = clusterService;
this.contributors = contributors;
contSettings = settings.getGroups("monitor.dump");
workFile = environment.workWithClusterFile();
this.dumpLocation = settings.get("dump_location");
File dumpLocationFile;
if (dumpLocation != null) {
dumpLocationFile = new File(dumpLocation);
} else {
dumpLocationFile = new File(workFile, "dump");
}
Map<String, DumpContributor> contributorMap = newHashMap();
if (contributors != null) {
for (Map.Entry<String, DumpContributorFactory> entry : contributors.entrySet()) {
String contName = entry.getKey();
DumpContributorFactory dumpContributorFactory = entry.getValue();
Settings analyzerSettings = contSettings.get(contName);
if (analyzerSettings == null) {
analyzerSettings = EMPTY_SETTINGS;
}
DumpContributor analyzerFactory = dumpContributorFactory.create(contName, analyzerSettings);
contributorMap.put(contName, analyzerFactory);
}
}
if (!contributorMap.containsKey(SUMMARY)) {
contributorMap.put(SUMMARY, new SummaryDumpContributor(SUMMARY, EMPTY_SETTINGS));
}
if (!contributorMap.containsKey(HEAP_DUMP)) {
contributorMap.put(HEAP_DUMP, new HeapDumpContributor(HEAP_DUMP, EMPTY_SETTINGS));
}
if (!contributorMap.containsKey(THREAD_DUMP)) {
contributorMap.put(THREAD_DUMP, new ThreadDumpContributor(THREAD_DUMP, EMPTY_SETTINGS));
}
generator = new SimpleDumpGenerator(dumpLocationFile, contributorMap);
}
public DumpGenerator.Result generateDump(String cause, @Nullable Map<String, Object> context) throws DumpGenerationFailedException {
return generator.generateDump(cause, fillContextMap(context));
}
public DumpGenerator.Result generateDump(String cause, @Nullable Map<String, Object> context, String... contributors) throws DumpGenerationFailedException {
return generator.generateDump(cause, fillContextMap(context), contributors);
}
private Map<String, Object> fillContextMap(Map<String, Object> context) {
if (context == null) {
context = newHashMap();
}
if (clusterService != null) {
context.put("localNode", clusterService.localNode());
}
return context;
}
}

View File

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import org.elasticsearch.common.Nullable;
import java.io.File;
import java.util.Map;
/**
*
*/
public class SimpleDump extends AbstractDump {
private final File location;
public SimpleDump(long timestamp, String cause, @Nullable Map<String, Object> context, File location) {
super(timestamp, cause, context);
this.location = location;
}
@Override
protected File doCreateFile(String name) throws DumpException {
return new File(location, name);
}
@Override
public void finish() throws DumpException {
}
}

View File

@ -1,80 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.FileSystemUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Map;
/**
*
*/
public class SimpleDumpGenerator implements DumpGenerator {
private final File dumpLocation;
private final ImmutableMap<String, DumpContributor> contributors;
public SimpleDumpGenerator(File dumpLocation, Map<String, DumpContributor> contributors) {
this.dumpLocation = dumpLocation;
this.contributors = ImmutableMap.copyOf(contributors);
}
public Result generateDump(String cause, @Nullable Map<String, Object> context) throws DumpGenerationFailedException {
return generateDump(cause, context, contributors.keySet().toArray(new String[contributors.size()]));
}
public Result generateDump(String cause, @Nullable Map<String, Object> context, String... contributors) throws DumpGenerationFailedException {
long timestamp = System.currentTimeMillis();
String fileName = "";
if (context.containsKey("localNode")) {
DiscoveryNode localNode = (DiscoveryNode) context.get("localNode");
if (localNode.name() != null) {
fileName += localNode.name() + "-";
}
fileName += localNode.id() + "-";
}
File file = new File(dumpLocation, fileName + cause + "-" + timestamp);
FileSystemUtils.mkdirs(file);
SimpleDump dump = new SimpleDump(System.currentTimeMillis(), cause, context, file);
ArrayList<DumpContributionFailedException> failedContributors = new ArrayList<>();
for (String name : contributors) {
DumpContributor contributor = this.contributors.get(name);
if (contributor == null) {
failedContributors.add(new DumpContributionFailedException(name, "No contributor"));
continue;
}
try {
contributor.contribute(dump);
} catch (DumpContributionFailedException e) {
failedContributors.add(e);
} catch (Exception e) {
failedContributors.add(new DumpContributionFailedException(contributor.getName(), "Failed", e));
}
}
dump.finish();
return new Result(file, failedContributors);
}
}

View File

@ -1,73 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump.cluster;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.assistedinject.Assisted;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.dump.Dump;
import org.elasticsearch.monitor.dump.DumpContributionFailedException;
import org.elasticsearch.monitor.dump.DumpContributor;
import java.io.PrintWriter;
/**
*
*/
public class ClusterDumpContributor implements DumpContributor {
public static final String CLUSTER = "cluster";
private final String name;
private final ClusterService clusterService;
@Inject
public ClusterDumpContributor(ClusterService clusterService, @Assisted String name, @Assisted Settings settings) {
this.clusterService = clusterService;
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public void contribute(Dump dump) throws DumpContributionFailedException {
ClusterState clusterState = clusterService.state();
DiscoveryNodes nodes = clusterState.nodes();
RoutingTable routingTable = clusterState.routingTable();
PrintWriter writer = new PrintWriter(dump.createFileWriter("cluster.txt"));
writer.println("===== CLUSTER NODES ======");
writer.print(nodes.prettyPrint());
writer.println("===== ROUTING TABLE ======");
writer.print(routingTable.prettyPrint());
writer.close();
}
}

View File

@ -1,77 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump.heap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.assistedinject.Assisted;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.dump.Dump;
import org.elasticsearch.monitor.dump.DumpContributionFailedException;
import org.elasticsearch.monitor.dump.DumpContributor;
import java.lang.reflect.Method;
/**
*
*/
public class HeapDumpContributor implements DumpContributor {
public static final String HEAP_DUMP = "heap";
private final Method heapDumpMethod;
private final Object diagnosticMBean;
private final String name;
@Inject
public HeapDumpContributor(@Assisted String name, @Assisted Settings settings) {
this.name = name;
Method heapDumpMethod;
Object diagnosticMBean;
try {
Class managementFactoryClass = Class.forName("sun.management.ManagementFactory", true, HeapDumpContributor.class.getClassLoader());
Method method = managementFactoryClass.getMethod("getDiagnosticMXBean");
diagnosticMBean = method.invoke(null);
heapDumpMethod = diagnosticMBean.getClass().getMethod("dumpHeap", String.class, boolean.class);
} catch (Exception _ex) {
heapDumpMethod = null;
diagnosticMBean = null;
}
this.heapDumpMethod = heapDumpMethod;
this.diagnosticMBean = diagnosticMBean;
}
@Override
public String getName() {
return name;
}
@Override
public void contribute(Dump dump) throws DumpContributionFailedException {
if (heapDumpMethod == null) {
throw new DumpContributionFailedException(getName(), "Heap dump not enabled on this JVM");
}
try {
heapDumpMethod.invoke(diagnosticMBean, dump.createFile("heap.hprof").getAbsolutePath(), true);
} catch (Exception e) {
throw new DumpContributionFailedException(getName(), "Failed to generate heap dump", e);
}
}
}

View File

@ -1,114 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump.summary;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.assistedinject.Assisted;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.dump.Dump;
import org.elasticsearch.monitor.dump.DumpContributionFailedException;
import org.elasticsearch.monitor.dump.DumpContributor;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Locale;
/**
*
*/
public class SummaryDumpContributor implements DumpContributor {
private final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS", Locale.ROOT);
private final Object formatterLock = new Object();
public static final String SUMMARY = "summary";
private final String name;
@Inject
public SummaryDumpContributor(@Assisted String name, @Assisted Settings settings) {
this.name = name;
}
public String getName() {
return name;
}
public void contribute(Dump dump) throws DumpContributionFailedException {
PrintWriter writer = new PrintWriter(dump.createFileWriter("summary.txt"));
try {
processHeader(writer, dump.timestamp());
processCause(writer, dump.cause());
processThrowables(writer, dump);
} catch (Exception e) {
throw new DumpContributionFailedException(getName(), "Failed to generate", e);
} finally {
try {
writer.close();
} catch (Exception e) {
// ignore
}
}
}
private void processHeader(PrintWriter writer, long timestamp) {
synchronized (formatterLock) {
writer.println("===== TIME =====");
writer.println(dateFormat.format(new Date(timestamp)));
writer.println();
}
}
private void processCause(PrintWriter writer, String cause) {
writer.println("===== CAUSE =====");
writer.println(cause);
writer.println();
}
private void processThrowables(PrintWriter writer, Dump dump) {
writer.println("===== EXCEPTIONS =====");
Object throwables = dump.context().get("throwables");
if (throwables == null) {
return;
}
if (throwables instanceof Throwable[]) {
Throwable[] array = (Throwable[]) throwables;
for (Throwable t : array) {
writer.println();
writer.println("---- Exception ----");
t.printStackTrace(writer);
}
} else if (throwables instanceof Collection) {
Collection collection = (Collection) throwables;
for (Object o : collection) {
Throwable t = (Throwable) o;
writer.println();
writer.println("---- Exception ----");
t.printStackTrace(writer);
}
} else {
throw new DumpContributionFailedException(getName(), "Can't handle throwables type [" + throwables.getClass() + "]");
}
writer.println();
}
}

View File

@ -1,136 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.monitor.dump.thread;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.assistedinject.Assisted;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.monitor.dump.Dump;
import org.elasticsearch.monitor.dump.DumpContributionFailedException;
import org.elasticsearch.monitor.dump.DumpContributor;
import java.io.PrintWriter;
import java.lang.management.ManagementFactory;
import java.lang.management.MonitorInfo;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.Locale;
/**
*
*/
public class ThreadDumpContributor implements DumpContributor {
private static final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
public static final String THREAD_DUMP = "thread";
private final String name;
@Inject
public ThreadDumpContributor(@Assisted String name, @Assisted Settings settings) {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public void contribute(Dump dump) throws DumpContributionFailedException {
PrintWriter writer = new PrintWriter(dump.createFileWriter("threads.txt"));
try {
processDeadlocks(writer);
processAllThreads(writer);
} catch (Exception e) {
throw new DumpContributionFailedException(getName(), "Failed to generate", e);
} finally {
try {
writer.close();
} catch (Exception e) {
// ignore
}
}
}
private void processDeadlocks(PrintWriter dump) {
dump.println("===== Deadlocked Threads =====");
long deadlockedThreadIds[] = findDeadlockedThreads();
if (deadlockedThreadIds != null)
dumpThreads(dump, getThreadInfo(deadlockedThreadIds));
}
private void processAllThreads(PrintWriter dump) {
dump.println();
dump.println("===== All Threads =====");
dumpThreads(dump, dumpAllThreads());
}
private void dumpThreads(PrintWriter dump, ThreadInfo infos[]) {
for (ThreadInfo info : infos) {
dump.println();
write(info, dump);
}
}
private ThreadInfo[] dumpAllThreads() {
return threadBean.dumpAllThreads(true, true);
}
public long[] findDeadlockedThreads() {
return threadBean.findDeadlockedThreads();
}
public ThreadInfo[] getThreadInfo(long[] threadIds) {
return threadBean.getThreadInfo(threadIds, true, true);
}
private void write(ThreadInfo threadInfo, PrintWriter writer) {
writer.print(String.format(Locale.ROOT, "\"%s\" Id=%s %s", threadInfo.getThreadName(), threadInfo.getThreadId(), threadInfo.getThreadState()));
if (threadInfo.getLockName() != null) {
writer.print(String.format(Locale.ROOT, " on %s", threadInfo.getLockName()));
if (threadInfo.getLockOwnerName() != null)
writer.print(String.format(Locale.ROOT, " owned by \"%s\" Id=%s", threadInfo.getLockOwnerName(), threadInfo.getLockOwnerId()));
}
if (threadInfo.isInNative())
writer.println(" (in native)");
else
writer.println();
MonitorInfo[] lockedMonitors = threadInfo.getLockedMonitors();
StackTraceElement stackTraceElements[] = threadInfo.getStackTrace();
for (StackTraceElement stackTraceElement : stackTraceElements) {
writer.println(" at " + stackTraceElement);
MonitorInfo lockedMonitor = findLockedMonitor(stackTraceElement, lockedMonitors);
if (lockedMonitor != null)
writer.println((" - locked " + lockedMonitor.getClassName() + "@" + lockedMonitor.getIdentityHashCode()));
}
}
private static MonitorInfo findLockedMonitor(StackTraceElement stackTraceElement, MonitorInfo lockedMonitors[]) {
for (MonitorInfo monitorInfo : lockedMonitors) {
if (stackTraceElement.equals(monitorInfo.getLockedStackFrame()))
return monitorInfo;
}
return null;
}
}

View File

@ -27,8 +27,6 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.monitor.dump.DumpGenerator;
import org.elasticsearch.monitor.dump.DumpMonitorService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.HashSet; import java.util.HashSet;
@ -37,8 +35,6 @@ import java.util.Set;
import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledFuture;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.elasticsearch.monitor.dump.summary.SummaryDumpContributor.SUMMARY;
import static org.elasticsearch.monitor.dump.thread.ThreadDumpContributor.THREAD_DUMP;
import static org.elasticsearch.monitor.jvm.DeadlockAnalyzer.deadlockAnalyzer; import static org.elasticsearch.monitor.jvm.DeadlockAnalyzer.deadlockAnalyzer;
import static org.elasticsearch.monitor.jvm.JvmStats.GarbageCollector; import static org.elasticsearch.monitor.jvm.JvmStats.GarbageCollector;
import static org.elasticsearch.monitor.jvm.JvmStats.jvmStats; import static org.elasticsearch.monitor.jvm.JvmStats.jvmStats;
@ -49,13 +45,8 @@ import static org.elasticsearch.monitor.jvm.JvmStats.jvmStats;
public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorService> { public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorService> {
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final DumpMonitorService dumpMonitorService;
private final boolean enabled; private final boolean enabled;
private final TimeValue interval; private final TimeValue interval;
private final ImmutableMap<String, GcThreshold> gcThresholds; private final ImmutableMap<String, GcThreshold> gcThresholds;
private volatile ScheduledFuture scheduledFuture; private volatile ScheduledFuture scheduledFuture;
@ -85,10 +76,9 @@ public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorServ
} }
@Inject @Inject
public JvmMonitorService(Settings settings, ThreadPool threadPool, DumpMonitorService dumpMonitorService) { public JvmMonitorService(Settings settings, ThreadPool threadPool) {
super(settings); super(settings);
this.threadPool = threadPool; this.threadPool = threadPool;
this.dumpMonitorService = dumpMonitorService;
this.enabled = componentSettings.getAsBoolean("enabled", true); this.enabled = componentSettings.getAsBoolean("enabled", true);
this.interval = componentSettings.getAsTime("interval", timeValueSeconds(1)); this.interval = componentSettings.getAsTime("interval", timeValueSeconds(1));
@ -238,25 +228,5 @@ public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorServ
} }
return sb.toString(); return sb.toString();
} }
private void monitorDeadlock() {
DeadlockAnalyzer.Deadlock[] deadlocks = deadlockAnalyzer().findDeadlocks();
if (deadlocks != null && deadlocks.length > 0) {
ImmutableSet<DeadlockAnalyzer.Deadlock> asSet = new ImmutableSet.Builder<DeadlockAnalyzer.Deadlock>().add(deadlocks).build();
if (!asSet.equals(lastSeenDeadlocks)) {
DumpGenerator.Result genResult = dumpMonitorService.generateDump("deadlock", null, SUMMARY, THREAD_DUMP);
StringBuilder sb = new StringBuilder("Detected Deadlock(s)");
for (DeadlockAnalyzer.Deadlock deadlock : asSet) {
sb.append("\n ----> ").append(deadlock);
}
sb.append("\nDump generated [").append(genResult.location()).append("]");
logger.error(sb.toString());
lastSeenDeadlocks.clear();
lastSeenDeadlocks.addAll(asSet);
}
} else {
lastSeenDeadlocks.clear();
}
}
} }
} }