mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-24 22:09:24 +00:00
nicer logs
This commit is contained in:
parent
b0494a8415
commit
8ecf71ffb8
@ -54,7 +54,7 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
|
|||||||
|
|
||||||
this.readerCleanerSchedule = componentSettings.getAsTime("reader_cleaner_schedule", TimeValue.timeValueMinutes(1));
|
this.readerCleanerSchedule = componentSettings.getAsTime("reader_cleaner_schedule", TimeValue.timeValueMinutes(1));
|
||||||
|
|
||||||
logger.debug("Using [" + type() + "] filter cache with readerCleanerSchedule [{}]", readerCleanerSchedule);
|
logger.debug("Using [" + type() + "] filter cache with reader_cleaner_schedule[{}]", readerCleanerSchedule);
|
||||||
|
|
||||||
this.cache = newConcurrentMap();
|
this.cache = newConcurrentMap();
|
||||||
this.scheduleFuture = threadPool.scheduleWithFixedDelay(new IndexReaderCleaner(), readerCleanerSchedule);
|
this.scheduleFuture = threadPool.scheduleWithFixedDelay(new IndexReaderCleaner(), readerCleanerSchedule);
|
||||||
|
@ -40,7 +40,7 @@ public class KeepLastNDeletionPolicy extends AbstractIndexShardComponent impleme
|
|||||||
@Inject public KeepLastNDeletionPolicy(ShardId shardId, @IndexSettings Settings indexSettings) {
|
@Inject public KeepLastNDeletionPolicy(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||||
super(shardId, indexSettings);
|
super(shardId, indexSettings);
|
||||||
this.numToKeep = componentSettings.getAsInt("num_to_keep", 5);
|
this.numToKeep = componentSettings.getAsInt("num_to_keep", 5);
|
||||||
logger.debug("Using [KeepLastN] deletion policy with num_to_keep [{}]", numToKeep);
|
logger.debug("Using [keep_last_n] deletion policy with num_to_keep[{}]", numToKeep);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void onInit(List<? extends IndexCommit> commits) throws IOException {
|
public void onInit(List<? extends IndexCommit> commits) throws IOException {
|
||||||
|
@ -39,7 +39,7 @@ public class KeepOnlyLastDeletionPolicy extends AbstractIndexShardComponent impl
|
|||||||
|
|
||||||
@Inject public KeepOnlyLastDeletionPolicy(ShardId shardId, @IndexSettings Settings indexSettings) {
|
@Inject public KeepOnlyLastDeletionPolicy(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||||
super(shardId, indexSettings);
|
super(shardId, indexSettings);
|
||||||
logger.debug("Using [KeepOnlyLast] deletion policy");
|
logger.debug("Using [keep_only_last] deletion policy");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -123,7 +123,7 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine,
|
|||||||
throw new EngineAlreadyStartedException(shardId);
|
throw new EngineAlreadyStartedException(shardId);
|
||||||
}
|
}
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Starting engine with ram_buffer_size [" + ramBufferSize + "], refresh_interval [" + refreshInterval + "]");
|
logger.debug("Starting engine with ram_buffer_size[" + ramBufferSize + "], refresh_interval[" + refreshInterval + "]");
|
||||||
}
|
}
|
||||||
IndexWriter indexWriter = null;
|
IndexWriter indexWriter = null;
|
||||||
try {
|
try {
|
||||||
|
@ -127,7 +127,7 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
|
|||||||
} else {
|
} else {
|
||||||
dynamicMappingSource = null;
|
dynamicMappingSource = null;
|
||||||
}
|
}
|
||||||
logger.debug("Using dynamic [{}] with location [{}] and source [{}]", new Object[]{dynamic, dynamicMappingLocation, dynamicMappingSource});
|
logger.debug("Using dynamic[{}] with location[{}] and source[{}]", new Object[]{dynamic, dynamicMappingLocation, dynamicMappingSource});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public UnmodifiableIterator<DocumentMapper> iterator() {
|
@Override public UnmodifiableIterator<DocumentMapper> iterator() {
|
||||||
|
@ -55,7 +55,7 @@ public class BalancedSegmentMergePolicyProvider extends AbstractIndexShardCompon
|
|||||||
|
|
||||||
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
||||||
|
|
||||||
logger.debug("Using [Balanced] merge policy with merge_factor[{}], min_merge_size[{}], max_merge_size[{}], max_merge_docs[{}] use_compound_file[{}]",
|
logger.debug("Using [balanced] merge policy with merge_factor[{}], min_merge_size[{}], max_merge_size[{}], max_merge_docs[{}] use_compound_file[{}]",
|
||||||
new Object[]{mergeFactor, minMergeSize, maxMergeSize, maxMergeDocs, useCompoundFile});
|
new Object[]{mergeFactor, minMergeSize, maxMergeSize, maxMergeDocs, useCompoundFile});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ public class LogByteSizeMergePolicyProvider extends AbstractIndexShardComponent
|
|||||||
this.maxMergeDocs = componentSettings.getAsInt("max_merge_docs", LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
this.maxMergeDocs = componentSettings.getAsInt("max_merge_docs", LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||||
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrate_size_by_deletes", false);
|
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrate_size_by_deletes", false);
|
||||||
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
||||||
logger.debug("Using [LogByteSize] merge policy with merge_factor[{}], min_merge_size[{}], max_merge_size[{}], max_merge_docs[{}] use_compound_file[{}], calibrate_size_by_deletes[{}]",
|
logger.debug("Using [log_bytes_size] merge policy with merge_factor[{}], min_merge_size[{}], max_merge_size[{}], max_merge_docs[{}] use_compound_file[{}], calibrate_size_by_deletes[{}]",
|
||||||
new Object[]{mergeFactor, minMergeSize, maxMergeSize, maxMergeDocs, useCompoundFile, calibrateSizeByDeletes});
|
new Object[]{mergeFactor, minMergeSize, maxMergeSize, maxMergeDocs, useCompoundFile, calibrateSizeByDeletes});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ public class LogDocMergePolicyProvider extends AbstractIndexShardComponent imple
|
|||||||
this.mergeFactor = componentSettings.getAsInt("merge_factor", LogDocMergePolicy.DEFAULT_MERGE_FACTOR);
|
this.mergeFactor = componentSettings.getAsInt("merge_factor", LogDocMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||||
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrate_size_by_deletes", false);
|
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrate_size_by_deletes", false);
|
||||||
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
||||||
logger.debug("Using [LogDoc] merge policy with merge_factor[{}] min_merge_docs[{}], max_merge_docs[{}], use_compound_file[{}], calibrate_size_by_deletes[{}]",
|
logger.debug("Using [log_doc] merge policy with merge_factor[{}] min_merge_docs[{}], max_merge_docs[{}], use_compound_file[{}], calibrate_size_by_deletes[{}]",
|
||||||
new Object[]{mergeFactor, minMergeDocs, maxMergeDocs, useCompoundFile, calibrateSizeByDeletes});
|
new Object[]{mergeFactor, minMergeDocs, maxMergeDocs, useCompoundFile, calibrateSizeByDeletes});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,7 +187,7 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
|
|||||||
|
|
||||||
indicesLifecycle.beforeIndexShardCreated(shardId);
|
indicesLifecycle.beforeIndexShardCreated(shardId);
|
||||||
|
|
||||||
logger.debug("Creating Shard Id [{}]", shardId.id());
|
logger.debug("Creating shard_id[{}]", shardId.id());
|
||||||
|
|
||||||
Injector shardInjector = injector.createChildInjector(
|
Injector shardInjector = injector.createChildInjector(
|
||||||
new ShardsPluginsModule(indexSettings, pluginsService),
|
new ShardsPluginsModule(indexSettings, pluginsService),
|
||||||
@ -234,7 +234,7 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
|
|||||||
}
|
}
|
||||||
shardsInjectors = ImmutableMap.copyOf(tmpShardInjectors);
|
shardsInjectors = ImmutableMap.copyOf(tmpShardInjectors);
|
||||||
if (delete) {
|
if (delete) {
|
||||||
logger.debug("Deleting Shard Id [{}]", shardId);
|
logger.debug("Deleting shard_id[{}]", shardId);
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<Integer, IndexShard> tmpShardsMap = newHashMap(shards);
|
Map<Integer, IndexShard> tmpShardsMap = newHashMap(shards);
|
||||||
|
@ -168,7 +168,7 @@ public class RecoveryAction extends AbstractIndexShardComponent implements Close
|
|||||||
stopWatch.stop();
|
stopWatch.stop();
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("Recovery completed from ").append(targetNode).append(", took [").append(stopWatch.totalTime()).append("]\n");
|
sb.append("Recovery completed from ").append(targetNode).append(", took[").append(stopWatch.totalTime()).append("]\n");
|
||||||
sb.append(" Phase1: recovered [").append(recoveryStatus.phase1FileNames.size()).append("]")
|
sb.append(" Phase1: recovered [").append(recoveryStatus.phase1FileNames.size()).append("]")
|
||||||
.append(" files with total size of [").append(new SizeValue(recoveryStatus.phase1TotalSize)).append("]")
|
.append(" files with total size of [").append(new SizeValue(recoveryStatus.phase1TotalSize)).append("]")
|
||||||
.append(", took [").append(new TimeValue(recoveryStatus.phase1Time, MILLISECONDS)).append("]")
|
.append(", took [").append(new TimeValue(recoveryStatus.phase1Time, MILLISECONDS)).append("]")
|
||||||
|
@ -53,11 +53,11 @@ public class StoreModule extends AbstractModule {
|
|||||||
storeModule = MemoryStoreModule.class;
|
storeModule = MemoryStoreModule.class;
|
||||||
} else if ("fs".equalsIgnoreCase(storeType)) {
|
} else if ("fs".equalsIgnoreCase(storeType)) {
|
||||||
// nothing to set here ... (we default to fs)
|
// nothing to set here ... (we default to fs)
|
||||||
} else if ("simplefs".equalsIgnoreCase(storeType)) {
|
} else if ("simplefs".equalsIgnoreCase(storeType) || "simple_fs".equals(storeType)) {
|
||||||
storeModule = SimpleFsStoreModule.class;
|
storeModule = SimpleFsStoreModule.class;
|
||||||
} else if ("niofs".equalsIgnoreCase(storeType)) {
|
} else if ("niofs".equalsIgnoreCase(storeType) || "nio_fs".equalsIgnoreCase(storeType)) {
|
||||||
storeModule = NioFsStoreModule.class;
|
storeModule = NioFsStoreModule.class;
|
||||||
} else if ("mmapfs".equalsIgnoreCase(storeType)) {
|
} else if ("mmapfs".equalsIgnoreCase(storeType) || "mmap_fs".equalsIgnoreCase(storeType)) {
|
||||||
storeModule = MmapFsStoreModule.class;
|
storeModule = MmapFsStoreModule.class;
|
||||||
} else if (storeType != null) {
|
} else if (storeType != null) {
|
||||||
storeModule = settings.getAsClass("index.store.type", storeModule, "org.elasticsearch.index.store.", "StoreModule");
|
storeModule = settings.getAsClass("index.store.type", storeModule, "org.elasticsearch.index.store.", "StoreModule");
|
||||||
|
@ -58,12 +58,12 @@ public class MmapFsStore extends AbstractFsStore<Directory> {
|
|||||||
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
||||||
if (switchDirectory != null) {
|
if (switchDirectory != null) {
|
||||||
suggestUseCompoundFile = false;
|
suggestUseCompoundFile = false;
|
||||||
logger.debug("Using [MmapFs] Store with path [{}], cache [true] with extensions [{}]", new Object[]{fsDirectory.getFile(), switchDirectory.primaryExtensions()});
|
logger.debug("Using [mmap_fs] Store with path [{}], cache [true] with extensions [{}]", new Object[]{fsDirectory.getFile(), switchDirectory.primaryExtensions()});
|
||||||
directory = switchDirectory;
|
directory = switchDirectory;
|
||||||
} else {
|
} else {
|
||||||
suggestUseCompoundFile = true;
|
suggestUseCompoundFile = true;
|
||||||
directory = fsDirectory;
|
directory = fsDirectory;
|
||||||
logger.debug("Using [MmapFs] Store with path [{}]", fsDirectory.getFile());
|
logger.debug("Using [mmap_fs] Store with path [{}]", fsDirectory.getFile());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,12 +58,12 @@ public class NioFsStore extends AbstractFsStore<Directory> {
|
|||||||
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
||||||
if (switchDirectory != null) {
|
if (switchDirectory != null) {
|
||||||
suggestUseCompoundFile = false;
|
suggestUseCompoundFile = false;
|
||||||
logger.debug("Using [NioFs] Store with path [{}], cache [true] with extensions [{}]", new Object[]{fsDirectory.getFile(), switchDirectory.primaryExtensions()});
|
logger.debug("Using [nio_fs] Store with path [{}], cache [true] with extensions [{}]", new Object[]{fsDirectory.getFile(), switchDirectory.primaryExtensions()});
|
||||||
directory = switchDirectory;
|
directory = switchDirectory;
|
||||||
} else {
|
} else {
|
||||||
suggestUseCompoundFile = true;
|
suggestUseCompoundFile = true;
|
||||||
directory = fsDirectory;
|
directory = fsDirectory;
|
||||||
logger.debug("Using [NioFs] Store with path [{}]", fsDirectory.getFile());
|
logger.debug("Using [nio_fs] Store with path [{}]", fsDirectory.getFile());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,12 +58,12 @@ public class SimpleFsStore extends AbstractFsStore<Directory> {
|
|||||||
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
||||||
if (switchDirectory != null) {
|
if (switchDirectory != null) {
|
||||||
suggestUseCompoundFile = false;
|
suggestUseCompoundFile = false;
|
||||||
logger.debug("Using [SimpleFs] Store with path [{}], cache [true] with extensions [{}]", new Object[]{fsDirectory.getFile(), switchDirectory.primaryExtensions()});
|
logger.debug("Using [simple_fs] Store with path [{}], cache [true] with extensions [{}]", new Object[]{fsDirectory.getFile(), switchDirectory.primaryExtensions()});
|
||||||
directory = switchDirectory;
|
directory = switchDirectory;
|
||||||
} else {
|
} else {
|
||||||
suggestUseCompoundFile = true;
|
suggestUseCompoundFile = true;
|
||||||
directory = fsDirectory;
|
directory = fsDirectory;
|
||||||
logger.debug("Using [SimpleFs] Store with path [{}]", fsDirectory.getFile());
|
logger.debug("Using [simple_fs] Store with path [{}]", fsDirectory.getFile());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ public class ByteBufferStore extends AbstractStore<ByteBufferDirectory> {
|
|||||||
this.direct = componentSettings.getAsBoolean("direct", true);
|
this.direct = componentSettings.getAsBoolean("direct", true);
|
||||||
this.warmCache = componentSettings.getAsBoolean("warm_cache", true);
|
this.warmCache = componentSettings.getAsBoolean("warm_cache", true);
|
||||||
this.directory = new ByteBufferDirectory((int) bufferSize.bytes(), (int) cacheSize.bytes(), direct, warmCache);
|
this.directory = new ByteBufferDirectory((int) bufferSize.bytes(), (int) cacheSize.bytes(), direct, warmCache);
|
||||||
logger.debug("Using [ByteBuffer] Store with buffer_size[{}], cache_size[{}], direct[{}], warm_cache[{}]",
|
logger.debug("Using [byte_buffer] store with buffer_size[{}], cache_size[{}], direct[{}], warm_cache[{}]",
|
||||||
new Object[]{bufferSize, cacheSize, directory.isDirect(), warmCache});
|
new Object[]{bufferSize, cacheSize, directory.isDirect(), warmCache});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ public class HeapStore extends AbstractStore<HeapDirectory> {
|
|||||||
this.warmCache = componentSettings.getAsBoolean("warm_cache", true);
|
this.warmCache = componentSettings.getAsBoolean("warm_cache", true);
|
||||||
|
|
||||||
this.directory = new HeapDirectory(bufferSize, cacheSize, warmCache);
|
this.directory = new HeapDirectory(bufferSize, cacheSize, warmCache);
|
||||||
logger.debug("Using [Memory] Store with buffer_size[{}], cache_size[{}], warm_cache[{}]",
|
logger.debug("Using [heap] Store with buffer_size[{}], cache_size[{}], warm_cache[{}]",
|
||||||
new Object[]{directory.bufferSize(), directory.cacheSize(), warmCache});
|
new Object[]{directory.bufferSize(), directory.cacheSize(), warmCache});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ public class RamStore extends AbstractStore<RAMDirectory> {
|
|||||||
@Inject public RamStore(ShardId shardId, @IndexSettings Settings indexSettings) {
|
@Inject public RamStore(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||||
super(shardId, indexSettings);
|
super(shardId, indexSettings);
|
||||||
this.directory = new RAMDirectory();
|
this.directory = new RAMDirectory();
|
||||||
logger.debug("Using [RAM] Store");
|
logger.debug("Using [ram] Store");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public RAMDirectory directory() {
|
@Override public RAMDirectory directory() {
|
||||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.cluster.routing.RoutingNode;
|
|||||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.index.IndexShardAlreadyExistsException;
|
import org.elasticsearch.index.IndexShardAlreadyExistsException;
|
||||||
|
import org.elasticsearch.index.IndexShardMissingException;
|
||||||
import org.elasticsearch.index.gateway.IgnoreGatewayRecoveryException;
|
import org.elasticsearch.index.gateway.IgnoreGatewayRecoveryException;
|
||||||
import org.elasticsearch.index.gateway.IndexShardGatewayService;
|
import org.elasticsearch.index.gateway.IndexShardGatewayService;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
@ -290,6 +291,8 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||||||
logger.warn("Failed to create shard for index [" + indexService.index().name() + "] and shard id [" + shardRouting.id() + "]", e);
|
logger.warn("Failed to create shard for index [" + indexService.index().name() + "] and shard id [" + shardRouting.id() + "]", e);
|
||||||
try {
|
try {
|
||||||
indexService.deleteShard(shardId);
|
indexService.deleteShard(shardId);
|
||||||
|
} catch (IndexShardMissingException e1) {
|
||||||
|
// ignore
|
||||||
} catch (Exception e1) {
|
} catch (Exception e1) {
|
||||||
logger.warn("Failed to delete shard after failed creation for index [" + indexService.index().name() + "] and shard id [" + shardRouting.id() + "]", e1);
|
logger.warn("Failed to delete shard after failed creation for index [" + indexService.index().name() + "] and shard id [" + shardRouting.id() + "]", e1);
|
||||||
}
|
}
|
||||||
|
@ -128,7 +128,7 @@ public class JmxService {
|
|||||||
if (!success) {
|
if (!success) {
|
||||||
throw new JmxConnectorCreationException("Failed to bind to [" + port + "]", lastException.get());
|
throw new JmxConnectorCreationException("Failed to bind to [" + port + "]", lastException.get());
|
||||||
}
|
}
|
||||||
logger.info("boundAddress [{}], publishAddress [{}]", serviceUrl, publishUrl);
|
logger.info("bound_address[{}], publish_address[{}]", serviceUrl, publishUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ResourceDMBean resource : constructionMBeans) {
|
for (ResourceDMBean resource : constructionMBeans) {
|
||||||
|
@ -482,7 +482,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
|||||||
throw new ConnectTransportException(node, "connectTimeout[" + connectTimeout + "], connectRetries[" + connectRetries + "], reason unknown");
|
throw new ConnectTransportException(node, "connectTimeout[" + connectTimeout + "], connectRetries[" + connectRetries + "], reason unknown");
|
||||||
}
|
}
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Connected to node [{}], numberOfConnections [{}]", node, channels.size());
|
logger.debug("Connected to node[{}], number_of_connections[{}]", node, channels.size());
|
||||||
}
|
}
|
||||||
clientChannels.put(node.id(), new NodeConnections(channels.toArray(new Channel[channels.size()])));
|
clientChannels.put(node.id(), new NodeConnections(channels.toArray(new Channel[channels.size()])));
|
||||||
}
|
}
|
||||||
|
@ -186,6 +186,7 @@ public class ImmutableSettings implements Settings {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings({"unchecked"})
|
||||||
@Override public <T> Class<? extends T> getAsClass(String setting, Class<? extends T> defaultClazz, String prefixPackage, String suffixClassName) throws NoClassSettingsException {
|
@Override public <T> Class<? extends T> getAsClass(String setting, Class<? extends T> defaultClazz, String prefixPackage, String suffixClassName) throws NoClassSettingsException {
|
||||||
String sValue = get(setting);
|
String sValue = get(setting);
|
||||||
if (sValue == null) {
|
if (sValue == null) {
|
||||||
@ -199,11 +200,16 @@ public class ImmutableSettings implements Settings {
|
|||||||
try {
|
try {
|
||||||
return (Class<? extends T>) getClassLoader().loadClass(fullClassName);
|
return (Class<? extends T>) getClassLoader().loadClass(fullClassName);
|
||||||
} catch (ClassNotFoundException e1) {
|
} catch (ClassNotFoundException e1) {
|
||||||
fullClassName = prefixPackage + sValue + "." + Strings.capitalize(toCamelCase(sValue)) + suffixClassName;
|
fullClassName = prefixPackage + toCamelCase(sValue) + "." + Strings.capitalize(toCamelCase(sValue)) + suffixClassName;
|
||||||
try {
|
try {
|
||||||
return (Class<? extends T>) getClassLoader().loadClass(fullClassName);
|
return (Class<? extends T>) getClassLoader().loadClass(fullClassName);
|
||||||
} catch (ClassNotFoundException e2) {
|
} catch (ClassNotFoundException e2) {
|
||||||
throw new NoClassSettingsException("Failed to load class setting [" + setting + "] with value [" + sValue + "]", e);
|
fullClassName = prefixPackage + toCamelCase(sValue).toLowerCase() + "." + Strings.capitalize(toCamelCase(sValue)) + suffixClassName;
|
||||||
|
try {
|
||||||
|
return (Class<? extends T>) getClassLoader().loadClass(fullClassName);
|
||||||
|
} catch (ClassNotFoundException e3) {
|
||||||
|
throw new NoClassSettingsException("Failed to load class setting [" + setting + "] with value [" + sValue + "]", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,6 +46,6 @@ public class BoundTransportAddress {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override public String toString() {
|
@Override public String toString() {
|
||||||
return "boundAddress [" + boundAddress + "], publishAddress [" + publishAddress + "]";
|
return "bound_address[" + boundAddress + "], publish_address[" + publishAddress + "]";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user