From 9262ed7e56085f125f666d24a375d12e9cdef089 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Tue, 31 Jul 2018 16:58:51 +0100 Subject: [PATCH] SOLR-12558: solr/core (private) logger renames --- .../apache/solr/core/ConfigSetService.java | 8 +++---- .../solr/core/CorePropertiesLocator.java | 22 +++++++++---------- .../org/apache/solr/handler/GraphHandler.java | 4 ++-- .../org/apache/solr/handler/SQLHandler.java | 4 ++-- .../apache/solr/handler/StreamHandler.java | 4 ++-- .../solr/handler/sql/SolrEnumerator.java | 4 ++-- .../java/org/apache/solr/search/Grouping.java | 8 +++---- .../solr/search/grouping/CommandHandler.java | 4 ++-- .../solr/store/hdfs/HdfsLocalityReporter.java | 8 +++---- 9 files changed, 33 insertions(+), 33 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java index 13ac9ce1db4..69e160b5da1 100644 --- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java +++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java @@ -44,7 +44,7 @@ import org.slf4j.LoggerFactory; */ public abstract class ConfigSetService { - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static ConfigSetService createConfigSetService(NodeConfig nodeConfig, SolrResourceLoader loader, ZkController zkController) { if (zkController != null) return new CloudConfigSetService(loader, zkController); @@ -228,15 +228,15 @@ public abstract class ConfigSetService { try { String cachedName = cacheName(schemaFile); return schemaCache.get(cachedName, () -> { - logger.info("Creating new index schema for core {}", cd.getName()); + log.info("Creating new index schema for core {}", cd.getName()); return IndexSchemaFactory.buildIndexSchema(cd.getSchemaName(), solrConfig); }); } catch (ExecutionException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error creating index schema for core " + cd.getName(), e); } catch (IOException e) { - logger.warn("Couldn't get last modified time for schema file {}: {}", schemaFile, e.getMessage()); - logger.warn("Will not use schema cache"); + log.warn("Couldn't get last modified time for schema file {}: {}", schemaFile, e.getMessage()); + log.warn("Will not use schema cache"); } } return IndexSchemaFactory.buildIndexSchema(cd.getSchemaName(), solrConfig); diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java index 3c8a40db224..76eb5c42f74 100644 --- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java +++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java @@ -50,13 +50,13 @@ public class CorePropertiesLocator implements CoresLocator { public static final String PROPERTIES_FILENAME = "core.properties"; - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final Path rootDirectory; public CorePropertiesLocator(Path coreDiscoveryRoot) { this.rootDirectory = coreDiscoveryRoot; - logger.debug("Config-defined core root directory: {}", this.rootDirectory); + log.debug("Config-defined core root directory: {}", this.rootDirectory); } @Override @@ -92,7 +92,7 @@ public class CorePropertiesLocator implements CoresLocator { } } catch (IOException e) { - logger.error("Couldn't persist core properties to {}: {}", propfile, e.getMessage()); + log.error("Couldn't persist core properties to {}: {}", propfile, e.getMessage()); throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Couldn't persist core properties to " + propfile.toAbsolutePath().toString() + " : " + e.getMessage()); } @@ -109,7 +109,7 @@ public class CorePropertiesLocator implements CoresLocator { try { Files.deleteIfExists(propfile); } catch (IOException e) { - logger.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage()); + log.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage()); } } } @@ -132,7 +132,7 @@ public class CorePropertiesLocator implements CoresLocator { @Override public List discover(final CoreContainer cc) { - logger.debug("Looking for core definitions underneath {}", rootDirectory); + log.debug("Looking for core definitions underneath {}", rootDirectory); final List cds = Lists.newArrayList(); try { Set options = new HashSet<>(); @@ -144,7 +144,7 @@ public class CorePropertiesLocator implements CoresLocator { if (file.getFileName().toString().equals(PROPERTIES_FILENAME)) { CoreDescriptor cd = buildCoreDescriptor(file, cc); if (cd != null) { - logger.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir()); + log.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir()); cds.add(cd); } return FileVisitResult.SKIP_SIBLINGS; @@ -157,19 +157,19 @@ public class CorePropertiesLocator implements CoresLocator { // if we get an error on the root, then fail the whole thing // otherwise, log a warning and continue to try and load other cores if (file.equals(rootDirectory)) { - logger.error("Error reading core root directory {}: {}", file, exc); + log.error("Error reading core root directory {}: {}", file, exc); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reading core root directory"); } - logger.warn("Error visiting {}: {}", file, exc); + log.warn("Error visiting {}: {}", file, exc); return FileVisitResult.CONTINUE; } }); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Couldn't walk file tree under " + this.rootDirectory, e); } - logger.info("Found {} core definitions underneath {}", cds.size(), rootDirectory); + log.info("Found {} core definitions underneath {}", cds.size(), rootDirectory); if (cds.size() > 0) { - logger.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList())); + log.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList())); } return cds; } @@ -190,7 +190,7 @@ public class CorePropertiesLocator implements CoresLocator { return ret; } catch (IOException e) { - logger.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString()); + log.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString()); return null; } diff --git a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java index 13874c93727..ed5ae0aeec1 100644 --- a/solr/core/src/java/org/apache/solr/handler/GraphHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/GraphHandler.java @@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory; public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider { private StreamFactory streamFactory = new DefaultStreamFactory(); - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String coreName; @Override @@ -110,7 +110,7 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P tupleStream = this.streamFactory.constructStream(params.get("expr")); } catch (Exception e) { //Catch exceptions that occur while the stream is being created. This will include streaming expression parse rules. - SolrException.log(logger, e); + SolrException.log(log, e); Map requestContext = req.getContext(); requestContext.put("stream", new DummyErrorStream(e)); return; diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java index 67ea1f69c7f..6b0330add83 100644 --- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java @@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory; public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider { - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static String defaultZkhost = null; private static String defaultWorkerCollection = null; @@ -124,7 +124,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per if(tupleStream != null) { tupleStream.close(); } - SolrException.log(logger, e); + SolrException.log(log, e); rsp.add("result-set", new StreamHandler.DummyErrorStream(e)); } } diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java index 269d12dc814..4e43e1ceb06 100644 --- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java @@ -66,7 +66,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, static SolrClientCache clientCache = new SolrClientCache(); static ModelCache modelCache = null; private StreamFactory streamFactory = new DefaultStreamFactory(); - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String coreName; private Map daemons = Collections.synchronizedMap(new HashMap()); @@ -157,7 +157,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, } catch (Exception e) { // Catch exceptions that occur while the stream is being created. This will include streaming expression parse // rules. - SolrException.log(logger, e); + SolrException.log(log, e); rsp.add("result-set", new DummyErrorStream(e)); return; diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java index 7ba3838ce79..8c06f3204c5 100644 --- a/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java +++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrEnumerator.java @@ -30,7 +30,7 @@ import java.util.Map; /** Enumerator that reads from a Solr collection. */ class SolrEnumerator implements Enumerator { - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final TupleStream tupleStream; private final List> fields; @@ -126,7 +126,7 @@ class SolrEnumerator implements Enumerator { return true; } } catch (IOException e) { - logger.error("IOException", e); + log.error("IOException", e); return false; } } diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java index 8342bb72b46..938562df84f 100644 --- a/solr/core/src/java/org/apache/solr/search/Grouping.java +++ b/solr/core/src/java/org/apache/solr/search/Grouping.java @@ -77,7 +77,7 @@ import org.slf4j.LoggerFactory; */ public class Grouping { - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final SolrIndexSearcher searcher; private final QueryResult qr; @@ -384,8 +384,8 @@ public class Grouping { cachedCollector.replay(secondPhaseCollectors); } else { signalCacheWarning = true; - logger.warn(String.format(Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache)); - logger.warn("Please increase cache size or disable group caching."); + log.warn(String.format(Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache)); + log.warn("Please increase cache size or disable group caching."); searchWithTimeLimiter(luceneFilter, secondPhaseCollectors); } } else { @@ -447,7 +447,7 @@ public class Grouping { } searcher.search(q, collector); } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) { - logger.warn( "Query: " + query + "; " + x.getMessage() ); + log.warn( "Query: " + query + "; " + x.getMessage() ); qr.setPartialResults(true); } } diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java index ec421e494d0..336c27be4b9 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java @@ -115,7 +115,7 @@ public class CommandHandler { } - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final QueryCommand queryCommand; private final List commands; @@ -243,7 +243,7 @@ public class CommandHandler { searcher.search(query, collector); } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) { partialResults = true; - logger.warn( "Query: " + query + "; " + x.getMessage() ); + log.warn( "Query: " + query + "; " + x.getMessage() ); } if (includeHitCount) { diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java index d10216b423f..2bf60cbd3a3 100644 --- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java +++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java @@ -45,7 +45,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer { public static final String LOCALITY_BLOCKS_LOCAL = "locality.blocks.local"; public static final String LOCALITY_BLOCKS_RATIO = "locality.blocks.ratio"; - private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String hostname; private final ConcurrentMap> cache; @@ -129,7 +129,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer { } } } catch (IOException e) { - logger.warn("Could not retrieve locality information for {} due to exception: {}", + log.warn("Could not retrieve locality information for {} due to exception: {}", hdfsDirectory.getHdfsDirPath(), e); } } @@ -160,7 +160,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer { * The directory to keep metrics on. */ public void registerDirectory(HdfsDirectory dir) { - logger.info("Registering direcotry {} for locality metrics.", dir.getHdfsDirPath().toString()); + log.info("Registering direcotry {} for locality metrics.", dir.getHdfsDirPath().toString()); cache.put(dir, new ConcurrentHashMap()); } @@ -181,7 +181,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer { FileStatus[] statuses = fs.listStatus(dir.getHdfsDirPath()); List statusList = Arrays.asList(statuses); - logger.debug("Updating locality information for: {}", statusList); + log.debug("Updating locality information for: {}", statusList); // Keep only the files that still exist cachedStatuses.retainAll(statusList);