SOLR-12558: solr/core (private) logger renames

This commit is contained in:
Christine Poerschke 2018-07-31 16:58:51 +01:00
parent abd6b07ea9
commit 9262ed7e56
9 changed files with 33 additions and 33 deletions

View File

@ -44,7 +44,7 @@ import org.slf4j.LoggerFactory;
*/
public abstract class ConfigSetService {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static ConfigSetService createConfigSetService(NodeConfig nodeConfig, SolrResourceLoader loader, ZkController zkController) {
if (zkController != null)
return new CloudConfigSetService(loader, zkController);
@ -228,15 +228,15 @@ public abstract class ConfigSetService {
try {
String cachedName = cacheName(schemaFile);
return schemaCache.get(cachedName, () -> {
logger.info("Creating new index schema for core {}", cd.getName());
log.info("Creating new index schema for core {}", cd.getName());
return IndexSchemaFactory.buildIndexSchema(cd.getSchemaName(), solrConfig);
});
} catch (ExecutionException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Error creating index schema for core " + cd.getName(), e);
} catch (IOException e) {
logger.warn("Couldn't get last modified time for schema file {}: {}", schemaFile, e.getMessage());
logger.warn("Will not use schema cache");
log.warn("Couldn't get last modified time for schema file {}: {}", schemaFile, e.getMessage());
log.warn("Will not use schema cache");
}
}
return IndexSchemaFactory.buildIndexSchema(cd.getSchemaName(), solrConfig);

View File

@ -50,13 +50,13 @@ public class CorePropertiesLocator implements CoresLocator {
public static final String PROPERTIES_FILENAME = "core.properties";
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final Path rootDirectory;
public CorePropertiesLocator(Path coreDiscoveryRoot) {
this.rootDirectory = coreDiscoveryRoot;
logger.debug("Config-defined core root directory: {}", this.rootDirectory);
log.debug("Config-defined core root directory: {}", this.rootDirectory);
}
@Override
@ -92,7 +92,7 @@ public class CorePropertiesLocator implements CoresLocator {
}
}
catch (IOException e) {
logger.error("Couldn't persist core properties to {}: {}", propfile, e.getMessage());
log.error("Couldn't persist core properties to {}: {}", propfile, e.getMessage());
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Couldn't persist core properties to " + propfile.toAbsolutePath().toString() + " : " + e.getMessage());
}
@ -109,7 +109,7 @@ public class CorePropertiesLocator implements CoresLocator {
try {
Files.deleteIfExists(propfile);
} catch (IOException e) {
logger.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage());
log.warn("Couldn't delete core properties file {}: {}", propfile, e.getMessage());
}
}
}
@ -132,7 +132,7 @@ public class CorePropertiesLocator implements CoresLocator {
@Override
public List<CoreDescriptor> discover(final CoreContainer cc) {
logger.debug("Looking for core definitions underneath {}", rootDirectory);
log.debug("Looking for core definitions underneath {}", rootDirectory);
final List<CoreDescriptor> cds = Lists.newArrayList();
try {
Set<FileVisitOption> options = new HashSet<>();
@ -144,7 +144,7 @@ public class CorePropertiesLocator implements CoresLocator {
if (file.getFileName().toString().equals(PROPERTIES_FILENAME)) {
CoreDescriptor cd = buildCoreDescriptor(file, cc);
if (cd != null) {
logger.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
log.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
cds.add(cd);
}
return FileVisitResult.SKIP_SIBLINGS;
@ -157,19 +157,19 @@ public class CorePropertiesLocator implements CoresLocator {
// if we get an error on the root, then fail the whole thing
// otherwise, log a warning and continue to try and load other cores
if (file.equals(rootDirectory)) {
logger.error("Error reading core root directory {}: {}", file, exc);
log.error("Error reading core root directory {}: {}", file, exc);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reading core root directory");
}
logger.warn("Error visiting {}: {}", file, exc);
log.warn("Error visiting {}: {}", file, exc);
return FileVisitResult.CONTINUE;
}
});
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Couldn't walk file tree under " + this.rootDirectory, e);
}
logger.info("Found {} core definitions underneath {}", cds.size(), rootDirectory);
log.info("Found {} core definitions underneath {}", cds.size(), rootDirectory);
if (cds.size() > 0) {
logger.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList()));
log.info("Cores are: {}", cds.stream().map(CoreDescriptor::getName).collect(Collectors.toList()));
}
return cds;
}
@ -190,7 +190,7 @@ public class CorePropertiesLocator implements CoresLocator {
return ret;
}
catch (IOException e) {
logger.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString());
log.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString());
return null;
}

View File

@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
private StreamFactory streamFactory = new DefaultStreamFactory();
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private String coreName;
@Override
@ -110,7 +110,7 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
tupleStream = this.streamFactory.constructStream(params.get("expr"));
} catch (Exception e) {
//Catch exceptions that occur while the stream is being created. This will include streaming expression parse rules.
SolrException.log(logger, e);
SolrException.log(log, e);
Map requestContext = req.getContext();
requestContext.put("stream", new DummyErrorStream(e));
return;

View File

@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory;
public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static String defaultZkhost = null;
private static String defaultWorkerCollection = null;
@ -124,7 +124,7 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware, Per
if(tupleStream != null) {
tupleStream.close();
}
SolrException.log(logger, e);
SolrException.log(log, e);
rsp.add("result-set", new StreamHandler.DummyErrorStream(e));
}
}

View File

@ -66,7 +66,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
static SolrClientCache clientCache = new SolrClientCache();
static ModelCache modelCache = null;
private StreamFactory streamFactory = new DefaultStreamFactory();
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private String coreName;
private Map<String,DaemonStream> daemons = Collections.synchronizedMap(new HashMap());
@ -157,7 +157,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
} catch (Exception e) {
// Catch exceptions that occur while the stream is being created. This will include streaming expression parse
// rules.
SolrException.log(logger, e);
SolrException.log(log, e);
rsp.add("result-set", new DummyErrorStream(e));
return;

View File

@ -30,7 +30,7 @@ import java.util.Map;
/** Enumerator that reads from a Solr collection. */
class SolrEnumerator implements Enumerator<Object> {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final TupleStream tupleStream;
private final List<Map.Entry<String, Class>> fields;
@ -126,7 +126,7 @@ class SolrEnumerator implements Enumerator<Object> {
return true;
}
} catch (IOException e) {
logger.error("IOException", e);
log.error("IOException", e);
return false;
}
}

View File

@ -77,7 +77,7 @@ import org.slf4j.LoggerFactory;
*/
public class Grouping {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrIndexSearcher searcher;
private final QueryResult qr;
@ -384,8 +384,8 @@ public class Grouping {
cachedCollector.replay(secondPhaseCollectors);
} else {
signalCacheWarning = true;
logger.warn(String.format(Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache));
logger.warn("Please increase cache size or disable group caching.");
log.warn(String.format(Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache));
log.warn("Please increase cache size or disable group caching.");
searchWithTimeLimiter(luceneFilter, secondPhaseCollectors);
}
} else {
@ -447,7 +447,7 @@ public class Grouping {
}
searcher.search(q, collector);
} catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) {
logger.warn( "Query: " + query + "; " + x.getMessage() );
log.warn( "Query: " + query + "; " + x.getMessage() );
qr.setPartialResults(true);
}
}

View File

@ -115,7 +115,7 @@ public class CommandHandler {
}
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final QueryCommand queryCommand;
private final List<Command> commands;
@ -243,7 +243,7 @@ public class CommandHandler {
searcher.search(query, collector);
} catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) {
partialResults = true;
logger.warn( "Query: " + query + "; " + x.getMessage() );
log.warn( "Query: " + query + "; " + x.getMessage() );
}
if (includeHitCount) {

View File

@ -45,7 +45,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
public static final String LOCALITY_BLOCKS_LOCAL = "locality.blocks.local";
public static final String LOCALITY_BLOCKS_RATIO = "locality.blocks.ratio";
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private String hostname;
private final ConcurrentMap<HdfsDirectory,ConcurrentMap<FileStatus,BlockLocation[]>> cache;
@ -129,7 +129,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
}
}
} catch (IOException e) {
logger.warn("Could not retrieve locality information for {} due to exception: {}",
log.warn("Could not retrieve locality information for {} due to exception: {}",
hdfsDirectory.getHdfsDirPath(), e);
}
}
@ -160,7 +160,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
* The directory to keep metrics on.
*/
public void registerDirectory(HdfsDirectory dir) {
logger.info("Registering direcotry {} for locality metrics.", dir.getHdfsDirPath().toString());
log.info("Registering direcotry {} for locality metrics.", dir.getHdfsDirPath().toString());
cache.put(dir, new ConcurrentHashMap<FileStatus, BlockLocation[]>());
}
@ -181,7 +181,7 @@ public class HdfsLocalityReporter implements SolrInfoBean, SolrMetricProducer {
FileStatus[] statuses = fs.listStatus(dir.getHdfsDirPath());
List<FileStatus> statusList = Arrays.asList(statuses);
logger.debug("Updating locality information for: {}", statusList);
log.debug("Updating locality information for: {}", statusList);
// Keep only the files that still exist
cachedStatuses.retainAll(statusList);