SOLR-14910: Use in-line tags for logger declarations in Gradle ValidateLogCalls that are non-standard, change //logok to //nowarn

This commit is contained in:
Erick Erickson 2020-10-03 09:47:37 -04:00
parent 7e04e4d0ca
commit f6c4f8a755
39 changed files with 65 additions and 73 deletions

View File

@ -67,7 +67,7 @@ class ValidateLogCallsTask extends DefaultTask {
boolean violation = false
// If the line has been explicitly OK'd, then it's OK!
if (line.replaceAll("\\s", "").toLowerCase().contains("//logok")) {
if (line.replaceAll("\\s", "").toLowerCase().contains("//nowarn")) {
return
}
// Strip all of the comments, things in quotes and the like.
@ -133,7 +133,7 @@ class ValidateLogCallsTask extends DefaultTask {
}
// Always report toString(). Note, this over-reports some constructs
// but just add //logOK if it's really OK.
// but just add //nowarn if it's really OK.
if (violation == false) {
if (line.contains("toString(") == true && prevLineNotIf) {
cause = "Line contains toString"
@ -151,27 +151,13 @@ class ValidateLogCallsTask extends DefaultTask {
return
}
// Require all our logger definitions lower case "log", except a couple of special ones.
// Require all our logger definitions lower case "log", except if they have //nowarn
def checkLogName(File file, String line) {
// It's many times faster to do check this way than use a regex
if (line.contains("static ") && line.contains("getLogger") && line.contains(" log ") == false) {
String name = file.name
if (name.equals("LoggerFactory.java")) {
if (line.replaceAll("\\s", "").toLowerCase().contains("//nowarn")) {
return
}
if (name.equals("SolrCore.java") && (line.contains("requestLog") || line.contains("slowLog"))) {
return
}
if (name.equals("StartupLoggingUtils.java") && line.contains("getLoggerImplStr")) {
return
}
// Apparently the Hadoop code expectes upper-case LOG, so...
if ((name.equals("HttpServer2.java") || name.equals("BlockPoolSlice.java") || name.equals("FileUtil.java"))
&& line.contains(" LOG ")) {
return
}
reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line + " project" + project)
}
}

View File

@ -60,7 +60,7 @@ NOTES:
simple concatenation. This last is something of a style check.
- You can get into some pretty convolued consructs trying to pass some of these
checks. Adding //logok, with or without spaces will cause the line to pass
checks. Adding //nowarn, with or without spaces will cause the line to pass
no matter what. Please use this hack sparingly and be conscientious about
surrounding with 'if (log.is*Enabled)'.

View File

@ -67,7 +67,7 @@ public abstract class StrategyTestCase extends SpatialTestCase {
protected boolean storeShape = true;
protected void executeQueries(SpatialMatchConcern concern, String... testQueryFile) throws IOException {
log.info("testing queried for strategy "+strategy); // logOk
log.info("testing queried for strategy "+strategy); // nowarn
for( String path : testQueryFile ) {
Iterator<SpatialTestQuery> testQueryIterator = getTestQueries(path, ctx);
runTestQueries(testQueryIterator, concern);

View File

@ -67,7 +67,7 @@ public class HeatmapFacetCounterTest extends StrategyTestCase {
@After
public void after() {
log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // logOK
log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // nowarn
}
@Test

View File

@ -87,7 +87,7 @@ public class RandomSpatialOpFuzzyPrefixTreeTest extends StrategyTestCase {
((PrefixTreeStrategy) strategy).setPointsOnly(true);
}
log.info("Strategy: " + strategy.toString()); // logOk
log.info("Strategy: " + strategy.toString()); // nowarn
}
private void setupCtx2D(SpatialContext ctx) {

View File

@ -294,6 +294,9 @@ Other Changes
* SOLR-12987: Deprecated plugins/features are now logged once and with log category org.apache.solr.DEPRECATED
(David Smiley)
* SOLR-14910: Use in-line tags for logger declarations in Gradle ValidateLogCalls that are non-standard,
change //logok to //nowarn (Erick Erickson)
================== 8.6.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

View File

@ -163,7 +163,7 @@ public class CarrotClusteringEngine extends SearchClusteringEngine {
if (attributeXmls.length > 0) {
if (attributeXmls.length > 1) {
log.warn("More than one attribute file found, first one will be used: {}"
, Arrays.toString(attributeXmls)); // logOk
, Arrays.toString(attributeXmls)); // nowarn
}
withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> {

View File

@ -230,7 +230,7 @@ public class ExtractingDocumentLoader extends ContentStreamLoader {
} catch (TikaException e) {
if(ignoreTikaException)
log.warn(new StringBuilder("skip extracting text due to ").append(e.getLocalizedMessage())
.append(". metadata=").append(metadata.toString()).toString()); // logOk
.append(". metadata=").append(metadata.toString()).toString()); // nowarn
else
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}

View File

@ -93,7 +93,7 @@ public class SchedulerMetricsCollector implements Closeable {
try {
metricSamples.addAll(future.get());
} catch (ExecutionException e) {
log.error("Error occurred during metrics collection", e.getCause());//logok
log.error("Error occurred during metrics collection", e.getCause());//nowarn
// continue any ways; do not fail
}
}

View File

@ -119,11 +119,11 @@ public class ActiveReplicaWatcher implements CollectionStateWatcher {
log.debug("-- onStateChanged@{}: replicaIds={}, solrCoreNames={} {}\ncollectionState {}"
, Long.toHexString(hashCode()), replicaIds, solrCoreNames
, (latch != null ? "\nlatch count=" + latch.getCount() : "")
, collectionState); // logOk
, collectionState); // nowarn
}
if (collectionState == null) { // collection has been deleted - don't wait
if (log.isDebugEnabled()) {
log.debug("-- collection deleted, decrementing latch by {} ", replicaIds.size() + solrCoreNames.size()); // logOk
log.debug("-- collection deleted, decrementing latch by {} ", replicaIds.size() + solrCoreNames.size()); // nowarn
}
if (latch != null) {
for (int i = 0; i < replicaIds.size() + solrCoreNames.size(); i++) {

View File

@ -93,7 +93,7 @@ public class CloudUtil {
}
log.error("{}",
new SolrException(ErrorCode.SERVER_ERROR, "Will not load SolrCore " + desc.getName()
+ " because it has been replaced due to failover.")); // logOk
+ " because it has been replaced due to failover.")); // nowarn
throw new SolrException(ErrorCode.SERVER_ERROR,
"Will not load SolrCore " + desc.getName()
+ " because it has been replaced due to failover.");

View File

@ -632,9 +632,9 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
private void printTrackingMaps() {
if (log.isDebugEnabled()) {
log.debug("RunningTasks: {}", runningTasks);
log.debug("BlockedTasks: {}", blockedTasks.keySet()); // logOk
log.debug("CompletedTasks: {}", completedTasks.keySet()); // logOk
log.debug("RunningZKTasks: {}", runningZKTasks); // logOk
log.debug("BlockedTasks: {}", blockedTasks.keySet()); // nowarn
log.debug("CompletedTasks: {}", completedTasks.keySet()); // nowarn
log.debug("RunningZKTasks: {}", runningZKTasks); // nowarn
}
}

View File

@ -189,8 +189,8 @@ public final class SolrCore implements SolrInfoBean, Closeable {
public static final String version = "1.0";
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final Logger requestLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".Request");
private static final Logger slowLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".SlowRequest");
private static final Logger requestLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".Request"); //nowarn
private static final Logger slowLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".SlowRequest"); //nowarn
private String name;
private String logid; // used to show what name is set

View File

@ -437,7 +437,7 @@ public class IndexFetcher {
if (log.isInfoEnabled()) {
log.info("Follower's generation: {}", commit.getGeneration());
log.info("Follower's version: {}", IndexDeletionPolicyWrapper.getCommitTimestamp(commit)); // logOK
log.info("Follower's version: {}", IndexDeletionPolicyWrapper.getCommitTimestamp(commit)); // nowarn
}
if (latestVersion == 0L) {
@ -1249,7 +1249,7 @@ public class IndexFetcher {
try {
if (log.isInfoEnabled()) {
log.info("From dir files: {}", Arrays.asList(tmpIdxDir.listAll()));
log.info("To dir files: {}", Arrays.asList(indexDir.listAll())); //logOk
log.info("To dir files: {}", Arrays.asList(indexDir.listAll())); //nowarn
}
} catch (IOException e) {
throw new RuntimeException(e);

View File

@ -1606,7 +1606,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
}
fos.write(buf, 0, read);
fos.flush();
log.debug("Wrote {} bytes for file {}", offset + read, fileName); // logOK
log.debug("Wrote {} bytes for file {}", offset + read, fileName); // nowarn
//Pause if necessary
maxBytesBeforePause += read;

View File

@ -1349,7 +1349,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
success = true;
break;
}
log.warn("Force leader attempt {}. Waiting 5 secs for an active leader. State of the slice: {}", (i + 1), slice); //logok
log.warn("Force leader attempt {}. Waiting 5 secs for an active leader. State of the slice: {}", (i + 1), slice); //nowarn
}
if (success) {

View File

@ -135,7 +135,7 @@ class PrepRecoveryOp implements CoreAdminHandler.CoreAdminOp {
", live=" + live + ", checkLive=" + checkLive + ", currentState=" + state
+ ", localState=" + localState + ", nodeName=" + nodeName +
", coreNodeName=" + coreNodeName + ", onlyIfActiveCheckResult=" + onlyIfActiveCheckResult
+ ", nodeProps: " + replica); //LOGOK
+ ", nodeProps: " + replica); //nowarn
}
if (!onlyIfActiveCheckResult && replica != null && (state == waitForState || leaderDoesNotNeedRecovery)) {
if (checkLive == null) {

View File

@ -233,7 +233,7 @@ public class HadoopAuthPlugin extends AuthenticationPlugin {
log.info("----------HTTP Request---------");
if (log.isInfoEnabled()) {
log.info("{} : {}", request.getMethod(), request.getRequestURI());
log.info("Query : {}", request.getQueryString()); // logOk
log.info("Query : {}", request.getQueryString()); // nowarn
}
log.info("Headers :");
Enumeration<String> headers = request.getHeaderNames();

View File

@ -493,7 +493,7 @@ public class HttpSolrCall {
}
if (statusCode == AuthorizationResponse.FORBIDDEN.statusCode) {
if (log.isDebugEnabled()) {
log.debug("UNAUTHORIZED auth header {} context : {}, msg: {}", req.getHeader("Authorization"), context, authResponse.getMessage()); // logOk
log.debug("UNAUTHORIZED auth header {} context : {}, msg: {}", req.getHeader("Authorization"), context, authResponse.getMessage()); // nowarn
}
sendError(statusCode,
"Unauthorized request, Response code: " + statusCode);
@ -503,7 +503,7 @@ public class HttpSolrCall {
return RETURN;
}
if (!(statusCode == HttpStatus.SC_ACCEPTED) && !(statusCode == HttpStatus.SC_OK)) {
log.warn("ERROR {} during authentication: {}", statusCode, authResponse.getMessage()); // logOk
log.warn("ERROR {} during authentication: {}", statusCode, authResponse.getMessage()); // nowarn
sendError(statusCode,
"ERROR during authorization, Response code: " + statusCode);
if (shouldAudit(EventType.ERROR)) {

View File

@ -184,7 +184,7 @@ public class SpellCheckCollator {
collations.add(collation);
}
if (log.isDebugEnabled()) {
log.debug("Collation: {} {}", collationQueryStr, (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : "")); // logOk
log.debug("Collation: {} {}", collationQueryStr, (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : "")); // nowarn
}
}
return collations;

View File

@ -312,7 +312,7 @@ public class SolrIndexSplitter {
for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
if (log.isInfoEnabled()) {
log.info("SolrIndexSplitter: partition # {} partitionCount={} {} segment #={} segmentCount={}", partitionNumber, numPieces
, (ranges != null ? " range=" + ranges.get(partitionNumber) : ""), segmentNumber, leaves.size()); // logOk
, (ranges != null ? " range=" + ranges.get(partitionNumber) : ""), segmentNumber, leaves.size()); // nowarn
}
CodecReader subReader = SlowCodecReaderWrapper.wrap(leaves.get(segmentNumber).reader());
iw.addIndexes(new LiveDocsReader(subReader, segmentDocSets.get(segmentNumber)[partitionNumber]));

View File

@ -53,7 +53,7 @@ public final class StartupLoggingUtils {
}
}
public static String getLoggerImplStr() {
public static String getLoggerImplStr() { //nowarn
return binder.getLoggerFactoryClassStr();
}

View File

@ -76,7 +76,8 @@ import org.slf4j.LoggerFactory;
public class FileUtil {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);
// Apparently the Hadoop code expectes upper-case LOG, so...
private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); //nowarn
/* The error code is defined in winutils to indicate insufficient
* privilege to create symbolic links. This value need to keep in

View File

@ -81,7 +81,8 @@ import com.google.common.annotations.VisibleForTesting;
public class BlockPoolSlice {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class);
// Apparently the Hadoop code expectes upper-case LOG, so...
static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class); //nowarn
private final String bpid;
private final FsVolumeImpl volume; // volume to which this BlockPool belongs to

View File

@ -118,7 +118,8 @@ import org.slf4j.LoggerFactory;
public final class HttpServer2 implements FilterContainer {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
// Apparently the Hadoop code expectes upper-case LOG, so...
public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); //nowarn
public static final String HTTP_SCHEME = "http";
public static final String HTTPS_SCHEME = "https";

View File

@ -206,7 +206,7 @@ public class ChaosMonkeySafeLeaderWithPullReplicasTest extends AbstractFullDistr
if (log.isInfoEnabled()) {
log.info("control docs:{}\n\n", controlClient.query(new SolrQuery("*:*")).getResults().getNumFound());
log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // logOk
log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // nowarn
}
waitForReplicationFromReplicas(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME));

View File

@ -99,7 +99,7 @@ public class OverseerRolesTest extends SolrCloudTestCase {
private void logOverseerState() throws KeeperException, InterruptedException {
if (log.isInfoEnabled()) {
log.info("Overseer: {}", getLeaderNode(zkClient()));
log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // logOk
log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // nowarn
}
}

View File

@ -1171,14 +1171,14 @@ public class OverseerTest extends SolrTestCaseJ4 {
Snapshot snapshot = timer.getSnapshot();
if (log.isInfoEnabled()) {
log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate());
log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // logOk
log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // logOk
log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // logOk
log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // logOk
log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // logOk
log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // logOk
log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // logOk
log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // logOk
log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // nowarn
log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // nowarn
log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // nowarn
log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // nowarn
log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // nowarn
log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // nowarn
log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // nowarn
log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // nowarn
}
}

View File

@ -83,7 +83,7 @@ public class RollingRestartTest extends AbstractFullDistribZkTestBase {
boolean sawLiveDesignate = false;
int numRestarts = 1 + random().nextInt(TEST_NIGHTLY ? 12 : 2);
for (int i = 0; i < numRestarts; i++) {
log.info("Rolling restart #{}", i + 1); // logOk
log.info("Rolling restart #{}", i + 1); // nowarn
for (CloudJettyRunner cloudJetty : designateJettys) {
log.info("Restarting {}", cloudJetty);
chaosMonkey.stopJetty(cloudJetty);

View File

@ -752,7 +752,7 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
break;
} catch (NumberFormatException | AssertionError notYet) {
if (log.isInfoEnabled()) {
log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, followerDetails); // logOk
log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, followerDetails); // nowarn
}
if (retries>9) {
log.error("giving up: ", notYet);

View File

@ -265,7 +265,7 @@ public class BasicAuthIntegrationTest extends SolrCloudAuthTestCase {
assertTrue(obj.containsKey("memory"));
} catch (Exception e) {
log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}"
, e, baos.toString(StandardCharsets.UTF_8.name())); // logOk
, e, baos.toString(StandardCharsets.UTF_8.name())); // nowarn
}
SolrParams params = new MapSolrParams(Collections.singletonMap("q", "*:*"));

View File

@ -943,7 +943,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
if (log.isInfoEnabled()) {
log.info("Non leader 0: {}", ((HttpSolrClient) NONLEADERS.get(0)).getBaseURL());
log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // logOk
log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // nowarn
}
SolrDocument doc0 = NONLEADERS.get(0).getById(String.valueOf(0), params("distrib", "false"));
@ -1034,7 +1034,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
if (log.isInfoEnabled()) {
log.info("Testing client (Fetch missing test): {}", ((HttpSolrClient) client).getBaseURL());
log.info("Version at {} is: {}"
, ((HttpSolrClient) client).getBaseURL(), getReplicaValue(client, 1, "_version_")); // logOk
, ((HttpSolrClient) client).getBaseURL(), getReplicaValue(client, 1, "_version_")); // nowarn
}
assertReplicaValue(client, 1, "inplace_updatable_float", (newinplace_updatable_float + 2.0f),
"inplace_updatable_float didn't match for replica at client: " + ((HttpSolrClient) client).getBaseURL());
@ -1344,7 +1344,7 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
if (log.isInfoEnabled()) {
log.info("Testing client (testDBQUsingUpdatedFieldFromDroppedUpdate): {}", ((HttpSolrClient) client).getBaseURL());
log.info("Version at {} is: {}", ((HttpSolrClient) client).getBaseURL(),
getReplicaValue(client, 1, "_version_")); // logOk
getReplicaValue(client, 1, "_version_")); // nowarn
}
assertNull(client.getById("1", params("distrib", "false")));
}

View File

@ -76,7 +76,7 @@ public class CategoryRoutedAliasUpdateProcessorTest extends RoutedAliasUpdatePro
//log this to help debug potential causes of problems
if (log.isInfoEnabled()) {
log.info("SolrClient: {}", solrClient);
log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk
log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn
}
}

View File

@ -74,7 +74,7 @@ public class DimensionalRoutedAliasUpdateProcessorTest extends RoutedAliasUpdate
//log this to help debug potential causes of problems
if (log.isInfoEnabled()) {
log.info("SolrClient: {}", solrClient);
log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk
log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn
}
}

View File

@ -91,7 +91,7 @@ public class TimeRoutedAliasUpdateProcessorTest extends RoutedAliasUpdateProcess
//log this to help debug potential causes of problems
if (log.isInfoEnabled()) {
log.info("SolrClient: {}", solrClient);
log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk
log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn
}
}

View File

@ -359,7 +359,7 @@ public class TestSolrCLIRunExample extends SolrTestCaseJ4 {
assertEquals("it should be ok "+tool+" "+Arrays.toString(toolArgs),0, status);
} catch (Exception e) {
log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}"
, e , baos.toString(StandardCharsets.UTF_8.name())); // logOk
, e , baos.toString(StandardCharsets.UTF_8.name())); // nowarn
throw e;
}

View File

@ -154,10 +154,10 @@ public class NodesSysPropsCacher implements SolrCloseable {
Thread.sleep(backOffTime);
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // logOk
log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // nowarn
break;
}
log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // logOk
log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // nowarn
}
}
}

View File

@ -800,7 +800,7 @@ public class Utils {
int statusCode = rsp.getStatusLine().getStatusCode();
if (statusCode != 200) {
try {
log.error("Failed a request to: {}, status: {}, body: {}", url, rsp.getStatusLine(), EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8)); // logOk
log.error("Failed a request to: {}, status: {}, body: {}", url, rsp.getStatusLine(), EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8)); // nowarn
} catch (IOException e) {
log.error("could not print error", e);
}

View File

@ -431,7 +431,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
if (useTlogReplicas()) {
if (log.isInfoEnabled()) {
log.info("create jetty {} in directory {} of type {} in shard {}"
, i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); // logOk
, i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); // nowarn
}
customThreadPool.submit(() -> {
try {
@ -463,7 +463,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
} else {
if (log.isInfoEnabled()) {
log.info("create jetty {} in directory {} of type {} for shard{}"
, i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); // logOk
, i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); // nowarn
}
customThreadPool.submit(() -> {
@ -492,7 +492,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
addedReplicas++;
}
} else {
log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); // logOk
log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); // nowarn
customThreadPool.submit(() -> {
try {
JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty"