remove unnecessary log that pontetially take up lots of heap (#5286)

This commit is contained in:
Himanshu 2018-01-24 13:30:51 -06:00 committed by Slim
parent cc32640642
commit d54330be76
2 changed files with 4 additions and 8 deletions

View File

@ -573,13 +573,6 @@ public class HadoopDruidIndexerConfig
public void verify()
{
try {
log.info("Running with config:%n%s", JSON_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(this));
}
catch (IOException e) {
throw Throwables.propagate(e);
}
Preconditions.checkNotNull(schema.getDataSchema().getDataSource(), "dataSource");
Preconditions.checkNotNull(schema.getDataSchema().getParser().getParseSpec(), "parseSpec");
Preconditions.checkNotNull(schema.getDataSchema().getParser().getParseSpec().getTimestampSpec(), "timestampSpec");

View File

@ -81,7 +81,10 @@ public class DatasourceInputFormat extends InputFormat<NullWritable, InputRow>
throw new ISE("No segments found to read");
}
logger.info("segments to read [%s]", segmentsStr);
// Note: log is splitted into two lines so that a new String is not generated to print it.
// segmentsStr could be quite large when re-indexing multiple months of data.
logger.info("Segment to read are...");
logger.info(segmentsStr);
long maxSize = conf.getLong(CONF_MAX_SPLIT_SIZE, 0);
if (maxSize < 0) {