log exceptions correctly in DatasourceInputFormat and IndexGeneratorJob

This commit is contained in:
Bingkun Guo 2016-03-09 12:43:04 -06:00
parent 4c3a3f8da6
commit c20d7682a9
2 changed files with 6 additions and 4 deletions

View File

@ -633,7 +633,7 @@ public class IndexGeneratorJob implements Jobby
persist(persistIndex, interval, file, progressIndicator);
}
catch (Exception e) {
log.error("persist index error", e);
log.error(e, "persist index error");
throw Throwables.propagate(e);
}
finally {

View File

@ -144,7 +144,8 @@ public class DatasourceInputFormat extends InputFormat<NullWritable, InputRow>
};
@VisibleForTesting
DatasourceInputFormat setSupplier(Supplier<org.apache.hadoop.mapred.InputFormat> supplier) {
DatasourceInputFormat setSupplier(Supplier<org.apache.hadoop.mapred.InputFormat> supplier)
{
this.supplier = supplier;
return this;
}
@ -160,7 +161,7 @@ public class DatasourceInputFormat extends InputFormat<NullWritable, InputRow>
locations = getFrequentLocations(segments, fio, conf);
}
catch (Exception e) {
logger.error("Exception thrown finding location of splits", e);
logger.error(e, "Exception thrown finding location of splits");
}
return new DatasourceInputSplit(segments, locations);
}
@ -181,7 +182,8 @@ public class DatasourceInputFormat extends InputFormat<NullWritable, InputRow>
return getFrequentLocations(locations);
}
private static String[] getFrequentLocations(Iterable<String> hosts) {
private static String[] getFrequentLocations(Iterable<String> hosts)
{
final CountingMap<String> counter = new CountingMap<>();
for (String location : hosts) {