mirror of https://github.com/apache/druid.git
Rejigger exception handling in standalone realtime
- Throw FormattedExceptions on null or unparseable timestamps - Throw away events (and increment thrownAway) on non-FormattedExceptions in the fire chief
This commit is contained in:
parent
930b089f7f
commit
ec53a30c6e
|
@ -1,9 +1,10 @@
|
|||
package com.metamx.druid.indexer.data;
|
||||
|
||||
import com.metamx.common.exception.FormattedException;
|
||||
import com.metamx.druid.input.InputRow;
|
||||
|
||||
public interface InputRowParser<T>
|
||||
{
|
||||
public InputRow parse(T input);
|
||||
public InputRow parse(T input) throws FormattedException;
|
||||
public void addDimensionExclusion(String dimension);
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
|||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.metamx.common.exception.FormattedException;
|
||||
import com.metamx.druid.input.InputRow;
|
||||
import com.metamx.druid.input.MapBasedInputRow;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -37,21 +38,30 @@ public class MapInputRowParser implements InputRowParser<Map<String, Object>>
|
|||
}
|
||||
|
||||
@Override
|
||||
public InputRow parse(Map<String, Object> theMap)
|
||||
public InputRow parse(Map<String, Object> theMap) throws FormattedException
|
||||
{
|
||||
final List<String> dimensions = dataSpec.hasCustomDimensions()
|
||||
? dataSpec.getDimensions()
|
||||
: Lists.newArrayList(Sets.difference(theMap.keySet(), dimensionExclusions));
|
||||
|
||||
final DateTime timestamp = timestampSpec.extractTimestamp(theMap);
|
||||
if (timestamp == null) {
|
||||
final String input = theMap.toString();
|
||||
throw new NullPointerException(
|
||||
String.format(
|
||||
"Null timestamp in input: %s",
|
||||
input.length() < 100 ? input : input.substring(0, 100) + "..."
|
||||
)
|
||||
);
|
||||
final DateTime timestamp;
|
||||
try {
|
||||
timestamp = timestampSpec.extractTimestamp(theMap);
|
||||
if (timestamp == null) {
|
||||
final String input = theMap.toString();
|
||||
throw new NullPointerException(
|
||||
String.format(
|
||||
"Null timestamp in input: %s",
|
||||
input.length() < 100 ? input : input.substring(0, 100) + "..."
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new FormattedException.Builder()
|
||||
.withErrorCode(FormattedException.ErrorCode.UNPARSABLE_TIMESTAMP)
|
||||
.withMessage(e.toString())
|
||||
.build();
|
||||
}
|
||||
|
||||
return new MapBasedInputRow(timestamp.getMillis(), dimensions, theMap);
|
||||
|
|
|
@ -164,32 +164,37 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
final InputRow inputRow;
|
||||
try {
|
||||
inputRow = firehose.nextRow();
|
||||
|
||||
final Sink sink = plumber.getSink(inputRow.getTimestampFromEpoch());
|
||||
if (sink == null) {
|
||||
metrics.incrementThrownAway();
|
||||
log.debug("Throwing away event[%s]", inputRow);
|
||||
|
||||
if (System.currentTimeMillis() > nextFlush) {
|
||||
plumber.persist(firehose.commit());
|
||||
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
int currCount = sink.add(inputRow);
|
||||
metrics.incrementProcessed();
|
||||
if (currCount >= config.getMaxRowsInMemory() || System.currentTimeMillis() > nextFlush) {
|
||||
plumber.persist(firehose.commit());
|
||||
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||
}
|
||||
}
|
||||
catch (FormattedException e) {
|
||||
log.info(e, "unparseable line: %s", e.getDetails());
|
||||
metrics.incrementUnparseable();
|
||||
continue;
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.info(e, "thrown away line due to exception");
|
||||
metrics.incrementThrownAway();
|
||||
continue;
|
||||
}
|
||||
|
||||
final Sink sink = plumber.getSink(inputRow.getTimestampFromEpoch());
|
||||
if (sink == null) {
|
||||
metrics.incrementThrownAway();
|
||||
log.debug("Throwing away event[%s]", inputRow);
|
||||
|
||||
if (System.currentTimeMillis() > nextFlush) {
|
||||
plumber.persist(firehose.commit());
|
||||
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
int currCount = sink.add(inputRow);
|
||||
metrics.incrementProcessed();
|
||||
if (currCount >= config.getMaxRowsInMemory() || System.currentTimeMillis() > nextFlush) {
|
||||
plumber.persist(firehose.commit());
|
||||
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
log.makeAlert(e, "RuntimeException aborted realtime processing[%s]", fireDepartment.getSchema().getDataSource())
|
||||
|
|
Loading…
Reference in New Issue