fix log typo (#6755)

* fix log typo, add DataSegmentUtils.getIdentifiersString util method

* fix indecisive oops
This commit is contained in:
Clint Wylie 2018-12-18 15:10:25 -08:00 committed by Gian Merlino
parent f0ee6bf898
commit 9505074530
7 changed files with 23 additions and 7 deletions

View File

@ -28,7 +28,9 @@ import org.apache.druid.java.util.common.logger.Logger;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import java.util.Collection;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
@PublicApi @PublicApi
public class DataSegmentUtils public class DataSegmentUtils
@ -108,6 +110,16 @@ public class DataSegmentUtils
return segmentDesc.withInterval(newInterval).toString(); return segmentDesc.withInterval(newInterval).toString();
} }
/**
* Creates a comma delimited list of segment identifiers
* @param segments
* @return
*/
public static String getIdentifiersString(Collection<DataSegment> segments)
{
return segments.stream().map(DataSegment::getIdentifier).collect(Collectors.joining(", "));
}
public static class SegmentIdentifierParts public static class SegmentIdentifierParts
{ {
private final String dataSource; private final String dataSource;

View File

@ -25,9 +25,9 @@ import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.common.task.Task;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.DataSegmentUtils;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
/** /**
* Insert segments into metadata storage. The segment versions must all be less than or equal to a lock held by * Insert segments into metadata storage. The segment versions must all be less than or equal to a lock held by
@ -84,7 +84,7 @@ public class SegmentInsertAction implements TaskAction<Set<DataSegment>>
public String toString() public String toString()
{ {
return "SegmentInsertAction{" + return "SegmentInsertAction{" +
"segments=" + segments.stream().map(DataSegment::getIdentifier).collect(Collectors.joining(",")) + "segments=" + DataSegmentUtils.getIdentifiersString(segments) +
'}'; '}';
} }
} }

View File

@ -31,6 +31,7 @@ import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.DruidMetrics; import org.apache.druid.query.DruidMetrics;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.DataSegmentUtils;
import org.joda.time.Interval; import org.joda.time.Interval;
import java.util.List; import java.util.List;
@ -114,7 +115,7 @@ public class SegmentMetadataUpdateAction implements TaskAction<Void>
public String toString() public String toString()
{ {
return "SegmentMetadataUpdateAction{" + return "SegmentMetadataUpdateAction{" +
"segments=" + segments.stream().map(DataSegment::getIdentifier).collect(Collectors.joining(",")) + "segments=" + DataSegmentUtils.getIdentifiersString(segments) +
'}'; '}';
} }
} }

View File

@ -31,6 +31,7 @@ import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.DruidMetrics; import org.apache.druid.query.DruidMetrics;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.DataSegmentUtils;
import org.joda.time.Interval; import org.joda.time.Interval;
import java.util.List; import java.util.List;
@ -116,7 +117,7 @@ public class SegmentNukeAction implements TaskAction<Void>
public String toString() public String toString()
{ {
return "SegmentNukeAction{" + return "SegmentNukeAction{" +
"segments=" + segments.stream().map(DataSegment::getIdentifier).collect(Collectors.joining(",")) + "segments=" + DataSegmentUtils.getIdentifiersString(segments) +
'}'; '}';
} }
} }

View File

@ -31,6 +31,7 @@ import org.apache.druid.indexing.overlord.SegmentPublishResult;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.query.DruidMetrics; import org.apache.druid.query.DruidMetrics;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.DataSegmentUtils;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -154,7 +155,7 @@ public class SegmentTransactionalInsertAction implements TaskAction<SegmentPubli
public String toString() public String toString()
{ {
return "SegmentInsertAction{" + return "SegmentInsertAction{" +
"segments=" + segments.stream().map(DataSegment::getIdentifier).collect(Collectors.joining(",")) + "segments=" + DataSegmentUtils.getIdentifiersString(segments) +
", startMetadata=" + startMetadata + ", startMetadata=" + startMetadata +
", endMetadata=" + endMetadata + ", endMetadata=" + endMetadata +
'}'; '}';

View File

@ -493,7 +493,7 @@ public interface IndexMerger
mergeBufferTotalSize += bufferAllocation.rhs; mergeBufferTotalSize += bufferAllocation.rhs;
ByteBufferUtils.free(bufferAllocation.lhs); ByteBufferUtils.free(bufferAllocation.lhs);
} }
log.info("Freed [,%d] bytes of dictionary merging direct buffers", mergeBufferTotalSize); log.info("Freed [%,d] bytes of dictionary merging direct buffers", mergeBufferTotalSize);
} }
} }
} }

View File

@ -30,6 +30,7 @@ import org.apache.druid.server.coordinator.CoordinatorStats;
import org.apache.druid.server.coordinator.DataSourceCompactionConfig; import org.apache.druid.server.coordinator.DataSourceCompactionConfig;
import org.apache.druid.server.coordinator.DruidCoordinatorRuntimeParams; import org.apache.druid.server.coordinator.DruidCoordinatorRuntimeParams;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.DataSegmentUtils;
import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.VersionedIntervalTimeline;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@ -163,7 +164,7 @@ public class DruidCoordinatorSegmentCompactor implements DruidCoordinatorHelper
LOG.info( LOG.info(
"Submitted a compactTask[%s] for segments[%s]", "Submitted a compactTask[%s] for segments[%s]",
taskId, taskId,
segmentsToCompact.stream().map(DataSegment::getIdentifier).collect(Collectors.joining(",")) DataSegmentUtils.getIdentifiersString(segmentsToCompact)
); );
} else if (segmentsToCompact.size() == 1) { } else if (segmentsToCompact.size() == 1) {
throw new ISE("Found one segments[%s] to compact", segmentsToCompact); throw new ISE("Found one segments[%s] to compact", segmentsToCompact);