Add Checkstyle rule to force comments to classes and methods to be Javadoc comments (#4239)

This commit is contained in:
Roman Leventov 2017-05-04 21:14:41 +03:00 committed by Fangjin Yang
parent f0fd8ba191
commit 8277284d67
10 changed files with 19 additions and 14 deletions

View File

@ -25,7 +25,7 @@ import org.apache.commons.math3.util.Pair;
import java.util.List;
import java.util.TreeMap;
/*
/**
* EnumeratedDistrubtion's sample() method does a linear scan through the array of probabilities.
*
* This is too slow with high cardinality value sets, so this subclass overrides sample() to use

View File

@ -22,7 +22,7 @@ package io.druid.benchmark.datagen;
import org.apache.commons.math3.distribution.AbstractIntegerDistribution;
import org.apache.commons.math3.distribution.AbstractRealDistribution;
/*
/**
* Rounds the output values from the sample() function of an AbstractRealDistribution.
*/
public class RealRoundingDistribution extends AbstractIntegerDistribution

View File

@ -53,5 +53,10 @@
<property name="format" value="^// {2}"/>
<property name="illegalPattern" value="true"/>
</module>
<module name="Regexp">
<!-- Force comments to classes and methods to be Javadoc comments -->
<property name="format" value="/\*[^\*].*?\n(\s*\*.*?\n)*\s+\*/[\s\n]*(transient|volatile|strictfp|synchronized|native|abstract|class|interface|enum|static|private|public|protected|default|void|byte|char|short|int|float|long|double|[A-Z])"/>
<property name="illegalPattern" value="true"/>
</module>
</module>
</module>

View File

@ -82,7 +82,7 @@ public class DetermineHashedPartitionsJob implements Jobby
* Group by (timestamp, dimensions) so we can correctly count dimension values as they would appear
* in the final segment.
*/
long startTime = System.currentTimeMillis();
final long startTime = System.currentTimeMillis();
final Job groupByJob = Job.getInstance(
new Configuration(),
String.format("%s-determine_partitions_hashed-%s", config.getDataSource(), config.getIntervals())

View File

@ -2211,7 +2211,7 @@ public class TestNG
private URLClassLoader m_serviceLoaderClassLoader;
private List<ITestNGListener> m_serviceLoaderListeners = Lists.newArrayList();
/*
/**
* Used to test ServiceClassLoader
*/
public void setServiceLoaderClassLoader(URLClassLoader ucl)
@ -2219,7 +2219,7 @@ public class TestNG
m_serviceLoaderClassLoader = ucl;
}
/*
/**
* Used to test ServiceClassLoader
*/
private void addServiceLoaderListener(ITestNGListener l)
@ -2227,7 +2227,7 @@ public class TestNG
m_serviceLoaderListeners.add(l);
}
/*
/**
* Used to test ServiceClassLoader
*/
public List<ITestNGListener> getServiceLoaderListeners()

View File

@ -127,7 +127,7 @@ public interface BufferAggregator extends HotLoopCallee
{
}
/*
/**
* Relocates any cached objects.
* If underlying ByteBuffer used for aggregation buffer relocates to a new ByteBuffer, positional caches(if any)
* built on top of old ByteBuffer can not be used for further {@link BufferAggregator#aggregate(ByteBuffer, int)}

View File

@ -34,7 +34,7 @@ public class Groupers
private static final int C1 = 0xcc9e2d51;
private static final int C2 = 0x1b873593;
/*
/**
* This method was rewritten in Java from an intermediate step of the Murmur hash function in
* https://github.com/aappleby/smhasher/blob/master/src/MurmurHash3.cpp, which contained the
* following header:

View File

@ -45,7 +45,8 @@ import java.util.Objects;
public class SegmentMetadataQuery extends BaseQuery<SegmentAnalysis>
{
/* The SegmentMetadataQuery cache key may contain UTF-8 column name strings.
/**
* The SegmentMetadataQuery cache key may contain UTF-8 column name strings.
* Prepend 0xFF before the analysisTypes as a separator to avoid
* any potential confusion with string values.
*/

View File

@ -63,7 +63,7 @@ public class CompressionFactory
// encoding format for segments created prior to the introduction of encoding formats
public static final LongEncodingFormat LEGACY_LONG_ENCODING_FORMAT = LongEncodingFormat.LONGS;
/*
/**
* Delta Encoding Header v1:
* Byte 1 : version
* Byte 2 - 9 : base value
@ -71,7 +71,7 @@ public class CompressionFactory
*/
public static final byte DELTA_ENCODING_VERSION = 0x1;
/*
/**
* Table Encoding Header v1 :
* Byte 1 : version
* Byte 2 - 5 : table size
@ -112,7 +112,7 @@ public class CompressionFactory
return hasEncodingFlag(strategyId) ? (byte) (strategyId + FLAG_VALUE) : strategyId;
}
/*
/**
* The compression of decompression of encodings are separated into different enums. EncodingStrategy refers to the
* strategy used to encode the data, and EncodingFormat refers to the format the data is encoded in. Note there is not
* necessarily an one-to-one mapping between to two. For instance, the AUTO LongEncodingStrategy scans the data once
@ -120,7 +120,6 @@ public class CompressionFactory
* write in any of the LongEncodingFormat. On the other hand, there are no LongEncodingStrategy that always write in
* TABLE LongEncodingFormat since it only works for data with low cardinality.
*/
public enum LongEncodingStrategy
{
/**

View File

@ -595,7 +595,7 @@ public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>,
return capabilities;
}
/*
/**
* Currently called to initialize IncrementalIndex dimension order during index creation
* Index dimension ordering could be changed to initialize from DimensionsSpec after resolution of
* https://github.com/druid-io/druid/issues/2011