LUCENE-9856: Static analysis take 3: Remove redundant interfaces (#38)

Co-authored-by: Robert Muir <rmuir@apache.org>
This commit is contained in:
Uwe Schindler 2021-03-24 18:26:12 +01:00 committed by GitHub
parent c23ea2f537
commit 3214e365e3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
57 changed files with 87 additions and 78 deletions

View File

@ -85,7 +85,7 @@ org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore
org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=error org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=error
org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=error
org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=error org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=error

View File

@ -147,6 +147,9 @@ public final class ConcatenateGraphFilter extends TokenStream {
super.reset(); super.reset();
// we only capture this if we really need it to save the UTF-8 to UTF-16 conversion // we only capture this if we really need it to save the UTF-8 to UTF-16 conversion
charTermAttribute = getAttribute(CharTermAttribute.class); // may return null charTermAttribute = getAttribute(CharTermAttribute.class); // may return null
// make sure the TermToBytesRefAttribute attribute is implemented by our class, not via
// CharTermAttribute's
assert getAttribute(TermToBytesRefAttribute.class) instanceof BytesRefBuilderTermAttributeImpl;
wasReset = true; wasReset = true;
} }
@ -347,8 +350,9 @@ public final class ConcatenateGraphFilter extends TokenStream {
* *
* @lucene.internal * @lucene.internal
*/ */
@SuppressWarnings("unused") // do not warn/error on redundant interface
public static final class BytesRefBuilderTermAttributeImpl extends AttributeImpl public static final class BytesRefBuilderTermAttributeImpl extends AttributeImpl
implements BytesRefBuilderTermAttribute, TermToBytesRefAttribute { implements BytesRefBuilderTermAttribute, TermToBytesRefAttribute /*required*/ {
private final BytesRefBuilder bytes = new BytesRefBuilder(); private final BytesRefBuilder bytes = new BytesRefBuilder();
private transient CharsRefBuilder charsRef; private transient CharsRefBuilder charsRef;

View File

@ -31,7 +31,6 @@ import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.analysis.TokenFilterFactory;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.util.ResourceLoader; import org.apache.lucene.util.ResourceLoader;
import org.apache.lucene.util.ResourceLoaderAware;
/** /**
* Factory for a {@link ProtectedTermFilter} * Factory for a {@link ProtectedTermFilter}
@ -82,8 +81,7 @@ import org.apache.lucene.util.ResourceLoaderAware;
* @since 7.4.0 * @since 7.4.0
* @lucene.spi {@value #NAME} * @lucene.spi {@value #NAME}
*/ */
public class ProtectedTermFilterFactory extends ConditionalTokenFilterFactory public class ProtectedTermFilterFactory extends ConditionalTokenFilterFactory {
implements ResourceLoaderAware {
public static final String NAME = "protectedTerm"; public static final String NAME = "protectedTerm";

View File

@ -23,7 +23,7 @@ import org.apache.lucene.util.BytesRef;
* *
* @see org.apache.lucene.analysis.payloads.PayloadHelper#encodeFloat(float, byte[], int) * @see org.apache.lucene.analysis.payloads.PayloadHelper#encodeFloat(float, byte[], int)
*/ */
public class FloatEncoder extends AbstractEncoder implements PayloadEncoder { public class FloatEncoder extends AbstractEncoder {
@Override @Override
public BytesRef encode(char[] buffer, int offset, int length) { public BytesRef encode(char[] buffer, int offset, int length) {

View File

@ -23,7 +23,7 @@ import java.nio.charset.StandardCharsets;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** Does nothing other than convert the char array to a byte array using the specified encoding. */ /** Does nothing other than convert the char array to a byte array using the specified encoding. */
public class IdentityEncoder extends AbstractEncoder implements PayloadEncoder { public class IdentityEncoder extends AbstractEncoder {
protected Charset charset = StandardCharsets.UTF_8; protected Charset charset = StandardCharsets.UTF_8;
public IdentityEncoder() {} public IdentityEncoder() {}

View File

@ -24,7 +24,7 @@ import org.apache.lucene.util.BytesRef;
* *
* <p>See {@link org.apache.lucene.analysis.payloads.PayloadHelper#encodeInt(int, byte[], int)}. * <p>See {@link org.apache.lucene.analysis.payloads.PayloadHelper#encodeInt(int, byte[], int)}.
*/ */
public class IntegerEncoder extends AbstractEncoder implements PayloadEncoder { public class IntegerEncoder extends AbstractEncoder {
@Override @Override
public BytesRef encode(char[] buffer, int offset, int length) { public BytesRef encode(char[] buffer, int offset, int length) {

View File

@ -25,7 +25,7 @@ import org.apache.lucene.util.AttributeReflector;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public class ScriptAttributeImpl extends AttributeImpl implements ScriptAttribute, Cloneable { public class ScriptAttributeImpl extends AttributeImpl implements ScriptAttribute {
private int code = UScript.COMMON; private int code = UScript.COMMON;
/** Initializes this attribute with <code>UScript.COMMON</code> */ /** Initializes this attribute with <code>UScript.COMMON</code> */

View File

@ -21,7 +21,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Attribute for {@link Token#getBaseForm()}. */ /** Attribute for {@link Token#getBaseForm()}. */
public class BaseFormAttributeImpl extends AttributeImpl implements BaseFormAttribute, Cloneable { public class BaseFormAttributeImpl extends AttributeImpl implements BaseFormAttribute {
private Token token; private Token token;
@Override @Override

View File

@ -22,8 +22,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Attribute for Kuromoji inflection data. */ /** Attribute for Kuromoji inflection data. */
public class InflectionAttributeImpl extends AttributeImpl public class InflectionAttributeImpl extends AttributeImpl implements InflectionAttribute {
implements InflectionAttribute, Cloneable {
private Token token; private Token token;
@Override @Override

View File

@ -22,8 +22,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Attribute for {@link Token#getPartOfSpeech()}. */ /** Attribute for {@link Token#getPartOfSpeech()}. */
public class PartOfSpeechAttributeImpl extends AttributeImpl public class PartOfSpeechAttributeImpl extends AttributeImpl implements PartOfSpeechAttribute {
implements PartOfSpeechAttribute, Cloneable {
private Token token; private Token token;
@Override @Override

View File

@ -22,7 +22,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Attribute for Kuromoji reading data */ /** Attribute for Kuromoji reading data */
public class ReadingAttributeImpl extends AttributeImpl implements ReadingAttribute, Cloneable { public class ReadingAttributeImpl extends AttributeImpl implements ReadingAttribute {
private Token token; private Token token;
@Override @Override

View File

@ -27,7 +27,7 @@ import org.apache.lucene.util.AttributeReflector;
* @see MorphosyntacticTagsAttribute * @see MorphosyntacticTagsAttribute
*/ */
public class MorphosyntacticTagsAttributeImpl extends AttributeImpl public class MorphosyntacticTagsAttributeImpl extends AttributeImpl
implements MorphosyntacticTagsAttribute, Cloneable { implements MorphosyntacticTagsAttribute {
/** Initializes this attribute with no tags */ /** Initializes this attribute with no tags */
public MorphosyntacticTagsAttributeImpl() {} public MorphosyntacticTagsAttributeImpl() {}

View File

@ -28,8 +28,7 @@ import org.apache.lucene.util.AttributeReflector;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public class PartOfSpeechAttributeImpl extends AttributeImpl public class PartOfSpeechAttributeImpl extends AttributeImpl implements PartOfSpeechAttribute {
implements PartOfSpeechAttribute, Cloneable {
private Token token; private Token token;
@Override @Override

View File

@ -25,7 +25,7 @@ import org.apache.lucene.util.AttributeReflector;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public class ReadingAttributeImpl extends AttributeImpl implements ReadingAttribute, Cloneable { public class ReadingAttributeImpl extends AttributeImpl implements ReadingAttribute {
private Token token; private Token token;
@Override @Override

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.backward_codecs.lucene50.compressing; package org.apache.lucene.backward_codecs.lucene50.compressing;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -58,8 +57,7 @@ import org.apache.lucene.util.packed.PackedInts;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public final class Lucene50CompressingTermVectorsReader extends TermVectorsReader public final class Lucene50CompressingTermVectorsReader extends TermVectorsReader {
implements Closeable {
// hard limit on the maximum number of documents per chunk // hard limit on the maximum number of documents per chunk
static final int MAX_DOCUMENTS_PER_CHUNK = 128; static final int MAX_DOCUMENTS_PER_CHUNK = 128;

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.backward_codecs.lucene60; package org.apache.lucene.backward_codecs.lucene60;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -32,7 +31,7 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.bkd.BKDReader; import org.apache.lucene.util.bkd.BKDReader;
/** Reads point values previously written with Lucene60PointsWriter */ /** Reads point values previously written with Lucene60PointsWriter */
public class Lucene60PointsReader extends PointsReader implements Closeable { public class Lucene60PointsReader extends PointsReader {
final IndexInput dataIn; final IndexInput dataIn;
final SegmentReadState readState; final SegmentReadState readState;
final Map<Integer, BKDReader> readers = new HashMap<>(); final Map<Integer, BKDReader> readers = new HashMap<>();

View File

@ -20,7 +20,6 @@ import static org.apache.lucene.backward_codecs.lucene70.Lucene70DocValuesFormat
import static org.apache.lucene.backward_codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT; import static org.apache.lucene.backward_codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT;
import static org.apache.lucene.backward_codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE; import static org.apache.lucene.backward_codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE;
import java.io.Closeable; // javadocs
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
@ -54,7 +53,7 @@ import org.apache.lucene.util.packed.DirectMonotonicWriter;
import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.DirectWriter;
/** writer for {@link Lucene70DocValuesFormat} */ /** writer for {@link Lucene70DocValuesFormat} */
final class Lucene70DocValuesConsumer extends DocValuesConsumer implements Closeable { final class Lucene70DocValuesConsumer extends DocValuesConsumer {
IndexOutput data, meta; IndexOutput data, meta;
final int maxDoc; final int maxDoc;

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.backward_codecs.lucene70; package org.apache.lucene.backward_codecs.lucene70;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -49,7 +48,7 @@ import org.apache.lucene.util.packed.DirectMonotonicReader;
import org.apache.lucene.util.packed.DirectReader; import org.apache.lucene.util.packed.DirectReader;
/** reader for {@link Lucene70DocValuesFormat} */ /** reader for {@link Lucene70DocValuesFormat} */
final class Lucene70DocValuesProducer extends DocValuesProducer implements Closeable { final class Lucene70DocValuesProducer extends DocValuesProducer {
private final Map<String, NumericEntry> numerics = new HashMap<>(); private final Map<String, NumericEntry> numerics = new HashMap<>();
private final Map<String, BinaryEntry> binaries = new HashMap<>(); private final Map<String, BinaryEntry> binaries = new HashMap<>();
private final Map<String, SortedEntry> sorted = new HashMap<>(); private final Map<String, SortedEntry> sorted = new HashMap<>();

View File

@ -62,7 +62,7 @@ import org.apache.lucene.util.packed.DirectMonotonicWriter;
import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.DirectWriter;
/** writer for {@link Lucene80DocValuesFormat} */ /** writer for {@link Lucene80DocValuesFormat} */
final class Lucene80DocValuesConsumer extends DocValuesConsumer implements Closeable { final class Lucene80DocValuesConsumer extends DocValuesConsumer {
final Lucene80DocValuesFormat.Mode mode; final Lucene80DocValuesFormat.Mode mode;
IndexOutput data, meta; IndexOutput data, meta;

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.backward_codecs.lucene80; package org.apache.lucene.backward_codecs.lucene80;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -52,7 +51,7 @@ import org.apache.lucene.util.packed.DirectMonotonicReader;
import org.apache.lucene.util.packed.DirectReader; import org.apache.lucene.util.packed.DirectReader;
/** reader for {@link Lucene80DocValuesFormat} */ /** reader for {@link Lucene80DocValuesFormat} */
final class Lucene80DocValuesProducer extends DocValuesProducer implements Closeable { final class Lucene80DocValuesProducer extends DocValuesProducer {
private final Map<String, NumericEntry> numerics = new HashMap<>(); private final Map<String, NumericEntry> numerics = new HashMap<>();
private final Map<String, BinaryEntry> binaries = new HashMap<>(); private final Map<String, BinaryEntry> binaries = new HashMap<>();
private final Map<String, SortedEntry> sorted = new HashMap<>(); private final Map<String, SortedEntry> sorted = new HashMap<>();

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.backward_codecs.lucene60; package org.apache.lucene.backward_codecs.lucene60;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
@ -41,7 +40,7 @@ import org.apache.lucene.util.bkd.BKDReader;
import org.apache.lucene.util.bkd.BKDWriter; import org.apache.lucene.util.bkd.BKDWriter;
/** Writes dimensional values */ /** Writes dimensional values */
public class Lucene60PointsWriter extends PointsWriter implements Closeable { public class Lucene60PointsWriter extends PointsWriter {
/** Output used to write the BKD tree data file */ /** Output used to write the BKD tree data file */
protected final IndexOutput dataOut; protected final IndexOutput dataOut;

View File

@ -36,7 +36,7 @@ import org.apache.lucene.search.spans.SpanTermQuery;
* A QueryMaker that uses common and uncommon actual Wikipedia queries for searching the English * A QueryMaker that uses common and uncommon actual Wikipedia queries for searching the English
* Wikipedia collection. 90 queries total. * Wikipedia collection. 90 queries total.
*/ */
public class EnwikiQueryMaker extends AbstractQueryMaker implements QueryMaker { public class EnwikiQueryMaker extends AbstractQueryMaker {
// common and a few uncommon queries from wikipedia search logs // common and a few uncommon queries from wikipedia search logs
private static String[] STANDARD_QUERIES = { private static String[] STANDARD_QUERIES = {

View File

@ -43,7 +43,7 @@ import org.apache.lucene.util.IOUtils;
* <pre>file.query.maker.file=c:/myqueries.txt * <pre>file.query.maker.file=c:/myqueries.txt
* file.query.maker.default.field=body</pre> * file.query.maker.default.field=body</pre>
*/ */
public class FileBasedQueryMaker extends AbstractQueryMaker implements QueryMaker { public class FileBasedQueryMaker extends AbstractQueryMaker {
@Override @Override
protected Query[] prepareQueries() throws Exception { protected Query[] prepareQueries() throws Exception {

View File

@ -34,7 +34,7 @@ import org.apache.lucene.search.spans.SpanTermQuery;
* A QueryMaker that makes queries devised manually (by Grant Ingersoll) for searching in the * A QueryMaker that makes queries devised manually (by Grant Ingersoll) for searching in the
* Reuters collection. * Reuters collection.
*/ */
public class ReutersQueryMaker extends AbstractQueryMaker implements QueryMaker { public class ReutersQueryMaker extends AbstractQueryMaker {
private static String[] STANDARD_QUERIES = { private static String[] STANDARD_QUERIES = {
// Start with some short queries // Start with some short queries

View File

@ -30,7 +30,7 @@ import org.apache.lucene.search.TermQuery;
* A QueryMaker that makes queries for a collection created using {@link * A QueryMaker that makes queries for a collection created using {@link
* org.apache.lucene.benchmark.byTask.feeds.SingleDocSource}. * org.apache.lucene.benchmark.byTask.feeds.SingleDocSource}.
*/ */
public class SimpleQueryMaker extends AbstractQueryMaker implements QueryMaker { public class SimpleQueryMaker extends AbstractQueryMaker {
/** /**
* Prepare the queries for this test. Extending classes can override this method for preparing * Prepare the queries for this test. Extending classes can override this method for preparing

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.codecs.blockterms; package org.apache.lucene.codecs.blockterms;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -52,7 +51,7 @@ import org.apache.lucene.util.IOUtils;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public class BlockTermsWriter extends FieldsConsumer implements Closeable { public class BlockTermsWriter extends FieldsConsumer {
static final String CODEC_NAME = "BlockTermsWriter"; static final String CODEC_NAME = "BlockTermsWriter";

View File

@ -26,8 +26,9 @@ import org.apache.lucene.util.BytesRef;
* *
* @lucene.internal * @lucene.internal
*/ */
@SuppressWarnings("unused") // do not warn/error on redundant interface
public class BytesTermAttributeImpl extends AttributeImpl public class BytesTermAttributeImpl extends AttributeImpl
implements BytesTermAttribute, TermToBytesRefAttribute { implements BytesTermAttribute, TermToBytesRefAttribute /*required*/ {
private BytesRef bytes; private BytesRef bytes;
/** Initialize this attribute with no bytes. */ /** Initialize this attribute with no bytes. */

View File

@ -26,7 +26,7 @@ import org.apache.lucene.util.BytesRefBuilder;
/** Default implementation of {@link CharTermAttribute}. */ /** Default implementation of {@link CharTermAttribute}. */
public class CharTermAttributeImpl extends AttributeImpl public class CharTermAttributeImpl extends AttributeImpl
implements CharTermAttribute, TermToBytesRefAttribute, Cloneable { implements CharTermAttribute, TermToBytesRefAttribute {
private static int MIN_BUFFER_SIZE = 10; private static int MIN_BUFFER_SIZE = 10;
private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, Character.BYTES)]; private char[] termBuffer = new char[ArrayUtil.oversize(MIN_BUFFER_SIZE, Character.BYTES)];

View File

@ -20,7 +20,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Default implementation of {@link FlagsAttribute}. */ /** Default implementation of {@link FlagsAttribute}. */
public class FlagsAttributeImpl extends AttributeImpl implements FlagsAttribute, Cloneable { public class FlagsAttributeImpl extends AttributeImpl implements FlagsAttribute {
private int flags = 0; private int flags = 0;
/** Initialize this attribute with no bits set */ /** Initialize this attribute with no bits set */

View File

@ -20,7 +20,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Default implementation of {@link OffsetAttribute}. */ /** Default implementation of {@link OffsetAttribute}. */
public class OffsetAttributeImpl extends AttributeImpl implements OffsetAttribute, Cloneable { public class OffsetAttributeImpl extends AttributeImpl implements OffsetAttribute {
private int startOffset; private int startOffset;
private int endOffset; private int endOffset;

View File

@ -21,7 +21,7 @@ import org.apache.lucene.util.AttributeReflector;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** Default implementation of {@link PayloadAttribute}. */ /** Default implementation of {@link PayloadAttribute}. */
public class PayloadAttributeImpl extends AttributeImpl implements PayloadAttribute, Cloneable { public class PayloadAttributeImpl extends AttributeImpl implements PayloadAttribute {
private BytesRef payload; private BytesRef payload;
/** Initialize this attribute with no payload. */ /** Initialize this attribute with no payload. */

View File

@ -21,7 +21,7 @@ import org.apache.lucene.util.AttributeReflector;
/** Default implementation of {@link PositionIncrementAttribute}. */ /** Default implementation of {@link PositionIncrementAttribute}. */
public class PositionIncrementAttributeImpl extends AttributeImpl public class PositionIncrementAttributeImpl extends AttributeImpl
implements PositionIncrementAttribute, Cloneable { implements PositionIncrementAttribute {
private int positionIncrement = 1; private int positionIncrement = 1;
/** Initialize this attribute with position increment of 1 */ /** Initialize this attribute with position increment of 1 */

View File

@ -20,8 +20,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Default implementation of {@link PositionLengthAttribute}. */ /** Default implementation of {@link PositionLengthAttribute}. */
public class PositionLengthAttributeImpl extends AttributeImpl public class PositionLengthAttributeImpl extends AttributeImpl implements PositionLengthAttribute {
implements PositionLengthAttribute, Cloneable {
private int positionLength = 1; private int positionLength = 1;
/** Initializes this attribute with position length of 1. */ /** Initializes this attribute with position length of 1. */

View File

@ -20,8 +20,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Default implementation of {@link TermFrequencyAttribute}. */ /** Default implementation of {@link TermFrequencyAttribute}. */
public class TermFrequencyAttributeImpl extends AttributeImpl public class TermFrequencyAttributeImpl extends AttributeImpl implements TermFrequencyAttribute {
implements TermFrequencyAttribute, Cloneable {
private int termFrequency = 1; private int termFrequency = 1;
/** Initialize this attribute with term frequency of 1 */ /** Initialize this attribute with term frequency of 1 */

View File

@ -20,7 +20,7 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector; import org.apache.lucene.util.AttributeReflector;
/** Default implementation of {@link TypeAttribute}. */ /** Default implementation of {@link TypeAttribute}. */
public class TypeAttributeImpl extends AttributeImpl implements TypeAttribute, Cloneable { public class TypeAttributeImpl extends AttributeImpl implements TypeAttribute {
private String type; private String type;
/** Initialize this attribute with {@link TypeAttribute#DEFAULT_TYPE} */ /** Initialize this attribute with {@link TypeAttribute#DEFAULT_TYPE} */

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.codecs.lucene86; package org.apache.lucene.codecs.lucene86;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -33,7 +32,7 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.bkd.BKDReader; import org.apache.lucene.util.bkd.BKDReader;
/** Reads point values previously written with {@link Lucene86PointsWriter} */ /** Reads point values previously written with {@link Lucene86PointsWriter} */
public class Lucene86PointsReader extends PointsReader implements Closeable { public class Lucene86PointsReader extends PointsReader {
final IndexInput indexIn, dataIn; final IndexInput indexIn, dataIn;
final SegmentReadState readState; final SegmentReadState readState;
final Map<Integer, BKDReader> readers = new HashMap<>(); final Map<Integer, BKDReader> readers = new HashMap<>();

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.codecs.lucene86; package org.apache.lucene.codecs.lucene86;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -39,7 +38,7 @@ import org.apache.lucene.util.bkd.BKDReader;
import org.apache.lucene.util.bkd.BKDWriter; import org.apache.lucene.util.bkd.BKDWriter;
/** Writes dimensional values */ /** Writes dimensional values */
public class Lucene86PointsWriter extends PointsWriter implements Closeable { public class Lucene86PointsWriter extends PointsWriter {
/** Outputs used to write the BKD tree data files. */ /** Outputs used to write the BKD tree data files. */
protected final IndexOutput metaOut, indexOut, dataOut; protected final IndexOutput metaOut, indexOut, dataOut;

View File

@ -62,7 +62,7 @@ import org.apache.lucene.util.packed.DirectMonotonicWriter;
import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.DirectWriter;
/** writer for {@link Lucene90DocValuesFormat} */ /** writer for {@link Lucene90DocValuesFormat} */
final class Lucene90DocValuesConsumer extends DocValuesConsumer implements Closeable { final class Lucene90DocValuesConsumer extends DocValuesConsumer {
final Lucene90DocValuesFormat.Mode mode; final Lucene90DocValuesFormat.Mode mode;
IndexOutput data, meta; IndexOutput data, meta;

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.codecs.lucene90; package org.apache.lucene.codecs.lucene90;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -52,7 +51,7 @@ import org.apache.lucene.util.packed.DirectMonotonicReader;
import org.apache.lucene.util.packed.DirectReader; import org.apache.lucene.util.packed.DirectReader;
/** reader for {@link Lucene90DocValuesFormat} */ /** reader for {@link Lucene90DocValuesFormat} */
final class Lucene90DocValuesProducer extends DocValuesProducer implements Closeable { final class Lucene90DocValuesProducer extends DocValuesProducer {
private final Map<String, NumericEntry> numerics = new HashMap<>(); private final Map<String, NumericEntry> numerics = new HashMap<>();
private final Map<String, BinaryEntry> binaries = new HashMap<>(); private final Map<String, BinaryEntry> binaries = new HashMap<>();
private final Map<String, SortedEntry> sorted = new HashMap<>(); private final Map<String, SortedEntry> sorted = new HashMap<>();

View File

@ -29,7 +29,6 @@ import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingT
import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingTermVectorsWriter.VERSION_CURRENT; import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingTermVectorsWriter.VERSION_CURRENT;
import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingTermVectorsWriter.VERSION_START; import static org.apache.lucene.codecs.lucene90.compressing.Lucene90CompressingTermVectorsWriter.VERSION_START;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -71,8 +70,7 @@ import org.apache.lucene.util.packed.PackedInts;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public final class Lucene90CompressingTermVectorsReader extends TermVectorsReader public final class Lucene90CompressingTermVectorsReader extends TermVectorsReader {
implements Closeable {
private final FieldInfos fieldInfos; private final FieldInfos fieldInfos;
final FieldsIndex indexReader; final FieldsIndex indexReader;

View File

@ -37,7 +37,7 @@ import java.io.IOException;
* *
* @see Directory * @see Directory
*/ */
public abstract class IndexInput extends DataInput implements Cloneable, Closeable { public abstract class IndexInput extends DataInput implements Closeable {
private final String resourceDescription; private final String resourceDescription;

View File

@ -21,6 +21,10 @@ package org.apache.lucene.util;
* *
* <p>Attributes are used to add data in a dynamic, yet type-safe way to a source of usually * <p>Attributes are used to add data in a dynamic, yet type-safe way to a source of usually
* streamed objects, e. g. a {@link org.apache.lucene.analysis.TokenStream}. * streamed objects, e. g. a {@link org.apache.lucene.analysis.TokenStream}.
*
* <p>All implementations must list all implemented {@link Attribute} interfaces in their {@code
* implements} clause. {@code AttributeSource} reflectively identifies all attributes and makes them
* available to consumers like {@code TokenStream}s.
*/ */
public abstract class AttributeImpl implements Cloneable, Attribute { public abstract class AttributeImpl implements Cloneable, Attribute {
/** /**

View File

@ -181,6 +181,9 @@ public class AttributeSource {
* retrieve the wanted attributes using {@link #getAttribute} after adding with this method and * retrieve the wanted attributes using {@link #getAttribute} after adding with this method and
* cast to your class. The recommended way to use custom implementations is using an {@link * cast to your class. The recommended way to use custom implementations is using an {@link
* AttributeFactory}. * AttributeFactory}.
*
* <p>This method will only add the Attribute interfaces directly implemented by the class and its
* super classes.
*/ */
public final void addAttributeImpl(final AttributeImpl att) { public final void addAttributeImpl(final AttributeImpl att) {
final Class<? extends AttributeImpl> clazz = att.getClass(); final Class<? extends AttributeImpl> clazz = att.getClass();

View File

@ -28,7 +28,7 @@ import org.apache.lucene.search.DocIdSetIterator;
* *
* @lucene.internal * @lucene.internal
*/ */
public final class FixedBitSet extends BitSet implements Bits, Accountable { public final class FixedBitSet extends BitSet {
private static final long BASE_RAM_BYTES_USED = private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FixedBitSet.class); RamUsageEstimator.shallowSizeOfInstance(FixedBitSet.class);

View File

@ -33,7 +33,7 @@ import org.apache.lucene.search.DocIdSetIterator;
* *
* @lucene.internal * @lucene.internal
*/ */
public class SparseFixedBitSet extends BitSet implements Bits, Accountable { public class SparseFixedBitSet extends BitSet {
private static final long BASE_RAM_BYTES_USED = private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(SparseFixedBitSet.class); RamUsageEstimator.shallowSizeOfInstance(SparseFixedBitSet.class);

View File

@ -16,6 +16,7 @@
*/ */
package org.apache.lucene.analysis.tokenattributes; package org.apache.lucene.analysis.tokenattributes;
import java.util.stream.Stream;
import org.apache.lucene.util.AttributeImpl; import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -44,4 +45,11 @@ public class TestBytesRefAttImpl extends LuceneTestCase {
assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode()); assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
return copy; return copy;
} }
public void testLucene9856() {
assertTrue(
"BytesTermAttributeImpl must explicitly declare to implement TermToBytesRefAttribute",
Stream.of(BytesTermAttributeImpl.class.getInterfaces())
.anyMatch(TermToBytesRefAttribute.class::equals));
}
} }

View File

@ -46,8 +46,7 @@ import org.apache.lucene.util.DocIdSetBuilder;
* instantiate one of the {@link Facets} subclasses to do the facet counting. Use the {@code search} * instantiate one of the {@link Facets} subclasses to do the facet counting. Use the {@code search}
* utility methods to perform an "ordinary" search but also collect into a {@link Collector}. * utility methods to perform an "ordinary" search but also collect into a {@link Collector}.
*/ */
// redundant 'implements Collector' to workaround javadocs bugs public class FacetsCollector extends SimpleCollector {
public class FacetsCollector extends SimpleCollector implements Collector {
private LeafReaderContext context; private LeafReaderContext context;
private Scorable scorer; private Scorable scorer;

View File

@ -34,7 +34,7 @@ import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
/** Assembles a QueryBuilder which uses only core Lucene Query objects */ /** Assembles a QueryBuilder which uses only core Lucene Query objects */
public class CoreParser implements QueryBuilder, SpanQueryBuilder { public class CoreParser implements SpanQueryBuilder {
protected String defaultField; protected String defaultField;
protected Analyzer analyzer; protected Analyzer analyzer;

View File

@ -24,8 +24,8 @@ import java.io.InputStream;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public class GeoCompositeMembershipShape extends GeoBaseCompositeMembershipShape<GeoMembershipShape> public class GeoCompositeMembershipShape
implements GeoMembershipShape { extends GeoBaseCompositeMembershipShape<GeoMembershipShape> {
/** Constructor. */ /** Constructor. */
public GeoCompositeMembershipShape(PlanetModel planetModel) { public GeoCompositeMembershipShape(PlanetModel planetModel) {

View File

@ -21,4 +21,4 @@ package org.apache.lucene.spatial3d.geom;
* *
* @lucene.experimental * @lucene.experimental
*/ */
public interface GeoMembershipShape extends GeoShape, GeoOutsideDistance, Membership {} public interface GeoMembershipShape extends GeoShape, GeoOutsideDistance {}

View File

@ -97,8 +97,7 @@ import org.apache.lucene.util.fst.Util.TopResults;
* *
* @lucene.experimental * @lucene.experimental
*/ */
// redundant 'implements Accountable' to workaround javadocs bugs public class AnalyzingSuggester extends Lookup {
public class AnalyzingSuggester extends Lookup implements Accountable {
/** /**
* FST&lt;Weight,Surface&gt;: input is the analyzed form, with a null byte between terms weights * FST&lt;Weight,Surface&gt;: input is the analyzed form, with a null byte between terms weights

View File

@ -100,8 +100,7 @@ import org.apache.lucene.util.fst.Util.TopResults;
* *
* @lucene.experimental * @lucene.experimental
*/ */
// redundant 'implements Accountable' to workaround javadocs bugs public class FreeTextSuggester extends Lookup {
public class FreeTextSuggester extends Lookup implements Accountable {
/** Codec name used in the header for the saved model. */ /** Codec name used in the header for the saved model. */
public static final String CODEC_NAME = "freetextsuggest"; public static final String CODEC_NAME = "freetextsuggest";

View File

@ -68,7 +68,7 @@ import org.apache.lucene.util.fst.NoOutputs;
* @see FSTCompletion * @see FSTCompletion
* @lucene.experimental * @lucene.experimental
*/ */
public class FSTCompletionLookup extends Lookup implements Accountable { public class FSTCompletionLookup extends Lookup {
/** /**
* An invalid bucket count if we're creating an object of this class from an existing FST. * An invalid bucket count if we're creating an object of this class from an existing FST.
* *

View File

@ -57,8 +57,7 @@ import org.apache.lucene.util.fst.Util.TopResults;
* *
* @lucene.experimental * @lucene.experimental
*/ */
// redundant 'implements Accountable' to workaround javadocs bugs public class WFSTCompletionLookup extends Lookup {
public class WFSTCompletionLookup extends Lookup implements Accountable {
/** FST<Long>, weights are encoded as costs: (Integer.MAX_VALUE-weight) */ /** FST<Long>, weights are encoded as costs: (Integer.MAX_VALUE-weight) */
// NOTE: like FSTSuggester, this is really a WFSA, if you want to // NOTE: like FSTSuggester, this is really a WFSA, if you want to

View File

@ -25,7 +25,6 @@ import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode; import org.apache.lucene.search.suggest.jaspell.JaspellTernarySearchTrie.TSTNode;
import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
@ -37,7 +36,7 @@ import org.apache.lucene.util.CharsRefBuilder;
* @deprecated Migrate to one of the newer suggesters which are much more RAM efficient. * @deprecated Migrate to one of the newer suggesters which are much more RAM efficient.
*/ */
@Deprecated @Deprecated
public class JaspellLookup extends Lookup implements Accountable { public class JaspellLookup extends Lookup {
JaspellTernarySearchTrie trie = new JaspellTernarySearchTrie(); JaspellTernarySearchTrie trie = new JaspellTernarySearchTrie();
private boolean usePrefix = true; private boolean usePrefix = true;
private int editDistance = 2; private int editDistance = 2;

View File

@ -143,6 +143,18 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
if (output.length > 0) { if (output.length > 0) {
assertTrue("has no CharTermAttribute", ts.hasAttribute(CharTermAttribute.class)); assertTrue("has no CharTermAttribute", ts.hasAttribute(CharTermAttribute.class));
termAtt = ts.getAttribute(CharTermAttribute.class); termAtt = ts.getAttribute(CharTermAttribute.class);
// every UTF-16 character-based TokenStream MUST provide a TermToBytesRefAttribute,
// implemented by same instance like the CharTermAttribute:
assertTrue("has no TermToBytesRefAttribute", ts.hasAttribute(TermToBytesRefAttribute.class));
TermToBytesRefAttribute bytesAtt = ts.getAttribute(TermToBytesRefAttribute.class);
// ConcatenateGraphFilter has some tricky logic violating this. We have an extra assert there:
if (!Objects.equals(
bytesAtt.getClass().getSimpleName(), "BytesRefBuilderTermAttributeImpl")) {
assertSame(
"TermToBytesRefAttribute must be implemented by same instance", termAtt, bytesAtt);
}
} }
OffsetAttribute offsetAtt = null; OffsetAttribute offsetAtt = null;

View File

@ -20,6 +20,7 @@ import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
/** TokenStream from a canned list of binary (BytesRef-based) tokens. */ /** TokenStream from a canned list of binary (BytesRef-based) tokens. */
@ -57,6 +58,7 @@ public final class CannedBinaryTokenStream extends TokenStream {
public CannedBinaryTokenStream(BinaryToken... tokens) { public CannedBinaryTokenStream(BinaryToken... tokens) {
super(Token.TOKEN_ATTRIBUTE_FACTORY); super(Token.TOKEN_ATTRIBUTE_FACTORY);
this.tokens = tokens; this.tokens = tokens;
assert termAtt == getAttribute(TermToBytesRefAttribute.class);
} }
@Override @Override