From 945b1cb8726223d5b0d5f61e85174ca93931b86e Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 23 Mar 2021 13:59:00 -0400 Subject: [PATCH] LUCENE-9856: fail precommit on unused local variables, take two (#37) Enable ecj unused local variable, private instance and method detection. Allow SuppressWarnings("unused") to disable unused checks (e.g. for generated code or very special tests). Fix gradlew regenerate for python 3.9 SuppressWarnings("unused") for generated javacc and jflex code. Enable a few other easy ecj checks such as Deprecated annotation, hashcode/equals, equals across different types. Co-authored-by: Mike McCandless --- build.gradle | 2 +- gradle/generation/javacc.gradle | 15 ++ .../dot.settings/org.eclipse.jdt.core.prefs | 2 + gradle/validation/ecj-lint/ecj.javadocs.prefs | 51 +++++- .../charfilter/HTMLStripCharFilter.java | 2 +- .../charfilter/HTMLStripCharFilter.jflex | 2 +- .../classic/ClassicTokenizerImpl.java | 2 +- .../classic/ClassicTokenizerImpl.jflex | 2 +- .../compound/hyphenation/PatternParser.java | 21 --- .../email/UAX29URLEmailTokenizerImpl.java | 2 +- .../email/UAX29URLEmailTokenizerImpl.jflex | 2 +- .../apache/lucene/analysis/en/KStemmer.java | 4 - .../hunspell/HunspellStemFilterFactory.java | 2 - .../miscellaneous/FixBrokenOffsetsFilter.java | 3 - .../wikipedia/WikipediaTokenizerImpl.java | 2 +- .../wikipedia/WikipediaTokenizerImpl.jflex | 2 +- .../charfilter/TestHTMLStripCharFilter.java | 3 +- .../cjk/TestCJKBigramFilterFactory.java | 11 +- .../TestWordDelimiterFilter.java | 12 -- .../TestWordDelimiterGraphFilter.java | 10 -- .../analysis/ngram/TestNGramTokenizer.java | 2 +- .../TestSimplePatternSplitTokenizer.java | 12 +- .../synonym/TestSynonymMapFilter.java | 8 - .../TestWikipediaTokenizerFactory.java | 3 +- .../analysis/opennlp/OpenNLPTokenizer.java | 2 - .../opennlp/TestOpenNLPTokenizerFactory.java | 14 +- .../phonetic/DoubleMetaphoneFilter.java | 2 - .../cn/smart/hhmm/BigramDictionary.java | 7 +- .../src/java/org/egothor/stemmer/Trie.java | 3 +- .../lucene70/Lucene70NormsProducer.java | 14 -- .../lucene70/Lucene70SegmentInfoFormat.java | 17 +- .../lucene80/TestIndexedDISI.java | 5 +- .../byTask/tasks/ReadTokensTask.java | 6 - .../tasks/SearchTravRetHighlightTask.java | 1 + .../Test20NewsgroupsClassification.java | 10 +- .../codecs/memory/DirectPostingsFormat.java | 20 --- .../lucene/codecs/memory/FSTTermsReader.java | 2 +- .../simpletext/SimpleTextBKDReader.java | 54 ------ .../simpletext/SimpleTextBKDWriter.java | 34 ---- .../simpletext/SimpleTextPointsWriter.java | 8 - .../simpletext/SimpleTextVectorWriter.java | 4 +- .../standard/StandardTokenizerImpl.java | 2 +- .../standard/StandardTokenizerImpl.jflex | 2 +- .../apache/lucene/codecs/VectorWriter.java | 2 - .../lucene86/Lucene86SegmentInfoFormat.java | 7 +- .../lucene90/Lucene90FieldInfosFormat.java | 15 +- .../codecs/lucene90/Lucene90VectorReader.java | 3 - .../blocktree/IntersectTermsEnumFrame.java | 2 - .../Lucene90BlockTreeTermsReader.java | 7 - .../blocktree/SegmentTermsEnumFrame.java | 2 - .../org/apache/lucene/index/CheckIndex.java | 169 ------------------ .../org/apache/lucene/index/OrdinalMap.java | 1 - .../lucene/search/spans/SpanScorer.java | 6 +- .../org/apache/lucene/util/OfflineSorter.java | 2 - .../java/org/apache/lucene/util/fst/FST.java | 2 - .../apache/lucene/util/hnsw/HnswGraph.java | 4 +- .../lucene/util/hnsw/HnswGraphBuilder.java | 2 +- .../lucene/util/hnsw/NeighborArray.java | 1 - .../lucene/util/packed/gen_BulkOperation.py | 7 +- .../lucene/analysis/TestStopFilter.java | 3 +- .../codecs/lucene90/TestIndexedDISI.java | 5 +- .../index/Test2BSortedDocValuesOrds.java | 1 - .../org/apache/lucene/index/TestCodecs.java | 4 - .../index/TestDemoParallelLeafReader.java | 8 +- .../test/org/apache/lucene/index/TestDoc.java | 2 +- .../apache/lucene/index/TestIndexSorting.java | 4 - .../apache/lucene/index/TestIndexWriter.java | 1 - .../lucene/index/TestIndexWriterCommit.java | 2 - .../index/TestIndexWriterExceptions.java | 2 +- .../index/TestIndexWriterOnDiskFull.java | 3 - .../lucene/index/TestIndexWriterReader.java | 2 - .../TestIndexWriterThreadsToSegments.java | 2 - .../index/TestIndexWriterWithThreads.java | 2 - .../index/TestNumericDocValuesUpdates.java | 7 - .../org/apache/lucene/index/TestPayloads.java | 5 - .../lucene/index/TestStressIndexing.java | 8 +- .../org/apache/lucene/index/TestTerm.java | 1 + .../lucene/index/TestTermVectorsReader.java | 1 - .../lucene/index/TestTermsHashPerField.java | 2 +- .../lucene/index/TestTwoPhaseCommitTool.java | 4 - .../apache/lucene/search/TestBoolean2.java | 3 - .../TestControlledRealTimeReopenThread.java | 2 +- .../lucene/search/TestLongValuesSource.java | 1 - .../lucene/search/TestMatchesIterator.java | 2 - .../search/TestSortedNumericSortField.java | 1 + .../lucene/search/TestSortedSetSortField.java | 1 + .../apache/lucene/search/TestTermQuery.java | 3 - .../apache/lucene/search/TestTotalHits.java | 1 + .../apache/lucene/search/TestWildcard.java | 1 + .../apache/lucene/store/TestMultiMMap.java | 2 +- .../lucene/store/TestNRTCachingDirectory.java | 3 +- .../org/apache/lucene/util/bkd/TestBKD.java | 2 - .../org/apache/lucene/util/fst/TestFSTs.java | 19 +- .../org/apache/lucene/util/fst/TestUtil.java | 23 --- .../expressions/ExpressionRescorer.java | 16 -- .../expressions/js/JavascriptCompiler.java | 2 +- .../TestExpressionValueSource.java | 1 + .../org/apache/lucene/facet/FacetsConfig.java | 8 - .../lucene/facet/LongValueFacetCounts.java | 6 +- .../facet/taxonomy/TaxonomyFacetLabels.java | 4 - .../facet/TestLongValueFacetCounts.java | 3 - .../lucene/facet/taxonomy/TestFacetLabel.java | 1 + .../taxonomy/TestSearcherTaxonomyManager.java | 15 -- .../TestDirectoryTaxonomyReader.java | 3 - .../grouping/TestAllGroupHeadsCollector.java | 1 - .../lucene/search/grouping/TestGrouping.java | 1 - .../lucene/search/highlight/TokenGroup.java | 3 +- .../uhighlight/TestUnifiedHighlighterMTQ.java | 2 +- .../lucene/search/join/TestJoinUtil.java | 8 +- .../components/AnalysisPanelProvider.java | 2 - .../menubar/OptimizeIndexDialogFactory.java | 5 - .../models/documents/TestDocumentsImpl.java | 18 +- .../lucene/index/memory/MemoryIndex.java | 9 +- .../TestMemoryIndexAgainstDirectory.java | 11 -- .../lucene/misc/util/fst/TestFSTsMisc.java | 1 - .../monitor/TestBooleanTermExtractor.java | 1 - .../function/valuesource/EnumFieldSource.java | 2 - .../queries/intervals/IntervalQuery.java | 6 +- .../lucene/queries/mlt/MoreLikeThis.java | 17 +- .../queryparser/classic/QueryParser.java | 2 +- .../classic/QueryParserTokenManager.java | 2 +- .../queryparser/flexible/messages/NLS.java | 5 +- .../standard/parser/ParseException.java | 52 +++--- .../standard/parser/StandardSyntaxParser.java | 2 +- .../StandardSyntaxParserTokenManager.java | 2 +- .../surround/parser/QueryParser.java | 2 +- .../parser/QueryParserTokenManager.java | 2 +- .../standard/TestMultiFieldQPHelper.java | 1 - .../flexible/standard/TestQPHelper.java | 12 -- .../lucene/replicator/nrt/ReplicaNode.java | 2 - ...TestIndexAndTaxonomyReplicationClient.java | 3 +- .../replicator/nrt/SimplePrimaryNode.java | 3 - .../replicator/nrt/SimpleReplicaNode.java | 2 +- .../replicator/nrt/TestNRTReplication.java | 2 - .../nrt/TestStressNRTReplication.java | 9 - .../sandbox/search/TermAutomatonQuery.java | 6 +- .../sandbox/search/TermAutomatonScorer.java | 12 +- .../TokenStreamToTermAutomatonQuery.java | 1 - .../TestIDVersionPostingsFormat.java | 6 - .../search/TestTermAutomatonQuery.java | 2 +- .../prefix/tree/DateRangePrefixTree.java | 1 - .../lucene/spatial/SpatialTestCase.java | 5 - .../lucene/spatial/bbox/TestBBoxStrategy.java | 3 +- .../TestRandomSpatialOpFuzzyPrefixTree.java | 2 - .../TestTermQueryPrefixGridStrategy.java | 3 +- .../prefix/tree/TestDateRangePrefixTree.java | 2 - .../TestGeo3dShapeWGS84ModelRectRelation.java | 2 - .../spatial3d/geom/GeoComplexPolygon.java | 10 -- .../spatial3d/geom/GeoDegeneratePath.java | 49 ----- .../spatial3d/geom/GeoPolygonFactory.java | 4 - .../spatial3d/geom/GeoStandardPath.java | 27 --- .../lucene/spatial3d/geom/XYZBounds.java | 7 - .../lucene/spatial3d/TestGeo3DPoint.java | 8 +- .../lucene/spatial3d/geom/TestGeoBBox.java | 1 - .../spatial3d/geom/TestGeoExactCircle.java | 24 +-- .../lucene/spatial3d/geom/TestGeoPath.java | 21 +-- .../lucene/spatial3d/geom/TestGeoPolygon.java | 53 ++---- .../spatial3d/geom/TestRandomGeoPolygon.java | 16 +- .../search/suggest/document/NRTSuggester.java | 2 +- .../lucene/analysis/CollationTestBase.java | 19 -- .../asserting/AssertingDocValuesFormat.java | 6 +- .../asserting/AssertingNormsFormat.java | 2 +- .../index/BasePostingsFormatTestCase.java | 1 - .../index/BaseTermVectorsFormatTestCase.java | 1 - .../org/apache/lucene/index/RandomCodec.java | 22 --- .../lucene/index/RandomPostingsTester.java | 1 - .../apache/lucene/util/LuceneTestCase.java | 23 --- .../apache/lucene/util/TestExpectThrows.java | 37 ++-- 168 files changed, 276 insertions(+), 1101 deletions(-) diff --git a/build.gradle b/build.gradle index 31d390eccfe..8a335a510da 100644 --- a/build.gradle +++ b/build.gradle @@ -86,7 +86,7 @@ ext { scriptDepVersions = [ "apache-rat": "0.11", "commons-codec": "1.13", - "ecj": "3.19.0", + "ecj": "3.25.0", "javacc": "7.0.4", "jflex": "1.7.0", "jgit": "5.9.0.202009080501-r", diff --git a/gradle/generation/javacc.gradle b/gradle/generation/javacc.gradle index df1c7fa0ca8..c60c1ce2025 100644 --- a/gradle/generation/javacc.gradle +++ b/gradle/generation/javacc.gradle @@ -95,6 +95,12 @@ def commonCleanups = { FileTree generatedFiles -> text = text.replace( "public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }", "// (setDebugStream omitted).") + text = text.replace( + "public class QueryParserTokenManager ", + '@SuppressWarnings("unused") public class QueryParserTokenManager ') + text = text.replace( + "public class StandardSyntaxParserTokenManager ", + '@SuppressWarnings("unused") public class StandardSyntaxParserTokenManager ') return text }) } @@ -123,6 +129,9 @@ configure(project(":lucene:queryparser")) { text = text.replace( "final private LookaheadSuccess jj_ls =", "static final private LookaheadSuccess jj_ls =") + text = text.replace( + "public class QueryParser ", + '@SuppressWarnings("unused") public class QueryParser ') return text }) } @@ -145,6 +154,9 @@ configure(project(":lucene:queryparser")) { text = text.replace( "new java.util.ArrayList", "new java.util.ArrayList<>") + text = text.replace( + "public class QueryParser ", + '@SuppressWarnings("unused") public class QueryParser ') return text }) } @@ -221,6 +233,9 @@ configure(project(":lucene:queryparser")) { text = text.replace( "Collections. singletonList", "Collections.singletonList") + text = text.replace( + "public class StandardSyntaxParser ", + '@SuppressWarnings("unused") public class StandardSyntaxParser ') return text }) } diff --git a/gradle/ide/eclipse/dot.settings/org.eclipse.jdt.core.prefs b/gradle/ide/eclipse/dot.settings/org.eclipse.jdt.core.prefs index 8dba52608be..a7754219067 100644 --- a/gradle/ide/eclipse/dot.settings/org.eclipse.jdt.core.prefs +++ b/gradle/ide/eclipse/dot.settings/org.eclipse.jdt.core.prefs @@ -3,6 +3,7 @@ eclipse.preferences.version=1 org.eclipse.jdt.core.compiler.codegen.targetPlatform=11 org.eclipse.jdt.core.compiler.compliance=11 org.eclipse.jdt.core.compiler.doc.comment.support=enabled +org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=enabled org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=error org.eclipse.jdt.core.compiler.problem.assertIdentifier=error org.eclipse.jdt.core.compiler.problem.comparingIdentical=error @@ -32,6 +33,7 @@ org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=error org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=error org.eclipse.jdt.core.compiler.problem.unusedImport=error +org.eclipse.jdt.core.compiler.problem.unusedLocal=error org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled org.eclipse.jdt.core.compiler.source=11 diff --git a/gradle/validation/ecj-lint/ecj.javadocs.prefs b/gradle/validation/ecj-lint/ecj.javadocs.prefs index 975707055ff..8bfb42da977 100644 --- a/gradle/validation/ecj-lint/ecj.javadocs.prefs +++ b/gradle/validation/ecj-lint/ecj.javadocs.prefs @@ -1,13 +1,24 @@ -#Sun Sep 23 20:55:03 EDT 2012 eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled +org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull +org.eclipse.jdt.core.compiler.annotation.nonnull.secondary= org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault -org.eclipse.jdt.core.compiler.annotation.nonnullisdefault=disabled +org.eclipse.jdt.core.compiler.annotation.nonnullbydefault.secondary= org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable +org.eclipse.jdt.core.compiler.annotation.nullable.secondary= org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate org.eclipse.jdt.core.compiler.codegen.targetPlatform=11 +org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve org.eclipse.jdt.core.compiler.compliance=11 +org.eclipse.jdt.core.compiler.debug.lineNumber=generate +org.eclipse.jdt.core.compiler.debug.localVariable=generate +org.eclipse.jdt.core.compiler.debug.sourceFile=generate org.eclipse.jdt.core.compiler.doc.comment.support=enabled +org.eclipse.jdt.core.compiler.problem.APILeak=error +org.eclipse.jdt.core.compiler.problem.annotatedTypeArgumentToUnannotated=error org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=error org.eclipse.jdt.core.compiler.problem.assertIdentifier=error org.eclipse.jdt.core.compiler.problem.autoboxing=ignore @@ -18,7 +29,9 @@ org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled org.eclipse.jdt.core.compiler.problem.discouragedReference=error org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore +org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore @@ -37,8 +50,10 @@ org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=disabled org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=error -org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore -org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore +org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore +org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=error +org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled +org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=error org.eclipse.jdt.core.compiler.problem.missingJavadocComments=ignore org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=public @@ -54,43 +69,63 @@ org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignor org.eclipse.jdt.core.compiler.problem.noEffectAssignment=error org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=error org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore +org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=error +org.eclipse.jdt.core.compiler.problem.nonnullTypeVariableFromLegacyInvocation=error +org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error org.eclipse.jdt.core.compiler.problem.nullReference=ignore +org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error +org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=error org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=error org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore +org.eclipse.jdt.core.compiler.problem.pessimisticNullAnalysisForFreeTypeVariables=error org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore +org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore +org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=error org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore +org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=error org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=ignore -org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled +org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=enabled org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled +org.eclipse.jdt.core.compiler.problem.suppressWarningsNotFullyAnalysed=error +org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore +org.eclipse.jdt.core.compiler.problem.terminalDeprecation=ignore org.eclipse.jdt.core.compiler.problem.typeParameterHiding=ignore org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=ignore +org.eclipse.jdt.core.compiler.problem.unclosedCloseable=ignore org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=ignore +org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentType=error +org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentTypeStrict=disabled +org.eclipse.jdt.core.compiler.problem.unlikelyEqualsArgumentType=error org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore +org.eclipse.jdt.core.compiler.problem.unstableAutoModuleName=ignore org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled +org.eclipse.jdt.core.compiler.problem.unusedExceptionParameter=ignore org.eclipse.jdt.core.compiler.problem.unusedImport=error -org.eclipse.jdt.core.compiler.problem.unusedLabel=ignore -org.eclipse.jdt.core.compiler.problem.unusedLocal=ignore +org.eclipse.jdt.core.compiler.problem.unusedLabel=error +org.eclipse.jdt.core.compiler.problem.unusedLocal=error org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled -org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=ignore +org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=error +org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore org.eclipse.jdt.core.compiler.problem.unusedWarningToken=ignore org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error +org.eclipse.jdt.core.compiler.release=disabled org.eclipse.jdt.core.compiler.source=11 diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java index ebb92cdcaa1..699de295ea6 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.java @@ -32,7 +32,7 @@ import org.apache.lucene.analysis.util.OpenStringBuilder; /** * A CharFilter that wraps another Reader and attempts to strip out HTML constructs. */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) public final class HTMLStripCharFilter extends BaseCharFilter { diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex index 8b83de0642e..1540df656e9 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/HTMLStripCharFilter.jflex @@ -30,7 +30,7 @@ import org.apache.lucene.analysis.util.OpenStringBuilder; /** * A CharFilter that wraps another Reader and attempts to strip out HTML constructs. */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) %% %unicode 9.0 diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.java index 9f526a9715b..55672d5cfb3 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.java @@ -22,7 +22,7 @@ package org.apache.lucene.analysis.classic; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; /** This class implements the classic lucene StandardTokenizer up until 3.0 */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused", "fallthrough"}) class ClassicTokenizerImpl { /** This character denotes the end of file */ diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.jflex index 798d9a5dc97..c170962f1db 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.jflex +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/classic/ClassicTokenizerImpl.jflex @@ -22,7 +22,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; /** * This class implements the classic lucene StandardTokenizer up until 3.0 */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) %% %class ClassicTokenizerImpl diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/PatternParser.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/PatternParser.java index 33a762bb60e..886f3ffaaad 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/PatternParser.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/PatternParser.java @@ -22,7 +22,6 @@ import javax.xml.parsers.SAXParserFactory; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; -import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; @@ -341,24 +340,4 @@ public class PatternParser extends DefaultHandler { word = readToken(chars); } } - - /** Returns a string of the location. */ - private String getLocationString(SAXParseException ex) { - StringBuilder str = new StringBuilder(); - - String systemId = ex.getSystemId(); - if (systemId != null) { - int index = systemId.lastIndexOf('/'); - if (index != -1) { - systemId = systemId.substring(index + 1); - } - str.append(systemId); - } - str.append(':'); - str.append(ex.getLineNumber()); - str.append(':'); - str.append(ex.getColumnNumber()); - - return str.toString(); - } // getLocationString(SAXParseException):String } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.java index fa5b7887c85..20d9bc42f91 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.java @@ -42,7 +42,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; *
  • <EMOJI>: A sequence of Emoji characters
  • * */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) public final class UAX29URLEmailTokenizerImpl { diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.jflex index 6f2028f0827..ec7ebc5be73 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.jflex +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/email/UAX29URLEmailTokenizerImpl.jflex @@ -40,7 +40,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; *
  • <EMOJI>: A sequence of Emoji characters
  • * */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) %% %unicode 9.0 diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/KStemmer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/KStemmer.java index e60bed8a9bb..204d7e53eea 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/KStemmer.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/en/KStemmer.java @@ -619,10 +619,6 @@ public class KStemmer { * CharArrayMap(maxCacheSize,false); } ***/ - private char finalChar() { - return word.charAt(k); - } - private char penultChar() { return word.charAt(k - 1); } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/HunspellStemFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/HunspellStemFilterFactory.java index 2fa2ec8a419..63ae9826506 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/HunspellStemFilterFactory.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/HunspellStemFilterFactory.java @@ -59,8 +59,6 @@ public class HunspellStemFilterFactory extends TokenFilterFactory implements Res private static final String PARAM_DICTIONARY = "dictionary"; private static final String PARAM_AFFIX = "affix"; - // NOTE: this one is currently unused?: - private static final String PARAM_RECURSION_CAP = "recursionCap"; private static final String PARAM_IGNORE_CASE = "ignoreCase"; private static final String PARAM_LONGEST_ONLY = "longestOnly"; diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/FixBrokenOffsetsFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/FixBrokenOffsetsFilter.java index 1b758f52066..426d9d89688 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/FixBrokenOffsetsFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/FixBrokenOffsetsFilter.java @@ -31,7 +31,6 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; public final class FixBrokenOffsetsFilter extends TokenFilter { private int lastStartOffset; - private int lastEndOffset; private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); @@ -58,7 +57,6 @@ public final class FixBrokenOffsetsFilter extends TokenFilter { public void reset() throws IOException { super.reset(); lastStartOffset = 0; - lastEndOffset = 0; } private void fixOffsets() { @@ -72,6 +70,5 @@ public final class FixBrokenOffsetsFilter extends TokenFilter { } offsetAtt.setOffset(startOffset, endOffset); lastStartOffset = startOffset; - lastEndOffset = endOffset; } } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java index 93886bf745c..3e97b54ac63 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java @@ -22,7 +22,7 @@ package org.apache.lucene.analysis.wikipedia; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; /** JFlex-generated tokenizer that is aware of Wikipedia syntax. */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused", "fallthrough"}) class WikipediaTokenizerImpl { /** This character denotes the end of file */ diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex index 3ac31e45fc5..4b4c67761bd 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.jflex @@ -22,7 +22,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; /** * JFlex-generated tokenizer that is aware of Wikipedia syntax. */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) %% %class WikipediaTokenizerImpl diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/charfilter/TestHTMLStripCharFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/charfilter/TestHTMLStripCharFilter.java index 76aa70b2cca..d91baa657e5 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/charfilter/TestHTMLStripCharFilter.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/charfilter/TestHTMLStripCharFilter.java @@ -358,9 +358,8 @@ public class TestHTMLStripCharFilter extends BaseTokenStreamTestCase { static void assertLegalOffsets(String in) throws Exception { int length = in.length(); HTMLStripCharFilter reader = new HTMLStripCharFilter(new BufferedReader(new StringReader(in))); - int ch = 0; int off = 0; - while ((ch = reader.read()) != -1) { + while (reader.read() != -1) { int correction = reader.correctOffset(off); assertTrue( "invalid offset correction: " + off + "->" + correction + " for doc of length: " + length, diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/cjk/TestCJKBigramFilterFactory.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/cjk/TestCJKBigramFilterFactory.java index b7bbb248e92..2bf8bedb955 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/cjk/TestCJKBigramFilterFactory.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/cjk/TestCJKBigramFilterFactory.java @@ -56,11 +56,10 @@ public class TestCJKBigramFilterFactory extends BaseTokenStreamFactoryTestCase { /** Test that bogus arguments result in exception */ public void testBogusArguments() throws Exception { - IllegalArgumentException expected = - expectThrows( - IllegalArgumentException.class, - () -> { - tokenFilterFactory("CJKBigram", "bogusArg", "bogusValue"); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + tokenFilterFactory("CJKBigram", "bogusArg", "bogusValue"); + }); } } diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java index 013c254d256..9f8ab64dddc 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterFilter.java @@ -673,16 +673,4 @@ public class TestWordDelimiterFilter extends BaseTokenStreamTestCase { null, false); } - - private Analyzer getAnalyzer(final int flags) { - return new Analyzer() { - - @Override - protected TokenStreamComponents createComponents(String fieldName) { - Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false); - return new TokenStreamComponents( - tokenizer, new WordDelimiterFilter(tokenizer, flags, null)); - } - }; - } } diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java index 670faf6e4f5..f0fe89c57ca 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java @@ -800,16 +800,6 @@ public class TestWordDelimiterGraphFilter extends BaseTokenStreamTestCase { return (flags & flag) != 0; } - private static boolean isEnglishPossessive(String text, int pos) { - if (pos > 2) { - if ((text.charAt(pos - 1) == 's' || text.charAt(pos - 1) == 'S') - && (pos == text.length() || text.charAt(pos) != '-')) { - text = text.substring(0, text.length() - 2); - } - } - return true; - } - private static class WordPart { final String part; final int startOffset; diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/TestNGramTokenizer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/TestNGramTokenizer.java index f0b10660c9b..5908cdf0f10 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/TestNGramTokenizer.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/ngram/TestNGramTokenizer.java @@ -44,7 +44,7 @@ public class TestNGramTokenizer extends BaseTokenStreamTestCase { expectThrows( IllegalArgumentException.class, () -> { - NGramTokenizer tok = new NGramTokenizer(2, 1); + new NGramTokenizer(2, 1); }); } diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestSimplePatternSplitTokenizer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestSimplePatternSplitTokenizer.java index 2cafcfc1549..82fc2ece185 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestSimplePatternSplitTokenizer.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/pattern/TestSimplePatternSplitTokenizer.java @@ -70,7 +70,7 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase { public void testNoTokens() throws Exception { Tokenizer t = new SimplePatternSplitTokenizer(".*"); - CharTermAttribute termAtt = t.getAttribute(CharTermAttribute.class); + t.getAttribute(CharTermAttribute.class); String s; while (true) { s = TestUtil.randomUnicodeString(random()); @@ -95,7 +95,7 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase { public void testSplitSingleCharWhitespace() throws Exception { Tokenizer t = new SimplePatternSplitTokenizer("[ \t\r\n]"); - CharTermAttribute termAtt = t.getAttribute(CharTermAttribute.class); + t.getAttribute(CharTermAttribute.class); t.setReader(new StringReader("a \tb c")); assertTokenStreamContents( t, new String[] {"a", "b", "c"}, new int[] {0, 3, 7}, new int[] {1, 4, 8}); @@ -103,7 +103,7 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase { public void testSplitMultiCharWhitespace() throws Exception { Tokenizer t = new SimplePatternSplitTokenizer("[ \t\r\n]*"); - CharTermAttribute termAtt = t.getAttribute(CharTermAttribute.class); + t.getAttribute(CharTermAttribute.class); t.setReader(new StringReader("a \tb c")); assertTokenStreamContents( t, new String[] {"a", "b", "c"}, new int[] {0, 3, 7}, new int[] {1, 4, 8}); @@ -111,21 +111,21 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase { public void testLeadingNonToken() throws Exception { Tokenizer t = new SimplePatternSplitTokenizer("[ \t\r\n]*"); - CharTermAttribute termAtt = t.getAttribute(CharTermAttribute.class); + t.getAttribute(CharTermAttribute.class); t.setReader(new StringReader(" a c")); assertTokenStreamContents(t, new String[] {"a", "c"}, new int[] {4, 6}, new int[] {5, 7}); } public void testTrailingNonToken() throws Exception { Tokenizer t = new SimplePatternSplitTokenizer("[ \t\r\n]*"); - CharTermAttribute termAtt = t.getAttribute(CharTermAttribute.class); + t.getAttribute(CharTermAttribute.class); t.setReader(new StringReader("a c ")); assertTokenStreamContents(t, new String[] {"a", "c"}, new int[] {0, 2}, new int[] {1, 3}); } public void testEmptyStringPatternOneMatch() throws Exception { Tokenizer t = new SimplePatternSplitTokenizer("a*"); - CharTermAttribute termAtt = t.getAttribute(CharTermAttribute.class); + t.getAttribute(CharTermAttribute.class); t.setReader(new StringReader("bbab")); assertTokenStreamContents(t, new String[] {"bb", "b"}, new int[] {0, 3}, new int[] {2, 4}); } diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/synonym/TestSynonymMapFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/synonym/TestSynonymMapFilter.java index abd3b890f18..83e7f51f538 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/synonym/TestSynonymMapFilter.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/synonym/TestSynonymMapFilter.java @@ -62,14 +62,6 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase { b.add(inputCharsRef.get(), outputCharsRef.get(), keepOrig); } - private void assertEquals(CharTermAttribute term, String expected) { - assertEquals(expected.length(), term.length()); - final char[] buffer = term.buffer(); - for (int chIDX = 0; chIDX < expected.length(); chIDX++) { - assertEquals(expected.charAt(chIDX), buffer[chIDX]); - } - } - // For the output string: separate positions with a space, // and separate multiple tokens at each position with a // /. If a token should have end offset != the input diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/wikipedia/TestWikipediaTokenizerFactory.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/wikipedia/TestWikipediaTokenizerFactory.java index b654880ac0f..ae2491f68e4 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/wikipedia/TestWikipediaTokenizerFactory.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/wikipedia/TestWikipediaTokenizerFactory.java @@ -156,8 +156,7 @@ public class TestWikipediaTokenizerFactory extends BaseTokenStreamFactoryTestCas expectThrows( IllegalArgumentException.class, () -> { - Tokenizer tf = - tokenizerFactory(WIKIPEDIA, TOKEN_OUTPUT, "-1").create(newAttributeFactory()); + tokenizerFactory(WIKIPEDIA, TOKEN_OUTPUT, "-1").create(newAttributeFactory()); }); assertTrue( expected diff --git a/lucene/analysis/opennlp/src/java/org/apache/lucene/analysis/opennlp/OpenNLPTokenizer.java b/lucene/analysis/opennlp/src/java/org/apache/lucene/analysis/opennlp/OpenNLPTokenizer.java index d465766f946..134fa25d855 100644 --- a/lucene/analysis/opennlp/src/java/org/apache/lucene/analysis/opennlp/OpenNLPTokenizer.java +++ b/lucene/analysis/opennlp/src/java/org/apache/lucene/analysis/opennlp/OpenNLPTokenizer.java @@ -43,7 +43,6 @@ public final class OpenNLPTokenizer extends SegmentingTokenizerBase { private int termNum = 0; private int sentenceStart = 0; - private NLPSentenceDetectorOp sentenceOp = null; private NLPTokenizerOp tokenizerOp = null; public OpenNLPTokenizer( @@ -54,7 +53,6 @@ public final class OpenNLPTokenizer extends SegmentingTokenizerBase { throw new IllegalArgumentException( "OpenNLPTokenizer: both a Sentence Detector and a Tokenizer are required"); } - this.sentenceOp = sentenceOp; this.tokenizerOp = tokenizerOp; } diff --git a/lucene/analysis/opennlp/src/test/org/apache/lucene/analysis/opennlp/TestOpenNLPTokenizerFactory.java b/lucene/analysis/opennlp/src/test/org/apache/lucene/analysis/opennlp/TestOpenNLPTokenizerFactory.java index 4573ce7e960..315af01ee0e 100644 --- a/lucene/analysis/opennlp/src/test/org/apache/lucene/analysis/opennlp/TestOpenNLPTokenizerFactory.java +++ b/lucene/analysis/opennlp/src/test/org/apache/lucene/analysis/opennlp/TestOpenNLPTokenizerFactory.java @@ -82,10 +82,9 @@ public class TestOpenNLPTokenizerFactory extends BaseTokenStreamTestCase { expectThrows( IllegalArgumentException.class, () -> { - CustomAnalyzer analyzer = - CustomAnalyzer.builder(new ClasspathResourceLoader(getClass())) - .withTokenizer("opennlp", "tokenizerModel", "en-test-tokenizer.bin") - .build(); + CustomAnalyzer.builder(new ClasspathResourceLoader(getClass())) + .withTokenizer("opennlp", "tokenizerModel", "en-test-tokenizer.bin") + .build(); }); assertTrue( expected.getMessage().contains("Configuration Error: missing parameter 'sentenceModel'")); @@ -97,10 +96,9 @@ public class TestOpenNLPTokenizerFactory extends BaseTokenStreamTestCase { expectThrows( IllegalArgumentException.class, () -> { - CustomAnalyzer analyzer = - CustomAnalyzer.builder(new ClasspathResourceLoader(getClass())) - .withTokenizer("opennlp", "sentenceModel", "en-test-sent.bin") - .build(); + CustomAnalyzer.builder(new ClasspathResourceLoader(getClass())) + .withTokenizer("opennlp", "sentenceModel", "en-test-sent.bin") + .build(); }); assertTrue( expected.getMessage().contains("Configuration Error: missing parameter 'tokenizerModel'")); diff --git a/lucene/analysis/phonetic/src/java/org/apache/lucene/analysis/phonetic/DoubleMetaphoneFilter.java b/lucene/analysis/phonetic/src/java/org/apache/lucene/analysis/phonetic/DoubleMetaphoneFilter.java index 50ce6154e54..e1f267a5d66 100644 --- a/lucene/analysis/phonetic/src/java/org/apache/lucene/analysis/phonetic/DoubleMetaphoneFilter.java +++ b/lucene/analysis/phonetic/src/java/org/apache/lucene/analysis/phonetic/DoubleMetaphoneFilter.java @@ -27,8 +27,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; /** Filter for DoubleMetaphone (supporting secondary codes) */ public final class DoubleMetaphoneFilter extends TokenFilter { - private static final String TOKEN_TYPE = "DoubleMetaphone"; - private final LinkedList remainingTokens = new LinkedList<>(); private final DoubleMetaphone encoder = new DoubleMetaphone(); private final boolean inject; diff --git a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BigramDictionary.java b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BigramDictionary.java index 766ea5e3d3a..c0014b4b32f 100644 --- a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BigramDictionary.java +++ b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BigramDictionary.java @@ -53,8 +53,6 @@ class BigramDictionary extends AbstractDictionary { private int max = 0; - private int repeat = 0; - // static Logger log = Logger.getLogger(BigramDictionary.class); public static synchronized BigramDictionary getInstance() { @@ -143,7 +141,7 @@ class BigramDictionary extends AbstractDictionary { */ public void loadFromFile(String dctFilePath) throws IOException { - int i, cnt, length, total = 0; + int i, cnt, length; // The file only counted 6763 Chinese characters plus 5 reserved slots 3756~3760. // The 3756th is used (as a header) to store information. int[] buffer = new int[3]; @@ -163,7 +161,6 @@ class BigramDictionary extends AbstractDictionary { if (cnt <= 0) { continue; } - total += cnt; int j = 0; while (j < cnt) { dctFile.read(intBuffer); @@ -232,13 +229,11 @@ class BigramDictionary extends AbstractDictionary { if (hash2 < 0) hash2 = PRIME_BIGRAM_LENGTH + hash2; int index = hash1; int i = 1; - repeat++; while (bigramHashTable[index] != 0 && bigramHashTable[index] != hashId && i < PRIME_BIGRAM_LENGTH) { index = (hash1 + i * hash2) % PRIME_BIGRAM_LENGTH; i++; - repeat++; if (i > max) max = i; } // System.out.println(i - 1); diff --git a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Trie.java b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Trie.java index 471cb98aa10..f09656e27fb 100644 --- a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Trie.java +++ b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Trie.java @@ -228,7 +228,6 @@ public class Trie { int cmd = -1; StrEnum e = new StrEnum(key, forward); Character ch = null; - Character aux = null; for (int i = 0; i < key.length(); ) { ch = e.next(); @@ -243,7 +242,7 @@ public class Trie { for (int skip = c.skip; skip > 0; skip--) { if (i < key.length()) { - aux = e.next(); + e.next(); } else { return null; } diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70NormsProducer.java b/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70NormsProducer.java index 14461993acb..ca4c015e6a2 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70NormsProducer.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70NormsProducer.java @@ -327,20 +327,6 @@ final class Lucene70NormsProducer extends NormsProducer implements Cloneable { }; } - private IndexInput getDisiInput2(FieldInfo field, NormsEntry entry) throws IOException { - IndexInput slice = null; - if (merging) { - slice = disiInputs.get(field.number); - } - if (slice == null) { - slice = data.slice("docs", entry.docsWithFieldOffset, entry.docsWithFieldLength); - if (merging) { - disiInputs.put(field.number, slice); - } - } - return slice; - } - @Override public NumericDocValues getNorms(FieldInfo field) throws IOException { final NormsEntry entry = norms.get(field.number); diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70SegmentInfoFormat.java b/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70SegmentInfoFormat.java index d67992d2c77..2d96dba8a53 100644 --- a/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70SegmentInfoFormat.java +++ b/lucene/backward-codecs/src/java/org/apache/lucene/backward_codecs/lucene70/Lucene70SegmentInfoFormat.java @@ -106,17 +106,14 @@ public class Lucene70SegmentInfoFormat extends SegmentInfoFormat { Throwable priorE = null; SegmentInfo si = null; try { - int format = - CodecUtil.checkIndexHeader( - input, - Lucene70SegmentInfoFormat.CODEC_NAME, - Lucene70SegmentInfoFormat.VERSION_START, - Lucene70SegmentInfoFormat.VERSION_CURRENT, - segmentID, - ""); - + CodecUtil.checkIndexHeader( + input, + Lucene70SegmentInfoFormat.CODEC_NAME, + Lucene70SegmentInfoFormat.VERSION_START, + Lucene70SegmentInfoFormat.VERSION_CURRENT, + segmentID, + ""); si = parseSegmentInfo(dir, input, segment, segmentID); - } catch (Throwable exception) { priorE = exception; } finally { diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/TestIndexedDISI.java b/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/TestIndexedDISI.java index bf54bf317c9..de8b8f0fd15 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/TestIndexedDISI.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/TestIndexedDISI.java @@ -97,7 +97,6 @@ public class TestIndexedDISI extends LuceneTestCase { private void assertAdvanceBeyondEnd(BitSet set, Directory dir) throws IOException { final int cardinality = set.cardinality(); final byte denseRankPower = 9; // Not tested here so fixed to isolate factors - long length; int jumpTableentryCount; try (IndexOutput out = dir.createOutput("bar", IOContext.DEFAULT)) { jumpTableentryCount = @@ -434,9 +433,7 @@ public class TestIndexedDISI extends LuceneTestCase { length = out.getFilePointer(); } try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) { - IndexedDISI disi = - new IndexedDISI( - in, 0L, length, jumpTableEntryCount, denseRankPowerRead, set.cardinality()); + new IndexedDISI(in, 0L, length, jumpTableEntryCount, denseRankPowerRead, set.cardinality()); } // This tests the legality of the denseRankPower only, so we don't do anything with the disi } diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java index b7b693e204b..ec35782670f 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTokensTask.java @@ -97,12 +97,6 @@ public class ReadTokensTask extends PerfTask { int left; String s; - void init(String s) { - this.s = s; - left = s.length(); - this.upto = 0; - } - @Override public int read(char[] c) { return read(c, 0, c.length); diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchTravRetHighlightTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchTravRetHighlightTask.java index 54797f5907f..a8a92327cbb 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchTravRetHighlightTask.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchTravRetHighlightTask.java @@ -178,6 +178,7 @@ public class SearchTravRetHighlightTask extends SearchTravTask { void withTopDocs(IndexSearcher searcher, Query q, TopDocs hits) throws Exception; } + @SuppressWarnings("unused") private volatile int preventOptimizeAway = 0; private class StandardHLImpl implements HLImpl { diff --git a/lucene/classification/src/test/org/apache/lucene/classification/Test20NewsgroupsClassification.java b/lucene/classification/src/test/org/apache/lucene/classification/Test20NewsgroupsClassification.java index 06250239d41..778b0142bc7 100644 --- a/lucene/classification/src/test/org/apache/lucene/classification/Test20NewsgroupsClassification.java +++ b/lucene/classification/src/test/org/apache/lucene/classification/Test20NewsgroupsClassification.java @@ -437,7 +437,7 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase { } } } - return new NewsPost(body.toString(), subject, groupName, number); + return new NewsPost(body.toString(), subject, groupName); } catch (Throwable e) { return null; } @@ -447,13 +447,11 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase { private final String body; private final String subject; private final String group; - private final String number; - private NewsPost(String body, String subject, String group, String number) { + private NewsPost(String body, String subject, String group) { this.body = body; this.subject = subject; this.group = group; - this.number = number; } public String getBody() { @@ -467,9 +465,5 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase { public String getGroup() { return group; } - - public String getNumber() { - return number; - } } } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java index a13522ba218..efea36df04b 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/DirectPostingsFormat.java @@ -1920,14 +1920,6 @@ public final class DirectPostingsFormat extends PostingsFormat { public HighFreqDocsEnum() {} - public int[] getDocIDs() { - return docIDs; - } - - public int[] getFreqs() { - return freqs; - } - public PostingsEnum reset(int[] docIDs, int[] freqs) { this.docIDs = docIDs; this.freqs = freqs; @@ -2106,18 +2098,6 @@ public final class DirectPostingsFormat extends PostingsFormat { posJump = hasOffsets ? 3 : 1; } - public int[] getDocIDs() { - return docIDs; - } - - public int[][] getPositions() { - return positions; - } - - public int getPosJump() { - return posJump; - } - public PostingsEnum reset(int[] docIDs, int[] freqs, int[][] positions, byte[][][] payloads) { this.docIDs = docIDs; this.freqs = freqs; diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java index abebcd9a487..abe075d3bfe 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/FSTTermsReader.java @@ -559,7 +559,7 @@ public class FSTTermsReader extends FieldsProducer { if (term == null) { return SeekStatus.END; } else { - return term.equals(target) ? SeekStatus.FOUND : SeekStatus.NOT_FOUND; + return term.get().equals(target) ? SeekStatus.FOUND : SeekStatus.NOT_FOUND; } } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java index c630e43659f..6c745674e8c 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDReader.java @@ -22,7 +22,6 @@ import static org.apache.lucene.codecs.simpletext.SimpleTextPointsWriter.BLOCK_V import java.io.IOException; import java.nio.charset.StandardCharsets; -import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.PointValues; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.Accountable; @@ -195,59 +194,6 @@ final class SimpleTextBKDReader extends PointValues implements Accountable { } } - private void visitCompressedDocValues( - int[] commonPrefixLengths, - byte[] scratchPackedValue, - IndexInput in, - int[] docIDs, - int count, - IntersectVisitor visitor, - int compressedDim) - throws IOException { - // the byte at `compressedByteOffset` is compressed using run-length compression, - // other suffix bytes are stored verbatim - final int compressedByteOffset = - compressedDim * bytesPerDim + commonPrefixLengths[compressedDim]; - commonPrefixLengths[compressedDim]++; - int i; - for (i = 0; i < count; ) { - scratchPackedValue[compressedByteOffset] = in.readByte(); - final int runLen = Byte.toUnsignedInt(in.readByte()); - for (int j = 0; j < runLen; ++j) { - for (int dim = 0; dim < numDims; dim++) { - int prefix = commonPrefixLengths[dim]; - in.readBytes(scratchPackedValue, dim * bytesPerDim + prefix, bytesPerDim - prefix); - } - visitor.visit(docIDs[i + j], scratchPackedValue); - } - i += runLen; - } - if (i != count) { - throw new CorruptIndexException( - "Sub blocks do not add up to the expected count: " + count + " != " + i, in); - } - } - - private int readCompressedDim(IndexInput in) throws IOException { - int compressedDim = in.readByte(); - if (compressedDim < -1 || compressedDim >= numIndexDims) { - throw new CorruptIndexException("Got compressedDim=" + compressedDim, in); - } - return compressedDim; - } - - private void readCommonPrefixes( - int[] commonPrefixLengths, byte[] scratchPackedValue, IndexInput in) throws IOException { - for (int dim = 0; dim < numDims; dim++) { - int prefix = in.readVInt(); - commonPrefixLengths[dim] = prefix; - if (prefix > 0) { - in.readBytes(scratchPackedValue, dim * bytesPerDim, prefix); - } - // System.out.println("R: " + dim + " of " + numDims + " prefix=" + prefix); - } - } - private void intersect( IntersectState state, int nodeID, byte[] cellMinPacked, byte[] cellMaxPacked) throws IOException { diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java index 211ef04eba9..d46bad23946 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextBKDWriter.java @@ -816,40 +816,6 @@ final class SimpleTextBKDWriter implements Closeable { } } - private void writeLeafBlockPackedValuesRange( - IndexOutput out, - int[] commonPrefixLengths, - int start, - int end, - IntFunction packedValues) - throws IOException { - for (int i = start; i < end; ++i) { - BytesRef ref = packedValues.apply(i); - assert ref.length == config.packedBytesLength; - - for (int dim = 0; dim < config.numDims; dim++) { - int prefix = commonPrefixLengths[dim]; - out.writeBytes( - ref.bytes, ref.offset + dim * config.bytesPerDim + prefix, config.bytesPerDim - prefix); - } - } - } - - private static int runLen( - IntFunction packedValues, int start, int end, int byteOffset) { - BytesRef first = packedValues.apply(start); - byte b = first.bytes[first.offset + byteOffset]; - for (int i = start + 1; i < end; ++i) { - BytesRef ref = packedValues.apply(i); - byte b2 = ref.bytes[ref.offset + byteOffset]; - assert Byte.toUnsignedInt(b2) >= Byte.toUnsignedInt(b); - if (b != b2) { - return i - start; - } - } - return end - start; - } - @Override public void close() throws IOException { if (tempInput != null) { diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java index faa90c1f0d9..4dab17d234c 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPointsWriter.java @@ -157,14 +157,6 @@ class SimpleTextPointsWriter extends PointsWriter { SimpleTextUtil.write(out, s, scratch); } - private void writeInt(IndexOutput out, int x) throws IOException { - SimpleTextUtil.write(out, Integer.toString(x), scratch); - } - - private void writeLong(IndexOutput out, long x) throws IOException { - SimpleTextUtil.write(out, Long.toString(x), scratch); - } - private void write(IndexOutput out, BytesRef b) throws IOException { SimpleTextUtil.write(out, b); } diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextVectorWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextVectorWriter.java index 16995372545..54948104f55 100644 --- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextVectorWriter.java +++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextVectorWriter.java @@ -74,8 +74,8 @@ public class SimpleTextVectorWriter extends VectorWriter { public void writeField(FieldInfo fieldInfo, VectorValues vectors) throws IOException { long vectorDataOffset = vectorData.getFilePointer(); List docIds = new ArrayList<>(); - int docV, ord = 0; - for (docV = vectors.nextDoc(); docV != NO_MORE_DOCS; docV = vectors.nextDoc(), ord++) { + int docV; + for (docV = vectors.nextDoc(); docV != NO_MORE_DOCS; docV = vectors.nextDoc()) { writeVectorValue(vectors); docIds.add(docV); } diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.java b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.java index 062911803ac..621c2f0a026 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.java @@ -39,7 +39,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; *
  • <EMOJI>: A sequence of Emoji characters
  • * */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) public final class StandardTokenizerImpl { diff --git a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex index e95a9b42811..702866d8dcf 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex +++ b/lucene/core/src/java/org/apache/lucene/analysis/standard/StandardTokenizerImpl.jflex @@ -37,7 +37,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; *
  • <EMOJI>: A sequence of Emoji characters
  • * */ -@SuppressWarnings("fallthrough") +@SuppressWarnings({"unused","fallthrough"}) %% %unicode 9.0 diff --git a/lucene/core/src/java/org/apache/lucene/codecs/VectorWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/VectorWriter.java index fd3e90f9a8d..b5c968197a8 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/VectorWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/VectorWriter.java @@ -118,7 +118,6 @@ public abstract class VectorWriter implements Closeable { /** Tracks state of one sub-reader that we are merging */ private static class VectorValuesSub extends DocIDMerger.Sub { - final MergeState.DocMap docMap; final VectorValues values; final int segmentIndex; int count; @@ -127,7 +126,6 @@ public abstract class VectorWriter implements Closeable { super(docMap); this.values = values; this.segmentIndex = segmentIndex; - this.docMap = docMap; assert values.docID() == -1; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java index 0f55de1793a..32a141b1e06 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java @@ -103,12 +103,9 @@ public class Lucene86SegmentInfoFormat extends SegmentInfoFormat { Throwable priorE = null; SegmentInfo si = null; try { - int format = - CodecUtil.checkIndexHeader( - input, CODEC_NAME, VERSION_START, VERSION_CURRENT, segmentID, ""); - + CodecUtil.checkIndexHeader( + input, CODEC_NAME, VERSION_START, VERSION_CURRENT, segmentID, ""); si = parseSegmentInfo(dir, input, segment, segmentID); - } catch (Throwable exception) { priorE = exception; } finally { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90FieldInfosFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90FieldInfosFormat.java index 43f421574a7..1ee61026468 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90FieldInfosFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90FieldInfosFormat.java @@ -125,14 +125,13 @@ public final class Lucene90FieldInfosFormat extends FieldInfosFormat { Throwable priorE = null; FieldInfo infos[] = null; try { - int version = - CodecUtil.checkIndexHeader( - input, - Lucene90FieldInfosFormat.CODEC_NAME, - Lucene90FieldInfosFormat.FORMAT_START, - Lucene90FieldInfosFormat.FORMAT_CURRENT, - segmentInfo.getId(), - segmentSuffix); + CodecUtil.checkIndexHeader( + input, + Lucene90FieldInfosFormat.CODEC_NAME, + Lucene90FieldInfosFormat.FORMAT_START, + Lucene90FieldInfosFormat.FORMAT_CURRENT, + segmentInfo.getId(), + segmentSuffix); final int size = input.readVInt(); // read in the size infos = new FieldInfo[size]; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90VectorReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90VectorReader.java index 58fc9196609..4dc25cb264a 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90VectorReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90VectorReader.java @@ -21,7 +21,6 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.FloatBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -324,7 +323,6 @@ public final class Lucene90VectorReader extends VectorReader { final BytesRef binaryValue; final ByteBuffer byteBuffer; - final FloatBuffer floatBuffer; final int byteSize; final float[] value; @@ -336,7 +334,6 @@ public final class Lucene90VectorReader extends VectorReader { this.dataIn = dataIn; byteSize = Float.BYTES * fieldEntry.dimension; byteBuffer = ByteBuffer.allocate(byteSize); - floatBuffer = byteBuffer.asFloatBuffer(); value = new float[fieldEntry.dimension]; binaryValue = new BytesRef(byteBuffer.array(), byteBuffer.arrayOffset(), byteSize); } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/IntersectTermsEnumFrame.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/IntersectTermsEnumFrame.java index ff7e58b11b4..eb60d7f3524 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/IntersectTermsEnumFrame.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/IntersectTermsEnumFrame.java @@ -97,14 +97,12 @@ final class IntersectTermsEnumFrame { int suffix; private final IntersectTermsEnum ite; - private final int version; public IntersectTermsEnumFrame(IntersectTermsEnum ite, int ord) throws IOException { this.ite = ite; this.ord = ord; this.termState = ite.fr.parent.postingsReader.newTermState(); this.termState.totalTermFreq = -1; - this.version = ite.fr.parent.version; suffixLengthBytes = new byte[32]; suffixLengthsReader = new ByteArrayDataInput(); } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java index 9e9e3a8d749..05549f8ebe8 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java @@ -271,13 +271,6 @@ public final class Lucene90BlockTreeTermsReader extends FieldsProducer { return bytes; } - /** Seek {@code input} to the directory offset. */ - private static void seekDir(IndexInput input) throws IOException { - input.seek(input.length() - CodecUtil.footerLength() - 8); - long offset = input.readLong(); - input.seek(offset); - } - // for debugging // private static String toHex(int v) { // return "0x" + Integer.toHexString(v); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/SegmentTermsEnumFrame.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/SegmentTermsEnumFrame.java index 8c742bdd02a..48c4fd0a6d4 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/SegmentTermsEnumFrame.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/SegmentTermsEnumFrame.java @@ -94,14 +94,12 @@ final class SegmentTermsEnumFrame { final ByteArrayDataInput bytesReader = new ByteArrayDataInput(); private final SegmentTermsEnum ste; - private final int version; public SegmentTermsEnumFrame(SegmentTermsEnum ste, int ord) throws IOException { this.ste = ste; this.ord = ord; this.state = ste.fr.parent.postingsReader.newTermState(); this.state.totalTermFreq = -1; - this.version = ste.fr.parent.version; suffixLengthBytes = new byte[32]; suffixLengthsReader = new ByteArrayDataInput(); } diff --git a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java index cb946ea8492..8ce1cfefcb1 100644 --- a/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java +++ b/lucene/core/src/java/org/apache/lucene/index/CheckIndex.java @@ -64,8 +64,6 @@ import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.SuppressForbidden; import org.apache.lucene.util.Version; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.CompiledAutomaton; /** * Basic tool and API to check the health of an index and write a new segments file that removes @@ -1094,171 +1092,6 @@ public final class CheckIndex implements Closeable { return status; } - /** - * Visits all terms in the range minTerm (inclusive) to maxTerm (exclusive), marking all doc IDs - * encountered into allDocsSeen, and returning the total number of terms visited. - */ - private static long getDocsFromTermRange( - String field, - int maxDoc, - TermsEnum termsEnum, - FixedBitSet docsSeen, - BytesRef minTerm, - BytesRef maxTerm, - boolean isIntersect) - throws IOException { - docsSeen.clear(0, docsSeen.length()); - - long termCount = 0; - PostingsEnum postingsEnum = null; - BytesRefBuilder lastTerm = null; - while (true) { - BytesRef term; - - // Kinda messy: for intersect, we must first next(), but for "normal", we are already on our - // first term: - if (isIntersect || termCount != 0) { - term = termsEnum.next(); - } else { - term = termsEnum.term(); - } - - if (term == null) { - if (isIntersect == false) { - throw new RuntimeException("didn't see max term field=" + field + " term=" + maxTerm); - } - // System.out.println(" terms=" + termCount); - return termCount; - } - - assert term.isValid(); - - if (lastTerm == null) { - lastTerm = new BytesRefBuilder(); - lastTerm.copyBytes(term); - } else { - if (lastTerm.get().compareTo(term) >= 0) { - throw new RuntimeException( - "terms out of order: lastTerm=" + lastTerm.get() + " term=" + term); - } - lastTerm.copyBytes(term); - } - - // System.out.println(" term=" + term); - - // Caller already ensured terms enum positioned >= minTerm: - if (term.compareTo(minTerm) < 0) { - throw new RuntimeException("saw term before min term field=" + field + " term=" + minTerm); - } - - if (isIntersect == false) { - int cmp = term.compareTo(maxTerm); - if (cmp == 0) { - // Done! - // System.out.println(" terms=" + termCount); - return termCount; - } else if (cmp > 0) { - throw new RuntimeException("didn't see end term field=" + field + " term=" + maxTerm); - } - } - - postingsEnum = termsEnum.postings(postingsEnum, 0); - - int lastDoc = -1; - while (true) { - int doc = postingsEnum.nextDoc(); - if (doc == DocIdSetIterator.NO_MORE_DOCS) { - break; - } - if (doc <= lastDoc) { - throw new RuntimeException("term " + term + ": doc " + doc + " <= lastDoc " + lastDoc); - } - if (doc >= maxDoc) { - throw new RuntimeException("term " + term + ": doc " + doc + " >= maxDoc " + maxDoc); - } - - // System.out.println(" doc=" + doc); - docsSeen.set(doc); - - lastDoc = doc; - } - - termCount++; - } - } - - /** - * Test Terms.intersect on this range, and validates that it returns the same doc ids as using - * non-intersect TermsEnum. Returns true if any fake terms were seen. - */ - private static boolean checkSingleTermRange( - String field, - int maxDoc, - Terms terms, - BytesRef minTerm, - BytesRef maxTerm, - FixedBitSet normalDocs, - FixedBitSet intersectDocs) - throws IOException { - // System.out.println(" check minTerm=" + minTerm.utf8ToString() + " maxTerm=" + - // maxTerm.utf8ToString()); - assert minTerm.compareTo(maxTerm) <= 0; - - TermsEnum termsEnum = terms.iterator(); - TermsEnum.SeekStatus status = termsEnum.seekCeil(minTerm); - if (status != TermsEnum.SeekStatus.FOUND) { - throw new RuntimeException( - "failed to seek to existing term field=" + field + " term=" + minTerm); - } - - // Do "dumb" iteration to visit all terms in the range: - long normalTermCount = - getDocsFromTermRange(field, maxDoc, termsEnum, normalDocs, minTerm, maxTerm, false); - - // Now do the same operation using intersect: - long intersectTermCount = - getDocsFromTermRange( - field, - maxDoc, - terms.intersect( - new CompiledAutomaton( - Automata.makeBinaryInterval(minTerm, true, maxTerm, false), - true, - false, - Integer.MAX_VALUE, - true), - null), - intersectDocs, - minTerm, - maxTerm, - true); - - if (intersectTermCount > normalTermCount) { - throw new RuntimeException( - "intersect returned too many terms: field=" - + field - + " intersectTermCount=" - + intersectTermCount - + " normalTermCount=" - + normalTermCount); - } - - if (normalDocs.equals(intersectDocs) == false) { - throw new RuntimeException( - "intersect visited different docs than straight terms enum: " - + normalDocs.cardinality() - + " for straight enum, vs " - + intersectDocs.cardinality() - + " for intersect, minTerm=" - + minTerm - + " maxTerm=" - + maxTerm); - } - // System.out.println(" docs=" + normalTermCount); - // System.out.println(" " + intersectTermCount + " vs " + normalTermCount); - return intersectTermCount != normalTermCount; - } - /** * checks Fields api is consistent with itself. searcher is optional, to verify with queries. Can * be null. @@ -2553,7 +2386,6 @@ public final class CheckIndex implements Closeable { public static class VerifyPointsVisitor implements PointValues.IntersectVisitor { private long pointCountSeen; private int lastDocID = -1; - private final int maxDoc; private final FixedBitSet docsSeen; private final byte[] lastMinPackedValue; private final byte[] lastMaxPackedValue; @@ -2570,7 +2402,6 @@ public final class CheckIndex implements Closeable { /** Sole constructor */ public VerifyPointsVisitor(String fieldName, int maxDoc, PointValues values) throws IOException { - this.maxDoc = maxDoc; this.fieldName = fieldName; numDataDims = values.getNumDimensions(); numIndexDims = values.getNumIndexDimensions(); diff --git a/lucene/core/src/java/org/apache/lucene/index/OrdinalMap.java b/lucene/core/src/java/org/apache/lucene/index/OrdinalMap.java index 558449f8e9c..ae92991b005 100644 --- a/lucene/core/src/java/org/apache/lucene/index/OrdinalMap.java +++ b/lucene/core/src/java/org/apache/lucene/index/OrdinalMap.java @@ -49,7 +49,6 @@ public class OrdinalMap implements Accountable { // TODO: use more efficient packed ints structures? private static class TermsEnumIndex { - public static final TermsEnumIndex[] EMPTY_ARRAY = new TermsEnumIndex[0]; final int subIndex; final TermsEnum termsEnum; BytesRef currentTerm; diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java index f4108870916..b6382c76fc6 100644 --- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java +++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanScorer.java @@ -35,8 +35,6 @@ public class SpanScorer extends Scorer { /** accumulated sloppy freq (computed in setFreqCurrentDoc) */ private float freq; - /** number of matches (computed in setFreqCurrentDoc) */ - private int numMatches; private int lastScoredDoc = -1; // last doc we called setFreqCurrentDoc() for @@ -77,13 +75,12 @@ public class SpanScorer extends Scorer { } /** - * Sets {@link #freq} and {@link #numMatches} for the current document. + * Sets {@link #freq} for the current document. * *

    This will be called at most once per document. */ protected final void setFreqCurrentDoc() throws IOException { freq = 0.0f; - numMatches = 0; spans.doStartCurrentDoc(); @@ -102,7 +99,6 @@ public class SpanScorer extends Scorer { // assert (startPos != prevStartPos) || (endPos > prevEndPos) : "non increased // endPos="+endPos; assert (startPos != prevStartPos) || (endPos >= prevEndPos) : "decreased endPos=" + endPos; - numMatches++; if (docScorer == null) { // scores not required, break out here freq = 1; return; diff --git a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java index 32baa6cf089..049d5af4f64 100644 --- a/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java +++ b/lucene/core/src/java/org/apache/lucene/util/OfflineSorter.java @@ -374,7 +374,6 @@ public class OfflineSorter { /** Merge the most recent {@code maxTempFile} partitions into a new partition. */ void mergePartitions(Directory trackingDir, List> segments) throws IOException { - long start = System.currentTimeMillis(); List> segmentsToMerge; if (segments.size() > maxTempFiles) { segmentsToMerge = segments.subList(segments.size() - maxTempFiles, segments.size()); @@ -429,7 +428,6 @@ public class OfflineSorter { long start = System.currentTimeMillis(); SortableBytesRefArray buffer; boolean exhausted = false; - int count; if (valueLength != -1) { // fixed length case buffer = new FixedLengthBytesRefArray(valueLength); diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java index bd00302224f..6857f90e19b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java @@ -71,8 +71,6 @@ public final class FST implements Accountable { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FST.class); - private static final long ARC_SHALLOW_RAM_BYTES_USED = - RamUsageEstimator.shallowSizeOfInstance(Arc.class); private static final int BIT_FINAL_ARC = 1 << 0; static final int BIT_LAST_ARC = 1 << 1; diff --git a/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java b/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java index 9f872ff4d15..2def85613e5 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java +++ b/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java @@ -59,7 +59,6 @@ import org.apache.lucene.util.SparseFixedBitSet; public final class HnswGraph extends KnnGraphValues { private final int maxConn; - private final VectorValues.SearchStrategy searchStrategy; // Each entry lists the top maxConn neighbors of a node. The nodes correspond to vectors added to // HnswBuilder, and the @@ -70,13 +69,12 @@ public final class HnswGraph extends KnnGraphValues { private int upto; private NeighborArray cur; - HnswGraph(int maxConn, VectorValues.SearchStrategy searchStrategy) { + HnswGraph(int maxConn) { graph = new ArrayList<>(); // Typically with diversity criteria we see nodes not fully occupied; average fanout seems to be // about 1/2 maxConn. There is some indexing time penalty for under-allocating, but saves RAM graph.add(new NeighborArray(Math.max(32, maxConn / 4))); this.maxConn = maxConn; - this.searchStrategy = searchStrategy; } /** diff --git a/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraphBuilder.java b/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraphBuilder.java index f51a5df6a76..4dc4b15bbee 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraphBuilder.java +++ b/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraphBuilder.java @@ -99,7 +99,7 @@ public final class HnswGraphBuilder { } this.maxConn = maxConn; this.beamWidth = beamWidth; - this.hnsw = new HnswGraph(maxConn, searchStrategy); + this.hnsw = new HnswGraph(maxConn); bound = BoundsChecker.create(searchStrategy.reversed); random = new Random(seed); scratch = new NeighborArray(Math.max(beamWidth, maxConn + 1)); diff --git a/lucene/core/src/java/org/apache/lucene/util/hnsw/NeighborArray.java b/lucene/core/src/java/org/apache/lucene/util/hnsw/NeighborArray.java index b026d6c0891..9deaa64113c 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hnsw/NeighborArray.java +++ b/lucene/core/src/java/org/apache/lucene/util/hnsw/NeighborArray.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.ArrayUtil; public class NeighborArray { private int size; - private int upto; float[] score; int[] node; diff --git a/lucene/core/src/java/org/apache/lucene/util/packed/gen_BulkOperation.py b/lucene/core/src/java/org/apache/lucene/util/packed/gen_BulkOperation.py index 16ed30add5d..ddb79cb2993 100644 --- a/lucene/core/src/java/org/apache/lucene/util/packed/gen_BulkOperation.py +++ b/lucene/core/src/java/org/apache/lucene/util/packed/gen_BulkOperation.py @@ -15,7 +15,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from fractions import gcd +try: + # python 3.9+ + from math import gcd +except ImportError: + # old python + from fractions import gcd """Code generation for bulk operations""" diff --git a/lucene/core/src/test/org/apache/lucene/analysis/TestStopFilter.java b/lucene/core/src/test/org/apache/lucene/analysis/TestStopFilter.java index 0172e67b04f..13b2ea9df7b 100644 --- a/lucene/core/src/test/org/apache/lucene/analysis/TestStopFilter.java +++ b/lucene/core/src/test/org/apache/lucene/analysis/TestStopFilter.java @@ -191,8 +191,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase { StopFilter stopfilter, List stopwordPositions, final int numberOfTokens) throws IOException { CharTermAttribute termAtt = stopfilter.getAttribute(CharTermAttribute.class); - PositionIncrementAttribute posIncrAtt = - stopfilter.getAttribute(PositionIncrementAttribute.class); + stopfilter.getAttribute(PositionIncrementAttribute.class); stopfilter.reset(); log("Test stopwords positions:"); for (int i = 0; i < numberOfTokens; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestIndexedDISI.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestIndexedDISI.java index 97f22bdff38..8c21c747eea 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestIndexedDISI.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestIndexedDISI.java @@ -98,7 +98,6 @@ public class TestIndexedDISI extends LuceneTestCase { private void assertAdvanceBeyondEnd(BitSet set, Directory dir) throws IOException { final int cardinality = set.cardinality(); final byte denseRankPower = 9; // Not tested here so fixed to isolate factors - long length; int jumpTableentryCount; try (IndexOutput out = dir.createOutput("bar", IOContext.DEFAULT)) { jumpTableentryCount = @@ -435,9 +434,7 @@ public class TestIndexedDISI extends LuceneTestCase { length = out.getFilePointer(); } try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) { - IndexedDISI disi = - new IndexedDISI( - in, 0L, length, jumpTableEntryCount, denseRankPowerRead, set.cardinality()); + new IndexedDISI(in, 0L, length, jumpTableEntryCount, denseRankPowerRead, set.cardinality()); } // This tests the legality of the denseRankPower only, so we don't do anything with the disi } diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java b/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java index eb2513857ee..a91695cdac9 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java @@ -84,7 +84,6 @@ public class Test2BSortedDocValuesOrds extends LuceneTestCase { int counter = 0; for (LeafReaderContext context : r.leaves()) { LeafReader reader = context.reader(); - BytesRef scratch = new BytesRef(); BinaryDocValues dv = DocValues.getBinary(reader, "dv"); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(i, dv.nextDoc()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java index 7994171359d..2ad2d60cba9 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java @@ -341,13 +341,11 @@ public class TestCodecs extends LuceneTestCase { private static class Verify extends Thread { final Fields termsDict; final FieldData[] fields; - final SegmentInfo si; volatile boolean failed; Verify(final SegmentInfo si, final FieldData[] fields, final Fields termsDict) { this.fields = fields; this.termsDict = termsDict; - this.si = si; } @Override @@ -377,8 +375,6 @@ public class TestCodecs extends LuceneTestCase { assertEquals(DocIdSetIterator.NO_MORE_DOCS, postingsEnum.nextDoc()); } - byte[] data = new byte[10]; - private void verifyPositions(final PositionData[] positions, final PostingsEnum posEnum) throws Throwable { for (int i = 0; i < positions.length; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java index b2732df50fe..37b5388760a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java @@ -95,7 +95,6 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { public final ReaderManager mgr; private final Directory indexDir; - private final Path root; private final Path segsPath; /** Which segments have been closed, but their parallel index is not yet not removed. */ @@ -119,8 +118,6 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { } public ReindexingReader(Path root) throws IOException { - this.root = root; - // Normal index is stored under "index": indexDir = openDirectory(root.resolve("index")); @@ -869,7 +866,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { for (int i = 0; i < maxDoc; i++) { // TODO: is this still O(blockSize^2)? assertEquals(i, oldValues.nextDoc()); - Document oldDoc = reader.document(i); + reader.document(i); Document newDoc = new Document(); newDoc.add(new NumericDocValuesField("number_" + newSchemaGen, oldValues.longValue())); w.addDocument(newDoc); @@ -996,7 +993,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { assertNotNull("oldSchemaGen=" + oldSchemaGen, oldValues); for (int i = 0; i < maxDoc; i++) { // TODO: is this still O(blockSize^2)? - Document oldDoc = reader.document(i); + reader.document(i); Document newDoc = new Document(); assertEquals(i, oldValues.nextDoc()); newDoc.add( @@ -1518,7 +1515,6 @@ public class TestDemoParallelLeafReader extends LuceneTestCase { NumericDocValues numbers = MultiDocValues.getNumericValues(r, fieldName); int maxDoc = r.maxDoc(); boolean failed = false; - long t0 = System.currentTimeMillis(); for (int i = 0; i < maxDoc; i++) { Document oldDoc = r.document(i); long value = multiplier * Long.parseLong(oldDoc.get("text").split(" ")[1]); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java index d062ee77086..ae635a3b584 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java @@ -238,7 +238,7 @@ public class TestDoc extends LuceneTestCase { new FieldInfos.FieldNumbers(null), context); - MergeState mergeState = merger.merge(); + merger.merge(); r1.close(); r2.close(); si.setFiles(new HashSet<>(trackingDir.getCreatedFiles())); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java index deb938de8b2..4bce7cef7c6 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java @@ -1720,7 +1720,6 @@ public class TestIndexSorting extends LuceneTestCase { } public void testRandom1() throws IOException { - boolean withDeletes = random().nextBoolean(); Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); Sort indexSort = new Sort(new SortField("foo", SortField.Type.LONG)); @@ -1791,7 +1790,6 @@ public class TestIndexSorting extends LuceneTestCase { } public void testMultiValuedRandom1() throws IOException { - boolean withDeletes = random().nextBoolean(); Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); Sort indexSort = new Sort(new SortedNumericSortField("foo", SortField.Type.LONG)); @@ -2412,7 +2410,6 @@ public class TestIndexSorting extends LuceneTestCase { } private static final class RandomDoc { - public final int id; public final int intValue; public final int[] intValues; public final long longValue; @@ -2425,7 +2422,6 @@ public class TestIndexSorting extends LuceneTestCase { public final byte[][] bytesValues; public RandomDoc(int id) { - this.id = id; intValue = random().nextInt(); longValue = random().nextLong(); floatValue = random().nextFloat(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java index 1ee58132a52..bd496ade28b 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -2967,7 +2967,6 @@ public class TestIndexWriter extends LuceneTestCase { // Use WindowsFS to prevent open files from being deleted: FileSystem fs = new WindowsFS(path.getFileSystem()).getFileSystem(URI.create("file:///")); Path root = new FilterPath(path, fs); - DirectoryReader reader; // MMapDirectory doesn't work because it closes its file handles after mapping! try (FSDirectory dir = new NIOFSDirectory(root)) { IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java index 489a8c55eee..c18d5fc905a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java @@ -182,8 +182,6 @@ public class TestIndexWriterCommit extends LuceneTestCase { // sum because the merged FST may use array encoding for // some arcs (which uses more space): - final String idFormat = TestUtil.getPostingsFormat("id"); - final String contentFormat = TestUtil.getPostingsFormat("content"); MockDirectoryWrapper dir = newMockDirectory(); Analyzer analyzer; if (random().nextBoolean()) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java index dcb605c72d3..a9c6d8774c9 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java @@ -1430,7 +1430,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { w.close(); IndexReader reader = DirectoryReader.open(dir); assertTrue(reader.numDocs() > 0); - SegmentInfos sis = SegmentInfos.readLatestCommit(dir); + SegmentInfos.readLatestCommit(dir); for (LeafReaderContext context : reader.leaves()) { assertFalse(context.reader().getFieldInfos().hasVectors()); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java index 8f1d114fdf6..bec09c54ce5 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java @@ -159,9 +159,6 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase { // sum because the merged FST may use array encoding for // some arcs (which uses more space): - final String idFormat = TestUtil.getPostingsFormat("id"); - final String contentFormat = TestUtil.getPostingsFormat("content"); - int START_COUNT = 57; int NUM_DIR = TEST_NIGHTLY ? 50 : 5; int END_COUNT = START_COUNT + NUM_DIR * (TEST_NIGHTLY ? 25 : 5); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java index 710f2b0729f..4fddce3691e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java @@ -424,7 +424,6 @@ public class TestIndexWriterReader extends LuceneTestCase { IndexWriter mainWriter; final List failures = new ArrayList<>(); DirectoryReader[] readers; - boolean didClose = false; AtomicInteger count = new AtomicInteger(0); AtomicInteger numaddIndexes = new AtomicInteger(0); @@ -460,7 +459,6 @@ public class TestIndexWriterReader extends LuceneTestCase { } void close(boolean doWait) throws Throwable { - didClose = true; if (doWait) { mainWriter.close(); } else { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java index 102fcc318c8..b989e8cf851 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterThreadsToSegments.java @@ -112,7 +112,6 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase { private static final int MAX_THREADS_AT_ONCE = 10; static class CheckSegmentCount implements Runnable, Closeable { - private final IndexWriter w; private final AtomicInteger maxThreadCountPerIter; private final AtomicInteger indexingCount; private DirectoryReader r; @@ -120,7 +119,6 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase { public CheckSegmentCount( IndexWriter w, AtomicInteger maxThreadCountPerIter, AtomicInteger indexingCount) throws IOException { - this.w = w; this.maxThreadCountPerIter = maxThreadCountPerIter; this.indexingCount = indexingCount; r = DirectoryReader.open(w); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java index 2628ed1bfa2..fb4052b46e8 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java @@ -54,7 +54,6 @@ public class TestIndexWriterWithThreads extends LuceneTestCase { private static class IndexerThread extends Thread { private final CyclicBarrier syncStart; - boolean diskFull; Throwable error; IndexWriter writer; boolean noErrors; @@ -100,7 +99,6 @@ public class TestIndexWriterWithThreads extends LuceneTestCase { // ioe.printStackTrace(System.out); if (ioe.getMessage().startsWith("fake disk full at") || ioe.getMessage().equals("now failing on purpose")) { - diskFull = true; try { Thread.sleep(1); } catch (InterruptedException ie) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java index 0d541805a9a..ed6d9aa6ae4 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java @@ -138,9 +138,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase { writer.addDocument(doc(i, val)); } - int numDocUpdates = 0; - int numValueUpdates = 0; - for (int i = 0; i < numOperations; i++) { final int op = TestUtil.nextInt(random(), 1, 100); final long val = random().nextLong(); @@ -152,10 +149,8 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase { final int id = TestUtil.nextInt(random(), 0, expected.size() - 1); expected.put(id, val); if (op <= UPD_CUTOFF) { - numDocUpdates++; writer.updateDocument(new Term("id", "doc-" + id), doc(id, val)); } else { - numValueUpdates++; writer.updateNumericDocValue(new Term("id", "doc-" + id), "val", val); } } @@ -832,7 +827,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase { int refreshChance = TestUtil.nextInt(random(), 5, 200); int deleteChance = TestUtil.nextInt(random(), 2, 100); - int idUpto = 0; int deletedCount = 0; List docs = new ArrayList<>(); @@ -1600,7 +1594,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase { // update all doc values long value = random().nextInt(); - NumericDocValuesField[] update = new NumericDocValuesField[numDocs]; for (int i = 0; i < numDocs; i++) { Term term = new Term("id", new BytesRef(Integer.toString(i))); writer.updateDocValues(term, new NumericDocValuesField("ndv", value)); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java index 7ae8908e87a..ec25b96e329 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java @@ -365,11 +365,6 @@ public class TestPayloads extends LuceneTestCase { super(PER_FIELD_REUSE_STRATEGY); } - public PayloadAnalyzer(String field, byte[] data, int offset, int length) { - super(PER_FIELD_REUSE_STRATEGY); - setPayloadData(field, data, offset, length); - } - void setPayloadData(String field, byte[] data, int offset, int length) { fieldToData.put(field, new PayloadData(data, offset, length)); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java b/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java index d1c2395c185..c65b1f3087e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing.java @@ -19,14 +19,12 @@ package org.apache.lucene.index; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.*; import org.apache.lucene.index.IndexWriterConfig.OpenMode; -import org.apache.lucene.search.*; import org.apache.lucene.store.*; import org.apache.lucene.util.*; public class TestStressIndexing extends LuceneTestCase { private abstract static class TimedThread extends Thread { volatile boolean failed; - int count; private static int RUN_TIME_MSEC = atLeast(1000); private TimedThread[] allThreads; @@ -40,13 +38,10 @@ public class TestStressIndexing extends LuceneTestCase { public void run() { final long stopTime = System.currentTimeMillis() + RUN_TIME_MSEC; - count = 0; - try { do { if (anyErrors()) break; doWork(); - count++; } while (System.currentTimeMillis() < stopTime); } catch (Throwable e) { System.out.println(Thread.currentThread() + ": exc"); @@ -103,10 +98,9 @@ public class TestStressIndexing extends LuceneTestCase { public void doWork() throws Throwable { for (int i = 0; i < 100; i++) { IndexReader ir = DirectoryReader.open(directory); - IndexSearcher is = newSearcher(ir); + newSearcher(ir); ir.close(); } - count += 100; } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTerm.java b/lucene/core/src/test/org/apache/lucene/index/TestTerm.java index 438f4148acf..1c2b3db9631 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTerm.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTerm.java @@ -20,6 +20,7 @@ import org.apache.lucene.util.LuceneTestCase; public class TestTerm extends LuceneTestCase { + @SuppressWarnings("unlikely-arg-type") public void testEquals() { final Term base = new Term("same", "same"); final Term same = new Term("same", "same"); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java index 3feeec53ed8..3ab1d5bba32 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java @@ -249,7 +249,6 @@ public class TestTermVectorsReader extends LuceneTestCase { Codec.getDefault() .termVectorsFormat() .vectorsReader(dir, seg.info, fieldInfos, newIOContext(random())); - BytesRef[] terms; Terms vector = reader.get(0).terms(testFields[0]); assertNotNull(vector); assertEquals(testTerms.length, vector.size()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java b/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java index 971cf4c5178..823dbb9aa84 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermsHashPerField.java @@ -263,7 +263,7 @@ public class TestTermsHashPerField extends LuceneTestCase { for (int i = 0; i < numDocs; i++) { int numTerms = 1 + random().nextInt(200); int doc = i; - for (int j = 0; i < numTerms; i++) { + for (int j = 0; j < numTerms; j++) { BytesRef ref = RandomPicks.randomFrom(random(), bytesRefs); Posting posting = postingMap.get(ref); if (posting.termId == -1) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTwoPhaseCommitTool.java b/lucene/core/src/test/org/apache/lucene/index/TestTwoPhaseCommitTool.java index f4777153e79..e8f76151ebb 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTwoPhaseCommitTool.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTwoPhaseCommitTool.java @@ -28,8 +28,6 @@ public class TestTwoPhaseCommitTool extends LuceneTestCase { final boolean failOnCommit; final boolean failOnRollback; boolean rollbackCalled = false; - Map prepareCommitData = null; - Map commitData = null; public TwoPhaseCommitImpl(boolean failOnPrepare, boolean failOnCommit, boolean failOnRollback) { this.failOnPrepare = failOnPrepare; @@ -43,7 +41,6 @@ public class TestTwoPhaseCommitTool extends LuceneTestCase { } public long prepareCommit(Map commitData) throws IOException { - this.prepareCommitData = commitData; assertFalse("commit should not have been called before all prepareCommit were", commitCalled); if (failOnPrepare) { throw new IOException("failOnPrepare"); @@ -57,7 +54,6 @@ public class TestTwoPhaseCommitTool extends LuceneTestCase { } public long commit(Map commitData) throws IOException { - this.commitData = commitData; commitCalled = true; if (failOnCommit) { throw new RuntimeException("failOnCommit"); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java index 8e0136d1ea0..5181c67a930 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java @@ -356,8 +356,6 @@ public class TestBoolean2 extends LuceneTestCase { public void testRandomQueries() throws Exception { String[] vals = {"w1", "w2", "w3", "w4", "w5", "xx", "yy", "zzz"}; - int tot = 0; - BooleanQuery q1 = null; try { @@ -395,7 +393,6 @@ public class TestBoolean2 extends LuceneTestCase { collector = TopFieldCollector.create(sort, 1000, 1); searcher.search(q1, collector); ScoreDoc[] hits2 = collector.topDocs().scoreDocs; - tot += hits2.length; CheckHits.checkEqual(q1, hits1, hits2); BooleanQuery.Builder q3 = new BooleanQuery.Builder(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java b/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java index 9dcdcd8227f..c1882e0683a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java @@ -585,7 +585,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc nrtDeletesThread.setDaemon(true); nrtDeletesThread.start(); - long gen1 = w.addDocument(new Document()); + w.addDocument(new Document()); long gen2 = w.deleteAll(); nrtDeletesThread.waitForGeneration(gen2); IOUtils.close(nrtDeletesThread, nrtDeletes, w, dir); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLongValuesSource.java b/lucene/core/src/test/org/apache/lucene/search/TestLongValuesSource.java index 4d19a82d958..be8ce3cad83 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestLongValuesSource.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestLongValuesSource.java @@ -46,7 +46,6 @@ public class TestLongValuesSource extends LuceneTestCase { dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); int numDocs = TestUtil.nextInt(random(), 2049, 4000); - int leastValue = 45; for (int i = 0; i < numDocs; i++) { Document document = new Document(); document.add(newTextField("english", English.intToEnglish(i), Field.Store.NO)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMatchesIterator.java b/lucene/core/src/test/org/apache/lucene/search/TestMatchesIterator.java index c8888becea3..4c4222e586b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMatchesIterator.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMatchesIterator.java @@ -646,8 +646,6 @@ public class TestMatchesIterator extends LuceneTestCase { // "a phrase sentence with many phrase sentence iterations of a phrase sentence", public void testSloppyPhraseQueryWithRepeats() throws IOException { - Term p = new Term(FIELD_WITH_OFFSETS, "phrase"); - Term s = new Term(FIELD_WITH_OFFSETS, "sentence"); PhraseQuery pq = new PhraseQuery(10, FIELD_WITH_OFFSETS, "phrase", "sentence", "sentence"); checkMatches( pq, diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java b/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java index d0568868715..b6cac827181 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java @@ -47,6 +47,7 @@ public class TestSortedNumericSortField extends LuceneTestCase { } } + @SuppressWarnings("unlikely-arg-type") public void testEquals() throws Exception { SortField sf = new SortedNumericSortField("a", SortField.Type.LONG); assertFalse(sf.equals(null)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java b/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java index 66e8be680e2..b338d5c659c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java @@ -47,6 +47,7 @@ public class TestSortedSetSortField extends LuceneTestCase { } } + @SuppressWarnings("unlikely-arg-type") public void testEquals() throws Exception { SortField sf = new SortedSetSortField("a", false); assertFalse(sf.equals(null)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java index cd7f35c8407..32437a17a48 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java @@ -126,9 +126,6 @@ public class TestTermQuery extends LuceneTestCase { w.addDocument(new Document()); DirectoryReader reader = w.getReader(); - FilterDirectoryReader noSeekReader = new NoSeekDirectoryReader(reader); - IndexSearcher noSeekSearcher = new IndexSearcher(noSeekReader); - Query query = new TermQuery(new Term("foo", "bar")); TermQuery queryWithContext = new TermQuery( new Term("foo", "bar"), diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTotalHits.java b/lucene/core/src/test/org/apache/lucene/search/TestTotalHits.java index f975f4114e3..4bd74bf06a0 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTotalHits.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTotalHits.java @@ -22,6 +22,7 @@ import org.apache.lucene.util.LuceneTestCase; public class TestTotalHits extends LuceneTestCase { + @SuppressWarnings("unlikely-arg-type") public void testEqualsAndHashcode() { TotalHits totalHits1 = randomTotalHits(); assertFalse(totalHits1.equals(null)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java b/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java index 69df6523e30..b8918268d52 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java @@ -32,6 +32,7 @@ import org.apache.lucene.util.LuceneTestCase; /** TestWildcard tests the '*' and '?' wildcard characters. */ public class TestWildcard extends LuceneTestCase { + @SuppressWarnings("unlikely-arg-type") public void testEquals() { WildcardQuery wq1 = new WildcardQuery(new Term("field", "b*a")); WildcardQuery wq2 = new WildcardQuery(new Term("field", "b*a")); diff --git a/lucene/core/src/test/org/apache/lucene/store/TestMultiMMap.java b/lucene/core/src/test/org/apache/lucene/store/TestMultiMMap.java index b664a458a70..e34cf7860f3 100644 --- a/lucene/core/src/test/org/apache/lucene/store/TestMultiMMap.java +++ b/lucene/core/src/test/org/apache/lucene/store/TestMultiMMap.java @@ -181,7 +181,7 @@ public class TestMultiMMap extends BaseDirectoryTestCase { public void testSeekSliceZero() throws Exception { int upto = TEST_NIGHTLY ? 31 : 3; - for (int i = 0; i < 3; i++) { + for (int i = 0; i < upto; i++) { MMapDirectory mmapDir = new MMapDirectory(createTempDir("testSeekSliceZero"), 1 << i); IndexOutput io = mmapDir.createOutput("zeroBytes", newIOContext(random())); io.close(); diff --git a/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java index d530310b038..ee40c3aef61 100644 --- a/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java +++ b/lucene/core/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java @@ -30,7 +30,6 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.TestUtil; @@ -82,7 +81,7 @@ public class TestNRTCachingDirectory extends BaseDirectoryTestCase { final IndexSearcher s = newSearcher(r); // Just make sure search can run; we can't assert // totHits since it could be 0 - TopDocs hits = s.search(new TermQuery(new Term("body", "the")), 10); + s.search(new TermQuery(new Term("body", "the")), 10); // System.out.println("tot hits " + hits.totalHits); } } diff --git a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java index 6e5c600d718..37a6cf65f2d 100644 --- a/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java +++ b/lucene/core/src/test/org/apache/lucene/util/bkd/TestBKD.java @@ -1723,7 +1723,6 @@ public class TestBKD extends LuceneTestCase { public void testTooManyPoints() throws Exception { Directory dir = newDirectory(); final int numValues = 10; - final int numPointsAdded = 50; // exceeds totalPointCount final int numBytesPerDim = TestUtil.nextInt(random(), 1, 4); final byte[] pointValue = new byte[numBytesPerDim]; BKDWriter w = @@ -1755,7 +1754,6 @@ public class TestBKD extends LuceneTestCase { public void testTooManyPoints1D() throws Exception { Directory dir = newDirectory(); final int numValues = 10; - final int numPointsAdded = 50; // exceeds totalPointCount final int numBytesPerDim = TestUtil.nextInt(random(), 1, 4); final byte[][] pointValue = new byte[11][numBytesPerDim]; BKDWriter w = diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java index a437f068ed7..036b7b06e41 100644 --- a/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java +++ b/lucene/core/src/test/org/apache/lucene/util/fst/TestFSTs.java @@ -294,7 +294,6 @@ public class TestFSTs extends LuceneTestCase { for (int inputMode = 0; inputMode < 2; inputMode++) { final int numWords = random.nextInt(maxNumWords + 1); Set termsSet = new HashSet<>(); - IntsRef[] terms = new IntsRef[numWords]; while (termsSet.size() < numWords) { final String term = getRandomString(random); termsSet.add(toIntsRef(term, inputMode)); @@ -527,7 +526,7 @@ public class TestFSTs extends LuceneTestCase { BufferedReader is = Files.newBufferedReader(wordsFileIn, StandardCharsets.UTF_8); try { - final IntsRefBuilder intsRef = new IntsRefBuilder(); + final IntsRefBuilder intsRefBuilder = new IntsRefBuilder(); long tStart = System.currentTimeMillis(); int ord = 0; while (true) { @@ -535,8 +534,8 @@ public class TestFSTs extends LuceneTestCase { if (w == null) { break; } - toIntsRef(w, inputMode, intsRef); - fstCompiler.add(intsRef.get(), getOutput(intsRef.get(), ord)); + toIntsRef(w, inputMode, intsRefBuilder); + fstCompiler.add(intsRefBuilder.get(), getOutput(intsRefBuilder.get(), ord)); ord++; if (ord % 500000 == 0) { @@ -613,10 +612,10 @@ public class TestFSTs extends LuceneTestCase { if (w == null) { break; } - toIntsRef(w, inputMode, intsRef); + toIntsRef(w, inputMode, intsRefBuilder); if (iter == 0) { - T expected = getOutput(intsRef.get(), ord); - T actual = Util.get(fst, intsRef.get()); + T expected = getOutput(intsRefBuilder.get(), ord); + T actual = Util.get(fst, intsRefBuilder.get()); if (actual == null) { throw new RuntimeException("unexpected null output on input=" + w); } @@ -631,18 +630,18 @@ public class TestFSTs extends LuceneTestCase { } } else { // Get by output - final Long output = (Long) getOutput(intsRef.get(), ord); + final Long output = (Long) getOutput(intsRefBuilder.get(), ord); @SuppressWarnings({"unchecked", "deprecation"}) final IntsRef actual = Util.getByOutput((FST) fst, output.longValue()); if (actual == null) { throw new RuntimeException("unexpected null input from output=" + output); } - if (!actual.equals(intsRef)) { + if (!actual.equals(intsRefBuilder.get())) { throw new RuntimeException( "wrong input (got " + actual + " but expected " - + intsRef + + intsRefBuilder + " from output=" + output); } diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/TestUtil.java b/lucene/core/src/test/org/apache/lucene/util/fst/TestUtil.java index d0c66ce18f0..17295c55846 100644 --- a/lucene/core/src/test/org/apache/lucene/util/fst/TestUtil.java +++ b/lucene/core/src/test/org/apache/lucene/util/fst/TestUtil.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.util.fst; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.lucene.util.BytesRef; @@ -99,26 +98,4 @@ public class TestUtil extends LuceneTestCase { } return fstCompiler.compile(); } - - private List createRandomDictionary(int width, int depth) { - return createRandomDictionary(new ArrayList<>(), new StringBuilder(), width, depth); - } - - private List createRandomDictionary( - List dict, StringBuilder buf, int width, int depth) { - char c = (char) random().nextInt(128); - assert width < Character.MIN_SURROGATE / 8 - 128; // avoid surrogate chars - int len = buf.length(); - for (int i = 0; i < width; i++) { - buf.append(c); - if (depth > 0) { - createRandomDictionary(dict, buf, width, depth - 1); - } else { - dict.add(buf.toString()); - } - c += random().nextInt(8); - buf.setLength(len); - } - return dict; - } } diff --git a/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionRescorer.java b/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionRescorer.java index 81198813d0c..3634e28dc9c 100644 --- a/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionRescorer.java +++ b/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionRescorer.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.List; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; -import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Rescorer; @@ -47,21 +46,6 @@ class ExpressionRescorer extends SortRescorer { this.bindings = bindings; } - private static DoubleValues scores(int doc, float score) { - return new DoubleValues() { - @Override - public double doubleValue() throws IOException { - return score; - } - - @Override - public boolean advanceExact(int target) throws IOException { - assert doc == target; - return true; - } - }; - } - @Override public Explanation explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID) throws IOException { diff --git a/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java b/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java index 5bb7d2e0c30..d7663d29431 100644 --- a/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java +++ b/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java @@ -166,7 +166,7 @@ public final class JavascriptCompiler { @SuppressWarnings({"unused", "null"}) private static void unusedTestCompile() throws IOException { DoubleValues f = null; - double ret = f.doubleValue(); + f.doubleValue(); } /** diff --git a/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java b/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java index 0472b8f8948..2d60d9828c7 100644 --- a/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java +++ b/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionValueSource.java @@ -94,6 +94,7 @@ public class TestExpressionValueSource extends LuceneTestCase { assertEquals(4, values.doubleValue(), 0); } + @SuppressWarnings("unlikely-arg-type") public void testDoubleValuesSourceEquals() throws Exception { Expression expr = JavascriptCompiler.compile("sqrt(a) + ln(b)"); diff --git a/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java b/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java index f4f396e8614..4d4fedd8c80 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/FacetsConfig.java @@ -441,15 +441,7 @@ public class FacetsConfig { System.arraycopy(field.assoc.bytes, field.assoc.offset, bytes, upto, field.assoc.length); upto += field.assoc.length; - FacetsConfig.DimConfig ft = getDimConfig(field.dim); - // Drill down: - int start; - if (ft.requireDimensionDrillDown) { - start = 1; - } else { - start = 2; - } for (int i = 1; i <= label.length; i++) { doc.add( new StringField(indexFieldName, pathToString(label.components, i), Field.Store.NO)); diff --git a/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java index 07853c8ec32..4379ab72f7c 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java @@ -218,8 +218,7 @@ public class LongValueFacetCounts extends Facets { } private void countAllOneSegment(NumericDocValues values) throws IOException { - int doc; - while ((doc = values.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + while (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { totCount++; increment(values.longValue()); } @@ -255,8 +254,7 @@ public class LongValueFacetCounts extends Facets { if (singleValues != null) { countAllOneSegment(singleValues); } else { - int doc; - while ((doc = values.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + while (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { int limit = values.docValueCount(); totCount += limit; for (int i = 0; i < limit; i++) { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacetLabels.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacetLabels.java index 7e121894689..f0c66ba613c 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacetLabels.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacetLabels.java @@ -31,9 +31,6 @@ import org.apache.lucene.util.IntsRef; */ public class TaxonomyFacetLabels { - /** Index field name provided to the constructor */ - private final String indexFieldName; - /** {@code TaxonomyReader} provided to the constructor */ private final TaxonomyReader taxoReader; @@ -49,7 +46,6 @@ public class TaxonomyFacetLabels { */ public TaxonomyFacetLabels(TaxonomyReader taxoReader, String indexFieldName) throws IOException { this.taxoReader = taxoReader; - this.indexFieldName = indexFieldName; this.ordsReader = new DocValuesOrdinalsReader(indexFieldName); } diff --git a/lucene/facet/src/test/org/apache/lucene/facet/TestLongValueFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/TestLongValueFacetCounts.java index 0e17bd25807..1c4b5eb943b 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/TestLongValueFacetCounts.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/TestLongValueFacetCounts.java @@ -385,7 +385,6 @@ public class TestLongValueFacetCounts extends LuceneTestCase { } long[][] values = new long[valueCount][]; - int missingCount = 0; for (int i = 0; i < valueCount; i++) { Document doc = new Document(); doc.add(new IntPoint("id", i)); @@ -407,8 +406,6 @@ public class TestLongValueFacetCounts extends LuceneTestCase { } } else { - missingCount++; - if (VERBOSE) { System.out.println(" doc=" + i + " missing values"); } diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestFacetLabel.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestFacetLabel.java index f6f21d3cf13..f1aa8c33638 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestFacetLabel.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestFacetLabel.java @@ -92,6 +92,7 @@ public class TestFacetLabel extends FacetTestCase { } } + @SuppressWarnings("unlikely-arg-type") @Test public void testEquals() { assertEquals(new FacetLabel(), new FacetLabel()); diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java index ee3972c5ed0..2bb98f684bb 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestSearcherTaxonomyManager.java @@ -31,18 +31,14 @@ import org.apache.lucene.facet.Facets; import org.apache.lucene.facet.FacetsCollector; import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager.SearcherAndTaxonomy; -import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; -import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexNotFoundException; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ReferenceManager; -import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; @@ -360,15 +356,4 @@ public class TestSearcherTaxonomyManager extends FacetTestCase { expectThrows(IndexNotFoundException.class, mgr::maybeRefreshBlocking); IOUtils.close(w, tw, mgr, indexDir, taxoDir); } - - private SearcherTaxonomyManager getSearcherTaxonomyManager( - Directory indexDir, Directory taxoDir, SearcherFactory searcherFactory) throws IOException { - if (random().nextBoolean()) { - return new SearcherTaxonomyManager(indexDir, taxoDir, searcherFactory); - } else { - IndexReader reader = DirectoryReader.open(indexDir); - DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); - return new SearcherTaxonomyManager(reader, taxoReader, searcherFactory); - } - } } diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java index 47e42dc8e23..bb34dee639b 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java @@ -545,7 +545,6 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase { Directory dir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir); int numCategories = atLeast(10); - int numA = 0, numB = 0; Random random = random(); // add the two categories for which we'll also add children (so asserts are simpler) taxoWriter.addCategory(new FacetLabel("a")); @@ -553,10 +552,8 @@ public class TestDirectoryTaxonomyReader extends FacetTestCase { for (int i = 0; i < numCategories; i++) { if (random.nextBoolean()) { taxoWriter.addCategory(new FacetLabel("a", Integer.toString(i))); - ++numA; } else { taxoWriter.addCategory(new FacetLabel("b", Integer.toString(i))); - ++numB; } } // add category with no children diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestAllGroupHeadsCollector.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestAllGroupHeadsCollector.java index 9b7baae65a2..1e4553c5380 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestAllGroupHeadsCollector.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestAllGroupHeadsCollector.java @@ -228,7 +228,6 @@ public class TestAllGroupHeadsCollector extends LuceneTestCase { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); - DocValuesType valueType = DocValuesType.SORTED; Document doc = new Document(); Document docNoGroup = new Document(); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java index df53dff0016..3cbf73554e5 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java @@ -1371,7 +1371,6 @@ public class TestGrouping extends LuceneTestCase { final List>> shardGroups = new ArrayList<>(); List> firstPassGroupingCollectors = new ArrayList<>(); FirstPassGroupingCollector firstPassCollector = null; - boolean shardsCanUseIDV = canUseIDV; String groupField = "group"; diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java index eb903fc9beb..96384abc34b 100644 --- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java +++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java @@ -36,11 +36,10 @@ public class TokenGroup { private int matchEndOffset; private OffsetAttribute offsetAtt; - private CharTermAttribute termAtt; public TokenGroup(TokenStream tokenStream) { offsetAtt = tokenStream.addAttribute(OffsetAttribute.class); - termAtt = tokenStream.addAttribute(CharTermAttribute.class); + tokenStream.addAttribute(CharTermAttribute.class); } void addToken(float score) { diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java b/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java index f70ce3e87f2..2eebfd98962 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/uhighlight/TestUnifiedHighlighterMTQ.java @@ -911,7 +911,7 @@ public class TestUnifiedHighlighterMTQ extends LuceneTestCase { BooleanQuery query = queryBuilder.build(); TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); try { - String snippets[] = highlighter.highlight("body", query, topDocs, 2); + highlighter.highlight("body", query, topDocs, 2); // don't even care what the results are; just want to test exception behavior if (fieldType == UHTestHelper.reanalysisType) { fail("Expecting EXPECTED IOException"); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java index f6a5e765985..61805873dab 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java @@ -1665,7 +1665,7 @@ public class TestJoinUtil extends LuceneTestCase { multipleValuesPerDocument ? Math.min(2 + random.nextInt(10), context.randomUniqueValues.length) : 1; - docs[i] = new RandomDoc(id, numberOfLinkValues, value, from); + docs[i] = new RandomDoc(id, numberOfLinkValues); if (globalOrdinalJoin) { document.add(newStringField("type", from ? "from" : "to", Field.Store.NO)); } @@ -2061,14 +2061,10 @@ public class TestJoinUtil extends LuceneTestCase { final String id; final List linkValues; - final String value; - final boolean from; - private RandomDoc(String id, int numberOfLinkValues, String value, boolean from) { + private RandomDoc(String id, int numberOfLinkValues) { this.id = id; - this.from = from; linkValues = new ArrayList<>(numberOfLinkValues); - this.value = value; } } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/AnalysisPanelProvider.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/AnalysisPanelProvider.java index b0314378585..b21e9c5626a 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/AnalysisPanelProvider.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/AnalysisPanelProvider.java @@ -407,7 +407,5 @@ public final class AnalysisPanelProvider implements AnalysisTabOperator { AnalysisPanelProvider.this.executeAnalysis(); } } - - void executeAnalysisStepByStep(ActionEvent e) {} } } diff --git a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/OptimizeIndexDialogFactory.java b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/OptimizeIndexDialogFactory.java index 238961321b6..2a463f895b9 100644 --- a/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/OptimizeIndexDialogFactory.java +++ b/lucene/luke/src/java/org/apache/lucene/luke/app/desktop/components/dialog/menubar/OptimizeIndexDialogFactory.java @@ -26,7 +26,6 @@ import java.awt.Insets; import java.awt.Window; import java.awt.event.ActionEvent; import java.io.IOException; -import java.lang.invoke.MethodHandles; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.swing.BorderFactory; @@ -42,7 +41,6 @@ import javax.swing.JSpinner; import javax.swing.JTextArea; import javax.swing.SpinnerNumberModel; import javax.swing.SwingWorker; -import org.apache.logging.log4j.Logger; import org.apache.lucene.luke.app.IndexHandler; import org.apache.lucene.luke.app.IndexObserver; import org.apache.lucene.luke.app.LukeState; @@ -56,14 +54,11 @@ import org.apache.lucene.luke.app.desktop.util.StyleConstants; import org.apache.lucene.luke.app.desktop.util.TextAreaPrintStream; import org.apache.lucene.luke.models.tools.IndexTools; import org.apache.lucene.luke.models.tools.IndexToolsFactory; -import org.apache.lucene.luke.util.LoggerFactory; import org.apache.lucene.util.NamedThreadFactory; /** Factory of optimize index dialog */ public final class OptimizeIndexDialogFactory implements DialogOpener.DialogFactory { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static OptimizeIndexDialogFactory instance; private final Preferences prefs; diff --git a/lucene/luke/src/test/org/apache/lucene/luke/models/documents/TestDocumentsImpl.java b/lucene/luke/src/test/org/apache/lucene/luke/models/documents/TestDocumentsImpl.java index cf08f1f56d4..ddddbef8fc0 100644 --- a/lucene/luke/src/test/org/apache/lucene/luke/models/documents/TestDocumentsImpl.java +++ b/lucene/luke/src/test/org/apache/lucene/luke/models/documents/TestDocumentsImpl.java @@ -150,7 +150,7 @@ public class TestDocumentsImpl extends DocumentsTestBase { assertEquals("adventures", term.text()); while (documents.nextTerm().isPresent()) { - Integer freq = documents.getDocFreq().orElseThrow(IllegalStateException::new); + documents.getDocFreq().orElseThrow(IllegalStateException::new); } } @@ -208,16 +208,16 @@ public class TestDocumentsImpl extends DocumentsTestBase { @Test public void testNextTermDoc_unPositioned() { DocumentsImpl documents = new DocumentsImpl(reader); - Term term = documents.firstTerm("title").orElseThrow(IllegalStateException::new); + documents.firstTerm("title").orElseThrow(IllegalStateException::new); assertFalse(documents.nextTermDoc().isPresent()); } @Test public void testTermPositions() { DocumentsImpl documents = new DocumentsImpl(reader); - Term term = documents.firstTerm("author").orElseThrow(IllegalStateException::new); - term = documents.seekTerm("carroll").orElseThrow(IllegalStateException::new); - int docid = documents.firstTermDoc().orElseThrow(IllegalStateException::new); + documents.firstTerm("author").orElseThrow(IllegalStateException::new); + documents.seekTerm("carroll").orElseThrow(IllegalStateException::new); + documents.firstTermDoc().orElseThrow(IllegalStateException::new); List postings = documents.getTermPositions(); assertEquals(1, postings.size()); assertEquals(1, postings.get(0).getPosition()); @@ -228,21 +228,21 @@ public class TestDocumentsImpl extends DocumentsTestBase { @Test public void testTermPositions_unPositioned() { DocumentsImpl documents = new DocumentsImpl(reader); - Term term = documents.firstTerm("author").orElseThrow(IllegalStateException::new); + documents.firstTerm("author").orElseThrow(IllegalStateException::new); assertEquals(0, documents.getTermPositions().size()); } @Test public void testTermPositions_noPositions() { DocumentsImpl documents = new DocumentsImpl(reader); - Term term = documents.firstTerm("title").orElseThrow(IllegalStateException::new); - int docid = documents.firstTermDoc().orElseThrow(IllegalStateException::new); + documents.firstTerm("title").orElseThrow(IllegalStateException::new); + documents.firstTermDoc().orElseThrow(IllegalStateException::new); assertEquals(0, documents.getTermPositions().size()); } @Test(expected = AlreadyClosedException.class) public void testClose() throws Exception { - DocumentsImpl documents = new DocumentsImpl(reader); + new DocumentsImpl(reader); reader.close(); IndexUtils.getFieldNames(reader); } diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java index 6dcf8d04254..9c3aad2dfdb 100644 --- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java +++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java @@ -1440,11 +1440,8 @@ public class MemoryIndex { @Override public int size() { - int size = 0; - for (String fieldName : this) { - size++; - } - return size; + return Math.toIntExact( + fields.entrySet().stream().filter(e -> e.getValue().numTokens > 0).count()); } } @@ -1573,7 +1570,6 @@ public class MemoryIndex { private boolean hasNext; private int doc = -1; private int freq; - private int pos; private int startOffset; private int endOffset; private int payloadIndex; @@ -1600,7 +1596,6 @@ public class MemoryIndex { @Override public int nextDoc() { - pos = -1; if (hasNext) { hasNext = false; return doc = 0; diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java index cea0a9d4cef..c66e4a9f238 100644 --- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java +++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstDirectory.java @@ -67,12 +67,9 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.ByteBlockPool; -import org.apache.lucene.util.ByteBlockPool.Allocator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LineFileDocs; -import org.apache.lucene.util.RecyclingByteBlockAllocator; import org.apache.lucene.util.TestUtil; /** @@ -355,14 +352,6 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase { reader.close(); } - private Allocator randomByteBlockAllocator() { - if (random().nextBoolean()) { - return new RecyclingByteBlockAllocator(); - } else { - return new ByteBlockPool.DirectAllocator(); - } - } - private MemoryIndex randomMemoryIndex() { return new MemoryIndex( random().nextBoolean(), random().nextBoolean(), random().nextInt(50) * 1024 * 1024); diff --git a/lucene/misc/src/test/org/apache/lucene/misc/util/fst/TestFSTsMisc.java b/lucene/misc/src/test/org/apache/lucene/misc/util/fst/TestFSTsMisc.java index 540ce259488..55c1675a3d0 100644 --- a/lucene/misc/src/test/org/apache/lucene/misc/util/fst/TestFSTsMisc.java +++ b/lucene/misc/src/test/org/apache/lucene/misc/util/fst/TestFSTsMisc.java @@ -67,7 +67,6 @@ public class TestFSTsMisc extends LuceneTestCase { for (int inputMode = 0; inputMode < 2; inputMode++) { final int numWords = random.nextInt(maxNumWords + 1); Set termsSet = new HashSet<>(); - IntsRef[] terms = new IntsRef[numWords]; while (termsSet.size() < numWords) { final String term = FSTTester.getRandomString(random); termsSet.add(FSTTester.toIntsRef(term, inputMode)); diff --git a/lucene/monitor/src/test/org/apache/lucene/monitor/TestBooleanTermExtractor.java b/lucene/monitor/src/test/org/apache/lucene/monitor/TestBooleanTermExtractor.java index d1b1f9dd58d..ce82237d9ce 100644 --- a/lucene/monitor/src/test/org/apache/lucene/monitor/TestBooleanTermExtractor.java +++ b/lucene/monitor/src/test/org/apache/lucene/monitor/TestBooleanTermExtractor.java @@ -31,7 +31,6 @@ import org.apache.lucene.util.LuceneTestCase; public class TestBooleanTermExtractor extends LuceneTestCase { private static final QueryAnalyzer treeBuilder = new QueryAnalyzer(); - private static final TermWeightor WEIGHTOR = TermWeightor.DEFAULT; private Set collectTerms(Query query) { Set terms = new HashSet<>(); diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java index 8d80677664a..01a3723f411 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/EnumFieldSource.java @@ -102,8 +102,6 @@ public class EnumFieldSource extends FieldCacheSource { final NumericDocValues arr = DocValues.getNumeric(readerContext.reader(), field); return new IntDocValues(this) { - final MutableValueInt val = new MutableValueInt(); - int lastDocID; private int getValueForDoc(int doc) throws IOException { diff --git a/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalQuery.java index 2ffa781c892..239ec6a3253 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalQuery.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalQuery.java @@ -121,7 +121,7 @@ public final class IntervalQuery extends Query { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { - return new IntervalWeight(this, boost, scoreMode); + return new IntervalWeight(this, boost); } @Override @@ -147,12 +147,10 @@ public final class IntervalQuery extends Query { private class IntervalWeight extends Weight { - final ScoreMode scoreMode; final float boost; - public IntervalWeight(Query query, float boost, ScoreMode scoreMode) { + public IntervalWeight(Query query, float boost) { super(query); - this.scoreMode = scoreMode; this.boost = boost; } diff --git a/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java b/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java index dba7840182e..283a1b6199e 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java @@ -659,12 +659,12 @@ public final class MoreLikeThis { if (queue.size() < limit) { // there is still space in the queue - queue.add(new ScoreTerm(word, fieldName, score, idf, docFreq, tf)); + queue.add(new ScoreTerm(word, fieldName, score)); } else { ScoreTerm term = queue.top(); // update the smallest in the queue in place and update the queue. if (term.score < score) { - term.update(word, fieldName, score, idf, docFreq, tf); + term.update(word, fieldName, score); queue.updateTop(); } } @@ -935,26 +935,17 @@ public final class MoreLikeThis { String word; String topField; float score; - float idf; - int docFreq; - int tf; - ScoreTerm(String word, String topField, float score, float idf, int docFreq, int tf) { + ScoreTerm(String word, String topField, float score) { this.word = word; this.topField = topField; this.score = score; - this.idf = idf; - this.docFreq = docFreq; - this.tf = tf; } - void update(String word, String topField, float score, float idf, int docFreq, int tf) { + void update(String word, String topField, float score) { this.word = word; this.topField = topField; this.score = score; - this.idf = idf; - this.docFreq = docFreq; - this.tf = tf; } } diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java index dd03bacb1be..a0f58aeef80 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java @@ -81,7 +81,7 @@ import org.apache.lucene.queryparser.charstream.FastCharStream; * the same syntax as this class, but is more modular, * enabling substantial customization to how a query is created. */ -public class QueryParser extends QueryParserBase implements QueryParserConstants { +@SuppressWarnings("unused") public class QueryParser extends QueryParserBase implements QueryParserConstants { /** The default operator for parsing queries. * Use {@link QueryParserBase#setDefaultOperator} to change it. */ diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserTokenManager.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserTokenManager.java index 10d09505d25..ca89bf5d103 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserTokenManager.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserTokenManager.java @@ -18,7 +18,7 @@ package org.apache.lucene.queryparser.classic; /** Token Manager. */ -public class QueryParserTokenManager implements QueryParserConstants { +@SuppressWarnings("unused") public class QueryParserTokenManager implements QueryParserConstants { /** Debug output. */ // (debugStream omitted). diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java index 3462a066e23..326da0bd767 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java @@ -143,10 +143,7 @@ public class NLS { ResourceBundle resourceBundle = ResourceBundle.getBundle(clazz.getName(), Locale.getDefault()); if (resourceBundle != null) { - Object obj = resourceBundle.getObject(key); - // if (obj == null) - // System.err.println("WARN: Message with key:" + key + " and locale: " - // + Locale.getDefault() + " not found."); + resourceBundle.getObject(key); } } catch (MissingResourceException e) { // System.err.println("WARN: Message with key:" + key + " and locale: " diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/ParseException.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/ParseException.java index a782e8b234a..65f22a08935 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/ParseException.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/ParseException.java @@ -1,11 +1,11 @@ /* Generated By:JavaCC: Do not edit this line. ParseException.java Version 7.0 */ /* JavaCCOptions:KEEP_LINE_COLUMN=true */ - package org.apache.lucene.queryparser.flexible.standard.parser; - - import org.apache.lucene.queryparser.flexible.messages.*; - import org.apache.lucene.queryparser.flexible.core.*; - import org.apache.lucene.queryparser.flexible.core.messages.*; - + package org.apache.lucene.queryparser.flexible.standard.parser; + + import org.apache.lucene.queryparser.flexible.messages.*; + import org.apache.lucene.queryparser.flexible.core.*; + import org.apache.lucene.queryparser.flexible.core.messages.*; + /** * This exception is thrown when parse errors are encountered. @@ -36,16 +36,16 @@ public class ParseException extends QueryNodeParseException { * a new object of this type with the fields "currentToken", * "expectedTokenSequences", and "tokenImage" set. */ - public ParseException(Token currentTokenVal, - int[][] expectedTokenSequencesVal, String[] tokenImageVal) - { - super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, initialise( - currentTokenVal, expectedTokenSequencesVal, tokenImageVal))); - this.currentToken = currentTokenVal; - this.expectedTokenSequences = expectedTokenSequencesVal; - this.tokenImage = tokenImageVal; - } - + public ParseException(Token currentTokenVal, + int[][] expectedTokenSequencesVal, String[] tokenImageVal) + { + super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, initialise( + currentTokenVal, expectedTokenSequencesVal, tokenImageVal))); + this.currentToken = currentTokenVal; + this.expectedTokenSequences = expectedTokenSequencesVal; + this.tokenImage = tokenImageVal; + } + /** * The following constructors are for use by you for whatever @@ -57,18 +57,18 @@ public class ParseException extends QueryNodeParseException { * these constructors. */ - public ParseException() - { - super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, "Error")); - } - + public ParseException() + { + super(new MessageImpl(QueryParserMessages.INVALID_SYNTAX, "Error")); + } + /** Constructor with message. */ - public ParseException(Message message) - { - super(message); - } - + public ParseException(Message message) + { + super(message); + } + /** diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java index 61a323851f0..d509a0daf81 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParser.java @@ -50,7 +50,7 @@ import static org.apache.lucene.queryparser.flexible.standard.parser.EscapeQuery /** * Parser for the standard Lucene syntax */ -public class StandardSyntaxParser implements SyntaxParser, StandardSyntaxParserConstants { +@SuppressWarnings("unused") public class StandardSyntaxParser implements SyntaxParser, StandardSyntaxParserConstants { public StandardSyntaxParser() { this(new FastCharStream(Reader.nullReader())); } diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParserTokenManager.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParserTokenManager.java index b0ef7fdbb63..3dc6507c591 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParserTokenManager.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/StandardSyntaxParserTokenManager.java @@ -46,7 +46,7 @@ package org.apache.lucene.queryparser.flexible.standard.parser; /** Token Manager. */ -public class StandardSyntaxParserTokenManager implements StandardSyntaxParserConstants { +@SuppressWarnings("unused") public class StandardSyntaxParserTokenManager implements StandardSyntaxParserConstants { /** Debug output. */ // (debugStream omitted). diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java index aac680f474a..c90c362e7f2 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParser.java @@ -46,7 +46,7 @@ import org.apache.lucene.queryparser.charstream.FastCharStream; * to two terms may appear between a and b.

    */ -public class QueryParser implements QueryParserConstants { +@SuppressWarnings("unused") public class QueryParser implements QueryParserConstants { static final int MINIMUM_PREFIX_LENGTH = 3; static final int MINIMUM_CHARS_IN_TRUNC = 3; static final String TRUNCATION_ERROR_MESSAGE = "Too unrestrictive truncation: "; diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParserTokenManager.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParserTokenManager.java index d12f2074387..f8fef600a20 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParserTokenManager.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/surround/parser/QueryParserTokenManager.java @@ -19,7 +19,7 @@ package org.apache.lucene.queryparser.surround.parser; /** Token Manager. */ -public class QueryParserTokenManager implements QueryParserConstants { +@SuppressWarnings("unused") public class QueryParserTokenManager implements QueryParserConstants { /** Debug output. */ // (debugStream omitted). diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java index d36234045fb..cbb4affde86 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java @@ -63,7 +63,6 @@ public class TestMultiFieldQPHelper extends LuceneTestCase { // verify parsing of query using a stopping analyzer private void assertStopQueryIsMatchNoDocsQuery(String qtxt) throws Exception { String[] fields = {"b", "t"}; - Occur occur[] = {Occur.SHOULD, Occur.SHOULD}; TestQPHelper.QPTestAnalyzer a = new TestQPHelper.QPTestAnalyzer(); StandardQueryParser mfqp = new StandardQueryParser(); mfqp.setMultiFields(fields); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java index 787c336e069..dca7f39656a 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java @@ -70,7 +70,6 @@ import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; @@ -1255,17 +1254,6 @@ public class TestQPHelper extends LuceneTestCase { } } - private void assertHits(int expected, String query, IndexSearcher is) - throws IOException, QueryNodeException { - StandardQueryParser qp = new StandardQueryParser(); - qp.setAnalyzer(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)); - qp.setLocale(Locale.ENGLISH); - - Query q = qp.parse(query, "date"); - ScoreDoc[] hits = is.search(q, 1000).scoreDocs; - assertEquals(expected, hits.length); - } - @Override public void tearDown() throws Exception { IndexSearcher.setMaxClauseCount(originalMaxClauses); diff --git a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java index 29088a867e9..2e616a8717c 100644 --- a/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java +++ b/lucene/replicator/src/java/org/apache/lucene/replicator/nrt/ReplicaNode.java @@ -271,7 +271,6 @@ public abstract class ReplicaNode extends Node { } lastPrimaryGen = job.getCopyState().primaryGen; - byte[] infosBytes = job.getCopyState().infosBytes; SegmentInfos syncInfos = SegmentInfos.readCommit( @@ -437,7 +436,6 @@ public abstract class ReplicaNode extends Node { job.finish(); // Turn byte[] back to SegmentInfos: - byte[] infosBytes = copyState.infosBytes; SegmentInfos infos = SegmentInfos.readCommit(dir, toIndexInput(copyState.infosBytes), copyState.gen); assert infos.getVersion() == copyState.version; diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/TestIndexAndTaxonomyReplicationClient.java b/lucene/replicator/src/test/org/apache/lucene/replicator/TestIndexAndTaxonomyReplicationClient.java index 4e80e308c87..d9ae94b1637 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/TestIndexAndTaxonomyReplicationClient.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/TestIndexAndTaxonomyReplicationClient.java @@ -419,13 +419,12 @@ public class TestIndexAndTaxonomyReplicationClient extends ReplicatorTestCase { // verify taxonomy index is fully consistent (since we only add one // category to all documents, there's nothing much more to validate. ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); - CheckIndex.Status indexStatus = null; try (CheckIndex checker = new CheckIndex(handlerTaxoDir.getDelegate())) { checker.setFailFast(true); checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8), false); try { - indexStatus = checker.checkIndex(null); + checker.checkIndex(null); } catch (IOException | RuntimeException ioe) { // ok: we fallback below } diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java index 51f1fc2f9dc..30aaba4592d 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimplePrimaryNode.java @@ -196,9 +196,6 @@ class SimplePrimaryNode extends PrimaryNode { warmingSegments.add(preCopy); try { - - Set fileNames = files.keySet(); - // Ask all currently known replicas to pre-copy this newly merged segment's files: for (int replicaTCPPort : replicaTCPPorts) { try { diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java index 17f6fd17771..d32dcd6455f 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/SimpleReplicaNode.java @@ -332,7 +332,7 @@ class SimpleReplicaNode extends ReplicaNode { Map files = SimpleServer.readFilesMetaData(in); message("done reading files to copy files=" + files.keySet()); AtomicBoolean finished = new AtomicBoolean(); - CopyJob job = launchPreCopyMerge(finished, newPrimaryGen, files); + launchPreCopyMerge(finished, newPrimaryGen, files); message("done launching copy job files=" + files.keySet()); // Silly keep alive mechanism, else if e.g. we (replica node) crash, the primary diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java index 1c42d07b1fc..d02bd410592 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestNRTReplication.java @@ -125,7 +125,6 @@ public class TestNRTReplication extends LuceneTestCase { long initCommitVersion = -1; long initInfosVersion = -1; Pattern logTimeStart = Pattern.compile("^[0-9\\.]+s .*"); - boolean sawExistingSegmentsFile = false; while (true) { String l = r.readLine(); @@ -159,7 +158,6 @@ public class TestNRTReplication extends LuceneTestCase { } else if (l.startsWith("NODE STARTED")) { break; } else if (l.contains("replica cannot start: existing segments file=")) { - sawExistingSegmentsFile = true; } } diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java index fd058213ae9..09aaf8f3ad6 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/nrt/TestStressNRTReplication.java @@ -44,9 +44,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LineFileDocs; @@ -444,7 +441,6 @@ public class TestStressNRTReplication extends LuceneTestCase { return; } - int id = replicaToPromote.id; message("top: now startPrimary " + replicaToPromote); startPrimary(replicaToPromote.id); } @@ -1005,9 +1001,6 @@ public class TestStressNRTReplication extends LuceneTestCase { @Override public void run() { - // Maps version to number of hits for silly 'the' TermQuery: - Query theQuery = new TermQuery(new Term("body", "the")); - // Persists connections Map connections = new HashMap<>(); @@ -1221,8 +1214,6 @@ public class TestStressNRTReplication extends LuceneTestCase { message("top: indexer: updatePct=" + updatePct + " sleepChance=" + sleepChance); - long lastTransLogLoc = transLog.getNextLocation(); - NodeProcess curPrimary = null; Connection c = null; diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java index be8688ee425..fa1f22b9323 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java @@ -429,11 +429,7 @@ public class TermAutomatonQuery extends Query implements Accountable { if (any) { return new TermAutomatonScorer( - this, - enums, - anyTermID, - idToTerm, - new LeafSimScorer(stats, context.reader(), field, true)); + this, enums, anyTermID, new LeafSimScorer(stats, context.reader(), field, true)); } else { return null; } diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonScorer.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonScorer.java index 8567ce0747a..17c8e58d239 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonScorer.java +++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonScorer.java @@ -17,14 +17,12 @@ package org.apache.lucene.sandbox.search; import java.io.IOException; -import java.util.Map; import org.apache.lucene.sandbox.search.TermAutomatonQuery.EnumAndScorer; import org.apache.lucene.sandbox.search.TermAutomatonQuery.TermAutomatonWeight; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafSimScorer; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.automaton.Automaton; @@ -32,12 +30,10 @@ import org.apache.lucene.util.automaton.RunAutomaton; // TODO: add two-phase and needsScores support. maybe use conjunctionDISI internally? class TermAutomatonScorer extends Scorer { - private final EnumAndScorer[] subs; private final EnumAndScorer[] subsOnDoc; private final PriorityQueue docIDQueue; private final PriorityQueue posQueue; private final RunAutomaton runAutomaton; - private final Map idToTerm; // We reuse this array to check for matches starting from an initial // position; we increase posShift every time we move to a new possible @@ -58,18 +54,12 @@ class TermAutomatonScorer extends Scorer { private int freq; public TermAutomatonScorer( - TermAutomatonWeight weight, - EnumAndScorer[] subs, - int anyTermID, - Map idToTerm, - LeafSimScorer docScorer) + TermAutomatonWeight weight, EnumAndScorer[] subs, int anyTermID, LeafSimScorer docScorer) throws IOException { super(weight); // System.out.println(" automaton:\n" + weight.automaton.toDot()); this.runAutomaton = new TermRunAutomaton(weight.automaton, subs.length); this.docScorer = docScorer; - this.idToTerm = idToTerm; - this.subs = subs; this.docIDQueue = new DocIDQueue(subs.length); this.posQueue = new PositionQueue(subs.length); this.anyTermID = anyTermID; diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TokenStreamToTermAutomatonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TokenStreamToTermAutomatonQuery.java index 1be61b98380..365b85b517c 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TokenStreamToTermAutomatonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TokenStreamToTermAutomatonQuery.java @@ -65,7 +65,6 @@ public class TokenStreamToTermAutomatonQuery { TermAutomatonQuery query = new TermAutomatonQuery(field); int pos = -1; - int lastPos = 0; int maxOffset = 0; int maxPos = -1; int state = -1; diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/codecs/idversion/TestIDVersionPostingsFormat.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/codecs/idversion/TestIDVersionPostingsFormat.java index 683f329be30..c6c805af77e 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/codecs/idversion/TestIDVersionPostingsFormat.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/codecs/idversion/TestIDVersionPostingsFormat.java @@ -141,8 +141,6 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { } ids = new IDSource() { - final int radix = - TestUtil.nextInt(random(), Character.MIN_RADIX, Character.MAX_RADIX); final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random(), 5, 20) + "d", 0); int upto; @@ -163,7 +161,6 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { new IDSource() { final int radix = TestUtil.nextInt(random(), Character.MIN_RADIX, Character.MAX_RADIX); - int upto; @Override public String next() { @@ -180,8 +177,6 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { new IDSource() { final int radix = TestUtil.nextInt(random(), Character.MIN_RADIX, Character.MAX_RADIX); - final String zeroPad = String.format(Locale.ROOT, "%015d", 0); - int upto; @Override public String next() { @@ -571,7 +566,6 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { payload.length = 8; IDVersionPostingsFormat.longToBytes(17, payload); ts.setValue("foo", payload); - Field field = new Field("id", ts, ft); doc.add(new Field("id", ts, ft)); expectThrows( IllegalArgumentException.class, diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/search/TestTermAutomatonQuery.java index 3c2c83b6f8e..7ca592e5525 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/search/TestTermAutomatonQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/search/TestTermAutomatonQuery.java @@ -775,7 +775,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase { public void testRewriteNoMatch() throws Exception { TermAutomatonQuery q = new TermAutomatonQuery("field"); - int initState = q.createState(); + q.createState(); // initState q.finish(); Directory dir = newDirectory(); diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java index 72ee20b5258..c1facb85d1a 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/DateRangePrefixTree.java @@ -446,7 +446,6 @@ public class DateRangePrefixTree extends NumberRangePrefixTree { private void appendPadded(StringBuilder builder, int integer, short positions) { assert integer >= 0 && positions >= 1 && positions <= 4; - int preBuilderLen = builder.length(); int intStrLen; if (integer > 999) { intStrLen = 4; diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java index 3a3e69a0b6f..45586ec7b98 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/SpatialTestCase.java @@ -161,11 +161,6 @@ public abstract class SpatialTestCase extends LuceneTestCase { } } - private double randomGaussianMinMeanMax(double min, double mean, double max) { - assert mean > min; - return randomGaussianMeanMax(mean - min, max - min) + min; - } - /** * Within one standard deviation (68% of the time) the result is "close" to mean. By "close": when * greater than mean, it's the lesser of 2*mean or half way to max, when lesser than mean, it's diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java index 0df4883d8a2..b9d33847a96 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java @@ -170,7 +170,6 @@ public class TestBBoxStrategy extends RandomSpatialOpStrategyTestCase { final Rectangle indexedShape = ctx.getShapeFactory().rect(180, 180, -10, 10); final Rectangle queryShape = ctx.getShapeFactory().rect(-180, -180, -20, 20); final SpatialOperation operation = SpatialOperation.IsWithin; - final boolean match = true; // yes it is within // the rest is super.testOperation without leading assert: @@ -179,7 +178,7 @@ public class TestBBoxStrategy extends RandomSpatialOpStrategyTestCase { Query query = strategy.makeQuery(new SpatialArgs(operation, queryShape)); SearchResults got = executeQuery(query, 1); assert got.numFound <= 1 : "unclean test env"; - if ((got.numFound == 1) != match) fail(operation + " I:" + indexedShape + " Q:" + queryShape); + if (got.numFound != 1) fail(operation + " I:" + indexedShape + " Q:" + queryShape); deleteAll(); // clean up after ourselves } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRandomSpatialOpFuzzyPrefixTree.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRandomSpatialOpFuzzyPrefixTree.java index 1556370f599..6fae9796a28 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRandomSpatialOpFuzzyPrefixTree.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestRandomSpatialOpFuzzyPrefixTree.java @@ -292,7 +292,6 @@ public class TestRandomSpatialOpFuzzyPrefixTree extends StrategyTestCase { Map indexedShapes = new LinkedHashMap<>(); Map indexedShapesGS = new LinkedHashMap<>(); // grid snapped final int numIndexedShapes = randomIntBetween(1, 6); - boolean indexedAtLeastOneShapePair = false; final boolean pointsOnly = ((PrefixTreeStrategy) strategy).isPointsOnly(); for (int i = 0; i < numIndexedShapes; i++) { String id = "" + i; @@ -305,7 +304,6 @@ public class TestRandomSpatialOpFuzzyPrefixTree extends StrategyTestCase { } else if (R <= 4) { // 3 in 12 // comprised of more than one shape indexedShape = randomShapePairRect(biasContains); - indexedAtLeastOneShapePair = true; } else { indexedShape = randomRectangle(); // just one rect } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java index 306c9f8f19f..5f0d8b30bbb 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/TestTermQueryPrefixGridStrategy.java @@ -24,7 +24,6 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.document.StringField; import org.apache.lucene.spatial.SpatialTestCase; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; -import org.apache.lucene.spatial.query.SpatialArgsParser; import org.junit.Test; import org.locationtech.spatial4j.context.SpatialContext; import org.locationtech.spatial4j.shape.Shape; @@ -51,7 +50,7 @@ public class TestTermQueryPrefixGridStrategy extends SpatialTestCase { addDocumentsAndCommit(Arrays.asList(losAngeles)); // This won't work with simple spatial context... - SpatialArgsParser spatialArgsParser = new SpatialArgsParser(); + // SpatialArgsParser spatialArgsParser = new SpatialArgsParser(); // TODO... use a non polygon query // SpatialArgs spatialArgs = spatialArgsParser.parse( // "Intersects(POLYGON((-127.00390625 39.8125,-112.765625 39.98828125,-111.53515625 diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/TestDateRangePrefixTree.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/TestDateRangePrefixTree.java index 44bc6b04ed4..edc862c2f8a 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/TestDateRangePrefixTree.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/tree/TestDateRangePrefixTree.java @@ -184,7 +184,6 @@ public class TestDateRangePrefixTree extends LuceneTestCase { private void roundTrip(Calendar calOrig) throws ParseException { Calendar cal = (Calendar) calOrig.clone(); - String lastString = null; while (true) { String calString; { @@ -231,7 +230,6 @@ public class TestDateRangePrefixTree extends LuceneTestCase { if (e.getMessage().equals("Calendar underflow")) return; throw e; } - lastString = calString; } } diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/TestGeo3dShapeWGS84ModelRectRelation.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/TestGeo3dShapeWGS84ModelRectRelation.java index 6376fc0b7f7..45e7539cd62 100644 --- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/TestGeo3dShapeWGS84ModelRectRelation.java +++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/spatial4j/TestGeo3dShapeWGS84ModelRectRelation.java @@ -112,8 +112,6 @@ public class TestGeo3dShapeWGS84ModelRectRelation extends ShapeRectRelationTestC 16 * RADIANS_PER_DEGREE, 4 * RADIANS_PER_DEGREE, 36 * RADIANS_PER_DEGREE); - final GeoPoint pt = - new GeoPoint(planetModel, 16 * RADIANS_PER_DEGREE, 23.81626064835212 * RADIANS_PER_DEGREE); final GeoPoint[] pathPoints = new GeoPoint[] { new GeoPoint( diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java index cf46234b03a..e72b70ba5c1 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java @@ -113,12 +113,10 @@ class GeoComplexPolygon extends GeoBasePolygon { } if (lastEdge != null) { lastEdge.next = edge; - edge.previous = lastEdge; } lastEdge = edge; lastGeoPoint = thisGeoPoint; } - firstEdge.previous = lastEdge; lastEdge.next = firstEdge; shapeStartEdges[edgePointIndex] = firstEdge; edgePointIndex++; @@ -920,7 +918,6 @@ class GeoComplexPolygon extends GeoBasePolygon { public final SidedPlane backingPlane; public final Plane plane; public final XYZBounds planeBounds; - public Edge previous = null; public Edge next = null; public Edge(final PlanetModel pm, final GeoPoint startPoint, final GeoPoint endPoint) { @@ -1190,8 +1187,6 @@ class GeoComplexPolygon extends GeoBasePolygon { private abstract static class Tree { private final Node rootNode; - protected static final Edge[] EMPTY_ARRAY = new Edge[0]; - /** * Constructor. * @@ -1283,7 +1278,6 @@ class GeoComplexPolygon extends GeoBasePolygon { /** This is the z-tree. */ private static class ZTree extends Tree { - public Node rootNode = null; public ZTree(final List allEdges) { super(allEdges); @@ -1444,7 +1438,6 @@ class GeoComplexPolygon extends GeoBasePolygon { /** Count the number of verifiable edge crossings for a full 1/2 a world. */ private class FullLinearCrossingEdgeIterator implements CountingEdgeIterator { - private final GeoPoint testPoint; private final Plane plane; private final Plane abovePlane; private final Plane belowPlane; @@ -1468,7 +1461,6 @@ class GeoComplexPolygon extends GeoBasePolygon { assert plane.evaluateIsZero(thePointX, thePointY, thePointZ) : "Check point is not on travel plane"; assert plane.evaluateIsZero(testPoint) : "Test point is not on travel plane"; - this.testPoint = testPoint; this.plane = plane; this.abovePlane = abovePlane; this.belowPlane = belowPlane; @@ -1573,7 +1565,6 @@ class GeoComplexPolygon extends GeoBasePolygon { /** Count the number of verifiable edge crossings for less than 1/2 a world. */ private class SectorLinearCrossingEdgeIterator implements CountingEdgeIterator { - private final GeoPoint testPoint; private final Plane plane; private final Plane abovePlane; private final Plane belowPlane; @@ -1598,7 +1589,6 @@ class GeoComplexPolygon extends GeoBasePolygon { assert plane.evaluateIsZero(thePointX, thePointY, thePointZ) : "Check point is not on travel plane"; assert plane.evaluateIsZero(testPoint) : "Test point is not on travel plane"; - this.testPoint = testPoint; this.plane = plane; this.abovePlane = abovePlane; this.belowPlane = belowPlane; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePath.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePath.java index a6361cc3087..51d81520bf6 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePath.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoDegeneratePath.java @@ -404,10 +404,6 @@ class GeoDegeneratePath extends GeoBasePath { public final GeoPoint point; /** Pertinent cutoff planes from adjoining segments */ public final Membership[] cutoffPlanes; - /** Notable points for this segment endpoint */ - public final GeoPoint[] notablePoints; - /** No notable points from the circle itself */ - public static final GeoPoint[] circlePoints = new GeoPoint[0]; /** Null membership */ public static final Membership[] NO_MEMBERSHIP = new Membership[0]; @@ -419,7 +415,6 @@ class GeoDegeneratePath extends GeoBasePath { public SegmentEndpoint(final GeoPoint point) { this.point = point; this.cutoffPlanes = NO_MEMBERSHIP; - this.notablePoints = circlePoints; } /** @@ -433,7 +428,6 @@ class GeoDegeneratePath extends GeoBasePath { public SegmentEndpoint(final GeoPoint point, final SidedPlane cutoffPlane) { this.point = point; this.cutoffPlanes = new Membership[] {new SidedPlane(cutoffPlane)}; - this.notablePoints = new GeoPoint[] {point}; } /** @@ -448,17 +442,6 @@ class GeoDegeneratePath extends GeoBasePath { this.point = point; this.cutoffPlanes = new Membership[] {new SidedPlane(cutoffPlane1), new SidedPlane(cutoffPlane2)}; - this.notablePoints = new GeoPoint[] {point}; - } - - /** - * Check if point is within this endpoint. - * - * @param point is the point. - * @return true of within. - */ - public boolean isWithin(final Vector point) { - return this.point.isIdentical(point.x, point.y, point.z); } /** @@ -490,26 +473,6 @@ class GeoDegeneratePath extends GeoBasePath { return distanceStyle.toAggregationForm(distanceStyle.computeDistance(this.point, x, y, z)); } - /** - * Compute nearest path distance. - * - * @param distanceStyle is the distance style. - * @param x is the point x. - * @param y is the point y. - * @param z is the point z. - * @return the distance metric (always value zero), in aggregation form, or POSITIVE_INFINITY if - * the point is not within the bounds of the endpoint. - */ - public double nearestPathDistance( - final DistanceStyle distanceStyle, final double x, final double y, final double z) { - for (final Membership m : cutoffPlanes) { - if (!m.isWithin(x, y, z)) { - return Double.POSITIVE_INFINITY; - } - } - return distanceStyle.toAggregationForm(0.0); - } - /** * Compute path center distance. * @@ -670,18 +633,6 @@ class GeoDegeneratePath extends GeoBasePath { } } - /** - * Check if point is within this segment. - * - * @param point is the point. - * @return true of within. - */ - public boolean isWithin(final Vector point) { - return startCutoffPlane.isWithin(point) - && endCutoffPlane.isWithin(point) - && normalizedConnectingPlane.evaluateIsZero(point); - } - /** * Check if point is within this segment. * diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java index 1b834a8dee5..29f097c6598 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoPolygonFactory.java @@ -2022,8 +2022,6 @@ public class GeoPolygonFactory { */ private static class SafePath { public final GeoPoint lastPoint; - public final int lastPointIndex; - public final Plane lastPlane; public final SafePath previous; /** Create a new safe end point. */ @@ -2033,8 +2031,6 @@ public class GeoPolygonFactory { final int lastPointIndex, final Plane lastPlane) { this.lastPoint = lastPoint; - this.lastPointIndex = lastPointIndex; - this.lastPlane = lastPlane; this.previous = previous; } diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java index b49588d23ac..227f2e73aa5 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java @@ -745,14 +745,6 @@ class GeoStandardPath extends GeoBasePath { } } - /** Simplest possible implementation of segment endpoint: a single point. */ - private static class DegenerateSegmentEndpoint extends BaseSegmentEndpoint { - - public DegenerateSegmentEndpoint(final GeoPoint point) { - super(point); - } - } - /** Endpoint that's a simple circle. */ private static class CircleSegmentEndpoint extends BaseSegmentEndpoint { /** A plane describing the circle */ @@ -1104,10 +1096,6 @@ class GeoStandardPath extends GeoBasePath { public final GeoPoint[] upperConnectingPlanePoints; /** Notable points for the lower connecting plane */ public final GeoPoint[] lowerConnectingPlanePoints; - /** Notable points for the start cutoff plane */ - public final GeoPoint[] startCutoffPlanePoints; - /** Notable points for the end cutoff plane */ - public final GeoPoint[] endCutoffPlanePoints; /** * Construct a path segment. @@ -1181,8 +1169,6 @@ class GeoStandardPath extends GeoBasePath { this.LRHC = points[0]; upperConnectingPlanePoints = new GeoPoint[] {ULHC, URHC}; lowerConnectingPlanePoints = new GeoPoint[] {LLHC, LRHC}; - startCutoffPlanePoints = new GeoPoint[] {ULHC, LLHC}; - endCutoffPlanePoints = new GeoPoint[] {URHC, LRHC}; } /** @@ -1204,19 +1190,6 @@ class GeoStandardPath extends GeoBasePath { } } - /** - * Check if point is within this segment. - * - * @param point is the point. - * @return true of within. - */ - public boolean isWithin(final Vector point) { - return startCutoffPlane.isWithin(point) - && endCutoffPlane.isWithin(point) - && upperConnectingPlane.isWithin(point) - && lowerConnectingPlane.isWithin(point); - } - /** * Check if point is within this segment. * diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java index c675de11f1a..6f458b54342 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/XYZBounds.java @@ -45,13 +45,6 @@ public class XYZBounds implements Bounds { /** Maximum z */ private Double maxZ = null; - /** Set to true if no longitude bounds can be stated */ - private boolean noLongitudeBound = false; - /** Set to true if no top latitude bound can be stated */ - private boolean noTopLatitudeBound = false; - /** Set to true if no bottom latitude bound can be stated */ - private boolean noBottomLatitudeBound = false; - /** Construct an empty bounds object */ public XYZBounds() {} diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java index d4616e1d6c5..6589e667b2c 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java @@ -1180,12 +1180,12 @@ public class TestGeo3DPoint extends LuceneTestCase { int iters = atLeast(100); for (int i = 0; i < iters; i++) { // Create a polygon that's less than 180 degrees - final Polygon clockWise = makePoly(pm, randomPole, true, true); + makePoly(pm, randomPole, true, true); } iters = atLeast(100); for (int i = 0; i < iters; i++) { // Create a polygon that's greater than 180 degrees - final Polygon counterClockWise = makePoly(pm, randomPole, false, true); + makePoly(pm, randomPole, false, true); } } @@ -1260,12 +1260,12 @@ public class TestGeo3DPoint extends LuceneTestCase { // the polygon, so we're going to use Geo3D to help us select those given the points we just // made. - final int holeCount = createHoles ? TestUtil.nextInt(random(), 0, 2) : 0; - final List holeList = new ArrayList<>(); /* Hole logic is broken and needs rethinking + final int holeCount = createHoles ? TestUtil.nextInt(random(), 0, 2) : 0; + // Create the geo3d polygon, so we can test out our poles. final GeoPolygon poly; try { diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoBBox.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoBBox.java index 749c0923a3e..5cb74fc274c 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoBBox.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoBBox.java @@ -31,7 +31,6 @@ public class TestGeoBBox { @Test public void testBBoxDegenerate() { GeoBBox box; - GeoConvexPolygon cp; int relationship; List points = new ArrayList(); points.add( diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoExactCircle.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoExactCircle.java index d3e354d14d8..1ab6261740a 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoExactCircle.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoExactCircle.java @@ -146,18 +146,16 @@ public class TestGeoExactCircle extends RandomGeo3dShapeGenerator { public void exactCircleLargeTest() { boolean success = true; try { - GeoCircle circle = - GeoCircleFactory.makeExactGeoCircle( - new PlanetModel(0.99, 1.05), 0.25 * Math.PI, 0, 0.35 * Math.PI, 1e-12); + GeoCircleFactory.makeExactGeoCircle( + new PlanetModel(0.99, 1.05), 0.25 * Math.PI, 0, 0.35 * Math.PI, 1e-12); } catch (IllegalArgumentException e) { success = false; } assertTrue(success); success = false; try { - GeoCircle circle = - GeoCircleFactory.makeExactGeoCircle( - PlanetModel.WGS84, 0.25 * Math.PI, 0, 0.9996 * Math.PI, 1e-12); + GeoCircleFactory.makeExactGeoCircle( + PlanetModel.WGS84, 0.25 * Math.PI, 0, 0.9996 * Math.PI, 1e-12); } catch (IllegalArgumentException e) { success = true; } @@ -168,13 +166,8 @@ public class TestGeoExactCircle extends RandomGeo3dShapeGenerator { public void testExactCircleDoesNotFit() { boolean exception = false; try { - GeoCircle circle = - GeoCircleFactory.makeExactGeoCircle( - PlanetModel.WGS84, - 1.5633796542562415, - -1.0387149580695152, - 3.1409865861032844, - 1e-12); + GeoCircleFactory.makeExactGeoCircle( + PlanetModel.WGS84, 1.5633796542562415, -1.0387149580695152, 3.1409865861032844, 1e-12); } catch (IllegalArgumentException e) { exception = true; } @@ -315,9 +308,8 @@ public class TestGeoExactCircle extends RandomGeo3dShapeGenerator { PlanetModel planetModel = new PlanetModel(1.6304230055804751, 1.0199671157571204); boolean fail = false; try { - GeoCircle circle = - GeoCircleFactory.makeExactGeoCircle( - planetModel, 0.8853814403571284, 0.9784990176851283, 0.9071033527030907, 1e-11); + GeoCircleFactory.makeExactGeoCircle( + planetModel, 0.8853814403571284, 0.9784990176851283, 0.9071033527030907, 1e-11); } catch (IllegalArgumentException e) { fail = true; } diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPath.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPath.java index 262a622fc94..59c1242fcb3 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPath.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPath.java @@ -130,9 +130,6 @@ public class TestGeoPath extends LuceneTestCase { GeoStandardPath p; GeoStandardPath c; GeoPoint point; - GeoPoint pointApprox; - int relationship; - GeoArea area; PlanetModel planetModel; planetModel = new PlanetModel(1.151145876105594, 0.8488541238944061); @@ -141,16 +138,14 @@ public class TestGeoPath extends LuceneTestCase { c.addPoint(0.27828548161836364, 0.6785795524104564); c.done(); point = new GeoPoint(planetModel, -0.49298555067758226, 0.9892440995026406); - pointApprox = new GeoPoint(0.5110940362119821, 0.7774603209946239, -0.49984312299556544); - area = - GeoAreaFactory.makeGeoArea( - planetModel, - 0.49937141144985997, - 0.5161765426256085, - 0.3337218719537796, - 0.8544419570901649, - -0.6347692823688085, - 0.3069696588119369); + GeoAreaFactory.makeGeoArea( + planetModel, + 0.49937141144985997, + 0.5161765426256085, + 0.3337218719537796, + 0.8544419570901649, + -0.6347692823688085, + 0.3069696588119369); assertTrue(!c.isWithin(point)); // Start by testing the basic kinds of relationship, increasing in order of difficulty. diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPolygon.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPolygon.java index 95164910fc3..4fdfe878de7 100755 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPolygon.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestGeoPolygon.java @@ -465,7 +465,6 @@ public class TestGeoPolygon extends LuceneTestCase { @Test public void testPolygonBoundsCase1() { GeoPolygon c; - LatLonBounds b; List points; XYZBounds xyzb; GeoPoint point1; @@ -608,10 +607,6 @@ public class TestGeoPolygon extends LuceneTestCase { c.addShape(new GeoConcavePolygon(pm, points2, p2bits, false)); // System.out.println(zScaling); - GeoPoint point = new GeoPoint(pm, -0.9825762558001477, 2.4832136904725273); - GeoPoint quantizedPoint = - new GeoPoint(-0.4505446160475436, 0.34850109186970535, -0.8539966368663765); - GeoArea xyzSolid = GeoAreaFactory.makeGeoArea( pm, @@ -666,7 +661,7 @@ public class TestGeoPolygon extends LuceneTestCase { boolean illegalArgumentException = false; try { - final GeoPolygon p = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null); } catch (IllegalArgumentException e) { illegalArgumentException = true; } @@ -699,7 +694,7 @@ public class TestGeoPolygon extends LuceneTestCase { boolean illegalArgumentException = false; try { - final GeoPolygon p = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null); } catch (IllegalArgumentException e) { illegalArgumentException = true; } @@ -729,18 +724,17 @@ public class TestGeoPolygon extends LuceneTestCase { final GeoCompositePolygon rval = new GeoCompositePolygon(PlanetModel.WGS84); final GeoPolygonFactory.MutableBoolean mutableBoolean = new GeoPolygonFactory.MutableBoolean(); - boolean result = - GeoPolygonFactory.buildPolygonShape( - rval, - mutableBoolean, - PlanetModel.WGS84, - points, - internal, - 0, - 1, - new SidedPlane(p1, p3, p2), - new ArrayList(), - null); + GeoPolygonFactory.buildPolygonShape( + rval, + mutableBoolean, + PlanetModel.WGS84, + points, + internal, + 0, + 1, + new SidedPlane(p1, p3, p2), + new ArrayList(), + null); assertFalse(mutableBoolean.value); } @@ -770,7 +764,7 @@ public class TestGeoPolygon extends LuceneTestCase { shapeList.add(desc); - GeoPolygon p = GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.WGS84, shapeList); + GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.WGS84, shapeList); } @Test @@ -854,8 +848,6 @@ public class TestGeoPolygon extends LuceneTestCase { */ final GeoPoint point = new GeoPoint(PlanetModel.WGS84, -0.41518838180529244, 3.141592653589793); - final GeoPoint encodedPoint = - new GeoPoint(-0.9155623168963972, 2.3309121299774915E-10, -0.40359240449795253); assertTrue(p.isWithin(point) ? solid.isWithin(point) : true); } @@ -1040,7 +1032,7 @@ public class TestGeoPolygon extends LuceneTestCase { boolean result; try { - final GeoConvexPolygon poly2 = new GeoConvexPolygon(PlanetModel.WGS84, poly2List); + new GeoConvexPolygon(PlanetModel.WGS84, poly2List); result = true; } catch (IllegalArgumentException e) { result = false; @@ -1397,9 +1389,9 @@ public class TestGeoPolygon extends LuceneTestCase { points.add( new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(64.53775), Geo3DUtil.fromDegrees(-52.19148))); - GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); Collections.reverse(points); - polygon = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); } @Test @@ -1495,9 +1487,9 @@ public class TestGeoPolygon extends LuceneTestCase { points.add( new GeoPoint( PlanetModel.SPHERE, Geo3DUtil.fromDegrees(50.455467), Geo3DUtil.fromDegrees(-3.48905))); - GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); Collections.reverse(points); - polygon = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); } /* @@ -1582,7 +1574,6 @@ public class TestGeoPolygon extends LuceneTestCase { // Is the north pole in set, or out of set? final GeoPoint northPole = new GeoPoint(PlanetModel.WGS84, Math.PI * 0.5, 0.0); final GeoPoint negativeX = new GeoPoint(PlanetModel.WGS84, 0.0, Math.PI); - final GeoPoint negativeY = new GeoPoint(PlanetModel.WGS84, 0.0, -Math.PI * 0.5); final GeoPoint positiveY = new GeoPoint(PlanetModel.WGS84, 0.0, Math.PI * 0.5); final GeoPoint testPoint = new GeoPoint(-0.074161727332972, 0.5686488061123504, 0.8178445379383386); @@ -1729,11 +1720,6 @@ public class TestGeoPolygon extends LuceneTestCase { // These are too close to parallel. The only solution is to prevent the poly from being // created. Let's see if Geo3d thinks they are parallel. - final Plane p1 = new Plane(-1.224646799147353E-16, -1.0, -7.498798913309287E-33, 0.0); - final Plane p2 = - new Plane(-3.0261581679831E-12, -0.9999999999999999, -1.8529874570670608E-28, 0.0); - final Plane p3 = new Plane(4.234084035470679E-12, 1.0, -1.5172037954732973E-12, 0.0); - assertFalse(shape.isWithin(unquantized)); // This point is indeed outside the shape but it doesn't matter @@ -2704,7 +2690,6 @@ public class TestGeoPolygon extends LuceneTestCase { final GeoPolygonFactory.PolygonDescription description = new GeoPolygonFactory.PolygonDescription(points); - final GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, description); final GeoPolygon largePolygon = GeoPolygonFactory.makeLargeGeoPolygon( PlanetModel.WGS84, Collections.singletonList(description)); diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestRandomGeoPolygon.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestRandomGeoPolygon.java index 74fcfee158e..ac5fd701f6a 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestRandomGeoPolygon.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/TestRandomGeoPolygon.java @@ -61,7 +61,7 @@ public class TestRandomGeoPolygon extends RandomGeo3dShapeGenerator { points.add(point3); points.add(point4); try { - GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); } catch (Exception e) { fail(points.toString()); } @@ -288,18 +288,4 @@ public class TestRandomGeoPolygon extends RandomGeo3dShapeGenerator { } return false; } - - private GeoPoint getCenterOfMass(final PlanetModel planetModel, final List points) { - double x = 0; - double y = 0; - double z = 0; - // get center of mass - for (final GeoPoint point : points) { - x += point.x; - y += point.y; - z += point.z; - } - // Normalization is not needed because createSurfacePoint does the scaling anyway. - return planetModel.createSurfacePoint(x, y, z); - } } diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java index 47a80350aa2..f06f5f54859 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/NRTSuggester.java @@ -364,7 +364,7 @@ public final class NRTSuggester implements Accountable { * Label used to denote the end of an input in the FST and * the beginning of dedup bytes */ - int endByte = input.readVInt(); + input.readVInt(); // endByte int payloadSep = input.readVInt(); return new NRTSuggester(fst, maxAnalyzedPathsPerOutput, payloadSep); } diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java index 3f5d7468904..92beb054ab9 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java @@ -28,14 +28,12 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.store.Directory; @@ -158,23 +156,6 @@ public abstract class CollationTestBase extends LuceneTestCase { farsiIndex.close(); } - // Make sure the documents returned by the search match the expected list - // Copied from TestSort.java - private void assertMatches(IndexSearcher searcher, Query query, Sort sort, String expectedResult) - throws IOException { - ScoreDoc[] result = searcher.search(query, 1000, sort).scoreDocs; - StringBuilder buff = new StringBuilder(10); - int n = result.length; - for (int i = 0; i < n; ++i) { - Document doc = searcher.doc(result[i].doc); - IndexableField[] v = doc.getFields("tracer"); - for (int j = 0; j < v.length; ++j) { - buff.append(v[j].stringValue()); - } - } - assertEquals(expectedResult, buff.toString()); - } - public void assertThreadSafe(final Analyzer analyzer) throws Exception { int numTestPoints = 100; int numThreads = TestUtil.nextInt(random(), 3, 5); diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDocValuesFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDocValuesFormat.java index 2b85a9a095c..27cedfea149 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDocValuesFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingDocValuesFormat.java @@ -82,7 +82,7 @@ public class AssertingDocValuesFormat extends DocValuesFormat { assert docID >= 0 && docID < maxDoc; assert docID > lastDocID; lastDocID = docID; - long value = values.longValue(); + values.longValue(); } in.addNumericField(field, valuesProducer); @@ -146,7 +146,6 @@ public class AssertingDocValuesFormat extends DocValuesFormat { throws IOException { SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); - long valueCount = 0; int lastDocID = -1; while (true) { int docID = values.nextDoc(); @@ -157,7 +156,6 @@ public class AssertingDocValuesFormat extends DocValuesFormat { lastDocID = values.docID(); int count = values.docValueCount(); assert count > 0; - valueCount += count; long previous = Long.MIN_VALUE; for (int i = 0; i < count; i++) { long nextValue = values.nextValue(); @@ -185,14 +183,12 @@ public class AssertingDocValuesFormat extends DocValuesFormat { lastValue = BytesRef.deepCopyOf(b); } - int docCount = 0; LongBitSet seenOrds = new LongBitSet(valueCount); while (true) { int docID = values.nextDoc(); if (docID == NO_MORE_DOCS) { break; } - docCount++; long lastOrd = -1; while (true) { diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingNormsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingNormsFormat.java index b6683d72b1c..ae10d8465d7 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingNormsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/asserting/AssertingNormsFormat.java @@ -69,7 +69,7 @@ public class AssertingNormsFormat extends NormsFormat { assert docID >= 0 && docID < maxDoc; assert docID > lastDocID; lastDocID = docID; - long value = values.longValue(); + values.longValue(); } in.addNormsField(field, valuesProducer); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java index a95c3468683..95e9b88932e 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java @@ -357,7 +357,6 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest public void testLevel2Ghosts() throws Exception { Directory dir = newDirectory(); - Analyzer analyzer = new MockAnalyzer(random()); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(getCodec()); iwc.setMergePolicy(newLogMergePolicy()); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java index 5f958cf3eb7..0d0604e4531 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java @@ -428,7 +428,6 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT // to test reuse private final ThreadLocal docsEnum = new ThreadLocal<>(); - private final ThreadLocal docsAndPositionsEnum = new ThreadLocal<>(); protected void assertEquals(RandomTokenStream tk, FieldType ft, Terms terms) throws IOException { assertEquals(1, terms.getDocCount()); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java index 2ecfe45fcef..275cbeea2aa 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomCodec.java @@ -298,28 +298,6 @@ public class RandomCodec extends AssertingCodec { this.random = new Random(randomSeed); } - private static boolean getRandomSingleValuePerDoc(boolean singleValuePerDoc, int randomSeed) { - // If we are single valued, sometimes pretend we aren't: - return singleValuePerDoc && (new Random(randomSeed).nextBoolean()); - } - - private static boolean getRandomLongOrds( - long totalPointCount, boolean singleValuePerDoc, int randomSeed) { - // Always use long ords if we have too many points, but sometimes randomly use it anyway when - // singleValuePerDoc is false: - return totalPointCount > Integer.MAX_VALUE - || (getRandomSingleValuePerDoc(singleValuePerDoc, randomSeed) == false - && new Random(randomSeed).nextBoolean()); - } - - private static long getRandomOfflineSorterBufferMB(int randomSeed) { - return TestUtil.nextInt(new Random(randomSeed), 1, 8); - } - - private static int getRandomOfflineSorterMaxTempFiles(int randomSeed) { - return TestUtil.nextInt(new Random(randomSeed), 2, 20); - } - @Override protected int split(byte[] minPackedValue, byte[] maxPackedValue, int[] parentDims) { // BKD normally defaults by the widest dimension, to try to make as squarish cells as diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java index cc8a7a78518..6883e8248d5 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomPostingsTester.java @@ -65,7 +65,6 @@ import org.apache.lucene.util.automaton.CompiledAutomaton; public class RandomPostingsTester { private static final IntToLongFunction DOC_TO_NORM = doc -> 1 + (doc & 0x0f); - private static final long MAX_NORM = 0x10; /** Which features to test. */ public enum Option { diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java index 08d18bc704c..cd4c03c1e4c 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java @@ -2172,29 +2172,6 @@ public abstract class LuceneTestCase extends Assert { } } - private static class RandomBits implements Bits { - FixedBitSet bits; - - RandomBits(int maxDoc, double pctLive, Random random) { - bits = new FixedBitSet(maxDoc); - for (int i = 0; i < maxDoc; i++) { - if (random.nextDouble() <= pctLive) { - bits.set(i); - } - } - } - - @Override - public boolean get(int index) { - return bits.get(index); - } - - @Override - public int length() { - return bits.length(); - } - } - /** * checks the terms enum sequentially if deep is false, it does a 'shallow' test that doesnt go * down to the docsenums diff --git a/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java b/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java index d29b28ce6af..6b4a597e01d 100644 --- a/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java +++ b/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java @@ -55,12 +55,11 @@ public class TestExpectThrows extends LuceneTestCase { final AtomicBoolean ran = new AtomicBoolean(false); AssertionError caught = null; try { - final IOException returned = - expectThrows( - IOException.class, - () -> { - ran.getAndSet(true); - }); + expectThrows( + IOException.class, + () -> { + ran.getAndSet(true); + }); fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows } catch (AssertionError ae) { caught = ae; @@ -78,13 +77,12 @@ public class TestExpectThrows extends LuceneTestCase { final AtomicBoolean ran = new AtomicBoolean(false); AssertionError caught = null; try { - final IOException returned = - expectThrows( - IOException.class, - () -> { - ran.getAndSet(true); - fail("this failure should propogate"); - }); + expectThrows( + IOException.class, + () -> { + ran.getAndSet(true); + fail("this failure should propogate"); + }); fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows } catch (AssertionError ae) { caught = ae; @@ -103,13 +101,12 @@ public class TestExpectThrows extends LuceneTestCase { final AtomicBoolean ran = new AtomicBoolean(false); AssumptionViolatedException caught = null; try { - final IOException returned = - expectThrows( - IOException.class, - () -> { - ran.getAndSet(true); - assumeTrue("this assumption should propogate", false); - }); + expectThrows( + IOException.class, + () -> { + ran.getAndSet(true); + assumeTrue("this assumption should propogate", false); + }); fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows } catch (AssumptionViolatedException ave) { caught = ave;