diff --git a/build.xml b/build.xml index 2346d540587..a5f09e48711 100755 --- a/build.xml +++ b/build.xml @@ -125,177 +125,7 @@ - - - - task.log(name + ': ' + f.toString().substring(baseDirLen).replace(File.separatorChar, (char)'/'), Project.MSG_ERR); - violations.add(name); - found++; - } - - def javadocsPattern = ~$/(?sm)^\Q/**\E(.*?)\Q*/\E/$; - def javaCommentPattern = ~$/(?sm)^\Q/*\E(.*?)\Q*/\E/$; - def xmlCommentPattern = ~$/(?sm)\Q\E/$; - def lineSplitter = ~$/[\r\n]+/$; - def singleLineSplitter = ~$/\n\r?/$; - def licenseMatcher = Defaults.createDefaultMatcher(); - def validLoggerPattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+\p{javaJavaIdentifierStart}+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$; - def packagePattern = ~$/(?m)^\s*package\s+org\.apache.*;/$; - def xmlTagPattern = ~$/(?m)\s*<[a-zA-Z].*/$; - def sourceHeaderPattern = ~$/\[source\b.*/$; - def blockBoundaryPattern = ~$/----\s*/$; - def blockTitlePattern = ~$/\..*/$; - def unescapedSymbolPattern = ~$/(?<=[^\\]|^)([-=]>|<[-=])/$; // SOLR-10883 - - def isLicense = { matcher, ratDocument -> - licenseMatcher.reset(); - return lineSplitter.split(matcher.group(1)).any{ licenseMatcher.match(ratDocument, it) }; - } - - def checkLicenseHeaderPrecedes = { f, description, contentPattern, commentPattern, text, ratDocument -> - def contentMatcher = contentPattern.matcher(text); - if (contentMatcher.find()) { - def contentStartPos = contentMatcher.start(); - def commentMatcher = commentPattern.matcher(text); - while (commentMatcher.find()) { - if (isLicense(commentMatcher, ratDocument)) { - if (commentMatcher.start() < contentStartPos) { - break; // This file is all good, so break loop: license header precedes 'description' definition - } else { - reportViolation(f, description+' declaration precedes license header'); - } - } - } - } - } - - def checkMockitoAssume = { f, text -> - if (text.contains("mockito") && !text.contains("assumeWorkingMockito()")) { - reportViolation(f, 'File uses Mockito but has no assumeWorkingMockito() call'); - } - } - - def checkForUnescapedSymbolSubstitutions = { f, text -> - def inCodeBlock = false; - def underSourceHeader = false; - def lineNumber = 0; - singleLineSplitter.split(text).each { - ++lineNumber; - if (underSourceHeader) { // This line is either a single source line, or the boundary of a code block - inCodeBlock = blockBoundaryPattern.matcher(it).matches(); - if ( ! blockTitlePattern.matcher(it).matches()) { - underSourceHeader = false; - } - } else { - if (inCodeBlock) { - inCodeBlock = ! blockBoundaryPattern.matcher(it).matches(); - } else { - underSourceHeader = sourceHeaderPattern.matcher(it).lookingAt(); - if ( ! underSourceHeader) { - def unescapedSymbolMatcher = unescapedSymbolPattern.matcher(it); - if (unescapedSymbolMatcher.find()) { - reportViolation(f, 'Unescaped symbol "' + unescapedSymbolMatcher.group(1) + '" on line #' + lineNumber); - } - } - } - } - } - } - - ant.fileScanner{ - fileset(dir: baseDir){ - extensions.each{ - include(name: 'lucene/**/*.' + it) - include(name: 'solr/**/*.' + it) - include(name: 'dev-tools/**/*.' + it) - include(name: '*.' + it) - } - // TODO: For now we don't scan txt files, so we - // check licenses in top-level folders separately: - include(name: '*.txt') - include(name: '*/*.txt') - // excludes: - exclude(name: '**/build/**') - exclude(name: '**/dist/**') - exclude(name: 'lucene/benchmark/work/**') - exclude(name: 'lucene/benchmark/temp/**') - exclude(name: '**/CheckLoggingConfiguration.java') - exclude(name: 'build.xml') // ourselves :-) - } - }.each{ f -> - task.log('Scanning file: ' + f, Project.MSG_VERBOSE); - def text = f.getText('UTF-8'); - invalidPatterns.each{ pattern,name -> - if (pattern.matcher(text).find()) { - reportViolation(f, name); - } - } - def javadocsMatcher = javadocsPattern.matcher(text); - def ratDocument = new FileDocument(f); - while (javadocsMatcher.find()) { - if (isLicense(javadocsMatcher, ratDocument)) { - reportViolation(f, String.format(Locale.ENGLISH, 'javadoc-style license header [%s]', - ratDocument.getMetaData().value(MetaData.RAT_URL_LICENSE_FAMILY_NAME))); - } - } - if (f.name.endsWith('.java')) { - if (text.contains('org.slf4j.LoggerFactory')) { - if (!validLoggerPattern.matcher(text).find()) { - reportViolation(f, 'invalid logging pattern [not private static final, uses static class name]'); - } - } - checkLicenseHeaderPrecedes(f, 'package', packagePattern, javaCommentPattern, text, ratDocument); - if (f.name.contains("Test")) { - checkMockitoAssume(f, text); - } - } - if (f.name.endsWith('.xml') || f.name.endsWith('.xml.template')) { - checkLicenseHeaderPrecedes(f, '', xmlTagPattern, xmlCommentPattern, text, ratDocument); - } - if (f.name.endsWith('.adoc')) { - checkForUnescapedSymbolSubstitutions(f, text); - } - }; - - if (found) { - throw new BuildException(String.format(Locale.ENGLISH, 'Found %d violations in source files (%s).', - found, violations.join(', '))); - } - ]]> + @@ -410,31 +240,7 @@ - k.startsWith(propPrefix) } - .collectEntries{ k, v -> [k.substring(propPrefixLen), v] }; - cmdlineProps << project.userProperties.findAll{ k, v -> !k.startsWith('ant.') }; - def artifact = NamespaceBuilder.newInstance(ant, 'antlib:org.apache.maven.artifact.ant'); - task.log('Running Maven with props: ' + cmdlineProps.toString(), Project.MSG_INFO); - artifact.mvn(pom: properties['maven-build-dir']+'/pom.xml', mavenVersion: properties['maven-version'], failonerror: true, fork: true) { - cmdlineProps.each{ k, v -> arg(value: '-D' + k + '=' + v) }; - arg(value: '-fae'); - arg(value: 'install'); - }; - ]]> + @@ -667,48 +473,7 @@ File | Project Structure | Platform Settings | SDKs): - - if (val instanceof Set) { - if (prop in ['untracked', 'untrackedFolders', 'missing']) { - unversioned.addAll(val); - } else if (prop != 'ignoredNotInIndex') { - modified.addAll(val); - } - } - }; - setProjectPropertyFromSet('wc.unversioned.files', unversioned); - setProjectPropertyFromSet('wc.modified.files', modified); - } - } catch (RepositoryNotFoundException | NoWorkTreeException | NotSupportedException e) { - task.log('WARNING: Development directory is not a valid GIT checkout! Disabling checks...', Project.MSG_WARN); - } - ]]> + @@ -726,7 +491,7 @@ File | Project Structure | Platform Settings | SDKs): - + @@ -759,14 +524,21 @@ File | Project Structure | Platform Settings | SDKs): + + + + + + + You can find the merged Lucene/Solr Clover report in '${clover.report.dir}'. diff --git a/dev-tools/maven/README.maven b/dev-tools/maven/README.maven index 390177172cd..2a741c136e2 100644 --- a/dev-tools/maven/README.maven +++ b/dev-tools/maven/README.maven @@ -116,6 +116,13 @@ D. How to use Maven to build Lucene/Solr ant clean-maven-build + 5. Please keep in mind that this is just a minimal Maven build. The resulting + artifacts are not the same as those created by the native Ant-based build. + It should be fine to enable Lucene builds in several Maven-based IDEs, + but should never be used for Lucene/Solr production usage, as they may lack + optimized class files (e.g., Java 9 MR-JAR support). To install Lucene/Solr + in your local repository, see instructions above. + Some example Maven commands you can use after you perform the above preparatory steps: @@ -127,6 +134,11 @@ D. How to use Maven to build Lucene/Solr After compiling and packaging, but before installing each module's artifact, the above command will also run all the module's tests. + + The resulting artifacts are not the same as those created by the native + Ant-based build. They should never be used for Lucene/Solr production + usage, as they may lack optimized class files (e.g., Java 9 MR-JAR + support). - Compile, package, and install all binary artifacts to your local repository, without running any tests: diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index c35d692ceca..7e0033a1e32 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -141,6 +141,15 @@ Improvements classes to use the optimized variants through the MR-JAR mechanism. (Uwe Schindler, Robert Muir, Adrien Grand, Mike McCandless) +* LUCENE-8127: Speed up rewriteNoScoring when there are no MUST clauses. + (Michael Braun via Adrien Grand) + +* LUCENE-8152: Improve consumption of doc-value iterators. (Horatiu Lazu via + Adrien Grand) + +* LUCENE-8033: FieldInfos now always use a dense encoding. (Mayya Sharipova + via Adrien Grand) + Bug Fixes * LUCENE-8077: Fixed bug in how CheckIndex verifies doc-value iterators. @@ -189,6 +198,17 @@ Other * LUCENE-8155: Add back support in smoke tester to run against later Java versions. (Uwe Schindler) +* LUCENE-8169: Migrated build to use OpenClover 4.2.1 for checking code coverage. + (Uwe Schindler) + +* LUCENE-8170: Improve OpenClover reports (separate test from production code); + enable coverage reports inside test-frameworks. (Uwe Schindler) + +Build + +* LUCENE-8168: Moved Groovy scripts in build files to separate files. + Update Groovy to 2.4.13. (Uwe Schindler) + ======================= Lucene 7.2.1 ======================= Bug Fixes diff --git a/lucene/benchmark/src/test/conf/ConfLoader.java b/lucene/benchmark/src/test/conf/ConfLoader.java deleted file mode 100644 index 97c02ac5a59..00000000000 --- a/lucene/benchmark/src/test/conf/ConfLoader.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package conf; - -public class ConfLoader { - // don't mind me, I load .alg files -} diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java index 90158dae68f..04e15cab760 100644 --- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java +++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksParse.java @@ -38,8 +38,6 @@ import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks; -import conf.ConfLoader; - /** Test very simply that perf tasks are parses as expected. */ @SuppressSysoutChecks(bugUrl = "very noisy") public class TestPerfTasksParse extends LuceneTestCase { @@ -114,7 +112,7 @@ public class TestPerfTasksParse extends LuceneTestCase { public void testParseExamples() throws Exception { // hackedy-hack-hack boolean foundFiles = false; - final Path examplesDir = Paths.get(ConfLoader.class.getResource(".").toURI()); + final Path examplesDir = Paths.get(getClass().getResource("/conf").toURI()); try (DirectoryStream stream = Files.newDirectoryStream(examplesDir, "*.alg")) { for (Path path : stream) { Config config = new Config(Files.newBufferedReader(path, StandardCharsets.UTF_8)); diff --git a/lucene/build.xml b/lucene/build.xml index f2ce68dae2c..0061d29cb81 100644 --- a/lucene/build.xml +++ b/lucene/build.xml @@ -353,8 +353,6 @@ - - diff --git a/lucene/common-build.xml b/lucene/common-build.xml index e63b116cabc..4fa59ac9364 100644 --- a/lucene/common-build.xml +++ b/lucene/common-build.xml @@ -139,11 +139,11 @@ - + - + @@ -234,7 +234,6 @@ - @@ -266,7 +265,7 @@ @@ -568,12 +567,12 @@ - + - + @@ -1568,58 +1567,7 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites - 1."); - } - - def antcall = project.createTask('antcall'); - antcall.with { - target = '-test'; - inheritAll = true; - inheritRefs = true; - createParam().with { - name = "tests.isbeasting"; - value = "true"; - }; - }; - - (1..iters).each { i -> - task.log('Beast round: ' + i, Project.MSG_INFO); - try { - // disable verbose build logging: - project.buildListeners.each { listener -> - if (listener instanceof BuildLogger) { - listener.messageOutputLevel = Project.MSG_WARN; - } - }; - - antcall.execute(); - - } catch (BuildException be) { - def logFile = new File(properties["junit.output.dir"], "tests-failures.txt"); - if (logFile.exists()) { - logFile.eachLine("UTF-8", { line -> - task.log(line, Project.MSG_ERR); - }); - } - throw be; - } finally { - // restore build logging (unfortunately there is no way to get the original logging level (write-only property): - project.buildListeners.each { listener -> - if (listener instanceof BuildLogger) { - listener.messageOutputLevel = Project.MSG_INFO; - } - }; - } - }; - task.log('Beasting finished.', Project.MSG_INFO); - ]]> + @@ -1663,27 +1611,8 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites - - - Code coverage with Atlassian Clover enabled. - Code coverage with OpenClover enabled. + @@ -1700,11 +1629,9 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites - - - + - + @@ -2484,7 +2411,7 @@ ${ant.project.name}.test.dependencies=${test.classpath.list} - - \n\n'); - CharSequence title = parsed.getFirstChildAny(Heading.class)?.getText(); - if (title != null) { - html.append('').append(Escaping.escapeHtml(title, false)).append('\n'); - } - html.append('\n') - .append('\n\n'); - HtmlRenderer.builder(options).build().render(parsed, html); - html.append('\n\n'); - return html; - } - } - - AntTypeDefinition t = new AntTypeDefinition(); - t.setName('markdownfilter'); - t.setClass(MarkdownFilter.class); - ComponentHelper.getComponentHelper(project).addDataTypeDefinition(t); - ]]> + diff --git a/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java b/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java index 3e68693253a..9666fd9f356 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java +++ b/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java @@ -25,8 +25,8 @@ import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; +import java.util.Arrays; +import java.util.List; import org.apache.lucene.util.ArrayUtil; @@ -45,8 +45,7 @@ public class FieldInfos implements Iterable { private final boolean hasPointValues; // used only by fieldInfo(int) - private final FieldInfo[] byNumberTable; // contiguous - private final SortedMap byNumberMap; // sparse + private final FieldInfo[] byNumber; private final HashMap byName = new HashMap<>(); private final Collection values; // for an unmodifiable iterator @@ -63,21 +62,28 @@ public class FieldInfos implements Iterable { boolean hasNorms = false; boolean hasDocValues = false; boolean hasPointValues = false; - - TreeMap byNumber = new TreeMap<>(); + + int size = 0; // number of elements in byNumberTemp, number of used array slots + FieldInfo[] byNumberTemp = new FieldInfo[10]; // initial array capacity of 10 for (FieldInfo info : infos) { if (info.number < 0) { throw new IllegalArgumentException("illegal field number: " + info.number + " for field " + info.name); } - FieldInfo previous = byNumber.put(info.number, info); + size = info.number >= size ? info.number+1 : size; + if (info.number >= byNumberTemp.length){ //grow array + byNumberTemp = ArrayUtil.grow(byNumberTemp, info.number + 1); + } + FieldInfo previous = byNumberTemp[info.number]; if (previous != null) { throw new IllegalArgumentException("duplicate field numbers: " + previous.name + " and " + info.name + " have: " + info.number); } + byNumberTemp[info.number] = info; + previous = byName.put(info.name, info); if (previous != null) { throw new IllegalArgumentException("duplicate field names: " + previous.number + " and " + info.number + " have: " + info.name); } - + hasVectors |= info.hasVectors(); hasProx |= info.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; hasFreq |= info.getIndexOptions() != IndexOptions.DOCS; @@ -96,25 +102,16 @@ public class FieldInfos implements Iterable { this.hasNorms = hasNorms; this.hasDocValues = hasDocValues; this.hasPointValues = hasPointValues; - Integer max = byNumber.isEmpty() ? null : byNumber.lastKey(); - - // Only usee TreeMap in the very sparse case (< 1/16th of the numbers are used), - // because TreeMap uses ~ 64 (32 bit JVM) or 120 (64 bit JVM w/o compressed oops) - // overall bytes per entry, but array uses 4 (32 bit JMV) or 8 - // (64 bit JVM w/o compressed oops): - if (max != null && max < ArrayUtil.MAX_ARRAY_LENGTH && max < 16L*byNumber.size()) { - // Pull infos into an arraylist to avoid holding a reference to the TreeMap - values = Collections.unmodifiableCollection(new ArrayList<>(byNumber.values())); - byNumberMap = null; - byNumberTable = new FieldInfo[max+1]; - for (Map.Entry entry : byNumber.entrySet()) { - byNumberTable[entry.getKey()] = entry.getValue(); + + List valuesTemp = new ArrayList<>(); + byNumber = new FieldInfo[size]; + for(int i=0; i { if (fieldNumber < 0) { throw new IllegalArgumentException("Illegal field number: " + fieldNumber); } - if (byNumberTable != null) { - if (fieldNumber >= byNumberTable.length) { - return null; - } - return byNumberTable[fieldNumber]; - } else { - return byNumberMap.get(fieldNumber); + if (fieldNumber >= byNumber.length) { + return null; } + return byNumber[fieldNumber]; } static final class FieldDimensions { diff --git a/lucene/core/src/java/org/apache/lucene/index/FilteredTermsEnum.java b/lucene/core/src/java/org/apache/lucene/index/FilteredTermsEnum.java index 411b435f2de..61392c3a2c5 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilteredTermsEnum.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilteredTermsEnum.java @@ -249,7 +249,9 @@ public abstract class FilteredTermsEnum extends TermsEnum { case END: // we are supposed to end the enum return null; - // NO: we just fall through and iterate again + case NO: + // we just iterate again + break; } } } diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java index 8e6df1600cf..f52df9fb9cd 100644 --- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java @@ -183,6 +183,9 @@ public class BooleanQuery extends Query implements Iterable { } private BooleanQuery rewriteNoScoring() { + if (clauseSets.get(Occur.MUST).size() == 0) { + return this; + } BooleanQuery.Builder newQuery = new BooleanQuery.Builder(); newQuery.setMinimumNumberShouldMatch(getMinimumNumberShouldMatch()); for (BooleanClause clause : clauses) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java new file mode 100644 index 00000000000..308e11ebe96 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/index/TestFieldInfos.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.index; + + +import java.util.Iterator; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.LuceneTestCase; + +public class TestFieldInfos extends LuceneTestCase { + + public void testFieldInfos() throws Exception{ + Directory dir = newDirectory(); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); + + Document d1 = new Document(); + for (int i = 0; i < 15; i++) { + d1.add(new StringField("f" + i, "v" + i, Field.Store.YES)); + } + writer.addDocument(d1); + writer.commit(); + + Document d2 = new Document(); + d2.add(new StringField("f0", "v0", Field.Store.YES)); + d2.add(new StringField("f15", "v15", Field.Store.YES)); + d2.add(new StringField("f16", "v16", Field.Store.YES)); + writer.addDocument(d2); + writer.commit(); + + Document d3 = new Document(); + writer.addDocument(d3); + writer.close(); + + SegmentInfos sis = SegmentInfos.readLatestCommit(dir); + assertEquals(3, sis.size()); + + FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0)); + FieldInfos fis2 = IndexWriter.readFieldInfos(sis.info(1)); + FieldInfos fis3 = IndexWriter.readFieldInfos(sis.info(2)); + + // testing dense FieldInfos + Iterator it = fis1.iterator(); + int i = 0; + while(it.hasNext()) { + FieldInfo fi = it.next(); + assertEquals(i, fi.number); + assertEquals("f" + i , fi.name); + assertEquals("f" + i, fis1.fieldInfo(i).name); //lookup by number + assertEquals("f" + i, fis1.fieldInfo("f" + i).name); //lookup by name + i++; + } + + // testing sparse FieldInfos + assertEquals("f0", fis2.fieldInfo(0).name); //lookup by number + assertEquals("f0", fis2.fieldInfo("f0").name); //lookup by name + assertNull(fis2.fieldInfo(1)); + assertNull(fis2.fieldInfo("f1")); + assertEquals("f15", fis2.fieldInfo(15).name); + assertEquals("f15", fis2.fieldInfo("f15").name); + assertEquals("f16", fis2.fieldInfo(16).name); + assertEquals("f16", fis2.fieldInfo("f16").name); + + // testing empty FieldInfos + assertNull(fis3.fieldInfo(0)); //lookup by number + assertNull(fis3.fieldInfo("f0")); //lookup by name + assertEquals(0, fis3.size()); + Iterator it3 = fis3.iterator(); + assertFalse(it3.hasNext()); + dir.close(); + } + +} diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsCollector.java index 7b49f80bbe9..15bc4b4a5b3 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsCollector.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsCollector.java @@ -77,10 +77,7 @@ final class GlobalOrdinalsCollector implements Collector { @Override public void collect(int doc) throws IOException { - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { + if (docTermOrds.advanceExact(doc)) { long segmentOrd = docTermOrds.ordValue(); long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); collectedOrds.set(globalOrd); @@ -102,10 +99,7 @@ final class GlobalOrdinalsCollector implements Collector { @Override public void collect(int doc) throws IOException { - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { + if (docTermOrds.advanceExact(doc)) { collectedOrds.set(docTermOrds.ordValue()); } } diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java index 6aaa785bb14..8247f81352a 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsQuery.java @@ -182,11 +182,7 @@ final class GlobalOrdinalsQuery extends Query { @Override public boolean matches() throws IOException { - int docID = approximation.docID(); - if (docID > values.docID()) { - values.advance(docID); - } - if (docID == values.docID()) { + if (values.advanceExact(approximation.docID())) { final long segmentOrd = values.ordValue(); final long globalOrd = segmentOrdToGlobalOrdLookup.get(segmentOrd); if (foundOrds.get(globalOrd)) { @@ -220,14 +216,8 @@ final class GlobalOrdinalsQuery extends Query { @Override public boolean matches() throws IOException { - int docID = approximation.docID(); - if (docID > values.docID()) { - values.advance(docID); - } - if (docID == values.docID()) { - if (foundOrds.get(values.ordValue())) { - return true; - } + if (values.advanceExact(approximation.docID()) && foundOrds.get(values.ordValue())) { + return true; } return false; } diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreCollector.java index fad3f0eb6cc..fdf014b2940 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreCollector.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreCollector.java @@ -113,10 +113,7 @@ abstract class GlobalOrdinalsWithScoreCollector implements Collector { @Override public void collect(int doc) throws IOException { - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { + if (docTermOrds.advanceExact(doc)) { final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(docTermOrds.ordValue()); collectedOrds.set(globalOrd); float existingScore = scores.getScore(globalOrd); @@ -145,10 +142,7 @@ abstract class GlobalOrdinalsWithScoreCollector implements Collector { @Override public void collect(int doc) throws IOException { - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { + if (docTermOrds.advanceExact(doc)) { int segmentOrd = docTermOrds.ordValue(); collectedOrds.set(segmentOrd); float existingScore = scores.getScore(segmentOrd); @@ -258,10 +252,7 @@ abstract class GlobalOrdinalsWithScoreCollector implements Collector { @Override public void collect(int doc) throws IOException { - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { + if (docTermOrds.advanceExact(doc)) { final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(docTermOrds.ordValue()); collectedOrds.set(globalOrd); occurrences.increment(globalOrd); @@ -276,10 +267,7 @@ abstract class GlobalOrdinalsWithScoreCollector implements Collector { @Override public void collect(int doc) throws IOException { - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { + if (docTermOrds.advanceExact(doc)) { int segmentOrd = docTermOrds.ordValue(); collectedOrds.set(segmentOrd); occurrences.increment(segmentOrd); diff --git a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java index cf83df4ed84..cdcf070cc7c 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/GlobalOrdinalsWithScoreQuery.java @@ -191,11 +191,7 @@ final class GlobalOrdinalsWithScoreQuery extends Query { @Override public boolean matches() throws IOException { - int docID = approximation.docID(); - if (docID > values.docID()) { - values.advance(docID); - } - if (docID == values.docID()) { + if (values.advanceExact(approximation.docID())) { final long segmentOrd = values.ordValue(); final int globalOrd = (int) segmentOrdToGlobalOrdLookup.get(segmentOrd); if (collector.match(globalOrd)) { @@ -229,11 +225,7 @@ final class GlobalOrdinalsWithScoreQuery extends Query { @Override public boolean matches() throws IOException { - int docID = approximation.docID(); - if (docID > values.docID()) { - values.advance(docID); - } - if (docID == values.docID()) { + if (values.advanceExact(approximation.docID())) { final int segmentOrd = values.ordValue(); if (collector.match(segmentOrd)) { score = collector.score(segmentOrd); diff --git a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java index d42cd2d4595..cabe7fa5112 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java @@ -199,10 +199,7 @@ public final class JoinUtil { @Override public void collect(int doc) throws IOException { - if (doc > sortedNumericDocValues.docID()) { - sortedNumericDocValues.advance(doc); - } - if (doc == sortedNumericDocValues.docID()) { + if (sortedNumericDocValues.advanceExact(doc)) { for (int i = 0; i < sortedNumericDocValues.docValueCount(); i++) { long value = sortedNumericDocValues.nextValue(); joinValues.add(value); @@ -246,15 +243,9 @@ public final class JoinUtil { @Override public void collect(int doc) throws IOException { assert docsInOrder(doc); - int dvDocID = numericDocValues.docID(); - if (dvDocID < doc) { - dvDocID = numericDocValues.advance(doc); - } - long value; - if (dvDocID == doc) { + long value = 0; + if (numericDocValues.advanceExact(doc)) { value = numericDocValues.longValue(); - } else { - value = 0; } joinValues.add(value); if (needsScore) { diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java index 0205ca35e54..9299638525e 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java @@ -84,11 +84,8 @@ abstract class TermsCollector extends DocValuesTermsCollector { @Override public void collect(int doc) throws IOException { - if (docValues.docID() < doc) { - docValues.advance(doc); - } BytesRef term; - if (docValues.docID() == doc) { + if (docValues.advanceExact(doc)) { term = docValues.binaryValue(); } else { term = new BytesRef(BytesRef.EMPTY_BYTES); diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java index cb2c62baa02..6cd24bcba21 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java @@ -96,11 +96,8 @@ abstract class TermsWithScoreCollector extends DocValuesTermsCollector @Override public void collect(int doc) throws IOException { - if (docValues.docID() < doc) { - docValues.advance(doc); - } BytesRef value; - if (docValues.docID() == doc) { + if (docValues.advanceExact(doc)) { value = docValues.binaryValue(); } else { value = new BytesRef(BytesRef.EMPTY_BYTES); @@ -155,11 +152,8 @@ abstract class TermsWithScoreCollector extends DocValuesTermsCollector @Override public void collect(int doc) throws IOException { - if (docValues.docID() < doc) { - docValues.advance(doc); - } BytesRef value; - if (docValues.docID() == doc) { + if (docValues.advanceExact(doc)) { value = docValues.binaryValue(); } else { value = new BytesRef(BytesRef.EMPTY_BYTES); @@ -207,10 +201,7 @@ abstract class TermsWithScoreCollector extends DocValuesTermsCollector @Override public void collect(int doc) throws IOException { - if (doc > docValues.docID()) { - docValues.advance(doc); - } - if (doc == docValues.docID()) { + if (docValues.advanceExact(doc)) { long ord; while ((ord = docValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { int termID = collectedTerms.add(docValues.lookupOrd(ord)); @@ -255,10 +246,7 @@ abstract class TermsWithScoreCollector extends DocValuesTermsCollector @Override public void collect(int doc) throws IOException { - if (doc > docValues.docID()) { - docValues.advance(doc); - } - if (doc == docValues.docID()) { + if (docValues.advanceExact(doc)) { long ord; while ((ord = docValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { int termID = collectedTerms.add(docValues.lookupOrd(ord)); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java index 088bb87b7cd..405e4814ebe 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java @@ -1471,11 +1471,8 @@ public class TestJoinUtil extends LuceneTestCase { @Override public void collect(int doc) throws IOException { - if (doc > terms.docID()) { - terms.advance(doc); - } final BytesRef joinValue; - if (doc == terms.docID()) { + if (terms.advanceExact(doc)) { joinValue = terms.binaryValue(); } else { // missing; @@ -1540,11 +1537,8 @@ public class TestJoinUtil extends LuceneTestCase { @Override public void collect(int doc) throws IOException { - if (doc > terms.docID()) { - terms.advance(doc); - } final BytesRef joinValue; - if (doc == terms.docID()) { + if (terms.advanceExact(doc)) { joinValue = terms.binaryValue(); } else { // missing; diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java index 479ac3cd121..f3946cb6f75 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java @@ -92,6 +92,8 @@ public class TestDocValuesFieldSources extends LuceneTestCase { vals[i] = (long) random().nextInt((int) PackedInts.maxValue(bitsPerValue)); f.setLongValue((Long) vals[i]); break; + default: + throw new AssertionError(); } iw.addDocument(document); if (random().nextBoolean() && i % 10 == 9) { @@ -150,6 +152,8 @@ public class TestDocValuesFieldSources extends LuceneTestCase { case SORTED_NUMERIC: assertEquals(((Number) expected).longValue(), values.longVal(i)); break; + default: + throw new AssertionError(); } } } diff --git a/lucene/tools/build.xml b/lucene/tools/build.xml index 4f4ed51b247..4736ea59cb2 100644 --- a/lucene/tools/build.xml +++ b/lucene/tools/build.xml @@ -40,6 +40,9 @@ + + + \E/$; +def lineSplitter = ~$/[\r\n]+/$; +def singleLineSplitter = ~$/\n\r?/$; +def licenseMatcher = Defaults.createDefaultMatcher(); +def validLoggerPattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+\p{javaJavaIdentifierStart}+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$; +def packagePattern = ~$/(?m)^\s*package\s+org\.apache.*;/$; +def xmlTagPattern = ~$/(?m)\s*<[a-zA-Z].*/$; +def sourceHeaderPattern = ~$/\[source\b.*/$; +def blockBoundaryPattern = ~$/----\s*/$; +def blockTitlePattern = ~$/\..*/$; +def unescapedSymbolPattern = ~$/(?<=[^\\]|^)([-=]>|<[-=])/$; // SOLR-10883 + +def isLicense = { matcher, ratDocument -> + licenseMatcher.reset(); + return lineSplitter.split(matcher.group(1)).any{ licenseMatcher.match(ratDocument, it) }; +} + +def checkLicenseHeaderPrecedes = { f, description, contentPattern, commentPattern, text, ratDocument -> + def contentMatcher = contentPattern.matcher(text); + if (contentMatcher.find()) { + def contentStartPos = contentMatcher.start(); + def commentMatcher = commentPattern.matcher(text); + while (commentMatcher.find()) { + if (isLicense(commentMatcher, ratDocument)) { + if (commentMatcher.start() < contentStartPos) { + break; // This file is all good, so break loop: license header precedes 'description' definition + } else { + reportViolation(f, description+' declaration precedes license header'); + } + } + } + } +} + +def checkMockitoAssume = { f, text -> + if (text.contains("mockito") && !text.contains("assumeWorkingMockito()")) { + reportViolation(f, 'File uses Mockito but has no assumeWorkingMockito() call'); + } +} + +def checkForUnescapedSymbolSubstitutions = { f, text -> + def inCodeBlock = false; + def underSourceHeader = false; + def lineNumber = 0; + singleLineSplitter.split(text).each { + ++lineNumber; + if (underSourceHeader) { // This line is either a single source line, or the boundary of a code block + inCodeBlock = blockBoundaryPattern.matcher(it).matches(); + if ( ! blockTitlePattern.matcher(it).matches()) { + underSourceHeader = false; + } + } else { + if (inCodeBlock) { + inCodeBlock = ! blockBoundaryPattern.matcher(it).matches(); + } else { + underSourceHeader = sourceHeaderPattern.matcher(it).lookingAt(); + if ( ! underSourceHeader) { + def unescapedSymbolMatcher = unescapedSymbolPattern.matcher(it); + if (unescapedSymbolMatcher.find()) { + reportViolation(f, 'Unescaped symbol "' + unescapedSymbolMatcher.group(1) + '" on line #' + lineNumber); + } + } + } + } + } +} + +ant.fileScanner{ + fileset(dir: baseDir){ + extensions.each{ + include(name: 'lucene/**/*.' + it) + include(name: 'solr/**/*.' + it) + include(name: 'dev-tools/**/*.' + it) + include(name: '*.' + it) + } + // TODO: For now we don't scan txt files, so we + // check licenses in top-level folders separately: + include(name: '*.txt') + include(name: '*/*.txt') + // excludes: + exclude(name: '**/build/**') + exclude(name: '**/dist/**') + exclude(name: 'lucene/benchmark/work/**') + exclude(name: 'lucene/benchmark/temp/**') + exclude(name: '**/CheckLoggingConfiguration.java') + exclude(name: 'lucene/tools/src/groovy/check-source-patterns.groovy') // ourselves :-) + } +}.each{ f -> + task.log('Scanning file: ' + f, Project.MSG_VERBOSE); + def text = f.getText('UTF-8'); + invalidPatterns.each{ pattern,name -> + if (pattern.matcher(text).find()) { + reportViolation(f, name); + } + } + def javadocsMatcher = javadocsPattern.matcher(text); + def ratDocument = new FileDocument(f); + while (javadocsMatcher.find()) { + if (isLicense(javadocsMatcher, ratDocument)) { + reportViolation(f, String.format(Locale.ENGLISH, 'javadoc-style license header [%s]', + ratDocument.getMetaData().value(MetaData.RAT_URL_LICENSE_FAMILY_NAME))); + } + } + if (f.name.endsWith('.java')) { + if (text.contains('org.slf4j.LoggerFactory')) { + if (!validLoggerPattern.matcher(text).find()) { + reportViolation(f, 'invalid logging pattern [not private static final, uses static class name]'); + } + } + checkLicenseHeaderPrecedes(f, 'package', packagePattern, javaCommentPattern, text, ratDocument); + if (f.name.contains("Test")) { + checkMockitoAssume(f, text); + } + } + if (f.name.endsWith('.xml') || f.name.endsWith('.xml.template')) { + checkLicenseHeaderPrecedes(f, '', xmlTagPattern, xmlCommentPattern, text, ratDocument); + } + if (f.name.endsWith('.adoc')) { + checkForUnescapedSymbolSubstitutions(f, text); + } +}; + +if (found) { + throw new BuildException(String.format(Locale.ENGLISH, 'Found %d violations in source files (%s).', + found, violations.join(', '))); +} diff --git a/lucene/tools/src/groovy/check-working-copy.groovy b/lucene/tools/src/groovy/check-working-copy.groovy new file mode 100644 index 00000000000..079a18b967d --- /dev/null +++ b/lucene/tools/src/groovy/check-working-copy.groovy @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Task script that is called by Ant's build.xml file: + * Checks GIT working copy for unversioned or modified files. + */ + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.Project; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.Status; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.storage.file.FileRepositoryBuilder; +import org.eclipse.jgit.errors.*; + +def setProjectPropertyFromSet = { prop, set -> + if (set) { + properties[prop] = '* ' + set.join(properties['line.separator'] + '* '); + } +}; + +try { + task.log('Initializing working copy...', Project.MSG_INFO); + final Repository repository = new FileRepositoryBuilder() + .setWorkTree(project.getBaseDir()) + .setMustExist(true) + .build(); + + task.log('Checking working copy status...', Project.MSG_INFO); + final Status status = new Git(repository).status().call(); + if (!status.isClean()) { + final SortedSet unversioned = new TreeSet(), modified = new TreeSet(); + status.properties.each{ prop, val -> + if (val instanceof Set) { + if (prop in ['untracked', 'untrackedFolders', 'missing']) { + unversioned.addAll(val); + } else if (prop != 'ignoredNotInIndex') { + modified.addAll(val); + } + } + }; + setProjectPropertyFromSet('wc.unversioned.files', unversioned); + setProjectPropertyFromSet('wc.modified.files', modified); + } +} catch (RepositoryNotFoundException | NoWorkTreeException | NotSupportedException e) { + task.log('WARNING: Development directory is not a valid GIT checkout! Disabling checks...', Project.MSG_WARN); +} diff --git a/lucene/tools/src/groovy/install-markdown-filter.groovy b/lucene/tools/src/groovy/install-markdown-filter.groovy new file mode 100644 index 00000000000..2b5544b99e6 --- /dev/null +++ b/lucene/tools/src/groovy/install-markdown-filter.groovy @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Task script that is called by Ant's common-build.xml file: + * Installs markdown filter into Ant. + */ + +import org.apache.tools.ant.AntTypeDefinition; +import org.apache.tools.ant.ComponentHelper; +import org.apache.tools.ant.filters.TokenFilter.ChainableReaderFilter; +import com.vladsch.flexmark.ast.Node; +import com.vladsch.flexmark.ast.Heading; +import com.vladsch.flexmark.html.HtmlRenderer; +import com.vladsch.flexmark.parser.Parser; +import com.vladsch.flexmark.parser.ParserEmulationProfile; +import com.vladsch.flexmark.util.html.Escaping; +import com.vladsch.flexmark.util.options.MutableDataSet; +import com.vladsch.flexmark.ext.abbreviation.AbbreviationExtension; +import com.vladsch.flexmark.ext.autolink.AutolinkExtension; + +public final class MarkdownFilter extends ChainableReaderFilter { + @Override + public String filter(String markdownSource) { + MutableDataSet options = new MutableDataSet(); + options.setFrom(ParserEmulationProfile.MARKDOWN); + options.set(Parser.EXTENSIONS, [ AbbreviationExtension.create(), AutolinkExtension.create() ]); + options.set(HtmlRenderer.RENDER_HEADER_ID, true); + options.set(HtmlRenderer.MAX_TRAILING_BLANK_LINES, 0); + Node parsed = Parser.builder(options).build().parse(markdownSource); + + StringBuilder html = new StringBuilder('\n\n'); + CharSequence title = parsed.getFirstChildAny(Heading.class)?.getText(); + if (title != null) { + html.append('').append(Escaping.escapeHtml(title, false)).append('\n'); + } + html.append('\n') + .append('\n\n'); + HtmlRenderer.builder(options).build().render(parsed, html); + html.append('\n\n'); + return html; + } +} + +AntTypeDefinition t = new AntTypeDefinition(); +t.setName('markdownfilter'); +t.setClass(MarkdownFilter.class); +ComponentHelper.getComponentHelper(project).addDataTypeDefinition(t); diff --git a/lucene/tools/src/groovy/patch-mrjar-classes.groovy b/lucene/tools/src/groovy/patch-mrjar-classes.groovy index d1699970eec..1ba6a983691 100644 --- a/lucene/tools/src/groovy/patch-mrjar-classes.groovy +++ b/lucene/tools/src/groovy/patch-mrjar-classes.groovy @@ -15,6 +15,11 @@ * limitations under the License. */ +/** Task script that is called by Ant's common-build.xml file: + * Patches Java 8 class files to replace method signatures by + * native Java 9 optimized ones (to be placed in MR-JAR). + */ + import org.apache.tools.ant.Project; import org.objectweb.asm.ClassReader; @@ -27,6 +32,11 @@ def mappings = [ 'org/apache/lucene/util/FutureArrays': 'java/util/Arrays', ]; +if (properties['run.clover'] != null) { + task.log("Disabled class file remapping for Java 9, because Clover code coverage is enabled.", Project.MSG_INFO); + return; +} + File inputDir = new File(properties['build.dir'], 'classes/java'); File outputDir = new File(properties['build.dir'], 'classes/java9'); diff --git a/lucene/tools/src/groovy/run-beaster.groovy b/lucene/tools/src/groovy/run-beaster.groovy new file mode 100644 index 00000000000..f94a4560f1c --- /dev/null +++ b/lucene/tools/src/groovy/run-beaster.groovy @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Task script that is called by Ant's common-build.xml file: + * Runs test beaster. + */ + +import org.apache.tools.ant.BuildException; +import org.apache.tools.ant.BuildLogger; +import org.apache.tools.ant.Project; + +int iters = (properties['beast.iters'] ?: '1') as int; +if (iters <= 1) { + throw new BuildException("Please give -Dbeast.iters with an int value > 1."); +} + +def antcall = project.createTask('antcall'); +antcall.with { + target = '-test'; + inheritAll = true; + inheritRefs = true; + createParam().with { + name = "tests.isbeasting"; + value = "true"; + }; +}; + +(1..iters).each { i -> + task.log('Beast round: ' + i, Project.MSG_INFO); + try { + // disable verbose build logging: + project.buildListeners.each { listener -> + if (listener instanceof BuildLogger) { + listener.messageOutputLevel = Project.MSG_WARN; + } + }; + + antcall.execute(); + + } catch (BuildException be) { + def logFile = new File(properties["junit.output.dir"], "tests-failures.txt"); + if (logFile.exists()) { + logFile.eachLine("UTF-8", { line -> + task.log(line, Project.MSG_ERR); + }); + } + throw be; + } finally { + // restore build logging (unfortunately there is no way to get the original logging level (write-only property): + project.buildListeners.each { listener -> + if (listener instanceof BuildLogger) { + listener.messageOutputLevel = Project.MSG_INFO; + } + }; + } +}; +task.log('Beasting finished.', Project.MSG_INFO); diff --git a/lucene/tools/src/groovy/run-maven-build.groovy b/lucene/tools/src/groovy/run-maven-build.groovy new file mode 100644 index 00000000000..c26c7bf865a --- /dev/null +++ b/lucene/tools/src/groovy/run-maven-build.groovy @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** Task script that is called by Ant's build.xml file: + * Runs maven build from within Ant after creating POMs. + */ + +import groovy.xml.NamespaceBuilder; +import org.apache.tools.ant.Project; + +def userHome = properties['user.home'], commonDir = properties['common.dir']; +def propPrefix = '-mvn.inject.'; int propPrefixLen = propPrefix.length(); + +def subProject = project.createSubProject(); +project.copyUserProperties(subProject); +subProject.initProperties(); +new AntBuilder(subProject).sequential{ + property(file: userHome+'/lucene.build.properties', prefix: propPrefix); + property(file: userHome+'/build.properties', prefix: propPrefix); + property(file: commonDir+'/build.properties', prefix: propPrefix); +}; + +def cmdlineProps = subProject.properties + .findAll{ k, v -> k.startsWith(propPrefix) } + .collectEntries{ k, v -> [k.substring(propPrefixLen), v] }; +cmdlineProps << project.userProperties.findAll{ k, v -> !k.startsWith('ant.') }; + +def artifact = NamespaceBuilder.newInstance(ant, 'antlib:org.apache.maven.artifact.ant'); + +task.log('Running Maven with props: ' + cmdlineProps.toString(), Project.MSG_INFO); +artifact.mvn(pom: properties['maven-build-dir']+'/pom.xml', mavenVersion: properties['maven-version'], failonerror: true, fork: true) { + cmdlineProps.each{ k, v -> arg(value: '-D' + k + '=' + v) }; + arg(value: '-fae'); + arg(value: 'install'); +}; diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 6bcb5e04f20..702de61cf35 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -197,6 +197,14 @@ Bug Fixes * SOLR-11931: Fix contrib/ltr custom inner class feature/normaliser/model persistence. (Christine Poerschke) +* SOLR-10261: In case of in-place updates, failure in leader to follower replica update request now throws the + follower replica in leader-initiated-recovery (Ishan Chattopadhyaya, Steve Rowe) + +* SOLR-11898: ConcurrentModificationException when calling org.apache.solr.core.SolrInfoBean.getMetricsSnapshot + (Jeff Miller via Erick Erickson) + +* SOLR-11950: Allow CLUSTERSTATUS "shard" parameter to accept comma (,) delimited list (Chris Ulicny via Jason Gerlowski) + Optimizations ---------------------- diff --git a/solr/build.xml b/solr/build.xml index 9b0bdabd416..400c71180d5 100644 --- a/solr/build.xml +++ b/solr/build.xml @@ -488,8 +488,6 @@ - - - + @@ -170,7 +170,8 @@ diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java index 63c1a078fd2..0f159544f0f 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java @@ -115,7 +115,10 @@ public class ClusterStatus { } } if (shard != null) { - requestedShards.add(shard); + String[] paramShards = shard.split(","); + for(String paramShard : paramShards){ + requestedShards.add(paramShard); + } } if (clusterStateCollection.getStateFormat() > 1) { diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java index 574ba72805b..09b4b364335 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java @@ -591,10 +591,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia ord = -1; } } else { - if (globalDoc > docValues.docID()) { - docValues.advance(globalDoc); - } - if (globalDoc == docValues.docID()) { + if (docValues.advanceExact(globalDoc)) { ord = docValues.ordValue(); } else { ord = -1; @@ -664,12 +661,8 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia @Override public void collect(int docId) throws IOException { - int valuesDocID = docValues.docID(); - if (valuesDocID < docId) { - valuesDocID = docValues.advance(docId); - } long value; - if (valuesDocID == docId) { + if (docValues.advanceExact(docId)) { value = docValues.longValue(); } else { value = 0; @@ -739,6 +732,8 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia return Float.toString(Float.intBitsToFloat((int)val)); case DOUBLE: return Double.toString(Double.longBitsToDouble(val)); + case DATE: + break; } } throw new IllegalArgumentException("FieldType must be INT,LONG,FLOAT,DOUBLE found " + fieldType); diff --git a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java index f8ab7b71811..6f2fc266c76 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java @@ -196,6 +196,9 @@ public class RangeFacetProcessor extends SimpleFacets { intervals.set(includeBefore ? 1 : 0, new IntervalFacets.FacetInterval(sf, range.lower, range.upper, range.includeLower, range.includeUpper, FacetRangeOther.BETWEEN.toString())); break; + case ALL: + case NONE: + break; } } continue; diff --git a/solr/core/src/java/org/apache/solr/schema/SortableTextField.java b/solr/core/src/java/org/apache/solr/schema/SortableTextField.java index c30b1b1216f..1d2c21de7a8 100644 --- a/solr/core/src/java/org/apache/solr/schema/SortableTextField.java +++ b/solr/core/src/java/org/apache/solr/schema/SortableTextField.java @@ -80,10 +80,10 @@ public class SortableTextField extends TextField { } } - // by the time our init() is called, super.setArgs has already removed & procesesd any explicit + // by the time our init() is called, super.setArgs has already removed & processed any explicit // "docValues=foo" or useDocValuesAsStored=bar args... // - If the user explicitly said docValues=false, we want to respect that and not change it. - // - if the user didn't explicit specify anything, then we want to implicitly *default* docValues=true + // - if the user didn't explicitly specify anything, then we want to implicitly *default* docValues=true // - The inverse is true for useDocValuesAsStored=true: // - if explict, then respect it; else implicitly default to useDocValuesAsStored=false // ...lucky for us, setArgs preserved info about explicitly set true|false properties... diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java index 974df9081b5..9af1c8957dc 100644 --- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java @@ -560,20 +560,14 @@ public class CollapsingQParserPlugin extends QParserPlugin { int ord = -1; if(this.ordinalMap != null) { //Handle ordinalMapping case - if (contextDoc > segmentValues.docID()) { - segmentValues.advance(contextDoc); - } - if (contextDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(contextDoc)) { ord = (int)segmentOrdinalMap.get(segmentValues.ordValue()); } else { ord = -1; } } else { //Handle top Level FieldCache or Single Segment Case - if (globalDoc > segmentValues.docID()) { - segmentValues.advance(globalDoc); - } - if (globalDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(globalDoc)) { ord = segmentValues.ordValue(); } else { ord = -1; @@ -680,18 +674,12 @@ public class CollapsingQParserPlugin extends QParserPlugin { int ord = -1; if(this.ordinalMap != null) { //Handle ordinalMapping case - if (contextDoc > segmentValues.docID()) { - segmentValues.advance(contextDoc); - } - if (contextDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(contextDoc)) { ord = (int)segmentOrdinalMap.get(segmentValues.ordValue()); } } else { //Handle top Level FieldCache or Single Segment Case - if (docId > segmentValues.docID()) { - segmentValues.advance(docId); - } - if (docId == segmentValues.docID()) { + if (segmentValues.advanceExact(docId)) { ord = segmentValues.ordValue(); } } @@ -786,14 +774,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { @Override public void collect(int contextDoc) throws IOException { - - int collapseDocID = collapseValues.docID(); - if (collapseDocID < contextDoc) { - collapseDocID = collapseValues.advance(contextDoc); - } - int collapseValue; - if (collapseDocID == contextDoc) { + if (collapseValues.advanceExact(contextDoc)) { collapseValue = (int) collapseValues.longValue(); } else { collapseValue = 0; @@ -889,12 +871,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { } int contextDoc = globalDoc-currentDocBase; - int valuesDocID = collapseValues.docID(); - if (valuesDocID < contextDoc) { - valuesDocID = collapseValues.advance(contextDoc); - } int collapseValue; - if (valuesDocID == contextDoc) { + if (collapseValues.advanceExact(contextDoc)) { collapseValue = (int) collapseValues.longValue(); } else { collapseValue = 0; @@ -1015,17 +993,11 @@ public class CollapsingQParserPlugin extends QParserPlugin { int globalDoc = contextDoc+this.docBase; int ord = -1; if(this.ordinalMap != null) { - if (contextDoc > segmentValues.docID()) { - segmentValues.advance(contextDoc); - } - if (contextDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(contextDoc)) { ord = (int)segmentOrdinalMap.get(segmentValues.ordValue()); } } else { - if (globalDoc > segmentValues.docID()) { - segmentValues.advance(globalDoc); - } - if (globalDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(globalDoc)) { ord = segmentValues.ordValue(); } } @@ -1085,18 +1057,12 @@ public class CollapsingQParserPlugin extends QParserPlugin { int ord = -1; if(this.ordinalMap != null) { //Handle ordinalMapping case - if (contextDoc > segmentValues.docID()) { - segmentValues.advance(contextDoc); - } - if (contextDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(contextDoc)) { ord = (int) segmentOrdinalMap.get(segmentValues.ordValue()); } } else { //Handle top Level FieldCache or Single Segment Case - if (globalDoc > segmentValues.docID()) { - segmentValues.advance(globalDoc); - } - if (globalDoc == segmentValues.docID()) { + if (segmentValues.advanceExact(globalDoc)) { ord = segmentValues.ordValue(); } } @@ -1197,13 +1163,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { } public void collect(int contextDoc) throws IOException { - int collapseDocID = collapseValues.docID(); - if (collapseDocID < contextDoc) { - collapseDocID = collapseValues.advance(contextDoc); - } - int collapseKey; - if (collapseDocID == contextDoc) { + if (collapseValues.advanceExact(contextDoc)) { collapseKey = (int) collapseValues.longValue(); } else { collapseKey = 0; @@ -1249,13 +1210,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { int contextDoc = globalDoc-currentDocBase; if(this.needsScores){ - int collapseDocID = collapseValues.docID(); - if (collapseDocID < contextDoc) { - collapseDocID = collapseValues.advance(contextDoc); - } - int collapseValue; - if (collapseDocID == contextDoc) { + if (collapseValues.advanceExact(contextDoc)) { collapseValue = (int) collapseValues.longValue(); } else { collapseValue = 0; @@ -1637,13 +1593,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { return; } - int valuesDocID = minMaxValues.docID(); - if (valuesDocID < contextDoc) { - valuesDocID = minMaxValues.advance(contextDoc); - } - int currentVal; - if (valuesDocID == contextDoc) { + if (minMaxValues.advanceExact(contextDoc)) { currentVal = (int) minMaxValues.longValue(); } else { currentVal = 0; @@ -1729,13 +1680,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { return; } - int valuesDocID = minMaxValues.docID(); - if (valuesDocID < contextDoc) { - valuesDocID = minMaxValues.advance(contextDoc); - } - int currentMinMax; - if (valuesDocID == contextDoc) { + if (minMaxValues.advanceExact(contextDoc)) { currentMinMax = (int) minMaxValues.longValue(); } else { currentMinMax = 0; @@ -1822,13 +1768,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { return; } - int valuesDocID = minMaxVals.docID(); - if (valuesDocID < contextDoc) { - valuesDocID = minMaxVals.advance(contextDoc); - } - long currentVal; - if (valuesDocID == contextDoc) { + if (minMaxVals.advanceExact(contextDoc)) { currentVal = minMaxVals.longValue(); } else { currentVal = 0; @@ -2229,13 +2170,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { return; } - int valuesDocID = minMaxVals.docID(); - if (valuesDocID < contextDoc) { - valuesDocID = minMaxVals.advance(contextDoc); - } - int currentVal; - if (valuesDocID == contextDoc) { + if (minMaxVals.advanceExact(contextDoc)) { currentVal = (int) minMaxVals.longValue(); } else { currentVal = 0; @@ -2341,13 +2277,8 @@ public class CollapsingQParserPlugin extends QParserPlugin { return; } - int valuesDocID = minMaxVals.docID(); - if (valuesDocID < contextDoc) { - valuesDocID = minMaxVals.advance(contextDoc); - } - int minMaxVal; - if (valuesDocID == contextDoc) { + if (minMaxVals.advanceExact(contextDoc)) { minMaxVal = (int) minMaxVals.longValue(); } else { minMaxVal = 0; diff --git a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java index 1cf4443c912..b2e56a81f52 100644 --- a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java +++ b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java @@ -25,7 +25,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.invoke.MethodHandles; -import java.util.HashSet; +import java.util.concurrent.ConcurrentHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -60,7 +60,7 @@ public class FastLRUCache extends SolrCacheBase implements SolrCache private long maxRamBytes; private MetricsMap cacheMap; - private Set metricNames = new HashSet<>(); + private Set metricNames = ConcurrentHashMap.newKeySet(); private MetricRegistry registry; @Override diff --git a/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java index 053f50f4055..c9e14344632 100644 --- a/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/IGainTermsQParserPlugin.java @@ -125,12 +125,8 @@ public class IGainTermsQParserPlugin extends QParserPlugin { public void collect(int doc) throws IOException { super.collect(doc); ++count; - int valuesDocID = leafOutcomeValue.docID(); - if (valuesDocID < doc) { - valuesDocID = leafOutcomeValue.advance(doc); - } int value; - if (valuesDocID == doc) { + if (leafOutcomeValue.advanceExact(doc)) { value = (int) leafOutcomeValue.longValue(); } else { value = 0; diff --git a/solr/core/src/java/org/apache/solr/search/LFUCache.java b/solr/core/src/java/org/apache/solr/search/LFUCache.java index f502b03600b..7d41ae343ff 100644 --- a/solr/core/src/java/org/apache/solr/search/LFUCache.java +++ b/solr/core/src/java/org/apache/solr/search/LFUCache.java @@ -17,7 +17,7 @@ package org.apache.solr.search; import java.lang.invoke.MethodHandles; -import java.util.HashSet; +import java.util.concurrent.ConcurrentHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -65,7 +65,7 @@ public class LFUCache implements SolrCache { private int showItems = 0; private Boolean timeDecay = true; private MetricsMap cacheMap; - private Set metricNames = new HashSet<>(); + private Set metricNames = ConcurrentHashMap.newKeySet(); private MetricRegistry registry; @Override diff --git a/solr/core/src/java/org/apache/solr/search/LRUCache.java b/solr/core/src/java/org/apache/solr/search/LRUCache.java index cbd3979155d..af05dadd7d4 100644 --- a/solr/core/src/java/org/apache/solr/search/LRUCache.java +++ b/solr/core/src/java/org/apache/solr/search/LRUCache.java @@ -19,7 +19,7 @@ package org.apache.solr.search; import java.lang.invoke.MethodHandles; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; +import java.util.concurrent.ConcurrentHashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; @@ -87,7 +87,7 @@ public class LRUCache extends SolrCacheBase implements SolrCache, Acco private Map map; private String description="LRU Cache"; private MetricsMap cacheMap; - private Set metricNames = new HashSet<>(); + private Set metricNames = ConcurrentHashMap.newKeySet(); private MetricRegistry registry; private long maxRamBytes = Long.MAX_VALUE; diff --git a/solr/core/src/java/org/apache/solr/search/PointMerger.java b/solr/core/src/java/org/apache/solr/search/PointMerger.java index 22ae05c7b36..59a6fb38cbc 100644 --- a/solr/core/src/java/org/apache/solr/search/PointMerger.java +++ b/solr/core/src/java/org/apache/solr/search/PointMerger.java @@ -79,6 +79,8 @@ public class PointMerger { case DOUBLE: seg = new DoubleSeg(pv, capacity); break; + case DATE: + break; } int count = seg.setNextValue(); if (count >= 0) { diff --git a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java index 842f746c56a..5d3bb46d4f2 100644 --- a/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/TextLogisticRegressionQParserPlugin.java @@ -150,12 +150,8 @@ public class TextLogisticRegressionQParserPlugin extends QParserPlugin { } public void collect(int doc) throws IOException{ - int valuesDocID = leafOutcomeValue.docID(); - if (valuesDocID < doc) { - valuesDocID = leafOutcomeValue.advance(doc); - } int outcome; - if (valuesDocID == doc) { + if (leafOutcomeValue.advanceExact(doc)) { outcome = (int) leafOutcomeValue.longValue(); } else { outcome = 0; diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java index d85b3af2dc8..8353cdd077b 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java +++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java @@ -387,10 +387,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor { @Override public void collect(int segDoc) throws IOException { - if (segDoc > values.docID()) { - values.advance(segDoc); - } - if (segDoc == values.docID()) { + if (values.advanceExact(segDoc)) { long l = values.nextValue(); // This document must have at least one value collectValFirstPhase(segDoc, l); for (int i = 1; i < values.docValueCount(); i++) { @@ -418,10 +415,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor { @Override public void collect(int segDoc) throws IOException { - if (segDoc > values.docID()) { - values.advance(segDoc); - } - if (segDoc == values.docID()) { + if (values.advanceExact(segDoc)) { collectValFirstPhase(segDoc, values.longValue()); } } diff --git a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java index ac8bf0bdf3e..8d4dc4dee19 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java @@ -335,10 +335,7 @@ public class MinMaxAgg extends SimpleAggValueSource { @Override public void collect(int doc, int slotNum) throws IOException { - if (doc > subDv.docID()) { - subDv.advance(doc); - } - if (doc == subDv.docID()) { + if (subDv.advanceExact(doc)) { int segOrd = subDv.ordValue(); int ord = toGlobal==null ? segOrd : (int)toGlobal.get(segOrd); if ((ord - slotOrd[slotNum]) * minmax < 0 || slotOrd[slotNum]==MISSING) { diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java index 02d457fe412..af419a4d96a 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java @@ -71,10 +71,7 @@ class UniqueMultiDvSlotAcc extends UniqueSlotAcc { @Override public void collect(int doc, int slotNum) throws IOException { - if (doc > subDv.docID()) { - subDv.advance(doc); - } - if (doc == subDv.docID()) { + if (subDv.advanceExact(doc)) { int segOrd = (int) subDv.nextOrd(); assert segOrd >= 0; diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java index 714d1270be3..4d944d239ba 100644 --- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java +++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java @@ -388,6 +388,11 @@ public class SolrDispatchFilter extends BaseSolrFilter { case FORWARD: request.getRequestDispatcher(call.getPath()).forward(request, response); break; + case ADMIN: + case PROCESS: + case REMOTEQUERY: + case RETURN: + break; } } finally { call.destroy(); diff --git a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java index 7158e316380..967db541414 100644 --- a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java +++ b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java @@ -309,6 +309,14 @@ public class UninvertingReader extends FilterLeafReader { case LEGACY_FLOAT: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_FLOAT_PARSER); case LEGACY_LONG: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_LONG_PARSER); case LEGACY_DOUBLE: return FieldCache.DEFAULT.getNumerics(in, field, FieldCache.LEGACY_DOUBLE_PARSER); + case BINARY: + case SORTED: + case SORTED_SET_BINARY: + case SORTED_SET_DOUBLE: + case SORTED_SET_FLOAT: + case SORTED_SET_INTEGER: + case SORTED_SET_LONG: + break; } } return null; @@ -359,6 +367,17 @@ public class UninvertingReader extends FilterLeafReader { return FieldCache.DEFAULT.getDocTermOrds(in, field, FieldCache.INT64_TERM_PREFIX); case SORTED_SET_BINARY: return FieldCache.DEFAULT.getDocTermOrds(in, field, null); + case BINARY: + case LEGACY_DOUBLE: + case LEGACY_FLOAT: + case LEGACY_INTEGER: + case LEGACY_LONG: + case DOUBLE_POINT: + case FLOAT_POINT: + case INTEGER_POINT: + case LONG_POINT: + case SORTED: + break; } } return null; diff --git a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java index 1851f3dc633..b200f892f6f 100644 --- a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java +++ b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java @@ -26,7 +26,6 @@ import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient; // jdoc import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrException; -import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.ModifiableSolrParams; @@ -279,7 +278,24 @@ public class SolrCmdDistributor implements Closeable { try (HttpSolrClient client = new HttpSolrClient.Builder(req.node.getUrl()).withHttpClient(clients.getHttpClient()).build()) { client.request(req.uReq); } catch (Exception e) { - throw new SolrException(ErrorCode.SERVER_ERROR, "Failed synchronous update on shard " + req.node + " update: " + req.uReq , e); + try { + // if false, then the node is probably not "live" anymore + // and we do not need to send a recovery message + Throwable rootCause = SolrException.getRootCause(e); + log.error("Setting up to try to start recovery on replica {}", req.node.getUrl(), rootCause); + req.cmd.getReq().getCore().getCoreContainer().getZkController().ensureReplicaInLeaderInitiatedRecovery( + req.cmd.getReq().getCore().getCoreContainer(), + req.node.getCollection(), + req.node.getShardId(), + req.node.getNodeProps(), + req.cmd.getReq().getCore().getCoreDescriptor(), + false /* forcePublishState */ + ); + } catch (Exception exc) { + Throwable setLirZnodeFailedCause = SolrException.getRootCause(exc); + log.error("Leader failed to set replica " + + req.node.getUrl() + " state to DOWN due to: " + setLirZnodeFailedCause, setLirZnodeFailedCause); + } } return; diff --git a/solr/core/src/test-files/solr/configsets/_default/conf/managed-schema b/solr/core/src/test-files/solr/configsets/_default/conf/managed-schema index abc72a0c9fd..5a2ebaf2ee6 100644 --- a/solr/core/src/test-files/solr/configsets/_default/conf/managed-schema +++ b/solr/core/src/test-files/solr/configsets/_default/conf/managed-schema @@ -289,6 +289,30 @@ + + + + + + + + + + + + + + + + + diff --git a/solr/core/src/test/SecureRandomAlgorithmTesterApp.java b/solr/core/src/test/SecureRandomAlgorithmTesterApp.java deleted file mode 100644 index 44f79e903c8..00000000000 --- a/solr/core/src/test/SecureRandomAlgorithmTesterApp.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; - -public class SecureRandomAlgorithmTesterApp { - public static void main(String[] args) throws NoSuchAlgorithmException { - String algorithm = args[0]; - String method = args[1]; - int amount = Integer.valueOf(args[2]); - SecureRandom secureRandom; - if(algorithm.equals("default")) - secureRandom = new SecureRandom(); - else - secureRandom = SecureRandom.getInstance(algorithm); - System.out.println("Algorithm:" + secureRandom.getAlgorithm()); - switch(method) { - case "seed": secureRandom.generateSeed(amount); break; - case "bytes": secureRandom.nextBytes(new byte[amount]); break; - case "long": secureRandom.nextLong(); break; - case "int": secureRandom.nextInt(); break; - default: throw new IllegalArgumentException("Not supported random function: " + method); - } - System.out.println("SecureRandom function invoked"); - } -} diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java index 5ffed507598..d9ea73c08da 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java @@ -83,6 +83,7 @@ public class TestCollectionAPI extends ReplicaPropertiesBase { clusterStatusNoCollection(); clusterStatusWithCollection(); clusterStatusWithCollectionAndShard(); + clusterStatusWithCollectionAndMultipleShards(); clusterStatusWithRouteKey(); clusterStatusAliasTest(); clusterStatusRolesTest(); @@ -122,6 +123,29 @@ public class TestCollectionAPI extends ReplicaPropertiesBase { } } + private void clusterStatusWithCollectionAndMultipleShards() throws IOException, SolrServerException { + try (CloudSolrClient client = createCloudClient(null)) { + final CollectionAdminRequest.ClusterStatus request = new CollectionAdminRequest.ClusterStatus(); + request.setCollectionName(COLLECTION_NAME); + request.setShardName(SHARD1 + "," + SHARD2); + + NamedList rsp = request.process(client).getResponse(); + NamedList cluster = (NamedList) rsp.get("cluster"); + assertNotNull("Cluster state should not be null", cluster); + NamedList collections = (NamedList) cluster.get("collections"); + assertNotNull("Collections should not be null in cluster state", collections); + assertNotNull(collections.get(COLLECTION_NAME)); + assertEquals(1, collections.size()); + Map collection = (Map) collections.get(COLLECTION_NAME); + Map shardStatus = (Map) collection.get("shards"); + assertEquals(2, shardStatus.size()); + Map firstSelectedShardStatus = (Map) shardStatus.get(SHARD1); + assertNotNull(firstSelectedShardStatus); + Map secondSelectedShardStatus = (Map) shardStatus.get(SHARD2); + assertNotNull(secondSelectedShardStatus); + } + } + private void listCollection() throws IOException, SolrServerException { try (CloudSolrClient client = createCloudClient(null)) { diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java index b99198d7e8c..01f852e6b5d 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java @@ -207,6 +207,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 { } @Test + @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-11974") public void testStringCollapse() throws Exception { for (final String hint : new String[] {"", " hint="+CollapsingQParserPlugin.HINT_TOP_FC}) { testCollapseQueries("group_s", hint, false); diff --git a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java index 19373f01dfb..fa74fb44386 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java @@ -693,12 +693,8 @@ public class TestRankQueryPlugin extends QParserPlugin { public void setScorer(Scorer scorer) throws IOException {} public void collect(int doc) throws IOException { - int valuesDocID = values.docID(); - if (valuesDocID < doc) { - valuesDocID = values.advance(doc); - } long value; - if (valuesDocID == doc) { + if (values.advanceExact(doc)) { value = values.longValue(); } else { value = 0; diff --git a/solr/server/solr/configsets/_default/conf/managed-schema b/solr/server/solr/configsets/_default/conf/managed-schema index abc72a0c9fd..5a2ebaf2ee6 100644 --- a/solr/server/solr/configsets/_default/conf/managed-schema +++ b/solr/server/solr/configsets/_default/conf/managed-schema @@ -289,6 +289,30 @@ + + + + + + + + + + + + + + + + + diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema b/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema index 6d6b9fd3dee..5751806dd64 100644 --- a/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema +++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema @@ -134,7 +134,7 @@ - + @@ -154,11 +154,11 @@ "content_type": From the HTTP headers of incoming stream "resourcename": From SolrCell request param resource.name --> - - + + - + @@ -423,6 +423,28 @@ + + + + + + + + + + + + + + + @@ -49,9 +49,6 @@ - - -