Merge remote-tracking branch 'origin/branch_6x' into branch_6x

This commit is contained in:
Noble Paul 2016-10-24 21:42:42 +05:30
commit d838d3281f
8 changed files with 41 additions and 17 deletions

View File

@ -63,6 +63,10 @@ Improvements
* LUCENE-7496: Better toString for SweetSpotSimilarity (janhoy) * LUCENE-7496: Better toString for SweetSpotSimilarity (janhoy)
* LUCENE-7520: Highlighter's WeightedSpanTermExtractor shouldn't attempt to expand a MultiTermQuery
when its field doesn't match the field the extraction is scoped to.
(Cao Manh Dat via David Smiley)
Optimizations Optimizations
* LUCENE-7501: BKDReader should not store the split dimension explicitly in the * LUCENE-7501: BKDReader should not store the split dimension explicitly in the

View File

@ -217,13 +217,14 @@ public class WeightedSpanTermExtractor {
} else if (isQueryUnsupported(query.getClass())) { } else if (isQueryUnsupported(query.getClass())) {
// nothing // nothing
} else { } else {
if (query instanceof MultiTermQuery &&
(!expandMultiTermQuery || !fieldNameComparator(((MultiTermQuery)query).getField()))) {
return;
}
Query origQuery = query; Query origQuery = query;
final IndexReader reader = getLeafContext().reader(); final IndexReader reader = getLeafContext().reader();
Query rewritten; Query rewritten;
if (query instanceof MultiTermQuery) { if (query instanceof MultiTermQuery) {
if (!expandMultiTermQuery) {
return;
}
rewritten = MultiTermQuery.SCORING_BOOLEAN_REWRITE.rewrite(reader, (MultiTermQuery) query); rewritten = MultiTermQuery.SCORING_BOOLEAN_REWRITE.rewrite(reader, (MultiTermQuery) query);
} else { } else {
rewritten = origQuery.rewrite(reader); rewritten = origQuery.rewrite(reader);
@ -513,11 +514,7 @@ public class WeightedSpanTermExtractor {
*/ */
public Map<String,WeightedSpanTerm> getWeightedSpanTerms(Query query, float boost, TokenStream tokenStream, public Map<String,WeightedSpanTerm> getWeightedSpanTerms(Query query, float boost, TokenStream tokenStream,
String fieldName) throws IOException { String fieldName) throws IOException {
if (fieldName != null) {
this.fieldName = fieldName; this.fieldName = fieldName;
} else {
this.fieldName = null;
}
Map<String,WeightedSpanTerm> terms = new PositionCheckingMap<>(); Map<String,WeightedSpanTerm> terms = new PositionCheckingMap<>();
this.tokenStream = tokenStream; this.tokenStream = tokenStream;

View File

@ -33,6 +33,7 @@ import java.util.StringTokenizer;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockPayloadAnalyzer; import org.apache.lucene.analysis.MockPayloadAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter; import org.apache.lucene.analysis.MockTokenFilter;
@ -1339,6 +1340,22 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
} }
public void testNotRewriteMultiTermQuery() throws IOException {
// field "bar": (not the field we ultimately want to extract)
MultiTermQuery mtq = new TermRangeQuery("bar", new BytesRef("aa"), new BytesRef("zz"), true, true) ;
WeightedSpanTermExtractor extractor = new WeightedSpanTermExtractor() {
@Override
protected void extract(Query query, float boost, Map<String, WeightedSpanTerm> terms) throws IOException {
assertEquals(mtq, query);
super.extract(query, boost, terms);
}
};
extractor.setExpandMultiTermQuery(true);
extractor.setMaxDocCharsToAnalyze(51200);
extractor.getWeightedSpanTerms(
mtq, 3, new CannedTokenStream(new Token("aa",0,2), new Token("bb", 2,4)), "foo"); // field "foo"
}
public void testGetBestSingleFragmentWithWeights() throws Exception { public void testGetBestSingleFragmentWithWeights() throws Exception {
TestHighlightRunner helper = new TestHighlightRunner() { TestHighlightRunner helper = new TestHighlightRunner() {

View File

@ -195,6 +195,7 @@ Optimizations
* SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble) * SOLR-9506: cache IndexFingerprint for each segment (Pushkar Raste, yonik, noble)
* SOLR-7506: Roll over GC logs by default via bin/solr scripts (shalin, janhoy)
Other Changes Other Changes
---------------------- ----------------------

View File

@ -1407,13 +1407,14 @@ if [ -z ${GC_LOG_OPTS+x} ]; then
else else
GC_LOG_OPTS=($GC_LOG_OPTS) GC_LOG_OPTS=($GC_LOG_OPTS)
fi fi
# if verbose gc logging enabled, setup the location of the log file
# if verbose gc logging enabled, setup the location of the log file and rotation
if [ "$GC_LOG_OPTS" != "" ]; then if [ "$GC_LOG_OPTS" != "" ]; then
gc_log_flag="-Xloggc" gc_log_flag="-Xloggc"
if [ "$JAVA_VENDOR" == "IBM J9" ]; then if [ "$JAVA_VENDOR" == "IBM J9" ]; then
gc_log_flag="-Xverbosegclog" gc_log_flag="-Xverbosegclog"
fi fi
GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log") GC_LOG_OPTS+=("$gc_log_flag:$SOLR_LOGS_DIR/solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M)
fi fi
# If ZK_HOST is defined, the assume SolrCloud mode # If ZK_HOST is defined, the assume SolrCloud mode

View File

@ -1013,23 +1013,23 @@ IF NOT EXIST "%SOLR_SERVER_DIR%\tmp" (
) )
IF "%JAVA_VENDOR%" == "IBM J9" ( IF "%JAVA_VENDOR%" == "IBM J9" (
set "GCLOG_OPT=-Xverbosegclog" set GCLOG_OPT="-Xverbosegclog:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
) else ( ) else (
set "GCLOG_OPT=-Xloggc" set GCLOG_OPT="-Xloggc:!SOLR_LOGS_DIR!\solr_gc.log" -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=9 -XX:GCLogFileSize=20M
) )
IF "%FG%"=="1" ( IF "%FG%"=="1" (
REM run solr in the foreground REM run solr in the foreground
title "Solr-%SOLR_PORT%" title "Solr-%SOLR_PORT%"
echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
"%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^ "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
-Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^ -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^ -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^
-Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^ -Djetty.host=%SOLR_JETTY_HOST% -Djetty.port=%SOLR_PORT% -Djetty.home="%SOLR_SERVER_DIR%" ^
-Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%" -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar "%SOLR_JETTY_CONFIG%"
) ELSE ( ) ELSE (
START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" ^ START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" ^
"%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT%:"!SOLR_LOGS_DIR!/solr_gc.log" ^ "%JAVA%" %SERVEROPT% %SOLR_JAVA_MEM% %START_OPTS% %GCLOG_OPT% ^
-Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^ -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^
-Dsolr.log.muteconsole ^ -Dsolr.log.muteconsole ^
-Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^ -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" ^

View File

@ -3445,13 +3445,13 @@ public class SolrCLI {
Files.createDirectories(archivePath); Files.createDirectories(archivePath);
} }
List<Path> archived = Files.find(archivePath, 1, (f, a) List<Path> archived = Files.find(archivePath, 1, (f, a)
-> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_")) -> a.isRegularFile() && String.valueOf(f.getFileName()).matches("^solr_gc[_.].+"))
.collect(Collectors.toList()); .collect(Collectors.toList());
for (Path p : archived) { for (Path p : archived) {
Files.delete(p); Files.delete(p);
} }
List<Path> files = Files.find(logsPath, 1, (f, a) List<Path> files = Files.find(logsPath, 1, (f, a)
-> a.isRegularFile() && String.valueOf(f.getFileName()).startsWith("solr_gc_")) -> a.isRegularFile() && String.valueOf(f.getFileName()).matches("^solr_gc[_.].+"))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (files.size() > 0) { if (files.size() > 0) {
out("Archiving " + files.size() + " old GC log files to " + archivePath); out("Archiving " + files.size() + " old GC log files to " + archivePath);

View File

@ -55,6 +55,10 @@ public class UtilsToolTest extends SolrTestCaseJ4 {
"solr_log_20160304", "solr_log_20160304",
"solr-8983-console.log", "solr-8983-console.log",
"solr_gc_log_20160102", "solr_gc_log_20160102",
"solr_gcnotremove",
"solr_gc.log",
"solr_gc.log.0",
"solr_gc.log.0.current",
"solr_gc_log_2"); "solr_gc_log_2");
@Before @Before
@ -136,7 +140,7 @@ public class UtilsToolTest extends SolrTestCaseJ4 {
String[] args = {"utils", "-archive_gc_logs", "-l", dir.toString()}; String[] args = {"utils", "-archive_gc_logs", "-l", dir.toString()};
assertEquals(files.size(), fileCount()); assertEquals(files.size(), fileCount());
assertEquals(0, runTool(args)); assertEquals(0, runTool(args));
assertEquals(files.size()-2, fileCount()); assertEquals(files.size()-5, fileCount());
assertFalse(listFiles().contains("solr_gc_log_2")); assertFalse(listFiles().contains("solr_gc_log_2"));
assertTrue(Files.exists(dir.resolve("archived").resolve("solr_gc_log_2"))); assertTrue(Files.exists(dir.resolve("archived").resolve("solr_gc_log_2")));
assertEquals(0, runTool(args)); assertEquals(0, runTool(args));