mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master' into gradle-master
This commit is contained in:
commit
23f3fd2d48
|
@ -105,6 +105,7 @@ def expand_jinja(text, vars=None):
|
|||
'rename_cmd': 'ren' if is_windows() else 'mv',
|
||||
'vote_close_72h': vote_close_72h_date().strftime("%Y-%m-%d %H:00 UTC"),
|
||||
'vote_close_72h_epoch': unix_time_millis(vote_close_72h_date()),
|
||||
'vote_close_72h_holidays': vote_close_72h_holidays(),
|
||||
'lucene_highlights_file': lucene_highlights_file,
|
||||
'solr_highlights_file': solr_highlights_file,
|
||||
'tlp_news_draft': tlp_news_draft,
|
||||
|
@ -1891,25 +1892,35 @@ def create_ical(todo):
|
|||
|
||||
|
||||
today = datetime.utcnow().date()
|
||||
weekends = {(today + timedelta(days=x)): 'Saturday' for x in range(10) if (today + timedelta(days=x)).weekday() == 5}
|
||||
weekends.update({(today + timedelta(days=x)): 'Sunday' for x in range(10) if (today + timedelta(days=x)).weekday() == 6})
|
||||
sundays = {(today + timedelta(days=x)): 'Sunday' for x in range(10) if (today + timedelta(days=x)).weekday() == 6}
|
||||
y = datetime.utcnow().year
|
||||
years = [y, y+1]
|
||||
non_working = holidays.CA(years=years) + holidays.US(years=years) + holidays.England(years=years) \
|
||||
+ holidays.DE(years=years) + holidays.NO(years=years) + holidays.SE(years=years) + holidays.RU(years=years)
|
||||
+ holidays.DE(years=years) + holidays.NO(years=years) + holidays.IND(years=years) + holidays.RU(years=years)
|
||||
|
||||
|
||||
def vote_close_72h_date():
|
||||
working_days = 0
|
||||
# Voting open at least 72 hours according to ASF policy
|
||||
return datetime.utcnow() + timedelta(hours=73)
|
||||
|
||||
|
||||
def vote_close_72h_holidays():
|
||||
days = 0
|
||||
day_offset = -1
|
||||
# Require voting open for 3 working days, not counting todays date
|
||||
# Working day is defined as saturday, sunday or a public holiday observed by 3 or more [CA, US, EN, DE, NO, SE, RU]
|
||||
while working_days < 4:
|
||||
holidays = []
|
||||
# Warn RM about major holidays coming up that should perhaps extend the voting deadline
|
||||
# Warning will be given for Sunday or a public holiday observed by 3 or more [CA, US, EN, DE, NO, IND, RU]
|
||||
while days < 3:
|
||||
day_offset += 1
|
||||
d = today + timedelta(days=day_offset)
|
||||
if not (d in weekends or (d in non_working and len(non_working[d]) >= 3)):
|
||||
working_days += 1
|
||||
return datetime.utcnow() + timedelta(days=day_offset) + timedelta(hours=1)
|
||||
if not (d in sundays or (d in non_working and len(non_working[d]) >= 2)):
|
||||
days += 1
|
||||
else:
|
||||
if d in sundays:
|
||||
holidays.append("%s (Sunday)" % d)
|
||||
else:
|
||||
holidays.append("%s (%s)" % (d, non_working[d]))
|
||||
return holidays if len(holidays) > 0 else None
|
||||
|
||||
|
||||
def website_javadoc_redirect(todo):
|
||||
|
|
|
@ -823,7 +823,7 @@ groups:
|
|||
python3 -u dev-tools/scripts/smokeTestRelease.py \
|
||||
https://dist.apache.org/repos/dist/dev/lucene/lucene-solr-{{ release_version }}-RC{{ rc_number }}-rev{{ build_rc.git_rev | default("<git_rev>", True) }}
|
||||
|
||||
The vote will be open for at least 3 working days, i.e. until {{ vote_close }}.
|
||||
The vote will be open for at least 72 hours i.e. until {{ vote_close }}.
|
||||
|
||||
[ ] +1 approve
|
||||
[ ] +0 no opinion
|
||||
|
@ -831,6 +831,16 @@ groups:
|
|||
|
||||
Here is my +1
|
||||
----
|
||||
|
||||
{% if vote_close_72h_holidays %}
|
||||
[IMPORTANT]
|
||||
====
|
||||
The voting period contains one or more holidays. Please consider extending the vote deadline.
|
||||
|
||||
{% for holiday in vote_close_72h_holidays %}* {{ holiday }}
|
||||
{% endfor %}
|
||||
====
|
||||
{%- endif %}
|
||||
vars:
|
||||
vote_close: '{{ vote_close_72h }}'
|
||||
vote_close_epoch: '{{ vote_close_72h_epoch }}'
|
||||
|
|
|
@ -58,6 +58,13 @@ Improvements
|
|||
|
||||
* LUCENE-8984: MoreLikeThis MLT is biased for uncommon fields (Andy Hind via Anshum Gupta)
|
||||
|
||||
* LUCENE-8596: Kuromoji user dictionary now accepts entries containing hash mark (#) that were
|
||||
previously treated as beginning a line-ending comment (Satoshi Kato and Masaru Hasegawa via
|
||||
Michael Sokolov)
|
||||
|
||||
* LUCENE-9109: Use StackWalker to implement TestSecurityManager's detection
|
||||
of JVM exit (Uwe Schindler)
|
||||
|
||||
Bug fixes
|
||||
|
||||
* LUCENE-8663: NRTCachingDirectory.slowFileExists may open a file while
|
||||
|
@ -81,7 +88,13 @@ New Features
|
|||
|
||||
Improvements
|
||||
---------------------
|
||||
(No changes)
|
||||
|
||||
* LUCENE-9102: Add maxQueryLength option to DirectSpellchecker. (Andy Webb via Bruno Roustant)
|
||||
|
||||
* LUCENE-9091: UnifiedHighlighter HTML escaping should only escape essentials (Nándor Mátravölgyi)
|
||||
|
||||
* LUCENE-9109: Backport some changes from master (except StackWalker) to improve
|
||||
TestSecurityManager (Uwe Schindler)
|
||||
|
||||
Optimizations
|
||||
---------------------
|
||||
|
|
|
@ -65,7 +65,7 @@ public final class UserDictionary implements Dictionary {
|
|||
// text, segmentation, readings, POS
|
||||
while ((line = br.readLine()) != null) {
|
||||
// Remove comments
|
||||
line = line.replaceAll("#.*$", "");
|
||||
line = line.replaceAll("^#.*$", "");
|
||||
|
||||
// Skip empty lines or comment lines
|
||||
if (line.trim().length() == 0) {
|
||||
|
|
|
@ -99,4 +99,16 @@ public class UserDictionaryTest extends LuceneTestCase {
|
|||
assertTrue(e.getMessage().contains("does not match the surface form"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSharp() throws IOException {
|
||||
String[] inputs = {"テスト#", "テスト#テスト"};
|
||||
UserDictionary dictionary = TestJapaneseTokenizer.readDict();
|
||||
|
||||
for (String input: inputs) {
|
||||
System.out.println(input);
|
||||
int[][] result = dictionary.lookup(input.toCharArray(), 0, input.length());
|
||||
assertEquals("カスタム名刺", dictionary.getPartOfSpeech(result[0][0]));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,3 +8,7 @@
|
|||
# Silly entry:
|
||||
abcd,a b cd,foo1 foo2 foo3,bar
|
||||
abcdefg,ab cd efg,foo1 foo2 foo4,bar
|
||||
|
||||
# sharp test
|
||||
test#テスト,test # テスト,test # テスト,カスタム名刺
|
||||
テスト#,テスト #,テスト #,カスタム名刺
|
||||
|
|
|
@ -129,15 +129,7 @@ public class DefaultPassageFormatter extends PassageFormatter {
|
|||
dest.append("/");
|
||||
break;
|
||||
default:
|
||||
if (ch >= 0x30 && ch <= 0x39 || ch >= 0x41 && ch <= 0x5A || ch >= 0x61 && ch <= 0x7A) {
|
||||
dest.append(ch);
|
||||
} else if (ch < 0xff) {
|
||||
dest.append("&#");
|
||||
dest.append((int) ch);
|
||||
dest.append(";");
|
||||
} else {
|
||||
dest.append(ch);
|
||||
}
|
||||
dest.append(ch);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.search.uhighlight;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestDefaultPassageFormatter extends LuceneTestCase {
|
||||
public void testBasic() throws Exception {
|
||||
String text = "Test customization & <div class=\"xy\">"escaping"</div> of this very formatter. Unrelated part. It's not very N/A!";
|
||||
// fabricate passages with matches to format
|
||||
Passage[] passages = new Passage[2];
|
||||
passages[0] = new Passage();
|
||||
passages[0].setStartOffset(0);
|
||||
passages[0].setEndOffset(text.indexOf(".")+1);
|
||||
passages[0].addMatch(text.indexOf("very"), text.indexOf("very")+4, null, 2);
|
||||
passages[1] = new Passage();
|
||||
passages[1].setStartOffset(text.indexOf(".", passages[0].getEndOffset()+1) + 2);
|
||||
passages[1].setEndOffset(text.length());
|
||||
passages[1].addMatch(
|
||||
text.indexOf("very", passages[0].getEndOffset()),
|
||||
text.indexOf("very", passages[0].getEndOffset())+4, null, 2);
|
||||
|
||||
// test default
|
||||
DefaultPassageFormatter formatter = new DefaultPassageFormatter();
|
||||
assertEquals(
|
||||
"Test customization & <div class=\"xy\">"escaping"</div> of this <b>very</b> formatter." +
|
||||
"... It's not <b>very</b> N/A!", formatter.format(passages, text));
|
||||
|
||||
// test customization and encoding
|
||||
formatter = new DefaultPassageFormatter("<u>", "</u>", "\u2026 ", true);
|
||||
assertEquals(
|
||||
"Test customization & <div class="xy">&quot;escaping&quot;" +
|
||||
"</div> of this <u>very</u> formatter.\u2026 It's not <u>very</u> N/A!",
|
||||
formatter.format(passages, text));
|
||||
}
|
||||
}
|
|
@ -957,7 +957,7 @@ public class TestUnifiedHighlighter extends LuceneTestCase {
|
|||
assertEquals(1, topDocs.totalHits.value);
|
||||
String snippets[] = highlighter.highlight("body", query, topDocs);
|
||||
assertEquals(1, snippets.length);
|
||||
assertEquals("Just a test <b>highlighting</b> from <i>postings</i>. ", snippets[0]);
|
||||
assertEquals("Just a test <b>highlighting</b> from <i>postings</i>. ", snippets[0]);
|
||||
|
||||
ir.close();
|
||||
}
|
||||
|
|
|
@ -866,7 +866,7 @@ public class TestUnifiedHighlighterTermIntervals extends LuceneTestCase {
|
|||
assertEquals(1, topDocs.totalHits.value);
|
||||
String snippets[] = highlighter.highlight("body", query, topDocs);
|
||||
assertEquals(1, snippets.length);
|
||||
assertEquals("Just a test <b>highlighting</b> from <i>postings</i>. ", snippets[0]);
|
||||
assertEquals("Just a test <b>highlighting</b> from <i>postings</i>. ", snippets[0]);
|
||||
ir.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -75,6 +75,8 @@ public class DirectSpellChecker {
|
|||
private float thresholdFrequency = 0f;
|
||||
/** minimum length of a query word to return suggestions */
|
||||
private int minQueryLength = 4;
|
||||
/** maximum length of a query word to return suggestions */
|
||||
private int maxQueryLength = Integer.MAX_VALUE;
|
||||
/** value in [0..1] (or absolute number >= 1) representing the maximum
|
||||
* number of documents (of the total) a query term can appear in to
|
||||
* be corrected. */
|
||||
|
@ -195,9 +197,27 @@ public class DirectSpellChecker {
|
|||
* metric.
|
||||
*/
|
||||
public void setMinQueryLength(int minQueryLength) {
|
||||
if (minQueryLength > this.maxQueryLength)
|
||||
throw new IllegalArgumentException("minQueryLength must not be greater than maxQueryLength");
|
||||
this.minQueryLength = minQueryLength;
|
||||
}
|
||||
|
||||
/** Get the maximum length of a query term to return suggestions */
|
||||
public int getMaxQueryLength() {
|
||||
return maxQueryLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum length of a query term to return suggestions.
|
||||
* <p>
|
||||
* Long queries can be expensive to process and/or trigger exceptions.
|
||||
*/
|
||||
public void setMaxQueryLength(int maxQueryLength) {
|
||||
if (maxQueryLength < this.minQueryLength)
|
||||
throw new IllegalArgumentException("maxQueryLength must not be smaller than minQueryLength");
|
||||
this.maxQueryLength = maxQueryLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum threshold of documents a query term can appear in order
|
||||
* to provide suggestions.
|
||||
|
@ -317,7 +337,9 @@ public class DirectSpellChecker {
|
|||
SuggestMode suggestMode, float accuracy) throws IOException {
|
||||
final CharsRefBuilder spare = new CharsRefBuilder();
|
||||
String text = term.text();
|
||||
if (minQueryLength > 0 && text.codePointCount(0, text.length()) < minQueryLength)
|
||||
|
||||
int textLength = text.codePointCount(0, text.length());
|
||||
if (textLength < minQueryLength || textLength > maxQueryLength)
|
||||
return new SuggestWord[0];
|
||||
|
||||
if (lowerCaseTerms) {
|
||||
|
|
|
@ -147,11 +147,19 @@ public class TestDirectSpellChecker extends LuceneTestCase {
|
|||
"fobar"), 1, ir, SuggestMode.SUGGEST_MORE_POPULAR);
|
||||
assertEquals(0, similar.length);
|
||||
|
||||
// confirm that a term shorter than minQueryLength is not spellchecked
|
||||
spellChecker = new DirectSpellChecker(); // reset defaults
|
||||
spellChecker.setMinQueryLength(5);
|
||||
similar = spellChecker.suggestSimilar(new Term("text", "foba"), 1, ir,
|
||||
SuggestMode.SUGGEST_MORE_POPULAR);
|
||||
assertEquals(0, similar.length);
|
||||
|
||||
// confirm that a term longer than maxQueryLength is not spellchecked
|
||||
spellChecker = new DirectSpellChecker(); // reset defaults
|
||||
spellChecker.setMaxQueryLength(5);
|
||||
similar = spellChecker.suggestSimilar(new Term("text", "foobrr"), 1, ir,
|
||||
SuggestMode.SUGGEST_MORE_POPULAR);
|
||||
assertEquals(0, similar.length);
|
||||
|
||||
spellChecker = new DirectSpellChecker(); // reset defaults
|
||||
spellChecker.setMaxEdits(1);
|
||||
|
|
|
@ -16,8 +16,9 @@
|
|||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.lang.StackWalker.StackFrame;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* A {@link SecurityManager} that prevents tests calling {@link System#exit(int)}.
|
||||
|
@ -28,11 +29,14 @@ import java.security.PrivilegedAction;
|
|||
*/
|
||||
public final class TestSecurityManager extends SecurityManager {
|
||||
|
||||
static final String JUNIT4_TEST_RUNNER_PACKAGE = "com.carrotsearch.ant.tasks.junit4.";
|
||||
static final String ECLIPSE_TEST_RUNNER_PACKAGE = "org.eclipse.jdt.internal.junit.runner.";
|
||||
static final String IDEA_TEST_RUNNER_PACKAGE = "com.intellij.rt.execution.junit.";
|
||||
static final String GRADLE_TEST_RUNNER_PACKAGE = "worker.org.gradle.process.internal.worker";
|
||||
private static final String JUNIT4_TEST_RUNNER_PACKAGE = "com.carrotsearch.ant.tasks.junit4.";
|
||||
private static final String ECLIPSE_TEST_RUNNER_PACKAGE = "org.eclipse.jdt.internal.junit.runner.";
|
||||
private static final String IDEA_TEST_RUNNER_PACKAGE = "com.intellij.rt.execution.junit.";
|
||||
private static final String GRADLE_TEST_RUNNER_PACKAGE = "worker.org.gradle.process.internal.worker.";
|
||||
|
||||
private static final String SYSTEM_CLASS_NAME = System.class.getName();
|
||||
private static final String RUNTIME_CLASS_NAME = Runtime.class.getName();
|
||||
|
||||
/**
|
||||
* Creates a new TestSecurityManager. This ctor is called on JVM startup,
|
||||
* when {@code -Djava.security.manager=org.apache.lucene.util.TestSecurityManager}
|
||||
|
@ -41,7 +45,7 @@ public final class TestSecurityManager extends SecurityManager {
|
|||
public TestSecurityManager() {
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
* <p>This method inspects the stack trace and checks who is calling
|
||||
|
@ -50,46 +54,27 @@ public final class TestSecurityManager extends SecurityManager {
|
|||
*/
|
||||
@Override
|
||||
public void checkExit(final int status) {
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
@Override
|
||||
public Void run() {
|
||||
final String systemClassName = System.class.getName(),
|
||||
runtimeClassName = Runtime.class.getName();
|
||||
String exitMethodHit = null;
|
||||
for (final StackTraceElement se : Thread.currentThread().getStackTrace()) {
|
||||
final String className = se.getClassName(), methodName = se.getMethodName();
|
||||
if (
|
||||
("exit".equals(methodName) || "halt".equals(methodName)) &&
|
||||
(systemClassName.equals(className) || runtimeClassName.equals(className))
|
||||
) {
|
||||
exitMethodHit = className + '#' + methodName + '(' + status + ')';
|
||||
continue;
|
||||
}
|
||||
|
||||
if (exitMethodHit != null) {
|
||||
if (className.startsWith(JUNIT4_TEST_RUNNER_PACKAGE) ||
|
||||
className.startsWith(ECLIPSE_TEST_RUNNER_PACKAGE) ||
|
||||
className.startsWith(IDEA_TEST_RUNNER_PACKAGE) ||
|
||||
className.startsWith(GRADLE_TEST_RUNNER_PACKAGE)) {
|
||||
// this exit point is allowed, we return normally from closure:
|
||||
return /*void*/ null;
|
||||
} else {
|
||||
// anything else in stack trace is not allowed, break and throw SecurityException below:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (exitMethodHit == null) {
|
||||
// should never happen, only if JVM hides stack trace - replace by generic:
|
||||
exitMethodHit = "JVM exit method";
|
||||
}
|
||||
throw new SecurityException(exitMethodHit + " calls are not allowed because they terminate the test runner's JVM.");
|
||||
}
|
||||
});
|
||||
|
||||
if (StackWalker.getInstance().walk(s -> s
|
||||
.dropWhile(Predicate.not(TestSecurityManager::isExitStackFrame)) // skip all internal stack frames
|
||||
.dropWhile(TestSecurityManager::isExitStackFrame) // skip all exit()/halt() stack frames
|
||||
.limit(1) // only look at one more frame (caller of exit)
|
||||
.map(StackFrame::getClassName)
|
||||
.noneMatch(c -> c.startsWith(JUNIT4_TEST_RUNNER_PACKAGE) ||
|
||||
c.startsWith(ECLIPSE_TEST_RUNNER_PACKAGE) ||
|
||||
c.startsWith(IDEA_TEST_RUNNER_PACKAGE) ||
|
||||
c.startsWith(GRADLE_TEST_RUNNER_PACKAGE)))) {
|
||||
throw new SecurityException(String.format(Locale.ENGLISH,
|
||||
"System/Runtime.exit(%1$d) or halt(%1$d) calls are not allowed because they terminate the test runner's JVM.",
|
||||
status));
|
||||
}
|
||||
// we passed the stack check, delegate to super, so default policy can still deny permission:
|
||||
super.checkExit(status);
|
||||
}
|
||||
|
||||
private static boolean isExitStackFrame(StackFrame f) {
|
||||
final String methodName = f.getMethodName(), className = f.getClassName();
|
||||
return ("exit".equals(methodName) || "halt".equals(methodName)) &&
|
||||
(SYSTEM_CLASS_NAME.equals(className) || RUNTIME_CLASS_NAME.equals(className));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -118,6 +118,38 @@ Upgrade Notes
|
|||
|
||||
* SOLR-13983: Process execution is removed from SystemInfoHandler. A best-effort attempt to
|
||||
execute "uname -a" and "uptime" on non-Windows platforms is no longer made. (rmuir)
|
||||
|
||||
* SOLR-14095 introduces a change in the format used for the elements in the Overseer queues and maps (see the Jira
|
||||
issue for details on the reasons for the change). This queue is used internally by the Overseer to reliably handle
|
||||
operations, to communicate operation results between the Overseer and the coordinator node, and by the
|
||||
REQUESTSTATUS API for displaying information about async Collection operations.
|
||||
This change won’t require you to change any client-side code you should see no differences on the client side,
|
||||
however, it does require some care when upgrading an existing SolrCloud cluster:
|
||||
- If you are upgrading Solr with an atomic restart strategy:
|
||||
- If you don’t use async or REQUESTSTATUS operations, you should be able to restart and not see any issues.
|
||||
- If you do use Collection API operations:
|
||||
1. pause Collection API operations
|
||||
2. cleanup queues (https://lucene.apache.org/solr/guide/8_3/collections-api.html#examples-using-deletestatus)
|
||||
if you use async operations
|
||||
3. upgrade and restart the nodes
|
||||
- If you are upgrading Solr with a rolling restart strategy:
|
||||
- If you don’t use Collection API operations, you should be able to do a rolling restart and not see
|
||||
any issues.
|
||||
- If you do use Collection API operations, but you can pause their use during the restart the easiest
|
||||
way is to:
|
||||
1. pause Collection API operations
|
||||
2. upgrade and restart all nodes
|
||||
3. cleanup queues (https://lucene.apache.org/solr/guide/8_3/collections-api.html#examples-using-deletestatus)
|
||||
if you use async operations
|
||||
4. Resume all normal operations
|
||||
- If you use Collection API operations and can’t pause them during the upgrade:
|
||||
1. Start 8.5 nodes with the system property: `-Dsolr.useUnsafeOverseerResponse=deserialization`. Ensure the
|
||||
Overseer node is upgraded last
|
||||
2. Once all nodes are in 8.5 and once you don’t need to read old status anymore, restart again removing the
|
||||
system property
|
||||
If you prefer to keep the old (but insecure) serialization strategy, you can start your nodes using the
|
||||
property: `-Dsolr.useUnsafeOverseerResponse=true`. Keep in mind that this will be removed in future version of Solr.
|
||||
|
||||
|
||||
New Features
|
||||
---------------------
|
||||
|
@ -125,8 +157,27 @@ New Features
|
|||
|
||||
Improvements
|
||||
---------------------
|
||||
* SOLR-14120: Define JavaScript methods 'includes' and 'startsWith' to ensure AdminUI can be displayed when using
|
||||
Internet Explorer 11 (jafurrer).
|
||||
|
||||
* SOLR-14042: Fix varargs precommit warnings (Andraas Salamon via Jason Gerlowski)
|
||||
|
||||
* SOLR-14095: Replace Java serialization with Javabin in the Overseer queues (Tomás Fernández Löbbe)
|
||||
|
||||
* SOLR-14125: Make <expressible> plugins work with packages (noble)
|
||||
|
||||
* SOLR-14138: Jetty's NCSA-formatted RequestLog can be enabled via environment variable,
|
||||
SOLR_REQUESTLOG_ENABLED=true. This will respect SOLR_LOGS_DIR if it is set. (rmuir)
|
||||
|
||||
* SOLR-14136: Jetty's InetAccessHandler can be enabled via environment variables,
|
||||
SOLR_IP_WHITELIST and SOLR_IP_BLACKLIST. These variables can restrict access to
|
||||
Solr based on IP addresses/networks. (rmuir)
|
||||
|
||||
* SOLR-14131: Add maxQueryLength option to DirectSolrSpellchecker. (Andy Webb via Bruno Roustant)
|
||||
|
||||
* SOLR-13984: Java's SecurityManager sandbox can be enabled via environment variable,
|
||||
SOLR_SECURITY_MANAGER_ENABLED=true. (rmuir)
|
||||
|
||||
Optimizations
|
||||
---------------------
|
||||
(No changes)
|
||||
|
@ -136,6 +187,8 @@ Bug Fixes
|
|||
|
||||
* SOLR-14099: Fixed @LogLevel annotation in test-framework to correctly 'unset' Loggers after test (hossman)
|
||||
|
||||
* SOLR-14017: Ensure bin/solr's -q and -v options work with examples (Andy Vuong via Jason Gerlowski)
|
||||
|
||||
* SOLR-14106: Cleanup Jetty SslContextFactory usage (Ryan Rockenbaugh, Jan Hoydahl, Kevin Risden)
|
||||
|
||||
Other Changes
|
||||
|
|
|
@ -262,6 +262,11 @@ else
|
|||
SOLR_JETTY_CONFIG+=("--module=http")
|
||||
fi
|
||||
|
||||
# Requestlog options
|
||||
if [ "$SOLR_REQUESTLOG_ENABLED" == "true" ]; then
|
||||
SOLR_JETTY_CONFIG+=("--module=requestlog")
|
||||
fi
|
||||
|
||||
# Authentication options
|
||||
if [ -z "$SOLR_AUTH_TYPE" ] && [ -n "$SOLR_AUTHENTICATION_OPTS" ]; then
|
||||
echo "WARNING: SOLR_AUTHENTICATION_OPTS environment variable configured without associated SOLR_AUTH_TYPE variable"
|
||||
|
@ -710,7 +715,7 @@ function jetty_port() {
|
|||
function run_tool() {
|
||||
|
||||
"$JAVA" $SOLR_SSL_OPTS $AUTHC_OPTS $SOLR_ZK_CREDS_AND_ACLS -Dsolr.install.dir="$SOLR_TIP" \
|
||||
-Dlog4j.configurationFile="file:$DEFAULT_SERVER_DIR/resources/log4j2-console.xml" \
|
||||
-Dlog4j.configurationFile="$DEFAULT_SERVER_DIR/resources/log4j2-console.xml" \
|
||||
-classpath "$DEFAULT_SERVER_DIR/solr-webapp/webapp/WEB-INF/lib/*:$DEFAULT_SERVER_DIR/lib/ext/*:$DEFAULT_SERVER_DIR/lib/*" \
|
||||
org.apache.solr.util.SolrCLI "$@"
|
||||
|
||||
|
@ -1737,10 +1742,12 @@ if [ $# -gt 0 ]; then
|
|||
;;
|
||||
-v)
|
||||
SOLR_LOG_LEVEL=DEBUG
|
||||
PASS_TO_RUN_EXAMPLE+=" -Dsolr.log.level=$SOLR_LOG_LEVEL"
|
||||
shift
|
||||
;;
|
||||
-q)
|
||||
SOLR_LOG_LEVEL=WARN
|
||||
PASS_TO_RUN_EXAMPLE+=" -Dsolr.log.level=$SOLR_LOG_LEVEL"
|
||||
shift
|
||||
;;
|
||||
-all)
|
||||
|
@ -1944,7 +1951,7 @@ fi
|
|||
|
||||
LOG4J_CONFIG=()
|
||||
if [ -n "$LOG4J_PROPS" ]; then
|
||||
LOG4J_CONFIG+=("-Dlog4j.configurationFile=file:$LOG4J_PROPS")
|
||||
LOG4J_CONFIG+=("-Dlog4j.configurationFile=$LOG4J_PROPS")
|
||||
fi
|
||||
|
||||
if [ "$SCRIPT_CMD" == "stop" ]; then
|
||||
|
@ -2043,6 +2050,10 @@ else
|
|||
fi
|
||||
fi
|
||||
|
||||
# IP-based access control
|
||||
IP_ACL_OPTS=("-Dsolr.jetty.inetaccess.includes=${SOLR_IP_WHITELIST}" \
|
||||
"-Dsolr.jetty.inetaccess.excludes=${SOLR_IP_BLACKLIST}")
|
||||
|
||||
# These are useful for attaching remote profilers like VisualVM/JConsole
|
||||
if [ "$ENABLE_REMOTE_JMX_OPTS" == "true" ]; then
|
||||
|
||||
|
@ -2069,6 +2080,16 @@ else
|
|||
REMOTE_JMX_OPTS=()
|
||||
fi
|
||||
|
||||
# Enable java security manager (limiting filesystem access and other things)
|
||||
if [ "$SOLR_SECURITY_MANAGER_ENABLED" == "true" ]; then
|
||||
SECURITY_MANAGER_OPTS=('-Djava.security.manager' \
|
||||
"-Djava.security.policy=${SOLR_SERVER_DIR}/etc/security.policy" \
|
||||
"-Djava.security.properties=${SOLR_SERVER_DIR}/etc/security.properties" \
|
||||
'-Dsolr.internal.network.permission=*')
|
||||
else
|
||||
SECURITY_MANAGER_OPTS=()
|
||||
fi
|
||||
|
||||
JAVA_MEM_OPTS=()
|
||||
if [ -z "$SOLR_HEAP" ] && [ -n "$SOLR_JAVA_MEM" ]; then
|
||||
JAVA_MEM_OPTS=($SOLR_JAVA_MEM)
|
||||
|
@ -2173,12 +2194,12 @@ function start_solr() {
|
|||
exit 1
|
||||
fi
|
||||
|
||||
SOLR_START_OPTS=('-server' "${JAVA_MEM_OPTS[@]}" "${GC_TUNE[@]}" "${GC_LOG_OPTS[@]}" \
|
||||
SOLR_START_OPTS=('-server' "${JAVA_MEM_OPTS[@]}" "${GC_TUNE[@]}" "${GC_LOG_OPTS[@]}" "${IP_ACL_OPTS[@]}" \
|
||||
"${REMOTE_JMX_OPTS[@]}" "${CLOUD_MODE_OPTS[@]}" $SOLR_LOG_LEVEL_OPT -Dsolr.log.dir="$SOLR_LOGS_DIR" \
|
||||
"-Djetty.port=$SOLR_PORT" "-DSTOP.PORT=$stop_port" "-DSTOP.KEY=$STOP_KEY" \
|
||||
"${SOLR_HOST_ARG[@]}" "-Duser.timezone=$SOLR_TIMEZONE" \
|
||||
"-Djetty.home=$SOLR_SERVER_DIR" "-Dsolr.solr.home=$SOLR_HOME" "-Dsolr.data.home=$SOLR_DATA_HOME" "-Dsolr.install.dir=$SOLR_TIP" \
|
||||
"-Dsolr.default.confdir=$DEFAULT_CONFDIR" "${LOG4J_CONFIG[@]}" "${SOLR_OPTS[@]}")
|
||||
"-Dsolr.default.confdir=$DEFAULT_CONFDIR" "${LOG4J_CONFIG[@]}" "${SOLR_OPTS[@]}" "${SECURITY_MANAGER_OPTS[@]}")
|
||||
|
||||
if [ "$SOLR_MODE" == "solrcloud" ]; then
|
||||
IN_CLOUD_MODE=" in SolrCloud mode"
|
||||
|
|
|
@ -125,6 +125,11 @@ IF "%SOLR_SSL_ENABLED%"=="true" (
|
|||
set SOLR_SSL_OPTS=
|
||||
)
|
||||
|
||||
REM Requestlog options
|
||||
IF "%SOLR_REQUESTLOG_ENABLED%"=="true" (
|
||||
set "SOLR_JETTY_CONFIG=!SOLR_JETTY_CONFIG! --module=requestlog"
|
||||
)
|
||||
|
||||
REM Authentication options
|
||||
|
||||
IF NOT DEFINED SOLR_AUTH_TYPE (
|
||||
|
@ -718,11 +723,13 @@ goto parse_args
|
|||
|
||||
:set_debug
|
||||
set SOLR_LOG_LEVEL=DEBUG
|
||||
set "PASS_TO_RUN_EXAMPLE=!PASS_TO_RUN_EXAMPLE! -Dsolr.log.level=%SOLR_LOG_LEVEL%"
|
||||
SHIFT
|
||||
goto parse_args
|
||||
|
||||
:set_warn
|
||||
set SOLR_LOG_LEVEL=WARN
|
||||
set "PASS_TO_RUN_EXAMPLE=!PASS_TO_RUN_EXAMPLE! -Dsolr.log.level=%SOLR_LOG_LEVEL%"
|
||||
SHIFT
|
||||
goto parse_args
|
||||
|
||||
|
@ -994,7 +1001,7 @@ set "EXAMPLE_DIR=%SOLR_TIP%\example"
|
|||
set TMP_SOLR_HOME=!SOLR_HOME:%EXAMPLE_DIR%=!
|
||||
IF NOT "%TMP_SOLR_HOME%"=="%SOLR_HOME%" (
|
||||
set "SOLR_LOGS_DIR=%SOLR_HOME%\..\logs"
|
||||
set "LOG4J_CONFIG=file:///%SOLR_SERVER_DIR%\resources\log4j2.xml"
|
||||
set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml"
|
||||
)
|
||||
|
||||
set IS_RESTART=0
|
||||
|
@ -1149,6 +1156,10 @@ IF "%SOLR_MODE%"=="solrcloud" (
|
|||
)
|
||||
)
|
||||
|
||||
REM IP-based access control
|
||||
set IP_ACL_OPTS=-Dsolr.jetty.inetaccess.includes="%SOLR_IP_WHITELIST%" ^
|
||||
-Dsolr.jetty.inetaccess.excludes="%SOLR_IP_BLACKLIST%"
|
||||
|
||||
REM These are useful for attaching remove profilers like VisualVM/JConsole
|
||||
IF "%ENABLE_REMOTE_JMX_OPTS%"=="true" (
|
||||
IF "!RMI_PORT!"=="" set RMI_PORT=1%SOLR_PORT%
|
||||
|
@ -1164,6 +1175,14 @@ IF "%ENABLE_REMOTE_JMX_OPTS%"=="true" (
|
|||
set REMOTE_JMX_OPTS=
|
||||
)
|
||||
|
||||
REM Enable java security manager (limiting filesystem access and other things)
|
||||
IF "%SOLR_SECURITY_MANAGER_ENABLED%"=="true" (
|
||||
set SECURITY_MANAGER_OPTS=-Djava.security.manager ^
|
||||
-Djava.security.policy="%SOLR_SERVER_DIR%\etc\security.policy" ^
|
||||
-Djava.security.properties="%SOLR_SERVER_DIR%\etc\security.properties" ^
|
||||
-Dsolr.internal.network.permission=*
|
||||
)
|
||||
|
||||
IF NOT "%SOLR_HEAP%"=="" set SOLR_JAVA_MEM=-Xms%SOLR_HEAP% -Xmx%SOLR_HEAP%
|
||||
IF "%SOLR_JAVA_MEM%"=="" set SOLR_JAVA_MEM=-Xms512m -Xmx512m
|
||||
IF "%SOLR_JAVA_STACK_SIZE%"=="" set SOLR_JAVA_STACK_SIZE=-Xss256k
|
||||
|
@ -1251,10 +1270,12 @@ IF "%verbose%"=="1" (
|
|||
set START_OPTS=-Duser.timezone=%SOLR_TIMEZONE%
|
||||
set START_OPTS=%START_OPTS% !GC_TUNE! %GC_LOG_OPTS%
|
||||
IF NOT "!CLOUD_MODE_OPTS!"=="" set "START_OPTS=%START_OPTS% !CLOUD_MODE_OPTS!"
|
||||
IF NOT "!IP_ACL_OPTS!"=="" set "START_OPTS=%START_OPTS% !IP_ACL_OPTS!"
|
||||
IF NOT "%REMOTE_JMX_OPTS%"=="" set "START_OPTS=%START_OPTS% %REMOTE_JMX_OPTS%"
|
||||
IF NOT "%SOLR_ADDL_ARGS%"=="" set "START_OPTS=%START_OPTS% %SOLR_ADDL_ARGS%"
|
||||
IF NOT "%SOLR_HOST_ARG%"=="" set "START_OPTS=%START_OPTS% %SOLR_HOST_ARG%"
|
||||
IF NOT "%SOLR_OPTS%"=="" set "START_OPTS=%START_OPTS% %SOLR_OPTS%"
|
||||
IF NOT "!SECURITY_MANAGER_OPTS!"=="" set "START_OPTS=%START_OPTS% !SECURITY_MANAGER_OPTS!"
|
||||
IF "%SOLR_SSL_ENABLED%"=="true" (
|
||||
set "SSL_PORT_PROP=-Dsolr.jetty.https.port=%SOLR_PORT%"
|
||||
set "START_OPTS=%START_OPTS% %SOLR_SSL_OPTS% !SSL_PORT_PROP!"
|
||||
|
@ -1266,7 +1287,7 @@ set SOLR_DATA_HOME_QUOTED="%SOLR_DATA_HOME%"
|
|||
|
||||
set "START_OPTS=%START_OPTS% -Dsolr.log.dir=%SOLR_LOGS_DIR_QUOTED%"
|
||||
IF NOT "%SOLR_DATA_HOME%"=="" set "START_OPTS=%START_OPTS% -Dsolr.data.home=%SOLR_DATA_HOME_QUOTED%"
|
||||
IF NOT DEFINED LOG4J_CONFIG set "LOG4J_CONFIG=file:///%SOLR_SERVER_DIR%\resources\log4j2.xml"
|
||||
IF NOT DEFINED LOG4J_CONFIG set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml"
|
||||
|
||||
cd /d "%SOLR_SERVER_DIR%"
|
||||
|
||||
|
|
|
@ -102,6 +102,9 @@ REM start rotation of logs. This is false by default as log4j2 handles this for
|
|||
REM framework that cannot do startup rotation, you may want to enable this to let Solr rotate logs on startup.
|
||||
REM set SOLR_LOG_PRESTART_ROTATION=false
|
||||
|
||||
REM Enables jetty request log for all requests
|
||||
REM set SOLR_REQUESTLOG_ENABLED=false
|
||||
|
||||
REM Set the host interface to listen on. Jetty will listen on all interfaces (0.0.0.0) by default.
|
||||
REM This must be an IPv4 ("a.b.c.d") or bracketed IPv6 ("[x::y]") address, not a hostname!
|
||||
REM set SOLR_JETTY_HOST=0.0.0.0
|
||||
|
@ -109,6 +112,16 @@ REM set SOLR_JETTY_HOST=0.0.0.0
|
|||
REM Sets the port Solr binds to, default is 8983
|
||||
REM set SOLR_PORT=8983
|
||||
|
||||
REM Restrict access to solr by IP address.
|
||||
REM Specify a comma-separated list of addresses or networks, for example:
|
||||
REM 127.0.0.1, 192.168.0.0/24, [::1], [2000:123:4:5::]/64
|
||||
REM set SOLR_IP_WHITELIST=
|
||||
|
||||
REM Block access to solr from specific IP addresses.
|
||||
REM Specify a comma-separated list of addresses or networks, for example:
|
||||
REM 127.0.0.1, 192.168.0.0/24, [::1], [2000:123:4:5::]/64
|
||||
REM set SOLR_IP_BLACKLIST=
|
||||
|
||||
REM Enables HTTPS. It is implictly true if you set SOLR_SSL_KEY_STORE. Use this config
|
||||
REM to enable https module with custom jetty configuration.
|
||||
REM set SOLR_SSL_ENABLED=true
|
||||
|
@ -175,3 +188,9 @@ REM For a visual indication in the Admin UI of what type of environment this clu
|
|||
REM a -Dsolr.environment property below. Valid values are prod, stage, test, dev, with an optional
|
||||
REM label or color, e.g. -Dsolr.environment=test,label=Functional+test,color=brown
|
||||
REM SOLR_OPTS="$SOLR_OPTS -Dsolr.environment=prod"
|
||||
|
||||
REM Runs solr in a java security manager sandbox. This can protect against some attacks.
|
||||
REM Runtime properties are passed to the security policy file (server\etc\security.policy)
|
||||
REM You can also tweak via standard JDK files such as ~\.java.policy, see https://s.apache.org/java8policy
|
||||
REM This is experimental! It may not work at all with Hadoop/HDFS features.
|
||||
REM set SOLR_SECURITY_MANAGER_ENABLED=false
|
||||
|
|
|
@ -123,9 +123,22 @@
|
|||
# framework that cannot do startup rotation, you may want to enable this to let Solr rotate logs on startup.
|
||||
#SOLR_LOG_PRESTART_ROTATION=false
|
||||
|
||||
# Enables jetty request log for all requests
|
||||
#SOLR_REQUESTLOG_ENABLED=false
|
||||
|
||||
# Sets the port Solr binds to, default is 8983
|
||||
#SOLR_PORT=8983
|
||||
|
||||
# Restrict access to solr by IP address.
|
||||
# Specify a comma-separated list of addresses or networks, for example:
|
||||
# 127.0.0.1, 192.168.0.0/24, [::1], [2000:123:4:5::]/64
|
||||
#SOLR_IP_WHITELIST=
|
||||
|
||||
# Block access to solr from specific IP addresses.
|
||||
# Specify a comma-separated list of addresses or networks, for example:
|
||||
# 127.0.0.1, 192.168.0.0/24, [::1], [2000:123:4:5::]/64
|
||||
#SOLR_IP_BLACKLIST=
|
||||
|
||||
# Enables HTTPS. It is implictly true if you set SOLR_SSL_KEY_STORE. Use this config
|
||||
# to enable https module with custom jetty configuration.
|
||||
#SOLR_SSL_ENABLED=true
|
||||
|
@ -203,3 +216,9 @@
|
|||
# a -Dsolr.environment property below. Valid values are prod, stage, test, dev, with an optional
|
||||
# label or color, e.g. -Dsolr.environment=test,label=Functional+test,color=brown
|
||||
#SOLR_OPTS="$SOLR_OPTS -Dsolr.environment=prod"
|
||||
|
||||
# Runs solr in java security manager sandbox. This can protect against some attacks.
|
||||
# Runtime properties are passed to the security policy file (server/etc/security.policy)
|
||||
# You can also tweak via standard JDK files such as ~/.java.policy, see https://s.apache.org/java8policy
|
||||
# This is experimental! It may not work at all with Hadoop/HDFS features.
|
||||
#SOLR_SECURITY_MANAGER_ENABLED=false
|
||||
|
|
|
@ -38,7 +38,7 @@
|
|||
<property name="maven.dist.dir" location="${package.dir}/maven"/>
|
||||
<property name="lucene-libs" location="${dest}/lucene-libs" />
|
||||
<property name="tests.userdir" location="src/test-files"/>
|
||||
<property name="tests.policy" location="${common-solr.dir}/../lucene/tools/junit4/solr-tests.policy"/>
|
||||
<property name="tests.policy" location="${common-solr.dir}/server/etc/security.policy"/>
|
||||
<property name="server.dir" location="${common-solr.dir}/server" />
|
||||
<property name="example" location="${common-solr.dir}/example" />
|
||||
<property name="javadoc.dir" location="${dest}/docs"/>
|
||||
|
|
|
@ -16,18 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrResponse;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
|
@ -44,6 +32,17 @@ import org.apache.zookeeper.KeeperException;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
import static org.apache.solr.common.params.ConfigSetParams.ConfigSetAction.CREATE;
|
||||
import static org.apache.solr.common.util.Utils.toJSONString;
|
||||
|
@ -90,7 +89,7 @@ public class OverseerConfigSetMessageHandler implements OverseerMessageHandler {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SolrResponse processMessage(ZkNodeProps message, String operation) {
|
||||
public OverseerSolrResponse processMessage(ZkNodeProps message, String operation) {
|
||||
NamedList results = new NamedList();
|
||||
try {
|
||||
if (!operation.startsWith(CONFIGSETS_ACTION_PREFIX)) {
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrResponse;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
|
||||
/**
|
||||
|
@ -30,7 +29,7 @@ public interface OverseerMessageHandler {
|
|||
*
|
||||
* @return response
|
||||
*/
|
||||
SolrResponse processMessage(ZkNodeProps message, String operation);
|
||||
OverseerSolrResponse processMessage(ZkNodeProps message, String operation);
|
||||
|
||||
/**
|
||||
* @return the name of the OverseerMessageHandler
|
||||
|
|
|
@ -17,11 +17,17 @@
|
|||
package org.apache.solr.cloud;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrResponse;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class OverseerSolrResponse extends SolrResponse {
|
||||
|
||||
NamedList responseList = null;
|
||||
NamedList<Object> responseList = null;
|
||||
|
||||
private long elapsedTime;
|
||||
|
||||
|
@ -48,5 +54,50 @@ public class OverseerSolrResponse extends SolrResponse {
|
|||
public NamedList<Object> getResponse() {
|
||||
return responseList;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method serializes the content of an {@code OverseerSolrResponse}. Note that:
|
||||
* <ul>
|
||||
* <li>The elapsed time is not serialized</li>
|
||||
* <li>"Unknown" elements for the Javabin format will be serialized as Strings. See {@link org.apache.solr.common.util.JavaBinCodec#writeVal}</li>
|
||||
* </ul>
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public static byte[] serialize(OverseerSolrResponse responseObject) {
|
||||
Objects.requireNonNull(responseObject);
|
||||
if (useUnsafeSerialization()) {
|
||||
return SolrResponse.serializable(responseObject);
|
||||
}
|
||||
try {
|
||||
return Utils.toJavabin(responseObject.getResponse()).readAllBytes();
|
||||
} catch (IOException|RuntimeException e) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Exception serializing response to Javabin", e);
|
||||
}
|
||||
}
|
||||
|
||||
static boolean useUnsafeSerialization() {
|
||||
String useUnsafeOverseerResponse = System.getProperty("solr.useUnsafeOverseerResponse");
|
||||
return useUnsafeOverseerResponse != null && ("true".equals(useUnsafeOverseerResponse));
|
||||
}
|
||||
|
||||
static boolean useUnsafeDeserialization() {
|
||||
String useUnsafeOverseerResponse = System.getProperty("solr.useUnsafeOverseerResponse");
|
||||
return useUnsafeOverseerResponse != null && ("true".equals(useUnsafeOverseerResponse) || "deserialization".equals(useUnsafeOverseerResponse));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static OverseerSolrResponse deserialize(byte[] responseBytes) {
|
||||
Objects.requireNonNull(responseBytes);
|
||||
try {
|
||||
@SuppressWarnings("unchecked")
|
||||
NamedList<Object> response = (NamedList<Object>) Utils.fromJavabin(responseBytes);
|
||||
return new OverseerSolrResponse(response);
|
||||
} catch (IOException|RuntimeException e) {
|
||||
if (useUnsafeDeserialization()) {
|
||||
return (OverseerSolrResponse) SolrResponse.deserialize(responseBytes);
|
||||
}
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Exception deserializing response from Javabin", e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import java.util.function.Predicate;
|
|||
import com.codahale.metrics.Timer;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.solr.client.solrj.SolrResponse;
|
||||
import org.apache.solr.cloud.Overseer.LeaderStatus;
|
||||
import org.apache.solr.cloud.OverseerTaskQueue.QueueEvent;
|
||||
import org.apache.solr.common.AlreadyClosedException;
|
||||
|
@ -476,7 +475,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
|
|||
protected class Runner implements Runnable {
|
||||
ZkNodeProps message;
|
||||
String operation;
|
||||
SolrResponse response;
|
||||
OverseerSolrResponse response;
|
||||
QueueEvent head;
|
||||
OverseerMessageHandler messageHandler;
|
||||
private final OverseerMessageHandler.Lock lock;
|
||||
|
@ -511,14 +510,14 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
|
|||
if (asyncId != null) {
|
||||
if (response != null && (response.getResponse().get("failure") != null
|
||||
|| response.getResponse().get("exception") != null)) {
|
||||
failureMap.put(asyncId, SolrResponse.serializable(response));
|
||||
failureMap.put(asyncId, OverseerSolrResponse.serialize(response));
|
||||
log.debug("Updated failed map for task with zkid:[{}]", head.getId());
|
||||
} else {
|
||||
completedMap.put(asyncId, SolrResponse.serializable(response));
|
||||
completedMap.put(asyncId, OverseerSolrResponse.serialize(response));
|
||||
log.debug("Updated completed map for task with zkid:[{}]", head.getId());
|
||||
}
|
||||
} else {
|
||||
head.setBytes(SolrResponse.serializable(response));
|
||||
head.setBytes(OverseerSolrResponse.serialize(response));
|
||||
log.debug("Completed task:[{}]", head.getId());
|
||||
}
|
||||
|
||||
|
|
|
@ -249,7 +249,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
|
|||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public SolrResponse processMessage(ZkNodeProps message, String operation) {
|
||||
public OverseerSolrResponse processMessage(ZkNodeProps message, String operation) {
|
||||
MDCLoggingContext.setCollection(message.getStr(COLLECTION));
|
||||
MDCLoggingContext.setShard(message.getStr(SHARD_ID_PROP));
|
||||
MDCLoggingContext.setReplica(message.getStr(REPLICA_PROP));
|
||||
|
@ -277,7 +277,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler,
|
|||
}
|
||||
|
||||
results.add("Operation " + operation + " caused exception:", e);
|
||||
SimpleOrderedMap nl = new SimpleOrderedMap();
|
||||
SimpleOrderedMap<Object> nl = new SimpleOrderedMap<>();
|
||||
nl.add("msg", e.getMessage());
|
||||
nl.add("rspCode", e instanceof SolrException ? ((SolrException)e).code() : -1);
|
||||
results.add("exception", nl);
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.handler;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -27,7 +28,9 @@ import java.util.Map.Entry;
|
|||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.comp.StreamComparator;
|
||||
import org.apache.solr.client.solrj.io.graph.Traversal;
|
||||
import org.apache.solr.client.solrj.io.stream.*;
|
||||
import org.apache.solr.client.solrj.io.stream.ExceptionStream;
|
||||
import org.apache.solr.client.solrj.io.stream.StreamContext;
|
||||
import org.apache.solr.client.solrj.io.stream.TupleStream;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.DefaultStreamFactory;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
|
@ -38,6 +41,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
|
|||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.PluginInfo;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
|
@ -90,11 +95,21 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
|
|||
Object functionMappingsObj = initArgs.get("streamFunctions");
|
||||
if(null != functionMappingsObj){
|
||||
NamedList<?> functionMappings = (NamedList<?>)functionMappingsObj;
|
||||
for(Entry<String,?> functionMapping : functionMappings){
|
||||
Class<? extends Expressible> clazz = core.getResourceLoader().findClass((String)functionMapping.getValue(),
|
||||
Expressible.class);
|
||||
streamFactory.withFunctionName(functionMapping.getKey(), clazz);
|
||||
for(Entry<String,?> functionMapping : functionMappings) {
|
||||
String key = functionMapping.getKey();
|
||||
PluginInfo pluginInfo = new PluginInfo(key, Collections.singletonMap("class", functionMapping.getValue()));
|
||||
|
||||
if (pluginInfo.pkgName == null) {
|
||||
Class<? extends Expressible> clazz = core.getResourceLoader().findClass((String) functionMapping.getValue(),
|
||||
Expressible.class);
|
||||
streamFactory.withFunctionName(key, clazz);
|
||||
} else {
|
||||
StreamHandler.ExpressibleHolder holder = new StreamHandler.ExpressibleHolder(pluginInfo, core, SolrConfig.classVsSolrPluginInfo.get(Expressible.class));
|
||||
streamFactory.withFunctionName(key, () -> holder.getClazz());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.solr.client.solrj.SolrClient;
|
|||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.SolrResponse;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
import org.apache.solr.cloud.ZkController;
|
||||
import org.apache.solr.cloud.ZkSolrResourceLoader;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
@ -588,9 +589,17 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
|
|||
private boolean verifyClass(CommandOperation op, String clz, Class expected) {
|
||||
if (clz == null) return true;
|
||||
if (!"true".equals(String.valueOf(op.getStr("runtimeLib", null)))) {
|
||||
PluginInfo info = new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap());
|
||||
//this is not dynamically loaded so we can verify the class right away
|
||||
try {
|
||||
req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, "");
|
||||
if(expected == Expressible.class) {
|
||||
SolrResourceLoader resourceLoader = info.pkgName == null ?
|
||||
req.getCore().getResourceLoader() :
|
||||
req.getCore().getResourceLoader(info.pkgName);
|
||||
resourceLoader.findClass(info.className, expected);
|
||||
} else {
|
||||
req.getCore().createInitInstance(info, expected, clz, "");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error checking plugin : ", e);
|
||||
op.addError(e.getMessage());
|
||||
|
|
|
@ -33,7 +33,10 @@ import org.apache.solr.client.solrj.io.ModelCache;
|
|||
import org.apache.solr.client.solrj.io.SolrClientCache;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.comp.StreamComparator;
|
||||
import org.apache.solr.client.solrj.io.stream.*;
|
||||
import org.apache.solr.client.solrj.io.stream.DaemonStream;
|
||||
import org.apache.solr.client.solrj.io.stream.ExceptionStream;
|
||||
import org.apache.solr.client.solrj.io.stream.StreamContext;
|
||||
import org.apache.solr.client.solrj.io.stream.TupleStream;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
|
@ -44,6 +47,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParser;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
import org.apache.solr.client.solrj.routing.RequestReplicaListTransformerGenerator;
|
||||
import org.apache.solr.cloud.ZkController;
|
||||
import org.apache.solr.common.MapWriter;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
|
@ -52,7 +56,10 @@ import org.apache.solr.common.params.SolrParams;
|
|||
import org.apache.solr.core.CloseHook;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.PluginInfo;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.pkg.PackageLoader;
|
||||
import org.apache.solr.pkg.PackagePluginHolder;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.security.AuthorizationContext;
|
||||
|
@ -87,7 +94,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
private SolrDefaultStreamFactory streamFactory = new SolrDefaultStreamFactory();
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private String coreName;
|
||||
private Map<String,DaemonStream> daemons = Collections.synchronizedMap(new HashMap());
|
||||
private Map<String, DaemonStream> daemons = Collections.synchronizedMap(new HashMap());
|
||||
|
||||
@Override
|
||||
public PermissionNameProvider.Name getPermissionName(AuthorizationContext request) {
|
||||
|
@ -118,8 +125,14 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
// This pulls all the overrides and additions from the config
|
||||
List<PluginInfo> pluginInfos = core.getSolrConfig().getPluginInfos(Expressible.class.getName());
|
||||
for (PluginInfo pluginInfo : pluginInfos) {
|
||||
Class<? extends Expressible> clazz = core.getMemClassLoader().findClass(pluginInfo.className, Expressible.class);
|
||||
streamFactory.withFunctionName(pluginInfo.name, clazz);
|
||||
if (pluginInfo.pkgName != null) {
|
||||
ExpressibleHolder holder = new ExpressibleHolder(pluginInfo, core, SolrConfig.classVsSolrPluginInfo.get(Expressible.class));
|
||||
streamFactory.withFunctionName(pluginInfo.name,
|
||||
() -> holder.getClazz());
|
||||
} else {
|
||||
Class<? extends Expressible> clazz = core.getMemClassLoader().findClass(pluginInfo.className, Expressible.class);
|
||||
streamFactory.withFunctionName(pluginInfo.name, clazz);
|
||||
}
|
||||
}
|
||||
|
||||
core.addCloseHook(new CloseHook() {
|
||||
|
@ -135,6 +148,24 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
});
|
||||
}
|
||||
|
||||
public static class ExpressibleHolder extends PackagePluginHolder {
|
||||
private Class clazz;
|
||||
|
||||
public ExpressibleHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginInfo pluginMeta) {
|
||||
super(info, core, pluginMeta);
|
||||
}
|
||||
|
||||
public Class getClazz() {
|
||||
return clazz;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initNewInstance(PackageLoader.Package.Version newest) {
|
||||
clazz = newest.getLoader().findClass(pluginInfo.className, Expressible.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
|
||||
SolrParams params = req.getParams();
|
||||
params = adjustParams(params);
|
||||
|
@ -220,6 +251,10 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
|
||||
private void handleAdmin(SolrQueryRequest req, SolrQueryResponse rsp, SolrParams params) {
|
||||
String action = params.get("action").toLowerCase(Locale.ROOT).trim();
|
||||
if ("plugins".equals(action)) {
|
||||
rsp.add("plugins", (MapWriter) ew -> streamFactory.getFunctionNames().forEach((s, classSupplier) -> ew.putNoEx(s, classSupplier.get().getName())));
|
||||
return;
|
||||
}
|
||||
|
||||
if ("list".equals(action)) {
|
||||
Collection<DaemonStream> vals = daemons.values();
|
||||
|
@ -255,10 +290,10 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " killed on " + coreName));
|
||||
break;
|
||||
|
||||
default:
|
||||
rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " action '"
|
||||
+ action + "' not recognized on " + coreName));
|
||||
break;
|
||||
default:
|
||||
rsp.add("result-set", new DaemonResponseStream("Deamon:" + id + " action '"
|
||||
+ action + "' not recognized on " + coreName));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -288,11 +323,14 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
return null;
|
||||
}
|
||||
|
||||
public void close() {}
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public void open() {}
|
||||
public void open() {
|
||||
}
|
||||
|
||||
public void setStreamContext(StreamContext context) {}
|
||||
public void setStreamContext(StreamContext context) {
|
||||
}
|
||||
|
||||
public List<TupleStream> children() {
|
||||
return null;
|
||||
|
@ -335,11 +373,14 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
return null;
|
||||
}
|
||||
|
||||
public void close() {}
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public void open() {}
|
||||
public void open() {
|
||||
}
|
||||
|
||||
public void setStreamContext(StreamContext context) {}
|
||||
public void setStreamContext(StreamContext context) {
|
||||
}
|
||||
|
||||
public List<TupleStream> children() {
|
||||
return null;
|
||||
|
@ -378,11 +419,14 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
return null;
|
||||
}
|
||||
|
||||
public void close() {}
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public void open() {}
|
||||
public void open() {
|
||||
}
|
||||
|
||||
public void setStreamContext(StreamContext context) {}
|
||||
public void setStreamContext(StreamContext context) {
|
||||
}
|
||||
|
||||
public List<TupleStream> children() {
|
||||
return null;
|
||||
|
@ -462,9 +506,9 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
}
|
||||
}
|
||||
|
||||
private Map<String,List<String>> getCollectionShards(SolrParams params) {
|
||||
private Map<String, List<String>> getCollectionShards(SolrParams params) {
|
||||
|
||||
Map<String,List<String>> collectionShards = new HashMap();
|
||||
Map<String, List<String>> collectionShards = new HashMap();
|
||||
Iterator<String> paramsIt = params.getParameterNamesIterator();
|
||||
while (paramsIt.hasNext()) {
|
||||
String param = paramsIt.next();
|
||||
|
|
|
@ -16,25 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.handler.admin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -102,6 +83,25 @@ import org.apache.zookeeper.KeeperException;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.solr.client.solrj.cloud.autoscaling.Policy.POLICY;
|
||||
import static org.apache.solr.client.solrj.response.RequestStatusState.COMPLETED;
|
||||
import static org.apache.solr.client.solrj.response.RequestStatusState.FAILED;
|
||||
|
@ -149,10 +149,10 @@ import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
|
|||
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.IN_PLACE_MOVE;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.NUM_SUB_SHARDS;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.SPLIT_FUZZ;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.SPLIT_METHOD;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX;
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
import static org.apache.solr.common.params.CommonParams.TIMING;
|
||||
import static org.apache.solr.common.params.CommonParams.VALUE_LONG;
|
||||
|
@ -368,7 +368,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
|||
.getOverseerCollectionQueue()
|
||||
.offer(Utils.toJSON(m), timeout);
|
||||
if (event.getBytes() != null) {
|
||||
return SolrResponse.deserialize(event.getBytes());
|
||||
return OverseerSolrResponse.deserialize(event.getBytes());
|
||||
} else {
|
||||
if (System.nanoTime() - time >= TimeUnit.NANOSECONDS.convert(timeout, TimeUnit.MILLISECONDS)) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, operation
|
||||
|
@ -874,11 +874,11 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
|||
final NamedList<Object> results = new NamedList<>();
|
||||
if (zkController.getOverseerCompletedMap().contains(requestId)) {
|
||||
final byte[] mapEntry = zkController.getOverseerCompletedMap().get(requestId);
|
||||
rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
|
||||
rsp.getValues().addAll(OverseerSolrResponse.deserialize(mapEntry).getResponse());
|
||||
addStatusToResponse(results, COMPLETED, "found [" + requestId + "] in completed tasks");
|
||||
} else if (zkController.getOverseerFailureMap().contains(requestId)) {
|
||||
final byte[] mapEntry = zkController.getOverseerFailureMap().get(requestId);
|
||||
rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
|
||||
rsp.getValues().addAll(OverseerSolrResponse.deserialize(mapEntry).getResponse());
|
||||
addStatusToResponse(results, FAILED, "found [" + requestId + "] in failed tasks");
|
||||
} else if (zkController.getOverseerRunningMap().contains(requestId)) {
|
||||
addStatusToResponse(results, RUNNING, "found [" + requestId + "] in running tasks");
|
||||
|
|
|
@ -212,7 +212,7 @@ public class ConfigSetsHandler extends RequestHandlerBase implements PermissionN
|
|||
.getOverseerConfigSetQueue()
|
||||
.offer(Utils.toJSON(m), timeout);
|
||||
if (event.getBytes() != null) {
|
||||
SolrResponse response = SolrResponse.deserialize(event.getBytes());
|
||||
SolrResponse response = OverseerSolrResponse.deserialize(event.getBytes());
|
||||
rsp.getValues().addAll(response.getResponse());
|
||||
SimpleOrderedMap exp = (SimpleOrderedMap) response.getResponse().get("exception");
|
||||
if (exp != null) {
|
||||
|
|
|
@ -104,12 +104,17 @@ public class PackagePluginHolder<T> extends PluginBag.PluginHolder<T> {
|
|||
log.info("loading plugin: {} -> {} using package {}:{}",
|
||||
pluginInfo.type, pluginInfo.name, pkg.name(), newest.getVersion());
|
||||
|
||||
initNewInstance(newest);
|
||||
pkgVersion = newest;
|
||||
|
||||
}
|
||||
|
||||
protected void initNewInstance(PackageLoader.Package.Version newest) {
|
||||
Object instance = SolrCore.createInstance(pluginInfo.className,
|
||||
pluginMeta.clazz, pluginMeta.getCleanTag(), core, newest.getLoader());
|
||||
PluginBag.initInstance(instance, pluginInfo);
|
||||
T old = inst;
|
||||
inst = (T) instance;
|
||||
pkgVersion = newest;
|
||||
if (old instanceof AutoCloseable) {
|
||||
AutoCloseable closeable = (AutoCloseable) old;
|
||||
try {
|
||||
|
|
|
@ -54,6 +54,7 @@ import org.slf4j.LoggerFactory;
|
|||
* can be specified as "freq".
|
||||
* <li>thresholdTokenFrequency: sets {@link DirectSpellChecker#setThresholdFrequency(float)}.
|
||||
* <li>minQueryLength: sets {@link DirectSpellChecker#setMinQueryLength(int)}.
|
||||
* <li>maxQueryLength: sets {@link DirectSpellChecker#setMaxQueryLength(int)}.
|
||||
* <li>maxQueryFrequency: sets {@link DirectSpellChecker#setMaxQueryFrequency(float)}.
|
||||
* </ul>
|
||||
* @see DirectSpellChecker
|
||||
|
@ -86,6 +87,9 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
|
|||
public static final String MINQUERYLENGTH = "minQueryLength";
|
||||
public static final int DEFAULT_MINQUERYLENGTH = 4;
|
||||
|
||||
public static final String MAXQUERYLENGTH = "maxQueryLength";
|
||||
public static final int DEFAULT_MAXQUERYLENGTH = Integer.MAX_VALUE;
|
||||
|
||||
public static final String MAXQUERYFREQUENCY = "maxQueryFrequency";
|
||||
public static final float DEFAULT_MAXQUERYFREQUENCY = 0.01f;
|
||||
|
||||
|
@ -144,6 +148,11 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
|
|||
Integer queryLength = params.getInt(MINQUERYLENGTH);
|
||||
if (queryLength != null)
|
||||
minQueryLength = queryLength;
|
||||
|
||||
int maxQueryLength = DEFAULT_MAXQUERYLENGTH;
|
||||
Integer overriddenMaxQueryLength = params.getInt(MAXQUERYLENGTH);
|
||||
if (overriddenMaxQueryLength != null)
|
||||
maxQueryLength = overriddenMaxQueryLength;
|
||||
|
||||
float maxQueryFrequency = DEFAULT_MAXQUERYFREQUENCY;
|
||||
Float queryFreq = params.getFloat(MAXQUERYFREQUENCY);
|
||||
|
@ -158,6 +167,7 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
|
|||
checker.setThresholdFrequency(minThreshold);
|
||||
checker.setMaxInspections(maxInspections);
|
||||
checker.setMinQueryLength(minQueryLength);
|
||||
checker.setMaxQueryLength(maxQueryLength);
|
||||
checker.setMaxQueryFrequency(maxQueryFrequency);
|
||||
checker.setLowerCaseTerms(false);
|
||||
|
||||
|
|
Binary file not shown.
|
@ -65,6 +65,10 @@ openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem testurp_v2.jar.bin | open
|
|||
|
||||
P/ptFXRvQMd4oKPvadSpd+A9ffwY3gcex5GVFVRy3df0/OF8XT5my8rQz7FZva+2ORbWxdXS8NKwNrbPVHLGXw==
|
||||
|
||||
openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem expressible.jar.bin | openssl enc -base64 | openssl enc -base64 | tr -d \\n | sed
|
||||
|
||||
ZOT11arAiPmPZYOHzqodiNnxO9pRyRozWZEBX8XGjU1/HJptFnZK+DI7eXnUtbNaMcbXE2Ze8hh4M/eGyhY8BQ==
|
||||
|
||||
====================sha512====================
|
||||
|
||||
openssl dgst -sha512 runtimelibs.jar.bin
|
||||
|
@ -95,6 +99,11 @@ openssl dgst -sha512 testurp_v1.jar.bin
|
|||
openssl dgst -sha512 testurp_v2.jar.bin
|
||||
|
||||
5c4c0c454a032916e48a1c14a0fecbd6658658a66aedec5168b7222f2e3c0c63fbe09637238a9325ce2e95a2c8521834397a97701ead46c681aa20c9fccb6654
|
||||
|
||||
openssl dgst -sha512 expressible.jar.bin
|
||||
|
||||
3474a1414c8329c71ef5db2d3eb6e870363bdd7224a836aab561dccf5e8bcee4974ac799e72398c7e0b0c01972bab1c7454c8a4e791a8865bb676c0440627388
|
||||
|
||||
=============sha256============================
|
||||
|
||||
openssl dgst -sha256 runtimelibs.jar.bin
|
||||
|
|
|
@ -564,7 +564,7 @@ public class OverseerCollectionConfigSetProcessorTest extends SolrTestCaseJ4 {
|
|||
QueueEvent qe = new QueueEvent("id", Utils.toJSON(props), null){
|
||||
@Override
|
||||
public void setBytes(byte[] bytes) {
|
||||
lastProcessMessageResult = SolrResponse.deserialize( bytes);
|
||||
lastProcessMessageResult = OverseerSolrResponse.deserialize(bytes);
|
||||
}
|
||||
};
|
||||
queue.add(qe);
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
|
||||
public class OverseerSolrResponseTest extends SolrTestCaseJ4 {
|
||||
|
||||
public void testEmpty() {
|
||||
assertSerializeDeserialize(new NamedList<Object>());
|
||||
}
|
||||
|
||||
public void testWithSingleObject() {
|
||||
NamedList<Object> responseNl = new NamedList<>();
|
||||
responseNl.add("foo", "bar");
|
||||
assertSerializeDeserialize(responseNl);
|
||||
}
|
||||
|
||||
public void testWithMultipleObject() {
|
||||
NamedList<Object> responseNl = new NamedList<>();
|
||||
responseNl.add("foo", "bar");
|
||||
responseNl.add("foobar", "foo");
|
||||
assertSerializeDeserialize(responseNl);
|
||||
}
|
||||
|
||||
public void testRepeatedKeys() {
|
||||
NamedList<Object> responseNl = new NamedList<>();
|
||||
responseNl.add("foo", "bar");
|
||||
responseNl.add("foo", "zoo");
|
||||
assertSerializeDeserialize(responseNl);
|
||||
}
|
||||
|
||||
public void testNested() {
|
||||
NamedList<Object> responseNl = new NamedList<>();
|
||||
NamedList<Object> response2 = new NamedList<>();
|
||||
response2.add("foo", "bar");
|
||||
responseNl.add("foo", response2);
|
||||
assertSerializeDeserialize(responseNl);
|
||||
}
|
||||
|
||||
public void testException() {
|
||||
NamedList<Object> responseNl = new NamedList<>();
|
||||
SolrException e = new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Foo");
|
||||
SimpleOrderedMap<Object> exceptionNl = new SimpleOrderedMap<>();
|
||||
exceptionNl.add("msg", e.getMessage());
|
||||
exceptionNl.add("rspCode", e.code());
|
||||
responseNl.add("exception", exceptionNl);
|
||||
OverseerSolrResponse deserialized = OverseerSolrResponse.deserialize(OverseerSolrResponse.serialize(new OverseerSolrResponse(responseNl)));
|
||||
assertNotNull("Expecting an exception", deserialized.getException());
|
||||
assertEquals("Unexpected exception type in deserialized response", SolrException.class, deserialized.getException().getClass());
|
||||
assertEquals("Unexpected exception code in deserialized response", e.code(), ((SolrException)deserialized.getException()).code());
|
||||
assertEquals("Unexpected exception message in deserialized response", e.getMessage(), deserialized.getException().getMessage());
|
||||
}
|
||||
|
||||
private void assertSerializeDeserialize(NamedList<Object> content) {
|
||||
OverseerSolrResponse response = new OverseerSolrResponse(content);
|
||||
byte[] serialized = OverseerSolrResponse.serialize(response);
|
||||
OverseerSolrResponse deserialized = OverseerSolrResponse.deserialize(serialized);
|
||||
assertEquals("Deserialized response is different than original", response.getResponse(), deserialized.getResponse());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class OverseerSolrResponseUnsafeSerializationTest extends OverseerSolrResponseTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() {
|
||||
System.setProperty("solr.useUnsafeOverseerResponse", "true");
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
System.clearProperty("solr.useUnsafeOverseerResponse");
|
||||
}
|
||||
|
||||
|
||||
public void testUnsafeSerializartionToggles() {
|
||||
assertToggles("true", true, true);
|
||||
assertToggles("deserialization", false, true);
|
||||
assertToggles(null, false, false); // By default, don't use unsafe
|
||||
assertToggles("foo", false, false);
|
||||
assertToggles("false", false, false);
|
||||
assertToggles("serialization", false, false); // This is not an option
|
||||
}
|
||||
|
||||
private void assertToggles(String propertyValue, boolean serializationEnabled, boolean deserializationEnabled) {
|
||||
String previousValue = System.getProperty("solr.useUnsafeOverseerResponse");
|
||||
try {
|
||||
if (propertyValue == null) {
|
||||
System.clearProperty("solr.useUnsafeOverseerResponse");
|
||||
} else {
|
||||
System.setProperty("solr.useUnsafeOverseerResponse", propertyValue);
|
||||
}
|
||||
assertEquals("Unexpected serialization toggle for value: " + propertyValue, serializationEnabled, OverseerSolrResponse.useUnsafeSerialization());
|
||||
assertEquals("Unexpected serialization toggle for value: " + propertyValue, deserializationEnabled, OverseerSolrResponse.useUnsafeDeserialization());
|
||||
} finally {
|
||||
if (previousValue != null) {
|
||||
System.setProperty("solr.useUnsafeOverseerResponse", previousValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -16,12 +16,11 @@
|
|||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrResponse;
|
||||
import org.apache.solr.client.solrj.response.SolrResponseBase;
|
||||
import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CollectionAdminParams;
|
||||
|
@ -86,7 +85,7 @@ public class OverseerTaskQueueTest extends DistributedQueueTest {
|
|||
}
|
||||
}
|
||||
assertNotNull("Didn't find event with requestid " + requestId2, requestId2Event);
|
||||
requestId2Event.setBytes(SolrResponse.serializable(new SolrResponseBase()));
|
||||
requestId2Event.setBytes("foo bar".getBytes(StandardCharsets.UTF_8));
|
||||
tq.remove(requestId2Event);
|
||||
|
||||
// Make sure this call to check if requestId exists doesn't barf with Json parse exception
|
||||
|
|
|
@ -171,7 +171,7 @@ public class TestPostingsSolrHighlighter extends SolrTestCaseJ4 {
|
|||
assertU(commit());
|
||||
assertQ("html escaped",
|
||||
req("q", "text:document", "sort", "id asc", "hl", "true", "hl.encoder", "html"),
|
||||
"//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='<em>Document</em> one has a first <i>sentence</i>.'");
|
||||
"//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='<em>Document</em> one has a first <i>sentence</i>.'");
|
||||
}
|
||||
|
||||
public void testWildcard() {
|
||||
|
|
|
@ -274,7 +274,7 @@ public class TestUnifiedSolrHighlighter extends SolrTestCaseJ4 {
|
|||
assertU(commit());
|
||||
assertQ("html escaped",
|
||||
req("q", "text:document", "sort", "id asc", "hl", "true", "hl.encoder", "html"),
|
||||
"//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='<em>Document</em> one has a first <i>sentence</i>.'");
|
||||
"//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='<em>Document</em> one has a first <i>sentence</i>.'");
|
||||
}
|
||||
|
||||
public void testRangeQuery() {
|
||||
|
|
|
@ -92,6 +92,7 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
String FILE3 = "/mypkg/runtimelibs_v3.jar";
|
||||
String URP1 = "/mypkg/testurpv1.jar";
|
||||
String URP2 = "/mypkg/testurpv2.jar";
|
||||
String EXPR1 = "/mypkg/expressible.jar";
|
||||
String COLLECTION_NAME = "testPluginLoadingColl";
|
||||
byte[] derFile = readFile("cryptokeys/pub_key512.der");
|
||||
cluster.getZkClient().makePath("/keys/exe", true);
|
||||
|
@ -102,10 +103,13 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
postFileAndWait(cluster, "runtimecode/testurp_v1.jar.bin", URP1,
|
||||
"h6UmMzuPqu4hQFGLBMJh/6kDSEXpJlgLsQDXx0KuxXWkV5giilRP57K3towiJRh2J+rqihqIghNCi3YgzgUnWQ==");
|
||||
|
||||
postFileAndWait(cluster, "runtimecode/expressible.jar.bin", EXPR1,
|
||||
"ZOT11arAiPmPZYOHzqodiNnxO9pRyRozWZEBX8XGjU1/HJptFnZK+DI7eXnUtbNaMcbXE2Ze8hh4M/eGyhY8BQ==");
|
||||
|
||||
Package.AddVersion add = new Package.AddVersion();
|
||||
add.version = "1.0";
|
||||
add.pkg = "mypkg";
|
||||
add.files = Arrays.asList(new String[]{FILE1, URP1});
|
||||
add.files = Arrays.asList(new String[]{FILE1, URP1, EXPR1});
|
||||
V2Request req = new V2Request.Builder("/cluster/package")
|
||||
.forceV2(true)
|
||||
.withMethod(SolrRequest.METHOD.POST)
|
||||
|
@ -134,7 +138,8 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
"'create-requesthandler' : { 'name' : '/runtime', 'class': 'mypkg:org.apache.solr.core.RuntimeLibReqHandler' }," +
|
||||
"'create-searchcomponent' : { 'name' : 'get', 'class': 'mypkg:org.apache.solr.core.RuntimeLibSearchComponent' }," +
|
||||
"'create-queryResponseWriter' : { 'name' : 'json1', 'class': 'mypkg:org.apache.solr.core.RuntimeLibResponseWriter' }" +
|
||||
"'create-updateProcessor' : { 'name' : 'myurp', 'class': 'mypkg:org.apache.solr.update.TestVersionedURP' }" +
|
||||
"'create-updateProcessor' : { 'name' : 'myurp', 'class': 'mypkg:org.apache.solr.update.TestVersionedURP' }," +
|
||||
" create-expressible: {name: mincopy , class: 'mypkg:org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric'}" +
|
||||
"}";
|
||||
cluster.getSolrClient().request(new ConfigRequest(payload) {
|
||||
@Override
|
||||
|
@ -159,6 +164,20 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
COLLECTION_NAME, "updateProcessor", "myurp",
|
||||
"mypkg", "1.0" );
|
||||
|
||||
verifyCmponent(cluster.getSolrClient(),
|
||||
COLLECTION_NAME, "expressible", "mincopy",
|
||||
"mypkg", "1.0" );
|
||||
|
||||
TestDistribPackageStore.assertResponseValues(10,
|
||||
cluster.getSolrClient() ,
|
||||
new GenericSolrRequest(SolrRequest.METHOD.GET,
|
||||
"/stream", new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
|
||||
WT, JAVABIN,
|
||||
"action", "plugins"
|
||||
))), Utils.makeMap(
|
||||
":plugins:mincopy", "org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric"
|
||||
));
|
||||
|
||||
UpdateRequest ur = new UpdateRequest();
|
||||
ur.add(new SolrInputDocument("id", "1"));
|
||||
ur.setParam("processor", "myurp");
|
||||
|
@ -192,7 +211,7 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
"P/ptFXRvQMd4oKPvadSpd+A9ffwY3gcex5GVFVRy3df0/OF8XT5my8rQz7FZva+2ORbWxdXS8NKwNrbPVHLGXw==");
|
||||
//add the version using package API
|
||||
add.version = "1.1";
|
||||
add.files = Arrays.asList(new String[]{FILE2,URP2});
|
||||
add.files = Arrays.asList(new String[]{FILE2,URP2, EXPR1});
|
||||
req.process(cluster.getSolrClient());
|
||||
|
||||
verifyCmponent(cluster.getSolrClient(),
|
||||
|
@ -222,7 +241,7 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
"a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q==");
|
||||
|
||||
add.version = "2.1";
|
||||
add.files = Arrays.asList(new String[]{FILE3, URP2});
|
||||
add.files = Arrays.asList(new String[]{FILE3, URP2, EXPR1});
|
||||
req.process(cluster.getSolrClient());
|
||||
|
||||
//now let's verify that the classes are updated
|
||||
|
@ -304,7 +323,7 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
}.process(cluster.getSolrClient()) ;
|
||||
|
||||
add.version = "2.1";
|
||||
add.files = Arrays.asList(new String[]{FILE3, URP2});
|
||||
add.files = Arrays.asList(new String[]{FILE3, URP2, EXPR1});
|
||||
req.process(cluster.getSolrClient());
|
||||
|
||||
//the collections mypkg is set to use version 1.1
|
||||
|
@ -368,6 +387,21 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
}
|
||||
|
||||
}
|
||||
/* new V2Request.Builder("/c/"+COLLECTIONORALIAS+"/config").withMethod(SolrRequest.METHOD.POST)
|
||||
.withPayload("{add-expressible: {name: mincopy , class: org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric}}")
|
||||
.build().process(cluster.getSolrClient());
|
||||
|
||||
ModifiableSolrParams _params = new ModifiableSolrParams();
|
||||
QueryRequest query = new QueryRequest(new MapSolrParams("action","plugins", "collection", COLLECTIONORALIAS, "wt", "javabin"));
|
||||
query.setPath("/stream");
|
||||
NamedList<Object> rsp = cluster.getSolrClient().request(query);
|
||||
assertEquals("org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric", rsp._getStr("/plugins/mincopy", null));
|
||||
_params = new ModifiableSolrParams();
|
||||
query = new QueryRequest(new MapSolrParams("componentName","mincopy", "meta" ,"true", "collection", COLLECTIONORALIAS, "wt", "javabin"));
|
||||
query.setPath("/config/expressible");
|
||||
rsp = cluster.getSolrClient().request(query);
|
||||
|
||||
System.out.println();*/
|
||||
|
||||
private void executeReq(String uri, JettySolrRunner jetty, Utils.InputStreamConsumer parser, Map expected) throws Exception {
|
||||
try(HttpSolrClient client = (HttpSolrClient) jetty.newClient()){
|
||||
|
@ -390,7 +424,6 @@ public class TestPackages extends SolrCloudTestCase {
|
|||
"componentName", componentName,
|
||||
"meta", "true"));
|
||||
|
||||
String s = "queryResponseWriter";
|
||||
GenericSolrRequest req1 = new GenericSolrRequest(SolrRequest.METHOD.GET,
|
||||
"/config/" + componentType, params);
|
||||
TestDistribPackageStore.assertResponseValues(10,
|
||||
|
|
|
@ -62,20 +62,25 @@ public class DirectSolrSpellCheckerTest extends SolrTestCaseJ4 {
|
|||
checker.init(spellchecker, core);
|
||||
|
||||
h.getCore().withSearcher(searcher -> {
|
||||
|
||||
// check that 'fob' is corrected to 'foo'
|
||||
Collection<Token> tokens = queryConverter.convert("fob");
|
||||
SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader());
|
||||
SpellingResult result = checker.getSuggestions(spellOpts);
|
||||
assertTrue("result is null and it shouldn't be", result != null);
|
||||
assertNotNull("result shouldn't be null", result);
|
||||
Map<String, Integer> suggestions = result.get(tokens.iterator().next());
|
||||
assertFalse("suggestions shouldn't be empty", suggestions.isEmpty());
|
||||
Map.Entry<String, Integer> entry = suggestions.entrySet().iterator().next();
|
||||
assertTrue(entry.getKey() + " is not equal to " + "foo", entry.getKey().equals("foo") == true);
|
||||
assertEquals("foo", entry.getKey());
|
||||
assertFalse(entry.getValue() + " equals: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO);
|
||||
|
||||
// check that 'super' is *not* corrected
|
||||
spellOpts.tokens = queryConverter.convert("super");
|
||||
result = checker.getSuggestions(spellOpts);
|
||||
assertTrue("result is null and it shouldn't be", result != null);
|
||||
suggestions = result.get(tokens.iterator().next());
|
||||
assertTrue("suggestions is not null and it should be", suggestions == null);
|
||||
assertNotNull("result shouldn't be null", result);
|
||||
suggestions = result.get(spellOpts.tokens.iterator().next());
|
||||
assertNotNull("suggestions shouldn't be null", suggestions);
|
||||
assertTrue("suggestions should be empty", suggestions.isEmpty());
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
@ -88,6 +93,46 @@ public class DirectSolrSpellCheckerTest extends SolrTestCaseJ4 {
|
|||
"//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='fox']/arr[@name='suggestion']/lst/int[@name='freq']=2",
|
||||
"//lst[@name='spellcheck']/bool[@name='correctlySpelled']='true'"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMaxQueryLength() throws Exception {
|
||||
testMaxQueryLength(true);
|
||||
testMaxQueryLength(false);
|
||||
}
|
||||
|
||||
private void testMaxQueryLength(Boolean limitQueryLength) throws Exception {
|
||||
|
||||
DirectSolrSpellChecker checker = new DirectSolrSpellChecker();
|
||||
NamedList<Object> spellchecker = new NamedList<>();
|
||||
spellchecker.add("classname", DirectSolrSpellChecker.class.getName());
|
||||
spellchecker.add(SolrSpellChecker.FIELD, "teststop");
|
||||
spellchecker.add(DirectSolrSpellChecker.MINQUERYLENGTH, 2);
|
||||
|
||||
// demonstrate that "anothar" is not corrected when maxQueryLength is set to a small number
|
||||
if (limitQueryLength) spellchecker.add(DirectSolrSpellChecker.MAXQUERYLENGTH, 4);
|
||||
|
||||
SolrCore core = h.getCore();
|
||||
checker.init(spellchecker, core);
|
||||
|
||||
h.getCore().withSearcher(searcher -> {
|
||||
Collection<Token> tokens = queryConverter.convert("anothar");
|
||||
SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader());
|
||||
SpellingResult result = checker.getSuggestions(spellOpts);
|
||||
assertNotNull("result shouldn't be null", result);
|
||||
Map<String, Integer> suggestions = result.get(tokens.iterator().next());
|
||||
assertNotNull("suggestions shouldn't be null", suggestions);
|
||||
|
||||
if (limitQueryLength) {
|
||||
assertTrue("suggestions should be empty", suggestions.isEmpty());
|
||||
} else {
|
||||
assertFalse("suggestions shouldn't be empty", suggestions.isEmpty());
|
||||
Map.Entry<String, Integer> entry = suggestions.entrySet().iterator().next();
|
||||
assertEquals("another", entry.getKey());
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
<?xml version="1.0"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
|
||||
|
||||
<!-- =========================================================== -->
|
||||
<!-- Configure Request Log -->
|
||||
<!-- =========================================================== -->
|
||||
|
||||
<Configure id="Server" class="org.eclipse.jetty.server.Server">
|
||||
<Set name="RequestLog">
|
||||
<New id="RequestLog" class="org.eclipse.jetty.server.CustomRequestLog">
|
||||
<!-- Writer -->
|
||||
<Arg>
|
||||
<New class="org.eclipse.jetty.server.AsyncRequestLogWriter">
|
||||
<Arg><Property name="solr.log.dir" default="logs"/>/yyyy_mm_dd.request.log</Arg>
|
||||
<Set name="filenameDateFormat">yyyy_MM_dd</Set>
|
||||
<Set name="retainDays">90</Set>
|
||||
<Set name="append">true</Set>
|
||||
<Set name="timeZone">UTC</Set>
|
||||
</New>
|
||||
</Arg>
|
||||
|
||||
<!-- Format String -->
|
||||
<Arg><Get class="org.eclipse.jetty.server.CustomRequestLog" name="NCSA_FORMAT"/></Arg>
|
||||
</New>
|
||||
</Set>
|
||||
</Configure>
|
|
@ -157,7 +157,25 @@
|
|||
<Set name="handlers">
|
||||
<Array type="org.eclipse.jetty.server.Handler">
|
||||
<Item>
|
||||
<New id="Contexts" class="org.eclipse.jetty.server.handler.ContextHandlerCollection"/>
|
||||
<New class="org.eclipse.jetty.server.handler.InetAccessHandler">
|
||||
<Call name="include">
|
||||
<Arg>
|
||||
<Call class="org.eclipse.jetty.util.StringUtil" name="csvSplit">
|
||||
<Arg><Property name="solr.jetty.inetaccess.includes" default=""/></Arg>
|
||||
</Call>
|
||||
</Arg>
|
||||
</Call>
|
||||
<Call name="exclude">
|
||||
<Arg>
|
||||
<Call class="org.eclipse.jetty.util.StringUtil" name="csvSplit">
|
||||
<Arg><Property name="solr.jetty.inetaccess.excludes" default=""/></Arg>
|
||||
</Call>
|
||||
</Arg>
|
||||
</Call>
|
||||
<Set name="handler">
|
||||
<New id="Contexts" class="org.eclipse.jetty.server.handler.ContextHandlerCollection"/>
|
||||
</Set>
|
||||
</New>
|
||||
</Item>
|
||||
<Item>
|
||||
<New id="InstrumentedHandler" class="com.codahale.metrics.jetty9.InstrumentedHandler">
|
||||
|
@ -167,9 +185,6 @@
|
|||
</Set>
|
||||
</New>
|
||||
</Item>
|
||||
<Item>
|
||||
<New id="RequestLog" class="org.eclipse.jetty.server.handler.RequestLogHandler"/>
|
||||
</Item>
|
||||
</Array>
|
||||
</Set>
|
||||
</New>
|
||||
|
@ -182,33 +197,6 @@
|
|||
<Set name="handler">
|
||||
<Ref id="RewriteHandler"/>
|
||||
</Set>
|
||||
|
||||
<!-- =========================================================== -->
|
||||
<!-- Configure Request Log -->
|
||||
<!-- =========================================================== -->
|
||||
<!--
|
||||
<Ref id="Handlers">
|
||||
<Call name="addHandler">
|
||||
<Arg>
|
||||
<New id="RequestLog" class="org.eclipse.jetty.server.handler.RequestLogHandler">
|
||||
<Set name="requestLog">
|
||||
<New id="RequestLogImpl" class="org.eclipse.jetty.server.NCSARequestLog">
|
||||
<Set name="filename">
|
||||
logs/request.yyyy_mm_dd.log
|
||||
</Set>
|
||||
<Set name="filenameDateFormat">yyyy_MM_dd</Set>
|
||||
<Set name="retainDays">90</Set>
|
||||
<Set name="append">true</Set>
|
||||
<Set name="extended">false</Set>
|
||||
<Set name="logCookies">false</Set>
|
||||
<Set name="LogTimeZone">UTC</Set>
|
||||
</New>
|
||||
</Set>
|
||||
</New>
|
||||
</Arg>
|
||||
</Call>
|
||||
</Ref>
|
||||
-->
|
||||
|
||||
<!-- =========================================================== -->
|
||||
<!-- extra options -->
|
||||
|
|
|
@ -15,8 +15,10 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Policy file for solr tests. Please keep minimal and avoid wildcards.
|
||||
// Policy file for solr. Please keep minimal and avoid wildcards.
|
||||
|
||||
// permissions needed for tests to pass, based on properties set by the build system
|
||||
// NOTE: if the property is not set, the permission entry is ignored.
|
||||
grant {
|
||||
// contain read access to only what we need:
|
||||
// 3rd party jar resources (where symlinks are not supported), test-files/ resources
|
||||
|
@ -163,3 +165,39 @@ grant {
|
|||
// used by solr to create sandboxes (e.g. script execution)
|
||||
permission java.security.SecurityPermission "createAccessControlContext";
|
||||
};
|
||||
|
||||
// additional permissions based on system properties set by /bin/solr
|
||||
// NOTE: if the property is not set, the permission entry is ignored.
|
||||
grant {
|
||||
permission java.io.FilePermission "${hadoop.security.credential.provider.path}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${hadoop.security.credential.provider.path}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.jetty.keystore}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.jetty.keystore}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.jetty.truststore}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.jetty.truststore}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.install.dir}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.install.dir}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${jetty.home}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${jetty.home}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.solr.home}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.solr.home}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.data.home}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.data.home}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.default.confdir}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.default.confdir}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${solr.log.dir}", "read,write,delete,readlink";
|
||||
permission java.io.FilePermission "${solr.log.dir}${/}-", "read,write,delete,readlink";
|
||||
|
||||
permission java.io.FilePermission "${log4j.configurationFile}", "read,write,delete,readlink";
|
||||
|
||||
// expanded to a wildcard if set, allows all networking everywhere
|
||||
permission java.net.SocketPermission "${solr.internal.network.permission}", "accept,listen,connect,resolve";
|
||||
};
|
|
@ -0,0 +1,24 @@
|
|||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# command-line security properties file
|
||||
#
|
||||
# By default, when enabling security manager, DNS lookups are cached indefinitely,
|
||||
# as protection against DNS spoofing. We set this back to the default (non-security-manager)
|
||||
# value of 30 seconds, to prevent surprising behavior (e.g. nodes in cloud environments without
|
||||
# static IP addresses). Users concerned about DNS spoofing should instead follow best practices:
|
||||
# populating solr.shardsWhitelist, enabling TLS, etc.
|
||||
networkaddress.cache.ttl=30
|
|
@ -0,0 +1,9 @@
|
|||
#
|
||||
# Request Log module
|
||||
#
|
||||
|
||||
[depend]
|
||||
server
|
||||
|
||||
[xml]
|
||||
etc/jetty-requestlog.xml
|
|
@ -234,7 +234,7 @@
|
|||
<attribute key="ivy-opennlp-version" value="${ivyversions./org.apache.opennlp/opennlp-tools}" />
|
||||
<attribute key="ivy-tika-version" value="${ivyversions.org.apache.tika.version}" />
|
||||
<attribute key="ivy-velocity-tools-version" value="${ivyversions.org.apache.velocity.tools.version}" />
|
||||
<attribute key="ivy-zookeeper-version" value="${ivyversions./org.apache.zookeeper/zookeeper}" />
|
||||
<attribute key="ivy-zookeeper-version" value="${ivyversions.org.apache.zookeeper.version}" />
|
||||
</asciidoctor:convert>
|
||||
</sequential>
|
||||
</macrodef>
|
||||
|
|
|
@ -84,7 +84,7 @@ solr-attributes: &solr-attributes-ref
|
|||
ivy-opennlp-version: "${ivyversions./org.apache.opennlp/opennlp-tools}"
|
||||
ivy-tika-version: "${ivyversions.org.apache.tika.version}"
|
||||
ivy-velocity-tools-version: "${ivyversions.org.apache.velocity.tools.version}"
|
||||
ivy-zookeeper-version: "${ivyversions./org.apache.zookeeper/zookeeper}"
|
||||
ivy-zookeeper-version: "${ivyversions.org.apache.zookeeper.version}"
|
||||
|
||||
asciidoctor:
|
||||
safe: 0
|
||||
|
|
|
@ -231,7 +231,7 @@ This value is only checked at the time of:
|
|||
. Document collection
|
||||
. Doc Values reading
|
||||
|
||||
As this check is periodically performed, the actual time for which a request can be processed before it is aborted would be marginally greater than or equal to the value of `timeAllowed`. If the request consumes more time in other stages, custom components, etc., this parameter is not expected to abort the request. Regular search, JSON Facet and Analytics handler abandon requests in according to this parameter.
|
||||
As this check is periodically performed, the actual time for which a request can be processed before it is aborted would be marginally greater than or equal to the value of `timeAllowed`. If the request consumes more time in other stages, custom components, etc., this parameter is not expected to abort the request. Regular search, JSON Facet and the Analytics component abandon requests in accordance with this parameter.
|
||||
|
||||
== segmentTerminateEarly Parameter
|
||||
|
||||
|
|
|
@ -101,10 +101,10 @@ Use `false` for field types with query analyzers including filters that can matc
|
|||
|
||||
[[docvaluesformat]]
|
||||
`docValuesFormat`::
|
||||
Defines a custom `DocValuesFormat` to use for fields of this type. This requires that a schema-aware codec, such as the `SchemaCodecFactory` has been configured in `solrconfig.xml`.
|
||||
Defines a custom `DocValuesFormat` to use for fields of this type. This requires that a schema-aware codec, such as the `SchemaCodecFactory`, has been configured in `solrconfig.xml`.
|
||||
|
||||
`postingsFormat`::
|
||||
Defines a custom `PostingsFormat` to use for fields of this type. This requires that a schema-aware codec, such as the `SchemaCodecFactory` has been configured in `solrconfig.xml`.
|
||||
Defines a custom `PostingsFormat` to use for fields of this type. This requires that a schema-aware codec, such as the `SchemaCodecFactory`, has been configured in `solrconfig.xml`.
|
||||
|
||||
|
||||
[NOTE]
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 60 KiB After Width: | Height: | Size: 150 KiB |
|
@ -742,7 +742,7 @@ include::{example-source-dir}JsonRequestApiTest.java[tag=solrj-json-nested-cat-f
|
|||
|
||||
And the response will look something like:
|
||||
|
||||
[source,java]
|
||||
[source,json]
|
||||
----
|
||||
"facets":{
|
||||
"count":32,
|
||||
|
@ -760,7 +760,7 @@ And the response will look something like:
|
|||
"top_manufacturer":{
|
||||
"buckets":[{
|
||||
"val":"boa",
|
||||
"count":1}]}},
|
||||
"count":1}]}}]}}
|
||||
----
|
||||
|
||||
|
||||
|
@ -805,8 +805,6 @@ include::{example-source-dir}JsonRequestApiTest.java[tag=solrj-json-nested-cat-f
|
|||
====
|
||||
--
|
||||
|
||||
|
||||
|
||||
In some situations the desired `sort` may be an aggregation function that is very costly to compute for every bucket. A `prelim_sort` option can be used to specify an approximation of the `sort`, for initially ranking the buckets to determine the top candidates (based on the `limit` and `overrequest`). Only after the top candidate buckets have been refined, will the actual `sort` be used.
|
||||
|
||||
[source,java]
|
||||
|
|
|
@ -22,8 +22,9 @@ Solr provides two approaches to backing up and restoring Solr cores or collectio
|
|||
|
||||
[NOTE]
|
||||
====
|
||||
Backups (and Snapshots) capture data that has been <<near-real-time-searching.adoc#commits-and-searching,_hard_ commited>>. Commiting changes using `softCommit=true` may result in changes that are visible in search results but not included in subsequent backups. Likewise, committing changes using `openSearcher=false` may result in changes committed to disk and included in subsequnt backups, even if they are not currently visible in search results.
|
||||
Backups (and Snapshots) capture data that has been <<near-real-time-searching.adoc#commits-and-searching,_hard_ commited>>. Commiting changes using `softCommit=true` may result in changes that are visible in search results but not included in subsequent backups.
|
||||
|
||||
Likewise, committing changes using `openSearcher=false` may result in changes committed to disk and included in subsequent backups, even if they are not currently visible in search results.
|
||||
====
|
||||
|
||||
== SolrCloud Backups
|
||||
|
|
|
@ -204,7 +204,8 @@ A list of queries that *must* appear in matching documents. However, unlike `mus
|
|||
|
||||
== Boost Query Parser
|
||||
|
||||
`BoostQParser` extends the `QParserPlugin` and creates a boosted query from the input value. The main value is any query to be "wrapped" and "boosted" -- only documents which match that query will match the final query produced by this parter. Parameter `b` is a <<function-queries.adoc#available-functions,function>> to be evaluted against each document that matches the original query, and the result of the function will be multiplied into into the final score for that document.
|
||||
`BoostQParser` extends the `QParserPlugin` and creates a boosted query from the input value. The main value is any query to be "wrapped" and "boosted" -- only documents which match that query will match the final query produced by this parser.
|
||||
Parameter `b` is a <<function-queries.adoc#available-functions,function>> to be evaluated against each document that matches the original query, and the result of the function will be multiplied into into the final score for that document.
|
||||
|
||||
=== Boost Query Parser Examples
|
||||
|
||||
|
@ -233,9 +234,6 @@ q={!boost b=query($my_boost)}name:foo
|
|||
my_boost=category:electronics
|
||||
----
|
||||
|
||||
|
||||
|
||||
|
||||
[[other-collapsing]]
|
||||
== Collapsing Query Parser
|
||||
|
||||
|
|
|
@ -27,9 +27,13 @@ Included are parameters for defining if it should handle `/select` urls (for Sol
|
|||
`handleSelect` is for legacy back-compatibility; those new to Solr do not need to change anything about the way this is configured by default.
|
||||
====
|
||||
|
||||
The first configurable item is the `handleSelect` attribute on the `<requestDispatcher>` element itself. This attribute can be set to one of two values, either "true" or "false". It governs how Solr responds to requests such as `/select?qt=XXX`. The default value "false" will ignore requests to `/select` if a requestHandler is not explicitly registered with the name `/select`. A value of "true" will route query requests to the parser defined with the `qt` value if a requestHandler is not explicitly registered with the name `/select`.
|
||||
The first configurable item is the `handleSelect` attribute on the `<requestDispatcher>` element itself.
|
||||
This attribute can be set to one of two values, either "true" or "false".
|
||||
It governs how Solr responds to requests such as `/select?qt=XXX`.
|
||||
The default value "false" will ignore requests to `/select` if a request handler is not explicitly registered with the name `/select`.
|
||||
A value of "true" will route query requests to the parser defined with the `qt` value if a request handler is not explicitly registered with the name `/select`.
|
||||
|
||||
In recent versions of Solr, a `/select` requestHandler is defined by default, so a value of "false" will work fine. See the section <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#requesthandlers-and-searchcomponents-in-solrconfig,RequestHandlers and SearchComponents in SolrConfig>> for more information.
|
||||
In recent versions of Solr, a `/select` request handler is defined by default, so a value of "false" will work fine. See the section <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#requesthandlers-and-searchcomponents-in-solrconfig,RequestHandlers and SearchComponents in SolrConfig>> for more information.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
|
|
|
@ -58,7 +58,27 @@ Authorization makes sure that only users with the necessary roles/permissions ca
|
|||
|
||||
Audit logging will record an audit trail of incoming reqests to your cluster, such as users being denied access to admin APIs. Learn more about audit logging and how to implement an audit logger plugin here in the <<audit-logging.adoc#audit-logging,Audit Logging>> chapter.
|
||||
|
||||
== Request Logging
|
||||
|
||||
Solr can optionally log every incoming HTTP(s) request in the standard https://en.wikipedia.org/wiki/Common_Log_Format[`NCSA format`]. You can enable request logging by setting `SOLR_REQUESTLOG_ENABLED=true` via environment variable or in `solr.in.sh`/`solr.in.cmd`.
|
||||
|
||||
== Enable IP Access Control
|
||||
|
||||
Restrict network access to specific hosts, by setting `SOLR_IP_WHITELIST`/`SOLR_IP_BLACKLIST` via environment variables or in `solr.in.sh`/`solr.in.cmd`.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
# Allow IPv4/IPv6 localhost, the 192.168.0.x IPv4 network, and 2000:123:4:5:: IPv6 network.
|
||||
SOLR_IP_WHITELIST="127.0.0.1, [::1], 192.168.0.0/24, [2000:123:4:5::]/64"
|
||||
# Explicitly deny access to two problematic hosts.
|
||||
SOLR_IP_BLACKLIST="192.168.0.3, 192.168.0.4"
|
||||
----
|
||||
|
||||
== Securing ZooKeeper Traffic
|
||||
|
||||
ZooKeeper is a central and important part of a SolrCloud cluster and understanding how to secure
|
||||
its content is covered in the <<zookeeper-access-control.adoc#zookeeper-access-control,ZooKeeper Access Control>> page.
|
||||
|
||||
== Enable Security Manager
|
||||
|
||||
Solr can run in a Java Security Manager sandbox by setting `SOLR_SECURITY_MANAGER_ENABLED=true` via environment variable or in `solr.in.sh`/`solr.in.cmd`. This feature is incompatible with Hadoop.
|
||||
|
|
|
@ -881,28 +881,47 @@ Examples of this command:
|
|||
`bin/solr zk mkroot /solr/production`
|
||||
|
||||
|
||||
== Exporting Data to a File
|
||||
== Exporting Documents to a File
|
||||
|
||||
The `export` command will allow you to export documents from a collection in either JSON or Javabin format.
|
||||
All documents can be exported, or only those that match a query.
|
||||
|
||||
`bin/solr export [options]`
|
||||
|
||||
`bin/solr export -help`
|
||||
|
||||
The `bin/solr export` command takes the following parameters:
|
||||
|
||||
`format`::
|
||||
The file format of the export, `jsonl` (default) or `javabin`. Choosing `javabin` exports to a file with extension `.javabin` which is the native Solr format. This is compact and faster to import.
|
||||
|
||||
`out`::
|
||||
The file name of the export.
|
||||
|
||||
`query`::
|
||||
A custom query. The default is `\*:*` which will export all documents.
|
||||
|
||||
`fields`::
|
||||
A comma separated list of fields to be exported.
|
||||
|
||||
`limit`::
|
||||
The number of documents to export. The default is `100`. The value `-1` will export all documents.
|
||||
|
||||
*Example*
|
||||
|
||||
Export all documents from a collection `gettingstarted` to a file called `gettingstarted.json`
|
||||
Export all documents from a collection `gettingstarted`:
|
||||
|
||||
`bin/solr export -url http://localhost:8983/solr/gettingstarted limit -1`
|
||||
[source,bash]
|
||||
bin/solr export -url http://localhost:8983/solr/gettingstarted limit -1
|
||||
|
||||
*Arguments*
|
||||
=== Importing Documents to a Collection
|
||||
|
||||
* `format` : `jsonl` (default) or `javabin`. `format=javabin` exports to a file with extension `.javabin` which is the native Solr format. This is compact & faster to import.
|
||||
* `out` : export file name
|
||||
* `query` : a custom query, default is `*:*`.
|
||||
* `fields`: a comma separated list of fields to be exported.
|
||||
* `limit` : number of documents, default is 100, send `-1` to import all the documents.
|
||||
Once you have exported documents in a file, you can use the <<updatehandlers-in-solrconfig.adoc#updatehandlers-in-solrconfig,/update request handler>> to import them to a new Solr collection.
|
||||
|
||||
=== Importing the Data to a Collection
|
||||
|
||||
*Example: importing the `jsonl` files*
|
||||
*Example: import `jsonl` files*
|
||||
|
||||
`curl -X POST -d @gettingstarted.json http://localhost:8983/solr/gettingstarted/update/json/docs?commit=true`
|
||||
|
||||
*Example: importing the `javabin` files*
|
||||
*Example: import `javabin` files*
|
||||
|
||||
`curl -X POST --header "Content-Type: application/javabin" --data-binary @gettingstarted.javabin http://localhost:8983/solr/gettingstarted/update?commit=true`
|
||||
|
|
|
@ -69,6 +69,7 @@ The `DirectSolrSpellChecker` uses terms from the Solr index without building a p
|
|||
<int name="minPrefix">1</int>
|
||||
<int name="maxInspections">5</int>
|
||||
<int name="minQueryLength">4</int>
|
||||
<int name="maxQueryLength">40</int>
|
||||
<float name="maxQueryFrequency">0.01</float>
|
||||
<float name="thresholdTokenFrequency">.01</float>
|
||||
</lst>
|
||||
|
@ -81,7 +82,7 @@ Many of the parameters relate to how this spell checker should query the index f
|
|||
|
||||
Because this spell checker is querying the main index, you may want to limit how often it queries the index to be sure to avoid any performance conflicts with user queries. The `accuracy` setting defines the threshold for a valid suggestion, while `maxEdits` defines the number of changes to the term to allow. Since most spelling mistakes are only 1 letter off, setting this to 1 will reduce the number of possible suggestions (the default, however, is 2); the value can only be 1 or 2. `minPrefix` defines the minimum number of characters the terms should share. Setting this to 1 means that the spelling suggestions will all start with the same letter, for example.
|
||||
|
||||
The `maxInspections` parameter defines the maximum number of possible matches to review before returning results; the default is 5. `minQueryLength` defines how many characters must be in the query before suggestions are provided; the default is 4.
|
||||
The `maxInspections` parameter defines the maximum number of possible matches to review before returning results; the default is 5. `minQueryLength` defines how many characters must be in the query before suggestions are provided; the default is 4. `maxQueryLength` enables the spell checker to skip over very long query terms, which can avoid expensive operations or exceptions. There is no limit to term length by default.
|
||||
|
||||
At first, spellchecker analyses incoming query words by looking up them in the index. Only query words, which are absent in index or too rare ones (below `maxQueryFrequency`) are considered as misspelled and used for finding suggestions. Words which are frequent than `maxQueryFrequency` bypass spellchecker unchanged. After suggestions for every misspelled word are found they are filtered for enough frequency with `thresholdTokenFrequency` as boundary value. These parameters (`maxQueryFrequency` and `thresholdTokenFrequency`) can be a percentage (such as .01, or 1%) or an absolute value (such as 4).
|
||||
|
||||
|
|
|
@ -148,9 +148,9 @@ The only difference between the above examples, is that using the `bq` parameter
|
|||
====
|
||||
Generally speaking, using `bq` (or `bf`, below) is considered a poor way to "boost" documents by a secondary query because it has an "Additive" effect on the final score. The overall impact a particular `bq` parameter will have on a given document can vary a lot depending on the _absolute_ values of the scores from the original query as well as the `bq` query, which in turn depends on the complexity of the original query, and various scoring factors (TF, IDF, average field length, etc.)
|
||||
|
||||
"Multiplicative Boosting" is generally considered to be a more predictable method of influcing document score, because it acts as a "scaling factor" -- increasing (or decreasing) the scores of each document by a _relative_ amount.
|
||||
"Multiplicative Boosting" is generally considered to be a more predictable method of influencing document score, because it acts as a "scaling factor" -- increasing (or decreasing) the scores of each document by a _relative_ amount.
|
||||
|
||||
The <<other-parsers.adoc#boost-query-parser,`{!boost}` QParser>> provides a convinient wrapper for implementing multiplicitive boosting, and the <<the-extended-dismax-query-parser.adoc#extended-dismax-parameters,`{!edismax}` QParser>> offers a `boost` query parameter shortcut for using it.
|
||||
The <<other-parsers.adoc#boost-query-parser,`{!boost}` QParser>> provides a convenient wrapper for implementing multiplicative boosting, and the <<the-extended-dismax-query-parser.adoc#extended-dismax-parameters,`{!edismax}` QParser>> offers a `boost` query parameter shortcut for using it.
|
||||
====
|
||||
|
||||
|
||||
|
@ -179,7 +179,6 @@ bq={!func}div(sales_rank,ms(NOW,release_date))
|
|||
bq={!lucene}( {!func v='div(1,sum(1,price))'} )^1.5
|
||||
----
|
||||
|
||||
|
||||
== Examples of Queries Submitted to the DisMax Query Parser
|
||||
|
||||
All of the sample URLs in this section assume you are running Solr's "techproducts" example:
|
||||
|
|
|
@ -54,7 +54,7 @@ Note that relaxing `mm` may cause undesired side effects, such as hurting the pr
|
|||
`boost`::
|
||||
A multivalued list of strings parsed as <<function-queries.adoc#available-functions,functions>> whose results will be multiplied into the score from the main query for all matching documents. This parameter is shorthand for wrapping the query produced by eDisMax using the <<other-parsers.adoc#boost-query-parser,`BoostQParserPlugin`>>.
|
||||
|
||||
These two examples are equivilent:
|
||||
These two examples are equivalent:
|
||||
[source,text]
|
||||
----
|
||||
q={!edismax qf=name}ipod
|
||||
|
@ -66,7 +66,6 @@ q={!boost b=div(1,sum(1,price)) v=$qq}
|
|||
qq={!edismax qf=name}ipod
|
||||
----
|
||||
|
||||
|
||||
`lowercaseOperators`::
|
||||
A Boolean parameter indicating if lowercase "and" and "or" should be treated the same as operators "AND" and "OR".
|
||||
Defaults to `false`.
|
||||
|
|
|
@ -16,6 +16,12 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj;
|
||||
|
||||
import org.apache.solr.common.MapWriter;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SuppressForbidden;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
|
@ -23,12 +29,6 @@ import java.io.ObjectInputStream;
|
|||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.apache.solr.common.MapWriter;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SuppressForbidden;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -62,6 +62,7 @@ public abstract class SolrResponse implements Serializable, MapWriter {
|
|||
}
|
||||
|
||||
@SuppressForbidden(reason = "XXX: security hole")
|
||||
@Deprecated
|
||||
public static byte[] serializable(SolrResponse response) {
|
||||
try {
|
||||
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
|
||||
|
@ -74,6 +75,7 @@ public abstract class SolrResponse implements Serializable, MapWriter {
|
|||
}
|
||||
|
||||
@SuppressForbidden(reason = "XXX: security hole")
|
||||
@Deprecated
|
||||
public static SolrResponse deserialize(byte[] bytes) {
|
||||
try {
|
||||
ByteArrayInputStream byteStream = new ByteArrayInputStream(bytes);
|
||||
|
|
|
@ -21,11 +21,13 @@ import java.io.Serializable;
|
|||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.solr.client.solrj.io.comp.ComparatorOrder;
|
||||
|
@ -44,7 +46,7 @@ import org.apache.solr.client.solrj.io.stream.metrics.Metric;
|
|||
public class StreamFactory implements Serializable {
|
||||
|
||||
private transient HashMap<String,String> collectionZkHosts;
|
||||
private transient HashMap<String,Class<? extends Expressible>> functionNames;
|
||||
private transient HashMap<String,Supplier<Class<? extends Expressible>>> functionNames;
|
||||
private transient String defaultZkHost;
|
||||
private transient String defaultCollection;
|
||||
|
||||
|
@ -79,14 +81,20 @@ public class StreamFactory implements Serializable {
|
|||
return null;
|
||||
}
|
||||
|
||||
public Map<String,Class<? extends Expressible>> getFunctionNames(){
|
||||
return functionNames;
|
||||
public Map<String, Supplier<Class<? extends Expressible>>> getFunctionNames() {
|
||||
return Collections.unmodifiableMap(functionNames);
|
||||
}
|
||||
public StreamFactory withFunctionName(String functionName, Class<? extends Expressible> clazz){
|
||||
this.functionNames.put(functionName, () -> clazz);
|
||||
return this;
|
||||
}
|
||||
|
||||
public StreamFactory withFunctionName(String functionName, Supplier< Class<? extends Expressible>> clazz){
|
||||
this.functionNames.put(functionName, clazz);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public StreamExpressionParameter getOperand(StreamExpression expression, int parameterIndex){
|
||||
if(null == expression.getParameters() || parameterIndex >= expression.getParameters().size()){
|
||||
return null;
|
||||
|
@ -173,14 +181,15 @@ public class StreamFactory implements Serializable {
|
|||
List<StreamExpression> allStreamExpressions = getExpressionOperands(expression);
|
||||
|
||||
parameterLoop:
|
||||
for(StreamExpression streamExpression : allStreamExpressions){
|
||||
if(functionNames.containsKey(streamExpression.getFunctionName())){
|
||||
for(Class clazz : clazzes){
|
||||
if(!clazz.isAssignableFrom(functionNames.get(streamExpression.getFunctionName()))){
|
||||
for(StreamExpression streamExpression : allStreamExpressions) {
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(streamExpression.getFunctionName());
|
||||
if (classSupplier != null) {
|
||||
for (Class clazz : clazzes) {
|
||||
if (!clazz.isAssignableFrom(classSupplier.get())) {
|
||||
continue parameterLoop;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
matchingStreamExpressions.add(streamExpression);
|
||||
}
|
||||
}
|
||||
|
@ -189,9 +198,10 @@ public class StreamFactory implements Serializable {
|
|||
}
|
||||
|
||||
public boolean doesRepresentTypes(StreamExpression expression, Class ... clazzes){
|
||||
if(functionNames.containsKey(expression.getFunctionName())){
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(expression.getFunctionName());
|
||||
if(classSupplier != null){
|
||||
for(Class clazz : clazzes){
|
||||
if(!clazz.isAssignableFrom(functionNames.get(expression.getFunctionName()))){
|
||||
if(!clazz.isAssignableFrom(classSupplier.get())){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +213,7 @@ public class StreamFactory implements Serializable {
|
|||
|
||||
public int getIntOperand(StreamExpression expression, String paramName, Integer defaultValue) throws IOException{
|
||||
StreamExpressionNamedParameter param = getNamedOperand(expression, paramName);
|
||||
|
||||
|
||||
if(null == param || null == param.getParameter() || !(param.getParameter() instanceof StreamExpressionValue)){
|
||||
if(null != defaultValue){
|
||||
return defaultValue;
|
||||
|
@ -241,10 +251,12 @@ public class StreamFactory implements Serializable {
|
|||
}
|
||||
public TupleStream constructStream(StreamExpression expression) throws IOException{
|
||||
String function = expression.getFunctionName();
|
||||
if(functionNames.containsKey(function)){
|
||||
Class<? extends Expressible> clazz = functionNames.get(function);
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(function);
|
||||
|
||||
if(classSupplier != null){
|
||||
Class<? extends Expressible> clazz = classSupplier.get();
|
||||
if(Expressible.class.isAssignableFrom(clazz) && TupleStream.class.isAssignableFrom(clazz)){
|
||||
return (TupleStream)createInstance(functionNames.get(function), new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
return (TupleStream)createInstance(clazz, new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -256,10 +268,11 @@ public class StreamFactory implements Serializable {
|
|||
}
|
||||
public Metric constructMetric(StreamExpression expression) throws IOException{
|
||||
String function = expression.getFunctionName();
|
||||
if(functionNames.containsKey(function)){
|
||||
Class<? extends Expressible> clazz = functionNames.get(function);
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(function);
|
||||
if(classSupplier != null){
|
||||
Class<? extends Expressible> clazz = classSupplier.get();
|
||||
if(Expressible.class.isAssignableFrom(clazz) && Metric.class.isAssignableFrom(clazz)){
|
||||
return (Metric)createInstance(functionNames.get(function), new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
return (Metric)createInstance(clazz, new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -356,16 +369,18 @@ public class StreamFactory implements Serializable {
|
|||
public Metric constructOperation(String expressionClause) throws IOException {
|
||||
return constructMetric(StreamExpressionParser.parse(expressionClause));
|
||||
}
|
||||
public StreamOperation constructOperation(StreamExpression expression) throws IOException{
|
||||
|
||||
public StreamOperation constructOperation(StreamExpression expression) throws IOException {
|
||||
String function = expression.getFunctionName();
|
||||
if(functionNames.containsKey(function)){
|
||||
Class<? extends Expressible> clazz = functionNames.get(function);
|
||||
if(Expressible.class.isAssignableFrom(clazz) && StreamOperation.class.isAssignableFrom(clazz)){
|
||||
return (StreamOperation)createInstance(functionNames.get(function), new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(function);
|
||||
if (classSupplier != null) {
|
||||
Class<? extends Expressible> clazz = classSupplier.get();
|
||||
if (Expressible.class.isAssignableFrom(clazz) && StreamOperation.class.isAssignableFrom(clazz)) {
|
||||
return (StreamOperation) createInstance(clazz, new Class[]{StreamExpression.class, StreamFactory.class}, new Object[]{expression, this});
|
||||
}
|
||||
}
|
||||
|
||||
throw new IOException(String.format(Locale.ROOT,"Invalid operation expression %s - function '%s' is unknown (not mapped to a valid StreamOperation)", expression, expression.getFunctionName()));
|
||||
|
||||
throw new IOException(String.format(Locale.ROOT, "Invalid operation expression %s - function '%s' is unknown (not mapped to a valid StreamOperation)", expression, expression.getFunctionName()));
|
||||
}
|
||||
|
||||
public org.apache.solr.client.solrj.io.eval.StreamEvaluator constructEvaluator(String expressionClause) throws IOException {
|
||||
|
@ -373,33 +388,24 @@ public class StreamFactory implements Serializable {
|
|||
}
|
||||
public org.apache.solr.client.solrj.io.eval.StreamEvaluator constructEvaluator(StreamExpression expression) throws IOException{
|
||||
String function = expression.getFunctionName();
|
||||
if(functionNames.containsKey(function)){
|
||||
Class<? extends Expressible> clazz = functionNames.get(function);
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(function);
|
||||
|
||||
if(classSupplier != null){
|
||||
Class<? extends Expressible> clazz = classSupplier.get();
|
||||
if(Expressible.class.isAssignableFrom(clazz) && StreamEvaluator.class.isAssignableFrom(clazz)){
|
||||
return (org.apache.solr.client.solrj.io.eval.StreamEvaluator)createInstance(functionNames.get(function), new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
return (org.apache.solr.client.solrj.io.eval.StreamEvaluator)createInstance(clazz, new Class[]{ StreamExpression.class, StreamFactory.class }, new Object[]{ expression, this});
|
||||
}
|
||||
}
|
||||
|
||||
throw new IOException(String.format(Locale.ROOT,"Invalid evaluator expression %s - function '%s' is unknown (not mapped to a valid StreamEvaluator)", expression, expression.getFunctionName()));
|
||||
}
|
||||
|
||||
public boolean isStream(StreamExpression expression) throws IOException{
|
||||
public boolean isStream(StreamExpression expression) throws IOException {
|
||||
String function = expression.getFunctionName();
|
||||
if(functionNames.containsKey(function)){
|
||||
Class<? extends Expressible> clazz = functionNames.get(function);
|
||||
if(Expressible.class.isAssignableFrom(clazz) && TupleStream.class.isAssignableFrom(clazz)){
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isEvaluator(StreamExpression expression) throws IOException{
|
||||
String function = expression.getFunctionName();
|
||||
if(functionNames.containsKey(function)){
|
||||
Class<? extends Expressible> clazz = functionNames.get(function);
|
||||
if(Expressible.class.isAssignableFrom(clazz) && StreamEvaluator.class.isAssignableFrom(clazz)){
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(function);
|
||||
if (classSupplier != null) {
|
||||
Class<? extends Expressible> clazz = classSupplier.get();
|
||||
if (Expressible.class.isAssignableFrom(clazz) && TupleStream.class.isAssignableFrom(clazz)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -407,35 +413,48 @@ public class StreamFactory implements Serializable {
|
|||
return false;
|
||||
}
|
||||
|
||||
public <T> T createInstance(Class<T> clazz, Class<?>[] paramTypes, Object[] params) throws IOException{
|
||||
public boolean isEvaluator(StreamExpression expression) throws IOException {
|
||||
String function = expression.getFunctionName();
|
||||
Supplier<Class<? extends Expressible>> classSupplier = functionNames.get(function);
|
||||
if (classSupplier != null) {
|
||||
Class<? extends Expressible> clazz = classSupplier.get();
|
||||
if (Expressible.class.isAssignableFrom(clazz) && StreamEvaluator.class.isAssignableFrom(clazz)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public <T> T createInstance(Class<T> clazz, Class<?>[] paramTypes, Object[] params) throws IOException {
|
||||
Constructor<T> ctor;
|
||||
try {
|
||||
ctor = clazz.getConstructor(paramTypes);
|
||||
return ctor.newInstance(params);
|
||||
|
||||
|
||||
} catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
|
||||
if(null != e.getMessage()){
|
||||
throw new IOException(String.format(Locale.ROOT,"Unable to construct instance of %s caused by %s", clazz.getName(), e.getMessage()),e);
|
||||
}
|
||||
else{
|
||||
throw new IOException(String.format(Locale.ROOT,"Unable to construct instance of %s", clazz.getName()),e);
|
||||
if (null != e.getMessage()) {
|
||||
throw new IOException(String.format(Locale.ROOT, "Unable to construct instance of %s caused by %s", clazz.getName(), e.getMessage()), e);
|
||||
} else {
|
||||
throw new IOException(String.format(Locale.ROOT, "Unable to construct instance of %s", clazz.getName()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getFunctionName(Class<? extends Expressible> clazz) throws IOException{
|
||||
for(Entry<String,Class<? extends Expressible>> entry : functionNames.entrySet()){
|
||||
if(entry.getValue() == clazz){
|
||||
|
||||
public String getFunctionName(Class<? extends Expressible> clazz) throws IOException {
|
||||
for (Entry<String, Supplier<Class<? extends Expressible>>> entry : functionNames.entrySet()) {
|
||||
if (entry.getValue().get() == clazz) {
|
||||
return entry.getKey();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
throw new IOException(String.format(Locale.ROOT, "Unable to find function name for class '%s'", clazz.getName()));
|
||||
}
|
||||
|
||||
|
||||
public Object constructPrimitiveObject(String original){
|
||||
String lower = original.trim().toLowerCase(Locale.ROOT);
|
||||
|
||||
|
||||
if("null".equals(lower)){ return null; }
|
||||
if("true".equals(lower) || "false".equals(lower)){ return Boolean.parseBoolean(lower); }
|
||||
try{ return Long.valueOf(original); } catch(Exception ignored){};
|
||||
|
|
|
@ -16,6 +16,34 @@
|
|||
*/
|
||||
package org.apache.solr.common.util;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.solr.client.solrj.cloud.DistribStateManager;
|
||||
import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
|
||||
import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
|
||||
import org.apache.solr.common.IteratorWriter;
|
||||
import org.apache.solr.common.LinkedHashMapWriter;
|
||||
import org.apache.solr.common.MapWriter;
|
||||
import org.apache.solr.common.MapWriterMap;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SpecProvider;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZkOperation;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.server.ByteBufferInputStream;
|
||||
import org.noggit.CharArr;
|
||||
import org.noggit.JSONParser;
|
||||
import org.noggit.JSONWriter;
|
||||
import org.noggit.ObjectBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.MDC;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
|
@ -54,34 +82,6 @@ import java.util.function.Function;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.solr.client.solrj.cloud.DistribStateManager;
|
||||
import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
|
||||
import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
|
||||
import org.apache.solr.common.IteratorWriter;
|
||||
import org.apache.solr.common.LinkedHashMapWriter;
|
||||
import org.apache.solr.common.MapWriter;
|
||||
import org.apache.solr.common.MapWriterMap;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SpecProvider;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZkOperation;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.server.ByteBufferInputStream;
|
||||
import org.noggit.CharArr;
|
||||
import org.noggit.JSONParser;
|
||||
import org.noggit.JSONWriter;
|
||||
import org.noggit.ObjectBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.MDC;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
|
@ -175,6 +175,12 @@ public class Utils {
|
|||
return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size()));
|
||||
}
|
||||
}
|
||||
|
||||
public static Object fromJavabin(byte[] bytes) throws IOException {
|
||||
try (JavaBinCodec jbc = new JavaBinCodec()) {
|
||||
return jbc.unmarshal(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
|
||||
return getDeepCopy(c, maxDepth, mutable, false);
|
||||
|
@ -295,7 +301,6 @@ public class Utils {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public static final Function<JSONParser, ObjectBuilder> STANDARDOBJBUILDER = jsonParser -> {
|
||||
try {
|
||||
return new ObjectBuilder(jsonParser);
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
|
@ -94,7 +95,7 @@ public class TestLang extends SolrTestCase {
|
|||
}
|
||||
StreamFactory factory = new StreamFactory();
|
||||
Lang.register(factory);
|
||||
Map<String,Class<? extends Expressible>> registeredFunctions = factory.getFunctionNames();
|
||||
Map<String, Supplier<Class<? extends Expressible>>> registeredFunctions = factory.getFunctionNames();
|
||||
|
||||
//Check that each function that is expected is registered.
|
||||
for(String func : functions) {
|
||||
|
|
|
@ -15,6 +15,36 @@
|
|||
limitations under the License.
|
||||
*/
|
||||
|
||||
/* SOLR-14120: Providing a manual definition for the methods 'includes' and 'startsWith' to support Internet Explorer 11. */
|
||||
if (!String.prototype.includes) {
|
||||
String.prototype.includes = function(search, start) { 'use strict';
|
||||
if (search instanceof RegExp) {
|
||||
throw TypeError('first argument must not be a RegExp');
|
||||
}
|
||||
if (start === undefined) { start = 0; }
|
||||
return this.indexOf(search, start) !== -1;
|
||||
};
|
||||
}
|
||||
if (!Array.prototype.includes) {
|
||||
Object.defineProperty(Array.prototype, "includes", {
|
||||
enumerable: false,
|
||||
value: function(obj) {
|
||||
var newArr = this.filter(function(el) {
|
||||
return el == obj;
|
||||
});
|
||||
return newArr.length > 0;
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!String.prototype.startsWith) {
|
||||
Object.defineProperty(String.prototype, 'startsWith', {
|
||||
value: function(search, rawPos) {
|
||||
var pos = rawPos > 0 ? rawPos|0 : 0;
|
||||
return this.substring(pos, pos + search.length) === search;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var solrAdminApp = angular.module("solrAdminApp", [
|
||||
"ngResource",
|
||||
"ngRoute",
|
||||
|
|
Loading…
Reference in New Issue