mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master' into gradle-master
This commit is contained in:
commit
bc539fc0fd
|
@ -62,7 +62,7 @@ com.sun.jersey.version = 1.19
|
||||||
/commons-logging/commons-logging = 1.1.3
|
/commons-logging/commons-logging = 1.1.3
|
||||||
/de.l3s.boilerpipe/boilerpipe = 1.1.0
|
/de.l3s.boilerpipe/boilerpipe = 1.1.0
|
||||||
|
|
||||||
io.dropwizard.metrics.version = 4.0.5
|
io.dropwizard.metrics.version = 4.1.2
|
||||||
/io.dropwizard.metrics/metrics-core = ${io.dropwizard.metrics.version}
|
/io.dropwizard.metrics/metrics-core = ${io.dropwizard.metrics.version}
|
||||||
/io.dropwizard.metrics/metrics-graphite = ${io.dropwizard.metrics.version}
|
/io.dropwizard.metrics/metrics-graphite = ${io.dropwizard.metrics.version}
|
||||||
/io.dropwizard.metrics/metrics-jetty9 = ${io.dropwizard.metrics.version}
|
/io.dropwizard.metrics/metrics-jetty9 = ${io.dropwizard.metrics.version}
|
||||||
|
@ -260,7 +260,7 @@ org.codehaus.janino.version = 3.0.9
|
||||||
/org.codehaus.woodstox/stax2-api = 3.1.4
|
/org.codehaus.woodstox/stax2-api = 3.1.4
|
||||||
/org.codehaus.woodstox/woodstox-core-asl = 4.4.1
|
/org.codehaus.woodstox/woodstox-core-asl = 4.4.1
|
||||||
|
|
||||||
org.eclipse.jetty.version = 9.4.19.v20190610
|
org.eclipse.jetty.version = 9.4.24.v20191120
|
||||||
/org.eclipse.jetty.http2/http2-client = ${org.eclipse.jetty.version}
|
/org.eclipse.jetty.http2/http2-client = ${org.eclipse.jetty.version}
|
||||||
/org.eclipse.jetty.http2/http2-common = ${org.eclipse.jetty.version}
|
/org.eclipse.jetty.http2/http2-common = ${org.eclipse.jetty.version}
|
||||||
/org.eclipse.jetty.http2/http2-hpack = ${org.eclipse.jetty.version}
|
/org.eclipse.jetty.http2/http2-hpack = ${org.eclipse.jetty.version}
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
4acddfa41f45790e43fe4be257c3c4bcf6b846ff
|
|
|
@ -0,0 +1 @@
|
||||||
|
69125cf74b07f1b9d60b5c94da47cb04c098f654
|
|
@ -1 +0,0 @@
|
||||||
b59ff8ecb0cf5d6234958f2404eabf0b72464e14
|
|
|
@ -0,0 +1 @@
|
||||||
|
d3f0b0fb016ef8d35ffb199d928ffbcbfa121c86
|
|
@ -1 +0,0 @@
|
||||||
7eb9a6be62d84e1691e5fdc99223e632485619a8
|
|
|
@ -0,0 +1 @@
|
||||||
|
dcb6d4d505ef74898e3a64a38c40195c01e97119
|
|
@ -1 +0,0 @@
|
||||||
55786f6e6649bd49425a7da1ac72cd85b8dd4bef
|
|
|
@ -0,0 +1 @@
|
||||||
|
7885cc3d5d7701a444acada7ab97f89846514875
|
|
@ -1 +0,0 @@
|
||||||
89e25610b3199fdf34a831c1b306f7e765928959
|
|
|
@ -0,0 +1 @@
|
||||||
|
ca1803fde51b795c0a8346ca8bc6277d9d04d01d
|
|
@ -1 +0,0 @@
|
||||||
2fd3cd40279280e8c56241f753d2c52d8d446d19
|
|
|
@ -0,0 +1 @@
|
||||||
|
3095acb088f4ff9e3fd9aedf98db73e3c18ea849
|
|
@ -33,9 +33,6 @@ grant {
|
||||||
permission java.io.FilePermission "${junit4.tempDir}${/}*", "read,write,delete";
|
permission java.io.FilePermission "${junit4.tempDir}${/}*", "read,write,delete";
|
||||||
permission java.io.FilePermission "${clover.db.dir}${/}-", "read,write,delete";
|
permission java.io.FilePermission "${clover.db.dir}${/}-", "read,write,delete";
|
||||||
permission java.io.FilePermission "${tests.linedocsfile}", "read";
|
permission java.io.FilePermission "${tests.linedocsfile}", "read";
|
||||||
// hadoop
|
|
||||||
permission java.io.FilePermission "${user.home}${/}hadoop-metrics2.properties", "read";
|
|
||||||
permission java.io.FilePermission "${user.home}${/}hadoop-metrics2-namenode.properties", "read";
|
|
||||||
// DirectoryFactoryTest messes with these (wtf?)
|
// DirectoryFactoryTest messes with these (wtf?)
|
||||||
permission java.io.FilePermission "/tmp/inst1/conf/solrcore.properties", "read";
|
permission java.io.FilePermission "/tmp/inst1/conf/solrcore.properties", "read";
|
||||||
permission java.io.FilePermission "/path/to/myinst/conf/solrcore.properties", "read";
|
permission java.io.FilePermission "/path/to/myinst/conf/solrcore.properties", "read";
|
||||||
|
|
|
@ -26,7 +26,7 @@ Apache Tika 1.19.1
|
||||||
Carrot2 3.16.2
|
Carrot2 3.16.2
|
||||||
Velocity 2.0 and Velocity Tools 3.0
|
Velocity 2.0 and Velocity Tools 3.0
|
||||||
Apache ZooKeeper 3.5.5
|
Apache ZooKeeper 3.5.5
|
||||||
Jetty 9.4.19.v20190610
|
Jetty 9.4.24.v20191120
|
||||||
|
|
||||||
Upgrade Notes
|
Upgrade Notes
|
||||||
----------------------
|
----------------------
|
||||||
|
@ -101,11 +101,12 @@ Apache Tika 1.19.1
|
||||||
Carrot2 3.16.0
|
Carrot2 3.16.0
|
||||||
Velocity 2.0 and Velocity Tools 3.0
|
Velocity 2.0 and Velocity Tools 3.0
|
||||||
Apache ZooKeeper 3.5.5
|
Apache ZooKeeper 3.5.5
|
||||||
Jetty 9.4.19.v20190610
|
Jetty 9.4.24.v20191120
|
||||||
|
|
||||||
Upgrade Notes
|
Upgrade Notes
|
||||||
---------------------
|
---------------------
|
||||||
(No changes)
|
|
||||||
|
* SOLR-14026: Upgrade Jetty to 9.4.24.v20191120 and dropwizard to 4.1.2 (Erick Erickson)
|
||||||
|
|
||||||
New Features
|
New Features
|
||||||
---------------------
|
---------------------
|
||||||
|
@ -176,9 +177,16 @@ Upgrade Notes
|
||||||
|
|
||||||
* SOLR-14071: Untrusted configsets (ones that are uploaded via unsecured configset API) cannot use <lib> directive.
|
* SOLR-14071: Untrusted configsets (ones that are uploaded via unsecured configset API) cannot use <lib> directive.
|
||||||
Consider enabling authentication/authorization so that the uploaded configsets are trusted.
|
Consider enabling authentication/authorization so that the uploaded configsets are trusted.
|
||||||
|
Note: If you already have a collection using untrusted configset that uses <lib> directive, it will not load after
|
||||||
|
upgrading to 8.4. You can re-upload your configset using "bin/solr zk -upconfig .." or place your libraries in the
|
||||||
|
classpath and restart Solr.
|
||||||
|
|
||||||
* SOLR-14065: VelocityResponseWriter has been deprecated and may be removed in a future version.
|
* SOLR-14065: VelocityResponseWriter has been deprecated and may be removed in a future version.
|
||||||
|
|
||||||
|
* SOLR-14072: The "Blob Store" API and "runtimeLib" plugin mechanism that uses it is now considered deprecated. The
|
||||||
|
replacement to it is the "Package Management" system, which includes a "File Store". These are experimental currently
|
||||||
|
but will grow/stabalize/mature.
|
||||||
|
|
||||||
New Features
|
New Features
|
||||||
---------------------
|
---------------------
|
||||||
* SOLR-13821: A Package store to store and load package artifacts (noble, Ishan Chattopadhyaya)
|
* SOLR-13821: A Package store to store and load package artifacts (noble, Ishan Chattopadhyaya)
|
||||||
|
@ -310,10 +318,14 @@ Bug Fixes
|
||||||
* SOLR-13945: Fix: SPLITSHARD can cause data loss on a failure to commit after the sub-shards are active and a rollback
|
* SOLR-13945: Fix: SPLITSHARD can cause data loss on a failure to commit after the sub-shards are active and a rollback
|
||||||
is done to make parent shard active again (Ishan Chattopadhyaya, ab)
|
is done to make parent shard active again (Ishan Chattopadhyaya, ab)
|
||||||
|
|
||||||
* SOLR-14071: Untrusted configsets cannot use <lib> directive (Ishan Chattopadhyaya)
|
* SOLR-14071: Untrusted configsets cannot use <lib> directive due to security reasons (Ishan Chattopadhyaya)
|
||||||
|
|
||||||
* SOLR-14013: FIX: javabin performance regressions (noble, yonik, Houston Putman)
|
* SOLR-14013: FIX: javabin performance regressions (noble, yonik, Houston Putman)
|
||||||
|
|
||||||
|
* SOLR-14079: SPLITSHARD splitByPrefix doesn't work in async mode. This also
|
||||||
|
affects splits triggered by the autoscale framework, which use async mode.
|
||||||
|
(Megan Carey, Andy Vuong, Bilal Waheed, Ilan Ginzburg, yonik)
|
||||||
|
|
||||||
Other Changes
|
Other Changes
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
|
@ -351,7 +363,7 @@ Apache Tika 1.19.1
|
||||||
Carrot2 3.16.0
|
Carrot2 3.16.0
|
||||||
Velocity 2.0 and Velocity Tools 3.0
|
Velocity 2.0 and Velocity Tools 3.0
|
||||||
Apache ZooKeeper 3.5.5
|
Apache ZooKeeper 3.5.5
|
||||||
Jetty 9.4.19.v20190610
|
Jetty 9.4.24.v20191120
|
||||||
|
|
||||||
Upgrade Notes
|
Upgrade Notes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
|
@ -38,7 +38,6 @@
|
||||||
<dependency org="commons-codec" name="commons-codec" rev="${/commons-codec/commons-codec}" conf="compile"/>
|
<dependency org="commons-codec" name="commons-codec" rev="${/commons-codec/commons-codec}" conf="compile"/>
|
||||||
<dependency org="commons-io" name="commons-io" rev="${/commons-io/commons-io}" conf="compile"/>
|
<dependency org="commons-io" name="commons-io" rev="${/commons-io/commons-io}" conf="compile"/>
|
||||||
<dependency org="org.apache.commons" name="commons-exec" rev="${/org.apache.commons/commons-exec}" conf="compile"/>
|
<dependency org="org.apache.commons" name="commons-exec" rev="${/org.apache.commons/commons-exec}" conf="compile"/>
|
||||||
<dependency org="org.apache.commons" name="commons-compress" rev="${/org.apache.commons/commons-compress}" conf="compile"/>
|
|
||||||
<dependency org="commons-fileupload" name="commons-fileupload" rev="${/commons-fileupload/commons-fileupload}" conf="compile"/>
|
<dependency org="commons-fileupload" name="commons-fileupload" rev="${/commons-fileupload/commons-fileupload}" conf="compile"/>
|
||||||
<dependency org="commons-cli" name="commons-cli" rev="${/commons-cli/commons-cli}" conf="compile"/>
|
<dependency org="commons-cli" name="commons-cli" rev="${/commons-cli/commons-cli}" conf="compile"/>
|
||||||
<dependency org="org.apache.commons" name="commons-text" rev="${/org.apache.commons/commons-text}" conf="compile"/>
|
<dependency org="org.apache.commons" name="commons-text" rev="${/org.apache.commons/commons-text}" conf="compile"/>
|
||||||
|
@ -110,6 +109,7 @@
|
||||||
<dependency org="com.sun.jersey" name="jersey-servlet" rev="${/com.sun.jersey/jersey-servlet}" conf="test.DfsMiniCluster"/>
|
<dependency org="com.sun.jersey" name="jersey-servlet" rev="${/com.sun.jersey/jersey-servlet}" conf="test.DfsMiniCluster"/>
|
||||||
<dependency org="commons-logging" name="commons-logging" rev="${/commons-logging/commons-logging}" conf="test.DfsMiniCluster"/>
|
<dependency org="commons-logging" name="commons-logging" rev="${/commons-logging/commons-logging}" conf="test.DfsMiniCluster"/>
|
||||||
<dependency org="io.netty" name="netty-all" rev="${/io.netty/netty-all}" conf="test.DfsMiniCluster"/>
|
<dependency org="io.netty" name="netty-all" rev="${/io.netty/netty-all}" conf="test.DfsMiniCluster"/>
|
||||||
|
<dependency org="org.apache.commons" name="commons-compress" rev="${/org.apache.commons/commons-compress}" conf="test.DfsMiniCluster"/>
|
||||||
<dependency org="org.apache.commons" name="commons-text" rev="${/org.apache.commons/commons-text}" conf="test.DfsMiniCluster"/>
|
<dependency org="org.apache.commons" name="commons-text" rev="${/org.apache.commons/commons-text}" conf="test.DfsMiniCluster"/>
|
||||||
|
|
||||||
<!-- Hadoop MiniKdc Dependencies-->
|
<!-- Hadoop MiniKdc Dependencies-->
|
||||||
|
|
|
@ -223,12 +223,11 @@ public class SplitShardCmd implements OverseerCollectionMessageHandler.Cmd {
|
||||||
// params.set(NUM_SUB_SHARDS, Integer.toString(numSubShards));
|
// params.set(NUM_SUB_SHARDS, Integer.toString(numSubShards));
|
||||||
|
|
||||||
{
|
{
|
||||||
final ShardRequestTracker shardRequestTracker = ocmh.asyncRequestTracker(asyncId);
|
final ShardRequestTracker shardRequestTracker = ocmh.syncRequestTracker();
|
||||||
shardRequestTracker.sendShardRequest(parentShardLeader.getNodeName(), params, shardHandler);
|
shardRequestTracker.sendShardRequest(parentShardLeader.getNodeName(), params, shardHandler);
|
||||||
SimpleOrderedMap<Object> getRangesResults = new SimpleOrderedMap<>();
|
SimpleOrderedMap<Object> getRangesResults = new SimpleOrderedMap<>();
|
||||||
String msgOnError = "SPLITSHARD failed to invoke SPLIT.getRanges core admin command";
|
String msgOnError = "SPLITSHARD failed to invoke SPLIT.getRanges core admin command";
|
||||||
shardRequestTracker.processResponses(getRangesResults, shardHandler, true, msgOnError);
|
shardRequestTracker.processResponses(getRangesResults, shardHandler, true, msgOnError);
|
||||||
handleFailureOnAsyncRequest(results, msgOnError);
|
|
||||||
|
|
||||||
// Extract the recommended splits from the shard response (if it exists)
|
// Extract the recommended splits from the shard response (if it exists)
|
||||||
// example response: getRangesResults={success={127.0.0.1:62086_solr={responseHeader={status=0,QTime=1},ranges=10-20,3a-3f}}}
|
// example response: getRangesResults={success={127.0.0.1:62086_solr={responseHeader={status=0,QTime=1},ranges=10-20,3a-3f}}}
|
||||||
|
|
|
@ -47,6 +47,7 @@ import org.apache.solr.common.util.Utils;
|
||||||
import org.apache.solr.core.BlobRepository;
|
import org.apache.solr.core.BlobRepository;
|
||||||
import org.apache.solr.core.CoreContainer;
|
import org.apache.solr.core.CoreContainer;
|
||||||
import org.apache.solr.core.SolrCore;
|
import org.apache.solr.core.SolrCore;
|
||||||
|
import org.apache.solr.pkg.PackageAPI;
|
||||||
import org.apache.solr.request.SolrQueryRequest;
|
import org.apache.solr.request.SolrQueryRequest;
|
||||||
import org.apache.solr.response.SolrQueryResponse;
|
import org.apache.solr.response.SolrQueryResponse;
|
||||||
import org.apache.solr.security.PermissionNameProvider;
|
import org.apache.solr.security.PermissionNameProvider;
|
||||||
|
@ -64,7 +65,7 @@ import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
|
||||||
|
|
||||||
public class PackageStoreAPI {
|
public class PackageStoreAPI {
|
||||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||||
public static final String PACKAGESTORE_DIRECTORY = "filestore";
|
public static final String PACKAGESTORE_DIRECTORY = ".filestore";
|
||||||
|
|
||||||
|
|
||||||
private final CoreContainer coreContainer;
|
private final CoreContainer coreContainer;
|
||||||
|
@ -135,6 +136,9 @@ public class PackageStoreAPI {
|
||||||
|
|
||||||
@Command
|
@Command
|
||||||
public void upload(SolrQueryRequest req, SolrQueryResponse rsp) {
|
public void upload(SolrQueryRequest req, SolrQueryResponse rsp) {
|
||||||
|
if(!coreContainer.getPackageLoader().getPackageAPI().isEnabled()) {
|
||||||
|
throw new RuntimeException(PackageAPI.ERR_MSG);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
coreContainer.getZkController().getZkClient().create(TMP_ZK_NODE, "true".getBytes(UTF_8),
|
coreContainer.getZkController().getZkClient().create(TMP_ZK_NODE, "true".getBytes(UTF_8),
|
||||||
CreateMode.EPHEMERAL, true);
|
CreateMode.EPHEMERAL, true);
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
|
||||||
final Map<String, String> mdcContext;
|
final Map<String, String> mdcContext;
|
||||||
|
|
||||||
Slf4jReporterWrapper(String logger, Map<String, String> mdcContext, Slf4jReporter delegate, TimeUnit rateUnit, TimeUnit durationUnit) {
|
Slf4jReporterWrapper(String logger, Map<String, String> mdcContext, Slf4jReporter delegate, TimeUnit rateUnit, TimeUnit durationUnit) {
|
||||||
super(null, logger, null, rateUnit, durationUnit);
|
super(metricManager.registry(registryName), logger, null, rateUnit, durationUnit);
|
||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
this.mdcContext = mdcContext;
|
this.mdcContext = mdcContext;
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,7 @@ import com.codahale.metrics.Gauge;
|
||||||
import com.codahale.metrics.Histogram;
|
import com.codahale.metrics.Histogram;
|
||||||
import com.codahale.metrics.Meter;
|
import com.codahale.metrics.Meter;
|
||||||
import com.codahale.metrics.MetricFilter;
|
import com.codahale.metrics.MetricFilter;
|
||||||
|
import com.codahale.metrics.MetricRegistry;
|
||||||
import com.codahale.metrics.ScheduledReporter;
|
import com.codahale.metrics.ScheduledReporter;
|
||||||
import com.codahale.metrics.Timer;
|
import com.codahale.metrics.Timer;
|
||||||
import org.apache.http.client.HttpClient;
|
import org.apache.http.client.HttpClient;
|
||||||
|
@ -301,12 +302,17 @@ public class SolrReporter extends ScheduledReporter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Recent dropwizard (found with version 4.1.2) requires that you _must_ call the superclass with a non-null registry.
|
||||||
|
// We delegate to registries anyway, so having a dummy registry is harmless.
|
||||||
|
private static final MetricRegistry dummyRegistry = new MetricRegistry();
|
||||||
|
|
||||||
public SolrReporter(HttpClient httpClient, Supplier<String> urlProvider, SolrMetricManager metricManager,
|
public SolrReporter(HttpClient httpClient, Supplier<String> urlProvider, SolrMetricManager metricManager,
|
||||||
List<Report> metrics, String handler,
|
List<Report> metrics, String handler,
|
||||||
String reporterId, TimeUnit rateUnit, TimeUnit durationUnit,
|
String reporterId, TimeUnit rateUnit, TimeUnit durationUnit,
|
||||||
SolrParams params, boolean skipHistograms, boolean skipAggregateValues,
|
SolrParams params, boolean skipHistograms, boolean skipAggregateValues,
|
||||||
boolean cloudClient, boolean compact) {
|
boolean cloudClient, boolean compact) {
|
||||||
super(null, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit, null, true);
|
super(dummyRegistry, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit, null, true);
|
||||||
|
|
||||||
this.metricManager = metricManager;
|
this.metricManager = metricManager;
|
||||||
this.urlProvider = urlProvider;
|
this.urlProvider = urlProvider;
|
||||||
this.reporterId = reporterId;
|
this.reporterId = reporterId;
|
||||||
|
|
|
@ -26,6 +26,7 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.commons.io.FilenameUtils;
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClientBuilder;
|
import org.apache.http.impl.client.HttpClientBuilder;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
|
@ -82,8 +83,8 @@ public class DefaultPackageRepository extends PackageRepository {
|
||||||
public Path download(String artifactName) throws SolrException, IOException {
|
public Path download(String artifactName) throws SolrException, IOException {
|
||||||
Path tmpDirectory = Files.createTempDirectory("solr-packages");
|
Path tmpDirectory = Files.createTempDirectory("solr-packages");
|
||||||
tmpDirectory.toFile().deleteOnExit();
|
tmpDirectory.toFile().deleteOnExit();
|
||||||
URL url = new URL(new URL(repositoryURL), artifactName);
|
URL url = new URL(new URL(repositoryURL.endsWith("/")? repositoryURL: repositoryURL+"/"), artifactName);
|
||||||
String fileName = url.getPath().substring(url.getPath().lastIndexOf('/') + 1);
|
String fileName = FilenameUtils.getName(url.getPath());
|
||||||
Path destination = tmpDirectory.resolve(fileName);
|
Path destination = tmpDirectory.resolve(fileName);
|
||||||
|
|
||||||
switch (url.getProtocol()) {
|
switch (url.getProtocol()) {
|
||||||
|
|
|
@ -26,15 +26,19 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Scanner;
|
import java.util.Scanner;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||||
import org.apache.solr.common.NavigableObject;
|
import org.apache.solr.common.NavigableObject;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
import org.apache.solr.common.SolrException.ErrorCode;
|
import org.apache.solr.common.SolrException.ErrorCode;
|
||||||
import org.apache.solr.common.cloud.SolrZkClient;
|
import org.apache.solr.common.cloud.SolrZkClient;
|
||||||
|
import org.apache.solr.common.cloud.ZkStateReader;
|
||||||
import org.apache.solr.common.util.Utils;
|
import org.apache.solr.common.util.Utils;
|
||||||
import org.apache.solr.packagemanager.SolrPackage.Command;
|
import org.apache.solr.packagemanager.SolrPackage.Command;
|
||||||
import org.apache.solr.packagemanager.SolrPackage.Manifest;
|
import org.apache.solr.packagemanager.SolrPackage.Manifest;
|
||||||
|
@ -84,9 +88,9 @@ public class PackageManager implements Closeable {
|
||||||
try {
|
try {
|
||||||
Map packagesZnodeMap = null;
|
Map packagesZnodeMap = null;
|
||||||
|
|
||||||
if (zkClient.exists("/packages.json", true) == true) {
|
if (zkClient.exists(ZkStateReader.SOLR_PKGS_PATH, true) == true) {
|
||||||
packagesZnodeMap = (Map)getMapper().readValue(
|
packagesZnodeMap = (Map)getMapper().readValue(
|
||||||
new String(zkClient.getData("/packages.json", null, null, true), "UTF-8"), Map.class).get("packages");
|
new String(zkClient.getData(ZkStateReader.SOLR_PKGS_PATH, null, null, true), "UTF-8"), Map.class).get("packages");
|
||||||
for (Object packageName: packagesZnodeMap.keySet()) {
|
for (Object packageName: packagesZnodeMap.keySet()) {
|
||||||
List pkg = (List)packagesZnodeMap.get(packageName);
|
List pkg = (List)packagesZnodeMap.get(packageName);
|
||||||
for (Map pkgVersion: (List<Map>)pkg) {
|
for (Map pkgVersion: (List<Map>)pkg) {
|
||||||
|
@ -112,34 +116,51 @@ public class PackageManager implements Closeable {
|
||||||
Map<String, String> packages = null;
|
Map<String, String> packages = null;
|
||||||
try {
|
try {
|
||||||
NavigableObject result = (NavigableObject) Utils.executeGET(solrClient.getHttpClient(),
|
NavigableObject result = (NavigableObject) Utils.executeGET(solrClient.getHttpClient(),
|
||||||
solrBaseUrl+"/api/collections/"+collection+"/config/params/PKG_VERSIONS?omitHeader=true&wt=javabin", Utils.JAVABINCONSUMER);
|
solrBaseUrl + PackageUtils.getCollectionParamsPath(collection) + "/PKG_VERSIONS?omitHeader=true&wt=javabin", Utils.JAVABINCONSUMER);
|
||||||
packages = (Map<String, String>) result._get("/response/params/PKG_VERSIONS", Collections.emptyMap());
|
packages = (Map<String, String>) result._get("/response/params/PKG_VERSIONS", Collections.emptyMap());
|
||||||
} catch (PathNotFoundException ex) {
|
} catch (PathNotFoundException ex) {
|
||||||
// Don't worry if PKG_VERSION wasn't found. It just means this collection was never touched by the package manager.
|
// Don't worry if PKG_VERSION wasn't found. It just means this collection was never touched by the package manager.
|
||||||
}
|
}
|
||||||
if (packages == null) return Collections.emptyMap();
|
if (packages == null) return Collections.emptyMap();
|
||||||
Map<String, SolrPackageInstance> ret = new HashMap<String, SolrPackageInstance>();
|
Map<String, SolrPackageInstance> ret = new HashMap<>();
|
||||||
for (String packageName: packages.keySet()) {
|
for (String packageName: packages.keySet()) {
|
||||||
if (Strings.isNullOrEmpty(packageName) == false) { // There can be an empty key, storing the version here
|
if (Strings.isNullOrEmpty(packageName) == false && // There can be an empty key, storing the version here
|
||||||
|
packages.get(packageName) != null) { // null means the package was undeployed from this package before
|
||||||
ret.put(packageName, getPackageInstance(packageName, packages.get(packageName)));
|
ret.put(packageName, getPackageInstance(packageName, packages.get(packageName)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void ensureCollectionsExist(List<String> collections) {
|
||||||
|
try {
|
||||||
|
List<String> existingCollections = zkClient.getChildren("/collections", null, true);
|
||||||
|
Set<String> nonExistent = new HashSet<>(collections);
|
||||||
|
nonExistent.removeAll(existingCollections);
|
||||||
|
if (nonExistent.isEmpty() == false) {
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection(s) doesn't exist: " + nonExistent.toString());
|
||||||
|
}
|
||||||
|
} catch (KeeperException | InterruptedException e) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to fetch list of collections from ZK.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private boolean deployPackage(SolrPackageInstance packageInstance, boolean pegToLatest, boolean isUpdate, boolean noprompt,
|
private boolean deployPackage(SolrPackageInstance packageInstance, boolean pegToLatest, boolean isUpdate, boolean noprompt,
|
||||||
List<String> collections, String overrides[]) {
|
List<String> collections, String overrides[]) {
|
||||||
for (String collection: collections) {
|
List<String> previouslyDeployed = new ArrayList<>(); // collections where package is already deployed in
|
||||||
|
|
||||||
|
for (String collection: collections) {
|
||||||
SolrPackageInstance deployedPackage = getPackagesDeployed(collection).get(packageInstance.name);
|
SolrPackageInstance deployedPackage = getPackagesDeployed(collection).get(packageInstance.name);
|
||||||
if (packageInstance.equals(deployedPackage)) {
|
if (packageInstance.equals(deployedPackage)) {
|
||||||
if (!pegToLatest) {
|
if (!pegToLatest) {
|
||||||
PackageUtils.printRed("Package " + packageInstance + " already deployed on "+collection);
|
PackageUtils.printRed("Package " + packageInstance + " already deployed on "+collection);
|
||||||
|
previouslyDeployed.add(collection);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (deployedPackage != null && !isUpdate) {
|
if (deployedPackage != null && !isUpdate) {
|
||||||
PackageUtils.printRed("Package " + deployedPackage + " already deployed on "+collection+". To update to "+packageInstance+", pass --update parameter.");
|
PackageUtils.printRed("Package " + deployedPackage + " already deployed on "+collection+". To update to "+packageInstance+", pass --update parameter.");
|
||||||
|
previouslyDeployed.add(collection);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -148,9 +169,9 @@ public class PackageManager implements Closeable {
|
||||||
|
|
||||||
// Get package params
|
// Get package params
|
||||||
try {
|
try {
|
||||||
boolean packageParamsExist = ((Map)PackageUtils.getJson(solrClient.getHttpClient(), solrBaseUrl + "/api/collections/abc/config/params/packages", Map.class)
|
boolean packageParamsExist = ((Map)PackageUtils.getJson(solrClient.getHttpClient(), solrBaseUrl + PackageUtils.getCollectionParamsPath(collection) + "/packages", Map.class)
|
||||||
.getOrDefault("response", Collections.emptyMap())).containsKey("params");
|
.getOrDefault("response", Collections.emptyMap())).containsKey("params");
|
||||||
SolrCLI.postJsonToSolr(solrClient, "/api/collections/" + collection + "/config/params",
|
SolrCLI.postJsonToSolr(solrClient, PackageUtils.getCollectionParamsPath(collection),
|
||||||
getMapper().writeValueAsString(Collections.singletonMap(packageParamsExist? "update": "set",
|
getMapper().writeValueAsString(Collections.singletonMap(packageParamsExist? "update": "set",
|
||||||
Collections.singletonMap("packages", Collections.singletonMap(packageInstance.name, collectionParameterOverrides)))));
|
Collections.singletonMap("packages", Collections.singletonMap(packageInstance.name, collectionParameterOverrides)))));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -159,7 +180,7 @@ public class PackageManager implements Closeable {
|
||||||
|
|
||||||
// Set the package version in the collection's parameters
|
// Set the package version in the collection's parameters
|
||||||
try {
|
try {
|
||||||
SolrCLI.postJsonToSolr(solrClient, "/api/collections/" + collection + "/config/params",
|
SolrCLI.postJsonToSolr(solrClient, PackageUtils.getCollectionParamsPath(collection),
|
||||||
"{set:{PKG_VERSIONS:{" + packageInstance.name+": '" + (pegToLatest? PackagePluginHolder.LATEST: packageInstance.version)+"'}}}");
|
"{set:{PKG_VERSIONS:{" + packageInstance.name+": '" + (pegToLatest? PackagePluginHolder.LATEST: packageInstance.version)+"'}}}");
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
||||||
|
@ -168,7 +189,7 @@ public class PackageManager implements Closeable {
|
||||||
// If updating, refresh the package version for this to take effect
|
// If updating, refresh the package version for this to take effect
|
||||||
if (isUpdate || pegToLatest) {
|
if (isUpdate || pegToLatest) {
|
||||||
try {
|
try {
|
||||||
SolrCLI.postJsonToSolr(solrClient, "/api/cluster/package", "{\"refresh\": \"" + packageInstance.name + "\"}");
|
SolrCLI.postJsonToSolr(solrClient, PackageUtils.PACKAGE_PATH, "{\"refresh\": \"" + packageInstance.name + "\"}");
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
||||||
}
|
}
|
||||||
|
@ -213,19 +234,27 @@ public class PackageManager implements Closeable {
|
||||||
|
|
||||||
// Set the package version in the collection's parameters
|
// Set the package version in the collection's parameters
|
||||||
try {
|
try {
|
||||||
SolrCLI.postJsonToSolr(solrClient, "/api/collections/" + collection + "/config/params",
|
SolrCLI.postJsonToSolr(solrClient, PackageUtils.getCollectionParamsPath(collection),
|
||||||
"{update:{PKG_VERSIONS:{'" + packageInstance.name + "' : '" + (pegToLatest? PackagePluginHolder.LATEST: packageInstance.version) + "'}}}");
|
"{update:{PKG_VERSIONS:{'" + packageInstance.name + "' : '" + (pegToLatest? PackagePluginHolder.LATEST: packageInstance.version) + "'}}}");
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify that package was successfully deployed
|
List<String> deployedCollections = collections.stream().filter(c -> !previouslyDeployed.contains(c)).collect(Collectors.toList());
|
||||||
boolean success = verify(packageInstance, collections);
|
|
||||||
if (success) {
|
boolean success = true;
|
||||||
PackageUtils.printGreen("Deployed and verified package: " + packageInstance.name + ", version: " + packageInstance.version);
|
if (deployedCollections.isEmpty() == false) {
|
||||||
|
// Verify that package was successfully deployed
|
||||||
|
success = verify(packageInstance, deployedCollections);
|
||||||
|
if (success) {
|
||||||
|
PackageUtils.printGreen("Deployed on " + deployedCollections + " and verified package: " + packageInstance.name + ", version: " + packageInstance.version);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return success;
|
if (previouslyDeployed.isEmpty() == false) {
|
||||||
|
PackageUtils.printRed("Already Deployed on " + previouslyDeployed + ", package: " + packageInstance.name + ", version: " + packageInstance.version);
|
||||||
|
}
|
||||||
|
return previouslyDeployed.isEmpty() && success;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String,String> getCollectionParameterOverrides(SolrPackageInstance packageInstance, boolean isUpdate,
|
private Map<String,String> getCollectionParameterOverrides(SolrPackageInstance packageInstance, boolean isUpdate,
|
||||||
|
@ -243,7 +272,7 @@ public class PackageManager implements Closeable {
|
||||||
Map<String, String> getPackageParams(String packageName, String collection) {
|
Map<String, String> getPackageParams(String packageName, String collection) {
|
||||||
try {
|
try {
|
||||||
return (Map<String, String>)((Map)((Map)((Map)
|
return (Map<String, String>)((Map)((Map)((Map)
|
||||||
PackageUtils.getJson(solrClient.getHttpClient(), solrBaseUrl + "/api/collections/" + collection + "/config/params/packages", Map.class)
|
PackageUtils.getJson(solrClient.getHttpClient(), solrBaseUrl + PackageUtils.getCollectionParamsPath(collection) + "/packages", Map.class)
|
||||||
.get("response"))
|
.get("response"))
|
||||||
.get("params"))
|
.get("params"))
|
||||||
.get("packages")).get(packageName);
|
.get("packages")).get(packageName);
|
||||||
|
@ -260,7 +289,6 @@ public class PackageManager implements Closeable {
|
||||||
public boolean verify(SolrPackageInstance pkg, List<String> collections) {
|
public boolean verify(SolrPackageInstance pkg, List<String> collections) {
|
||||||
boolean success = true;
|
boolean success = true;
|
||||||
for (Plugin plugin: pkg.plugins) {
|
for (Plugin plugin: pkg.plugins) {
|
||||||
PackageUtils.printGreen(plugin.verifyCommand);
|
|
||||||
for (String collection: collections) {
|
for (String collection: collections) {
|
||||||
Map<String, String> collectionParameterOverrides = getPackageParams(pkg.name, collection);
|
Map<String, String> collectionParameterOverrides = getPackageParams(pkg.name, collection);
|
||||||
Command cmd = plugin.verifyCommand;
|
Command cmd = plugin.verifyCommand;
|
||||||
|
@ -322,6 +350,8 @@ public class PackageManager implements Closeable {
|
||||||
*/
|
*/
|
||||||
public void deploy(String packageName, String version, String[] collections, String[] parameters,
|
public void deploy(String packageName, String version, String[] collections, String[] parameters,
|
||||||
boolean isUpdate, boolean noprompt) throws SolrException {
|
boolean isUpdate, boolean noprompt) throws SolrException {
|
||||||
|
ensureCollectionsExist(Arrays.asList(collections));
|
||||||
|
|
||||||
boolean pegToLatest = PackageUtils.LATEST.equals(version); // User wants to peg this package's version to the latest installed (for auto-update, i.e. no explicit deploy step)
|
boolean pegToLatest = PackageUtils.LATEST.equals(version); // User wants to peg this package's version to the latest installed (for auto-update, i.e. no explicit deploy step)
|
||||||
SolrPackageInstance packageInstance = getPackageInstance(packageName, version);
|
SolrPackageInstance packageInstance = getPackageInstance(packageName, version);
|
||||||
if (packageInstance == null) {
|
if (packageInstance == null) {
|
||||||
|
@ -342,11 +372,17 @@ public class PackageManager implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Undeploys a packge from given collections.
|
* Undeploys a package from given collections.
|
||||||
*/
|
*/
|
||||||
public void undeploy(String packageName, String[] collections) throws SolrException {
|
public void undeploy(String packageName, String[] collections) throws SolrException {
|
||||||
|
ensureCollectionsExist(Arrays.asList(collections));
|
||||||
|
|
||||||
for (String collection: collections) {
|
for (String collection: collections) {
|
||||||
SolrPackageInstance deployedPackage = getPackagesDeployed(collection).get(packageName);
|
SolrPackageInstance deployedPackage = getPackagesDeployed(collection).get(packageName);
|
||||||
|
if (deployedPackage == null) {
|
||||||
|
PackageUtils.printRed("Package "+packageName+" not deployed on collection "+collection);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
Map<String, String> collectionParameterOverrides = getPackageParams(packageName, collection);
|
Map<String, String> collectionParameterOverrides = getPackageParams(packageName, collection);
|
||||||
|
|
||||||
// Run the uninstall command for all plugins
|
// Run the uninstall command for all plugins
|
||||||
|
@ -374,13 +410,14 @@ public class PackageManager implements Closeable {
|
||||||
|
|
||||||
// Set the package version in the collection's parameters
|
// Set the package version in the collection's parameters
|
||||||
try {
|
try {
|
||||||
SolrCLI.postJsonToSolr(solrClient, "/api/collections/" + collection + "/config/params", "{set: {PKG_VERSIONS: {"+packageName+": null}}}");
|
SolrCLI.postJsonToSolr(solrClient, PackageUtils.getCollectionParamsPath(collection),
|
||||||
SolrCLI.postJsonToSolr(solrClient, "/api/cluster/package", "{\"refresh\": \"" + packageName + "\"}");
|
"{set: {PKG_VERSIONS: {"+packageName+": null}}}"); // Is it better to "unset"? If so, build support in params API for "unset"
|
||||||
|
SolrCLI.postJsonToSolr(solrClient, PackageUtils.PACKAGE_PATH, "{\"refresh\": \"" + packageName + "\"}");
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
throw new SolrException(ErrorCode.SERVER_ERROR, ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Also better to remove the package parameters
|
// TODO: Also better to remove the package parameters PKG_VERSION etc.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -391,14 +428,14 @@ public class PackageManager implements Closeable {
|
||||||
public Map<String, String> getDeployedCollections(String packageName) {
|
public Map<String, String> getDeployedCollections(String packageName) {
|
||||||
List<String> allCollections;
|
List<String> allCollections;
|
||||||
try {
|
try {
|
||||||
allCollections = zkClient.getChildren("/collections", null, true);
|
allCollections = zkClient.getChildren(ZkStateReader.COLLECTIONS_ZKNODE, null, true);
|
||||||
} catch (KeeperException | InterruptedException e) {
|
} catch (KeeperException | InterruptedException e) {
|
||||||
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, e);
|
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, e);
|
||||||
}
|
}
|
||||||
Map<String, String> deployed = new HashMap<String, String>();
|
Map<String, String> deployed = new HashMap<String, String>();
|
||||||
for (String collection: allCollections) {
|
for (String collection: allCollections) {
|
||||||
// Check package version installed
|
// Check package version installed
|
||||||
String paramsJson = PackageUtils.getJsonStringFromUrl(solrClient.getHttpClient(), solrBaseUrl + "/api/collections/" + collection + "/config/params/PKG_VERSIONS?omitHeader=true");
|
String paramsJson = PackageUtils.getJsonStringFromUrl(solrClient.getHttpClient(), solrBaseUrl + PackageUtils.getCollectionParamsPath(collection) + "/PKG_VERSIONS?omitHeader=true");
|
||||||
String version = null;
|
String version = null;
|
||||||
try {
|
try {
|
||||||
version = JsonPath.parse(paramsJson, PackageUtils.jsonPathConfiguration())
|
version = JsonPath.parse(paramsJson, PackageUtils.jsonPathConfiguration())
|
||||||
|
|
|
@ -58,6 +58,9 @@ public class PackageUtils {
|
||||||
* Represents a version which denotes the latest version available at the moment.
|
* Represents a version which denotes the latest version available at the moment.
|
||||||
*/
|
*/
|
||||||
public static String LATEST = "latest";
|
public static String LATEST = "latest";
|
||||||
|
|
||||||
|
public static String PACKAGE_PATH = "/api/cluster/package";
|
||||||
|
public static String REPOSITORIES_ZK_PATH = "/repositories.json";
|
||||||
|
|
||||||
public static Configuration jsonPathConfiguration() {
|
public static Configuration jsonPathConfiguration() {
|
||||||
MappingProvider provider = new JacksonMappingProvider();
|
MappingProvider provider = new JacksonMappingProvider();
|
||||||
|
@ -235,4 +238,8 @@ public class PackageUtils {
|
||||||
}
|
}
|
||||||
return collections;
|
return collections;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String getCollectionParamsPath(String collection) {
|
||||||
|
return "/api/collections/" + collection + "/config/params";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,23 +122,23 @@ public class RepositoryManager {
|
||||||
|
|
||||||
List<PackageRepository> repos = getMapper().readValue(existingRepositoriesJson, List.class);
|
List<PackageRepository> repos = getMapper().readValue(existingRepositoriesJson, List.class);
|
||||||
repos.add(new DefaultPackageRepository(name, uri));
|
repos.add(new DefaultPackageRepository(name, uri));
|
||||||
if (packageManager.zkClient.exists("/repositories.json", true) == false) {
|
if (packageManager.zkClient.exists(PackageUtils.REPOSITORIES_ZK_PATH, true) == false) {
|
||||||
packageManager.zkClient.create("/repositories.json", getMapper().writeValueAsString(repos).getBytes("UTF-8"), CreateMode.PERSISTENT, true);
|
packageManager.zkClient.create(PackageUtils.REPOSITORIES_ZK_PATH, getMapper().writeValueAsString(repos).getBytes("UTF-8"), CreateMode.PERSISTENT, true);
|
||||||
} else {
|
} else {
|
||||||
packageManager.zkClient.setData("/repositories.json", getMapper().writeValueAsString(repos).getBytes("UTF-8"), true);
|
packageManager.zkClient.setData(PackageUtils.REPOSITORIES_ZK_PATH, getMapper().writeValueAsString(repos).getBytes("UTF-8"), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (packageManager.zkClient.exists("/keys", true)==false) packageManager.zkClient.create("/keys", new byte[0], CreateMode.PERSISTENT, true);
|
if (packageManager.zkClient.exists("/keys", true)==false) packageManager.zkClient.create("/keys", new byte[0], CreateMode.PERSISTENT, true);
|
||||||
if (packageManager.zkClient.exists("/keys/exe", true)==false) packageManager.zkClient.create("/keys/exe", new byte[0], CreateMode.PERSISTENT, true);
|
if (packageManager.zkClient.exists("/keys/exe", true)==false) packageManager.zkClient.create("/keys/exe", new byte[0], CreateMode.PERSISTENT, true);
|
||||||
if (packageManager.zkClient.exists("/keys/exe/"+name+".der", true)==false) {
|
if (packageManager.zkClient.exists("/keys/exe/" + name + ".der", true)==false) {
|
||||||
packageManager.zkClient.create("/keys/exe/"+name+".der", new byte[0], CreateMode.PERSISTENT, true);
|
packageManager.zkClient.create("/keys/exe/" + name + ".der", new byte[0], CreateMode.PERSISTENT, true);
|
||||||
}
|
}
|
||||||
packageManager.zkClient.setData("/keys/exe/"+name+".der", IOUtils.toByteArray(new URL(uri+"/publickey.der").openStream()), true);
|
packageManager.zkClient.setData("/keys/exe/" + name + ".der", IOUtils.toByteArray(new URL(uri + "/publickey.der").openStream()), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getRepositoriesJson(SolrZkClient zkClient) throws UnsupportedEncodingException, KeeperException, InterruptedException {
|
private String getRepositoriesJson(SolrZkClient zkClient) throws UnsupportedEncodingException, KeeperException, InterruptedException {
|
||||||
if (zkClient.exists("/repositories.json", true)) {
|
if (zkClient.exists(PackageUtils.REPOSITORIES_ZK_PATH, true)) {
|
||||||
return new String(zkClient.getData("/repositories.json", null, null, true), "UTF-8");
|
return new String(zkClient.getData(PackageUtils.REPOSITORIES_ZK_PATH, null, null, true), "UTF-8");
|
||||||
}
|
}
|
||||||
return "[]";
|
return "[]";
|
||||||
}
|
}
|
||||||
|
@ -195,7 +195,7 @@ public class RepositoryManager {
|
||||||
add.manifest = "/package/" + packageName + "/" + version + "/manifest.json";
|
add.manifest = "/package/" + packageName + "/" + version + "/manifest.json";
|
||||||
add.manifestSHA512 = manifestSHA512;
|
add.manifestSHA512 = manifestSHA512;
|
||||||
|
|
||||||
V2Request req = new V2Request.Builder("/api/cluster/package")
|
V2Request req = new V2Request.Builder(PackageUtils.PACKAGE_PATH)
|
||||||
.forceV2(true)
|
.forceV2(true)
|
||||||
.withMethod(SolrRequest.METHOD.POST)
|
.withMethod(SolrRequest.METHOD.POST)
|
||||||
.withPayload(Collections.singletonMap("add", add))
|
.withPayload(Collections.singletonMap("add", add))
|
||||||
|
@ -308,10 +308,12 @@ public class RepositoryManager {
|
||||||
installPackage(packageName, version);
|
installPackage(packageName, version);
|
||||||
}
|
}
|
||||||
|
|
||||||
SolrPackageInstance updatedPackage = packageManager.getPackageInstance(packageName, PackageUtils.LATEST);
|
if (peggedToLatest.isEmpty() == false) {
|
||||||
boolean res = packageManager.verify(updatedPackage, peggedToLatest);
|
SolrPackageInstance updatedPackage = packageManager.getPackageInstance(packageName, PackageUtils.LATEST);
|
||||||
PackageUtils.printGreen("Verifying version " + updatedPackage.version +
|
boolean res = packageManager.verify(updatedPackage, peggedToLatest);
|
||||||
" on " + peggedToLatest + ", result: " + res);
|
PackageUtils.printGreen("Verifying version " + updatedPackage.version +
|
||||||
if (!res) throw new SolrException(ErrorCode.BAD_REQUEST, "Failed verification after deployment");
|
" on " + peggedToLatest + ", result: " + res);
|
||||||
|
if (!res) throw new SolrException(ErrorCode.BAD_REQUEST, "Failed verification after deployment");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,6 +64,8 @@ public class PackageAPI {
|
||||||
public final boolean enablePackages = Boolean.parseBoolean(System.getProperty("enable.packages", "false"));
|
public final boolean enablePackages = Boolean.parseBoolean(System.getProperty("enable.packages", "false"));
|
||||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||||
|
|
||||||
|
public static final String ERR_MSG = "Package loading is not enabled , Start your nodes with -Denable.packages=true";
|
||||||
|
|
||||||
final CoreContainer coreContainer;
|
final CoreContainer coreContainer;
|
||||||
private final ObjectMapper mapper = SolrJacksonAnnotationInspector.createObjectMapper();
|
private final ObjectMapper mapper = SolrJacksonAnnotationInspector.createObjectMapper();
|
||||||
private final PackageLoader packageLoader;
|
private final PackageLoader packageLoader;
|
||||||
|
@ -341,9 +343,13 @@ public class PackageAPI {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isEnabled() {
|
||||||
|
return enablePackages;
|
||||||
|
}
|
||||||
|
|
||||||
private boolean checkEnabled(CommandOperation payload) {
|
private boolean checkEnabled(CommandOperation payload) {
|
||||||
if (!enablePackages) {
|
if (!enablePackages) {
|
||||||
payload.addError("Package loading is not enabled , Start your nodes with -Denable.packages=true");
|
payload.addError(ERR_MSG);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -118,9 +118,13 @@ public class PackageTool extends SolrCLI.ToolBase {
|
||||||
} else {
|
} else {
|
||||||
String packageName = cli.getArgs()[1];
|
String packageName = cli.getArgs()[1];
|
||||||
Map<String, String> deployedCollections = packageManager.getDeployedCollections(packageName);
|
Map<String, String> deployedCollections = packageManager.getDeployedCollections(packageName);
|
||||||
PackageUtils.printGreen("Collections on which package " + packageName + " was deployed:");
|
if (deployedCollections.isEmpty() == false) {
|
||||||
for (String collection: deployedCollections.keySet()) {
|
PackageUtils.printGreen("Collections on which package " + packageName + " was deployed:");
|
||||||
PackageUtils.printGreen("\t" + collection + "("+packageName+":"+deployedCollections.get(collection)+")");
|
for (String collection: deployedCollections.keySet()) {
|
||||||
|
PackageUtils.printGreen("\t" + collection + "("+packageName+":"+deployedCollections.get(collection)+")");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
PackageUtils.printGreen("Package "+packageName+" not deployed on any collection.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -130,7 +134,7 @@ public class PackageTool extends SolrCLI.ToolBase {
|
||||||
String packageName = parsedVersion.first();
|
String packageName = parsedVersion.first();
|
||||||
String version = parsedVersion.second();
|
String version = parsedVersion.second();
|
||||||
repositoryManager.install(packageName, version);
|
repositoryManager.install(packageName, version);
|
||||||
PackageUtils.printGreen(repositoryManager.toString() + " installed.");
|
PackageUtils.printGreen(packageName + " installed.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "deploy":
|
case "deploy":
|
||||||
|
@ -140,7 +144,7 @@ public class PackageTool extends SolrCLI.ToolBase {
|
||||||
String version = parsedVersion.second();
|
String version = parsedVersion.second();
|
||||||
boolean noprompt = cli.hasOption('y');
|
boolean noprompt = cli.hasOption('y');
|
||||||
boolean isUpdate = cli.hasOption("update") || cli.hasOption('u');
|
boolean isUpdate = cli.hasOption("update") || cli.hasOption('u');
|
||||||
packageManager.deploy(packageName, version, PackageUtils.validateCollections(cli.getOptionValues("collections")), cli.getOptionValues("param"), isUpdate, noprompt);
|
packageManager.deploy(packageName, version, PackageUtils.validateCollections(cli.getOptionValue("collections").split(",")), cli.getOptionValues("param"), isUpdate, noprompt);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "undeploy":
|
case "undeploy":
|
||||||
|
@ -150,7 +154,7 @@ public class PackageTool extends SolrCLI.ToolBase {
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Only package name expected, without a version. Actual: " + cli.getArgList().get(1));
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Only package name expected, without a version. Actual: " + cli.getArgList().get(1));
|
||||||
}
|
}
|
||||||
String packageName = parsedVersion.first();
|
String packageName = parsedVersion.first();
|
||||||
packageManager.undeploy(packageName, cli.getOptionValues("collections"));
|
packageManager.undeploy(packageName, cli.getOptionValue("collections").split(","));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case "help":
|
case "help":
|
||||||
|
@ -228,7 +232,7 @@ public class PackageTool extends SolrCLI.ToolBase {
|
||||||
|
|
||||||
OptionBuilder
|
OptionBuilder
|
||||||
.withArgName("COLLECTIONS")
|
.withArgName("COLLECTIONS")
|
||||||
.hasArgs()
|
.hasArg()
|
||||||
.isRequired(false)
|
.isRequired(false)
|
||||||
.withDescription("List of collections. Run './solr package help' for more details.")
|
.withDescription("List of collections. Run './solr package help' for more details.")
|
||||||
.create("collections"),
|
.create("collections"),
|
||||||
|
|
|
@ -20,11 +20,9 @@ package org.apache.solr.cloud;
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Level;
|
|
||||||
import org.apache.logging.log4j.core.config.Configurator;
|
|
||||||
import org.apache.lucene.util.SuppressForbidden;
|
|
||||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||||
import org.apache.solr.core.TestSolrConfigHandler;
|
import org.apache.solr.core.TestSolrConfigHandler;
|
||||||
|
import org.apache.solr.util.LogLevel;
|
||||||
import org.apache.solr.util.PackageTool;
|
import org.apache.solr.util.PackageTool;
|
||||||
import org.apache.solr.util.SolrCLI;
|
import org.apache.solr.util.SolrCLI;
|
||||||
import org.eclipse.jetty.server.Handler;
|
import org.eclipse.jetty.server.Handler;
|
||||||
|
@ -39,6 +37,7 @@ import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@LogLevel("org.apache=INFO")
|
||||||
public class PackageManagerCLITest extends SolrCloudTestCase {
|
public class PackageManagerCLITest extends SolrCloudTestCase {
|
||||||
|
|
||||||
// Note for those who want to modify the jar files used in the packages used in this test:
|
// Note for those who want to modify the jar files used in the packages used in this test:
|
||||||
|
@ -70,13 +69,9 @@ public class PackageManagerCLITest extends SolrCloudTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@SuppressForbidden(reason = "Need to turn off logging, and SLF4J doesn't seem to provide for a way.")
|
|
||||||
public void testPackageManager() throws Exception {
|
public void testPackageManager() throws Exception {
|
||||||
PackageTool tool = new PackageTool();
|
PackageTool tool = new PackageTool();
|
||||||
|
|
||||||
// Enable the logger for this test. Need to do this since the tool disables logger.
|
|
||||||
Configurator.setRootLevel(Level.INFO);
|
|
||||||
|
|
||||||
String solrUrl = cluster.getJettySolrRunner(0).getBaseUrl().toString();
|
String solrUrl = cluster.getJettySolrRunner(0).getBaseUrl().toString();
|
||||||
|
|
||||||
run(tool, new String[] {"-solrUrl", solrUrl, "list-installed"});
|
run(tool, new String[] {"-solrUrl", solrUrl, "list-installed"});
|
||||||
|
|
|
@ -0,0 +1,293 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.solr.cloud.api.collections;
|
||||||
|
|
||||||
|
import java.lang.invoke.MethodHandles;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import org.apache.solr.client.solrj.SolrRequest;
|
||||||
|
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||||
|
import org.apache.solr.client.solrj.impl.CloudSolrClient;
|
||||||
|
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||||
|
import org.apache.solr.cloud.CloudTestUtils;
|
||||||
|
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||||
|
import org.apache.solr.common.cloud.ClusterState;
|
||||||
|
import org.apache.solr.common.cloud.DocCollection;
|
||||||
|
import org.apache.solr.common.cloud.Replica;
|
||||||
|
import org.apache.solr.common.cloud.Slice;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Ignore;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
||||||
|
@Ignore("SOLR-13884")
|
||||||
|
public class ConcurrentCreateCollectionTest extends SolrCloudTestCase {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||||
|
|
||||||
|
static int NODES = 2;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupCluster() throws Exception {
|
||||||
|
configureCluster(NODES)
|
||||||
|
// .addConfig("conf", configset("cloud-minimal"))
|
||||||
|
.addConfig("conf", configset("_default"))
|
||||||
|
.configure();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
@Override
|
||||||
|
public void tearDown() throws Exception {
|
||||||
|
super.tearDown();
|
||||||
|
cluster.deleteAllCollections();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private CollectionAdminRequest.Create createCollectionRequest(String cname, int numShards, int numReplicas) throws Exception {
|
||||||
|
CollectionAdminRequest.Create creq = CollectionAdminRequest
|
||||||
|
// .createCollection(cname, "conf", NODES - 1, NODES - 1)
|
||||||
|
.createCollection(cname, "conf", numShards, numReplicas)
|
||||||
|
.setMaxShardsPerNode(100);
|
||||||
|
creq.setWaitForFinalState(true);
|
||||||
|
creq.setAutoAddReplicas(true);
|
||||||
|
return creq;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testConcurrentCreatePlacement() throws Exception {
|
||||||
|
final int nThreads = 2;
|
||||||
|
final int createsPerThread = 1;
|
||||||
|
final int nShards = 1;
|
||||||
|
final int repFactor = 2;
|
||||||
|
final boolean useClusterPolicy = false;
|
||||||
|
final boolean useCollectionPolicy = true;
|
||||||
|
final boolean startUnbalanced = true; // can help make a smaller test that can still reproduce an issue.
|
||||||
|
final int unbalancedSize = 1; // the number of replicas to create first
|
||||||
|
final boolean stopNode = false; // only applicable when startUnbalanced==true... stops a node during first collection creation, then restarts
|
||||||
|
|
||||||
|
final CloudSolrClient client = cluster.getSolrClient();
|
||||||
|
|
||||||
|
|
||||||
|
if (startUnbalanced) {
|
||||||
|
/*** This produces a failure (multiple replicas of single shard on same node) when run with NODES=4 and
|
||||||
|
final int nThreads = 2;
|
||||||
|
final int createsPerThread = 1;
|
||||||
|
final int nShards = 2;
|
||||||
|
final int repFactor = 2;
|
||||||
|
final boolean useClusterPolicy = false;
|
||||||
|
final boolean useCollectionPolicy = true;
|
||||||
|
final boolean startUnbalanced = true;
|
||||||
|
// NOTE: useClusterPolicy=true seems to fix it! So does putting both creates in a single thread!
|
||||||
|
// NOTE: even creating a single replica to start with causes failure later on.
|
||||||
|
|
||||||
|
Also reproduced with smaller cluster: NODES=2 and
|
||||||
|
final int nThreads = 2;
|
||||||
|
final int createsPerThread = 1;
|
||||||
|
final int nShards = 1;
|
||||||
|
final int repFactor = 2;
|
||||||
|
final boolean useClusterPolicy = false;
|
||||||
|
final boolean useCollectionPolicy = true;
|
||||||
|
final boolean startUnbalanced = true;
|
||||||
|
|
||||||
|
Also, with NODES=3:
|
||||||
|
final int nThreads = 2;
|
||||||
|
final int createsPerThread = 1;
|
||||||
|
final int nShards = 1;
|
||||||
|
final int repFactor = 2;
|
||||||
|
final boolean useClusterPolicy = false;
|
||||||
|
final boolean useCollectionPolicy = true;
|
||||||
|
final boolean startUnbalanced = false;
|
||||||
|
|
||||||
|
// Also succeeded in replicating a bug where all 5 replicas were on a single node: CORES=5, nThreads=5, repFactor=5,
|
||||||
|
// unbalancedSize = 16 (4 replicas on each of the up nodes), stopNode=true
|
||||||
|
***/
|
||||||
|
|
||||||
|
|
||||||
|
JettySolrRunner downJetty = cluster.getJettySolrRunners().get(0);
|
||||||
|
if (stopNode) {
|
||||||
|
cluster.stopJettySolrRunner(downJetty);
|
||||||
|
}
|
||||||
|
|
||||||
|
String cname = "STARTCOLLECTION";
|
||||||
|
CollectionAdminRequest.Create creq = CollectionAdminRequest
|
||||||
|
// .createCollection(cname, "conf", NODES - 1, NODES - 1)
|
||||||
|
.createCollection(cname, "conf", unbalancedSize, 1)
|
||||||
|
.setMaxShardsPerNode(100);
|
||||||
|
creq.setWaitForFinalState(true);
|
||||||
|
// creq.setAutoAddReplicas(true);
|
||||||
|
if (useCollectionPolicy) { creq.setPolicy("policy1"); }
|
||||||
|
creq.process(client);
|
||||||
|
|
||||||
|
if (stopNode) {
|
||||||
|
// this will start it with a new port.... does it matter?
|
||||||
|
cluster.startJettySolrRunner(downJetty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if (useClusterPolicy) {
|
||||||
|
String setClusterPolicyCommand = "{" +
|
||||||
|
" 'set-cluster-policy': [" +
|
||||||
|
// " {'cores':'<100', 'node':'#ANY'}," +
|
||||||
|
" {'replica':'<2', 'shard': '#EACH', 'node': '#ANY'}," +
|
||||||
|
// " {'replica':'<2', 'node': '#ANY'}," +
|
||||||
|
" ]" +
|
||||||
|
"}";
|
||||||
|
|
||||||
|
SolrRequest req = CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, setClusterPolicyCommand);
|
||||||
|
client.request(req);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (useCollectionPolicy) {
|
||||||
|
// NOTE: the mere act of setting this named policy prevents LegacyAssignStrategy from being used, even if the policy is
|
||||||
|
// not used during collection creation.
|
||||||
|
String commands = "{set-policy : {" +
|
||||||
|
" policy1 : [{replica:'<2' , node:'#ANY'}]" +
|
||||||
|
",policy2 : [{replica:'<2' , shard:'#EACH', node:'#ANY'}]" +
|
||||||
|
"}}";
|
||||||
|
client.request(CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, commands));
|
||||||
|
|
||||||
|
/*** take defaults for cluster preferences
|
||||||
|
String cmd = "{" +
|
||||||
|
" 'set-cluster-preferences': [" +
|
||||||
|
// " {'cores':'<100', 'node':'#ANY'}," +
|
||||||
|
" {minimize:cores}" +
|
||||||
|
" ]" +
|
||||||
|
"}";
|
||||||
|
|
||||||
|
SolrRequest req = CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, cmd);
|
||||||
|
client.request(req);
|
||||||
|
***/
|
||||||
|
}
|
||||||
|
|
||||||
|
/***
|
||||||
|
SolrRequest req = CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.GET, null);
|
||||||
|
SolrResponse response = req.process(client);
|
||||||
|
log.info("######### AUTOSCALE " + response);
|
||||||
|
***/
|
||||||
|
|
||||||
|
|
||||||
|
byte[] data = client.getZkStateReader().getZkClient().getData("/autoscaling.json", null, null, true);
|
||||||
|
log.info("AUTOSCALE DATA: " + new String(data, "UTF-8"));
|
||||||
|
|
||||||
|
final AtomicInteger collectionNum = new AtomicInteger();
|
||||||
|
Thread[] indexThreads = new Thread[nThreads];
|
||||||
|
|
||||||
|
for (int i=0; i<nThreads; i++) {
|
||||||
|
indexThreads[i] = new Thread(() -> {
|
||||||
|
try {
|
||||||
|
for (int j=0; j<createsPerThread; j++) {
|
||||||
|
int num = collectionNum.incrementAndGet();
|
||||||
|
// Thread.sleep(num*1000);
|
||||||
|
String collectionName = "collection" + num;
|
||||||
|
CollectionAdminRequest.Create createReq = CollectionAdminRequest
|
||||||
|
.createCollection(collectionName, "conf", nShards, repFactor)
|
||||||
|
// .setMaxShardsPerNode(1) // should be default
|
||||||
|
;
|
||||||
|
createReq.setWaitForFinalState(false);
|
||||||
|
if (useCollectionPolicy) {
|
||||||
|
createReq.setPolicy("policy1");
|
||||||
|
}
|
||||||
|
createReq.setAutoAddReplicas(true);
|
||||||
|
|
||||||
|
createReq.process(client);
|
||||||
|
// cluster.waitForActiveCollection(collectionName, 1, repFactor);
|
||||||
|
// Thread.sleep(10000);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail(e.getMessage());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Thread thread : indexThreads) {
|
||||||
|
thread.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Thread thread : indexThreads) {
|
||||||
|
thread.join();
|
||||||
|
}
|
||||||
|
|
||||||
|
int expectedTotalReplicas = unbalancedSize + nThreads * createsPerThread * nShards * repFactor;
|
||||||
|
int expectedPerNode = expectedTotalReplicas / NODES;
|
||||||
|
boolean expectBalanced = (expectedPerNode * NODES == expectedTotalReplicas);
|
||||||
|
|
||||||
|
Map<String,List<Replica>> replicaMap = new HashMap<>();
|
||||||
|
ClusterState cstate = client.getZkStateReader().getClusterState();
|
||||||
|
for (DocCollection collection : cstate.getCollectionsMap().values()) {
|
||||||
|
for (Replica replica : collection.getReplicas()) {
|
||||||
|
String url = replica.getBaseUrl();
|
||||||
|
List<Replica> replicas = replicaMap.get(url);
|
||||||
|
if (replicas == null) {
|
||||||
|
replicas = new ArrayList<>();
|
||||||
|
replicaMap.put(url, replicas);
|
||||||
|
}
|
||||||
|
replicas.add(replica);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if nodes are balanced
|
||||||
|
boolean failed = false;
|
||||||
|
for (List<Replica> replicas : replicaMap.values()) {
|
||||||
|
if (replicas.size() != expectedPerNode ) {
|
||||||
|
if (expectBalanced) {
|
||||||
|
failed = true;
|
||||||
|
}
|
||||||
|
log.error("UNBALANCED CLUSTER: expected replicas per node " + expectedPerNode + " but got " + replicas.size());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if there were multiple replicas of the same shard placed on the same node
|
||||||
|
for (DocCollection collection : cstate.getCollectionsMap().values()) {
|
||||||
|
for (Slice slice : collection.getSlices()) {
|
||||||
|
Map<String, Replica> nodeToReplica = new HashMap<>();
|
||||||
|
for (Replica replica : slice.getReplicas()) {
|
||||||
|
Replica prev = nodeToReplica.put(replica.getBaseUrl(), replica);
|
||||||
|
if (prev != null) {
|
||||||
|
failed = true;
|
||||||
|
// NOTE: with a replication factor > 2, this will print multiple times per bad slice.
|
||||||
|
log.error("MULTIPLE REPLICAS OF SINGLE SHARD ON SAME NODE: r1=" + prev + " r2=" + replica);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (failed) {
|
||||||
|
log.error("Cluster state " + cstate.getCollectionsMap());
|
||||||
|
}
|
||||||
|
|
||||||
|
assertEquals(replicaMap.size(), NODES); // make sure something was created
|
||||||
|
|
||||||
|
assertTrue(!failed);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -148,6 +148,10 @@ public class SplitByPrefixTest extends SolrCloudTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void doTest() throws IOException, SolrServerException {
|
public void doTest() throws IOException, SolrServerException {
|
||||||
|
// SPLITSHARD is recommended to be run in async mode, so we default to that.
|
||||||
|
// Also, autoscale triggers use async with splits as well.
|
||||||
|
boolean doAsync = true;
|
||||||
|
|
||||||
CollectionAdminRequest
|
CollectionAdminRequest
|
||||||
.createCollection(COLLECTION_NAME, "conf", 1, 1)
|
.createCollection(COLLECTION_NAME, "conf", 1, 1)
|
||||||
.setMaxShardsPerNode(100)
|
.setMaxShardsPerNode(100)
|
||||||
|
@ -165,6 +169,9 @@ public class SplitByPrefixTest extends SolrCloudTestCase {
|
||||||
.setNumSubShards(2)
|
.setNumSubShards(2)
|
||||||
.setSplitByPrefix(true)
|
.setSplitByPrefix(true)
|
||||||
.setShardName("shard1");
|
.setShardName("shard1");
|
||||||
|
if (doAsync) {
|
||||||
|
splitShard.setAsyncId("SPLIT1");
|
||||||
|
}
|
||||||
splitShard.process(client);
|
splitShard.process(client);
|
||||||
waitForState("Timed out waiting for sub shards to be active.",
|
waitForState("Timed out waiting for sub shards to be active.",
|
||||||
COLLECTION_NAME, activeClusterShape(2, 3)); // expectedReplicas==3 because original replica still exists (just inactive)
|
COLLECTION_NAME, activeClusterShape(2, 3)); // expectedReplicas==3 because original replica still exists (just inactive)
|
||||||
|
@ -187,6 +194,9 @@ public class SplitByPrefixTest extends SolrCloudTestCase {
|
||||||
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
||||||
.setSplitByPrefix(true)
|
.setSplitByPrefix(true)
|
||||||
.setShardName("shard1_1"); // should start out with the range of 0-7fffffff
|
.setShardName("shard1_1"); // should start out with the range of 0-7fffffff
|
||||||
|
if (doAsync) {
|
||||||
|
splitShard.setAsyncId("SPLIT2");
|
||||||
|
}
|
||||||
splitShard.process(client);
|
splitShard.process(client);
|
||||||
waitForState("Timed out waiting for sub shards to be active.",
|
waitForState("Timed out waiting for sub shards to be active.",
|
||||||
COLLECTION_NAME, activeClusterShape(3, 5));
|
COLLECTION_NAME, activeClusterShape(3, 5));
|
||||||
|
@ -216,6 +226,9 @@ public class SplitByPrefixTest extends SolrCloudTestCase {
|
||||||
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
||||||
.setSplitByPrefix(true)
|
.setSplitByPrefix(true)
|
||||||
.setShardName(slice1.getName());
|
.setShardName(slice1.getName());
|
||||||
|
if (doAsync) {
|
||||||
|
splitShard.setAsyncId("SPLIT3");
|
||||||
|
}
|
||||||
splitShard.process(client);
|
splitShard.process(client);
|
||||||
waitForState("Timed out waiting for sub shards to be active.",
|
waitForState("Timed out waiting for sub shards to be active.",
|
||||||
COLLECTION_NAME, activeClusterShape(4, 7));
|
COLLECTION_NAME, activeClusterShape(4, 7));
|
||||||
|
@ -236,6 +249,9 @@ public class SplitByPrefixTest extends SolrCloudTestCase {
|
||||||
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
||||||
.setSplitByPrefix(true)
|
.setSplitByPrefix(true)
|
||||||
.setShardName(slice1.getName());
|
.setShardName(slice1.getName());
|
||||||
|
if (doAsync) {
|
||||||
|
splitShard.setAsyncId("SPLIT4");
|
||||||
|
}
|
||||||
splitShard.process(client);
|
splitShard.process(client);
|
||||||
waitForState("Timed out waiting for sub shards to be active.",
|
waitForState("Timed out waiting for sub shards to be active.",
|
||||||
COLLECTION_NAME, activeClusterShape(5, 9));
|
COLLECTION_NAME, activeClusterShape(5, 9));
|
||||||
|
@ -252,6 +268,9 @@ public class SplitByPrefixTest extends SolrCloudTestCase {
|
||||||
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME)
|
||||||
.setSplitByPrefix(true)
|
.setSplitByPrefix(true)
|
||||||
.setShardName(slice1.getName());
|
.setShardName(slice1.getName());
|
||||||
|
if (doAsync) {
|
||||||
|
splitShard.setAsyncId("SPLIT5");
|
||||||
|
}
|
||||||
splitShard.process(client);
|
splitShard.process(client);
|
||||||
waitForState("Timed out waiting for sub shards to be active.",
|
waitForState("Timed out waiting for sub shards to be active.",
|
||||||
COLLECTION_NAME, activeClusterShape(6, 11));
|
COLLECTION_NAME, activeClusterShape(6, 11));
|
||||||
|
|
|
@ -47,6 +47,9 @@ import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
|
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
|
||||||
import org.apache.hadoop.http.HttpServer2;
|
import org.apache.hadoop.http.HttpServer2;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
|
import org.apache.hadoop.metrics2.MetricsSystem;
|
||||||
|
import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
|
||||||
|
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||||
import org.apache.hadoop.util.DiskChecker;
|
import org.apache.hadoop.util.DiskChecker;
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -159,6 +162,8 @@ public class HdfsTestUtil {
|
||||||
|
|
||||||
if (!HA_TESTING_ENABLED) haTesting = false;
|
if (!HA_TESTING_ENABLED) haTesting = false;
|
||||||
|
|
||||||
|
DefaultMetricsSystem.setInstance(new FakeMetricsSystem());
|
||||||
|
|
||||||
Configuration conf = getBasicConfiguration(new Configuration());
|
Configuration conf = getBasicConfiguration(new Configuration());
|
||||||
conf.set("hdfs.minidfs.basedir", dir + File.separator + "hdfsBaseDir");
|
conf.set("hdfs.minidfs.basedir", dir + File.separator + "hdfsBaseDir");
|
||||||
conf.set("dfs.namenode.name.dir", dir + File.separator + "nameNodeNameDir");
|
conf.set("dfs.namenode.name.dir", dir + File.separator + "nameNodeNameDir");
|
||||||
|
@ -374,4 +379,15 @@ public class HdfsTestUtil {
|
||||||
super(pool);
|
super(pool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensures that we don't try to initialize metrics and read files outside
|
||||||
|
* the source tree.
|
||||||
|
*/
|
||||||
|
public static class FakeMetricsSystem extends MetricsSystemImpl {
|
||||||
|
@Override
|
||||||
|
public synchronized MetricsSystem init(String prefix) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,9 +17,6 @@
|
||||||
|
|
||||||
package org.apache.solr.filestore;
|
package org.apache.solr.filestore;
|
||||||
|
|
||||||
import static org.apache.solr.common.util.Utils.JAVABINCONSUMER;
|
|
||||||
import static org.apache.solr.core.TestDynamicLoading.getFileContent;
|
|
||||||
|
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
@ -30,6 +27,7 @@ import java.util.Set;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
import org.apache.commons.codec.digest.DigestUtils;
|
import org.apache.commons.codec.digest.DigestUtils;
|
||||||
import org.apache.solr.client.solrj.SolrClient;
|
import org.apache.solr.client.solrj.SolrClient;
|
||||||
import org.apache.solr.client.solrj.SolrRequest;
|
import org.apache.solr.client.solrj.SolrRequest;
|
||||||
|
@ -50,12 +48,14 @@ import org.apache.solr.util.LogLevel;
|
||||||
import org.apache.zookeeper.CreateMode;
|
import org.apache.zookeeper.CreateMode;
|
||||||
import org.apache.zookeeper.server.ByteBufferInputStream;
|
import org.apache.zookeeper.server.ByteBufferInputStream;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableSet;
|
import static org.apache.solr.common.util.Utils.JAVABINCONSUMER;
|
||||||
|
import static org.apache.solr.core.TestDynamicLoading.getFileContent;
|
||||||
|
|
||||||
@LogLevel("org.apache.solr.filestore.PackageStoreAPI=DEBUG;org.apache.solr.filestore.DistribPackageStore=DEBUG")
|
@LogLevel("org.apache.solr.filestore.PackageStoreAPI=DEBUG;org.apache.solr.filestore.DistribPackageStore=DEBUG")
|
||||||
public class TestDistribPackageStore extends SolrCloudTestCase {
|
public class TestDistribPackageStore extends SolrCloudTestCase {
|
||||||
|
|
||||||
public void testPackageStoreManagement() throws Exception {
|
public void testPackageStoreManagement() throws Exception {
|
||||||
|
System.setProperty("enable.packages", "true");
|
||||||
MiniSolrCloudCluster cluster =
|
MiniSolrCloudCluster cluster =
|
||||||
configureCluster(4)
|
configureCluster(4)
|
||||||
.withJettyConfig(jetty -> jetty.enableV2(true))
|
.withJettyConfig(jetty -> jetty.enableV2(true))
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.lucene.util.Constants;
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
import org.apache.solr.client.solrj.SolrQuery;
|
import org.apache.solr.client.solrj.SolrQuery;
|
||||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||||
|
import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
|
||||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
||||||
import org.apache.solr.client.solrj.request.CoreStatus;
|
import org.apache.solr.client.solrj.request.CoreStatus;
|
||||||
|
@ -190,12 +191,12 @@ public class CoreAdminHandlerTest extends SolrTestCaseJ4 {
|
||||||
CoreAdminParams.CoreAdminAction.STATUS.toString(),
|
CoreAdminParams.CoreAdminAction.STATUS.toString(),
|
||||||
CoreAdminParams.CORE, "bogus_dir_core"),
|
CoreAdminParams.CORE, "bogus_dir_core"),
|
||||||
resp);
|
resp);
|
||||||
Map<String,Exception> failures =
|
@SuppressWarnings("unchecked")
|
||||||
|
Map<String,Exception> failures =
|
||||||
(Map<String,Exception>) resp.getValues().get("initFailures");
|
(Map<String,Exception>) resp.getValues().get("initFailures");
|
||||||
assertNotNull("core failures is null", failures);
|
assertNotNull("core failures is null", failures);
|
||||||
|
|
||||||
NamedList<Object> status =
|
NamedList status = (NamedList)resp.getValues().get("status");
|
||||||
(NamedList<Object>)resp.getValues().get("status");
|
|
||||||
assertNotNull("core status is null", status);
|
assertNotNull("core status is null", status);
|
||||||
|
|
||||||
assertEquals("wrong number of core failures", 1, failures.size());
|
assertEquals("wrong number of core failures", 1, failures.size());
|
||||||
|
@ -338,7 +339,7 @@ public class CoreAdminHandlerTest extends SolrTestCaseJ4 {
|
||||||
req.process(client);
|
req.process(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
HttpSolrClient.RemoteSolrException rse = expectThrows(HttpSolrClient.RemoteSolrException.class, () -> {
|
BaseHttpSolrClient.RemoteSolrException rse = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> {
|
||||||
try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT,
|
try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT,
|
||||||
DEFAULT_CONNECTION_TIMEOUT * 1000)) {
|
DEFAULT_CONNECTION_TIMEOUT * 1000)) {
|
||||||
client.query(new SolrQuery("id:*"));
|
client.query(new SolrQuery("id:*"));
|
||||||
|
@ -346,7 +347,7 @@ public class CoreAdminHandlerTest extends SolrTestCaseJ4 {
|
||||||
runner.stop();
|
runner.stop();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
assertTrue(rse.getMessage(), rse.getMessage().contains("Problem accessing /solr/corex/select"));
|
assertEquals("Should have received a 404 error", 404, rse.code());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -135,8 +135,7 @@ public class JWTAuthPluginIntegrationTest extends SolrCloudAuthTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void infoRequestValidateXSolrAuthHeaders() throws IOException {
|
public void infoRequestValidateXSolrAuthHeaders() throws IOException {
|
||||||
Map<String, String> headers = getHeaders(baseUrl + "/admin/info/system", null);
|
Map<String, String> headers = getHeaders(baseUrl + "/admin/info/system", null);
|
||||||
assertEquals("401", headers.get("code"));
|
assertEquals("Should have received 401 code", "401", headers.get("code"));
|
||||||
assertEquals("HTTP/1.1 401 Require authentication", headers.get(null));
|
|
||||||
assertEquals("Bearer realm=\"my-solr-jwt\"", headers.get("WWW-Authenticate"));
|
assertEquals("Bearer realm=\"my-solr-jwt\"", headers.get("WWW-Authenticate"));
|
||||||
String authData = new String(Base64.base64ToByteArray(headers.get("X-Solr-AuthData")), UTF_8);
|
String authData = new String(Base64.base64ToByteArray(headers.get("X-Solr-AuthData")), UTF_8);
|
||||||
assertEquals("{\n" +
|
assertEquals("{\n" +
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
da335ee2e7d1439dcc7e11e89941edfad91e9e10
|
|
|
@ -0,0 +1 @@
|
||||||
|
2f043b2b3b9d27c17f2a067521dfb69b41fea1b8
|
|
@ -1 +0,0 @@
|
||||||
3b8c59c68d52a3d0de0d53f5b3588be3a5c05fb8
|
|
|
@ -0,0 +1 @@
|
||||||
|
81ac98f3be6a902e39e3f48496c9790dd02d4950
|
|
@ -1 +0,0 @@
|
||||||
3aaf2c8c9c781f10d4d9da6120c5195b2fcb2ad9
|
|
|
@ -0,0 +1 @@
|
||||||
|
916f481032995159d062ffc44f566891872d8a07
|
|
@ -1 +0,0 @@
|
||||||
95f58cd0cfa0c4553fc3901138cc6a03ece23b94
|
|
|
@ -0,0 +1 @@
|
||||||
|
cde2b06c3134600309061a84759d1ef9087a7348
|
|
@ -1 +0,0 @@
|
||||||
672891a1abbeef85192d137192e347872a6fc9c3
|
|
|
@ -0,0 +1 @@
|
||||||
|
41365c22bc6046af6e1e10f1be0b4dbfe49902be
|
|
@ -1 +0,0 @@
|
||||||
8c9283b8a04056a0fced23fc474e62aa39764c6b
|
|
|
@ -0,0 +1 @@
|
||||||
|
4aa2da175202a62d62850ade7fb26a64fd451bc2
|
|
@ -1 +0,0 @@
|
||||||
37eff0bd068adca090e14a0fbd9de258a871f9d9
|
|
|
@ -0,0 +1 @@
|
||||||
|
d3e23487151f5393bdcef5449407c5ce29718cdc
|
|
@ -1 +0,0 @@
|
||||||
7e7f62c2c03b74e59211eeeba0ddc067ad422ff7
|
|
|
@ -0,0 +1 @@
|
||||||
|
7a999fbfb9905465c1494052b612b4a4bbb349b7
|
|
@ -1 +0,0 @@
|
||||||
5dd67dacaf1eed80ab95493da840dab35c22ce9c
|
|
|
@ -0,0 +1 @@
|
||||||
|
aaacd77f8073e98f8400d042e70538623b3924ed
|
|
@ -1 +0,0 @@
|
||||||
4386c1f243042e0f78f2e4c3c6cd239967410d6e
|
|
|
@ -0,0 +1 @@
|
||||||
|
09dd286abe644513305864453030e9c1631b5535
|
|
@ -1 +0,0 @@
|
||||||
4acddfa41f45790e43fe4be257c3c4bcf6b846ff
|
|
|
@ -0,0 +1 @@
|
||||||
|
69125cf74b07f1b9d60b5c94da47cb04c098f654
|
|
@ -1 +0,0 @@
|
||||||
8b350466ff1fcb7030a7abc152eed458e086fac2
|
|
|
@ -0,0 +1 @@
|
||||||
|
12d71fe6d671c635f1ae6fd3c31bc6578a293c4b
|
|
@ -1 +0,0 @@
|
||||||
b59ff8ecb0cf5d6234958f2404eabf0b72464e14
|
|
|
@ -0,0 +1 @@
|
||||||
|
d3f0b0fb016ef8d35ffb199d928ffbcbfa121c86
|
|
@ -1 +0,0 @@
|
||||||
7eb9a6be62d84e1691e5fdc99223e632485619a8
|
|
|
@ -0,0 +1 @@
|
||||||
|
dcb6d4d505ef74898e3a64a38c40195c01e97119
|
|
@ -1 +0,0 @@
|
||||||
8dc81acdc4d3085c0b5f3c80b9a78cc9cb48bc4e
|
|
|
@ -0,0 +1 @@
|
||||||
|
22be18a055850a6cf3b0efd56c789c3929c87e98
|
|
@ -1 +0,0 @@
|
||||||
9b830886bd6098c613ed08d99574bbf300519506
|
|
|
@ -0,0 +1 @@
|
||||||
|
7990b0f4e8cafe99b47148df9aa3276af336d4d5
|
|
@ -1 +0,0 @@
|
||||||
bfe96e1e78719bdd446e063c3f45c132010237ce
|
|
|
@ -0,0 +1 @@
|
||||||
|
9fa640d36c088cf55843900043d28aef830ade4d
|
|
@ -1 +0,0 @@
|
||||||
55786f6e6649bd49425a7da1ac72cd85b8dd4bef
|
|
|
@ -0,0 +1 @@
|
||||||
|
7885cc3d5d7701a444acada7ab97f89846514875
|
|
@ -1 +0,0 @@
|
||||||
89e25610b3199fdf34a831c1b306f7e765928959
|
|
|
@ -0,0 +1 @@
|
||||||
|
ca1803fde51b795c0a8346ca8bc6277d9d04d01d
|
|
@ -1 +0,0 @@
|
||||||
b290c176abe2cd9274b9f794bf74497c4759359d
|
|
|
@ -0,0 +1 @@
|
||||||
|
b7bb7913d7583ee8d877f1c20feeb0905f342ad5
|
|
@ -1 +0,0 @@
|
||||||
2fd3cd40279280e8c56241f753d2c52d8d446d19
|
|
|
@ -0,0 +1 @@
|
||||||
|
3095acb088f4ff9e3fd9aedf98db73e3c18ea849
|
|
@ -1 +0,0 @@
|
||||||
945fc0c0fa69504c194e32c5330afa1df0be9574
|
|
|
@ -0,0 +1 @@
|
||||||
|
968d70676fa16b3d62487987624dd4e9ce5db123
|
|
@ -1 +0,0 @@
|
||||||
d25e67fbe0809cae777065b75b10ecfb5c1bd749
|
|
|
@ -0,0 +1 @@
|
||||||
|
a5da43f9b72d2208b8f4d22ba0a16d176b328b91
|
|
@ -1 +0,0 @@
|
||||||
b81ef162970cdb9f4512ee2da09715a856ff4c4c
|
|
|
@ -0,0 +1 @@
|
||||||
|
bba231bbf3024c19e75622ec168821cbbd4261a4
|
|
@ -1 +0,0 @@
|
||||||
76e8758356373d5aed5abacbda429b38f6e8fa98
|
|
|
@ -0,0 +1 @@
|
||||||
|
c98a2821eeb9193001c131a6d742a8f4e67e3b10
|
|
@ -1 +0,0 @@
|
||||||
87f3b49a7377e56f62046875d394ed0028b37690
|
|
|
@ -0,0 +1 @@
|
||||||
|
96b3aefcd0544de7e0e1f72990968b48c3d04dd1
|
|
@ -1 +0,0 @@
|
||||||
d7be4ddd7ba674ee8be1d23d883fb3ca68ee1d54
|
|
|
@ -0,0 +1 @@
|
||||||
|
73a242395eadde5bc4cd16a43230531dccb3d0eb
|
|
@ -1 +0,0 @@
|
||||||
09f6f1e6c1db440d9ad4c3114f17be40f66bb399
|
|
|
@ -0,0 +1 @@
|
||||||
|
01c23f7c463f8f4e83209d83b0549cea5d51ec1c
|
|
@ -1 +1 @@
|
||||||
6ab950be264e74803f12ba43ee5db434a74e6c0c
|
ddb54190e858875fb681a6b9dd630a3609eaa513
|
||||||
|
|
|
@ -18,6 +18,12 @@
|
||||||
|
|
||||||
In SolrCloud mode, custom plugins need to be shared across all nodes of the cluster.
|
In SolrCloud mode, custom plugins need to be shared across all nodes of the cluster.
|
||||||
|
|
||||||
|
.Deprecated
|
||||||
|
[IMPORTANT]
|
||||||
|
====
|
||||||
|
The functionality here is a subset of the <<package-manager.adoc#package-manager,Package Management>> system. It will no longer be supported in Solr 9.
|
||||||
|
====
|
||||||
|
|
||||||
When running Solr in SolrCloud mode and you want to use custom code (such as custom analyzers, tokenizers, query parsers, and other plugins), it can be cumbersome to add jars to the classpath on all nodes in your cluster. Using the <<blob-store-api.adoc#blob-store-api,Blob Store API>> and special commands with the <<config-api.adoc#config-api,Config API>>, you can upload jars to a special system-level collection and dynamically load plugins from them at runtime without needing to restart any nodes.
|
When running Solr in SolrCloud mode and you want to use custom code (such as custom analyzers, tokenizers, query parsers, and other plugins), it can be cumbersome to add jars to the classpath on all nodes in your cluster. Using the <<blob-store-api.adoc#blob-store-api,Blob Store API>> and special commands with the <<config-api.adoc#config-api,Config API>>, you can upload jars to a special system-level collection and dynamically load plugins from them at runtime without needing to restart any nodes.
|
||||||
|
|
||||||
.This Feature is Disabled By Default
|
.This Feature is Disabled By Default
|
||||||
|
|
|
@ -33,12 +33,9 @@ Since the Analytics framework is a _search component_, it must be declared as su
|
||||||
For distributed analytics requests over cloud collections, the component uses the `AnalyticsHandler` strictly for inter-shard communication.
|
For distributed analytics requests over cloud collections, the component uses the `AnalyticsHandler` strictly for inter-shard communication.
|
||||||
The Analytics Handler should not be used by users to submit analytics requests.
|
The Analytics Handler should not be used by users to submit analytics requests.
|
||||||
|
|
||||||
To configure Solr to use the Analytics Component, the first step is to add a `<lib/>` directive so Solr loads the Analytic Component classes (for more about the `<lib/>` directive, see <<resource-and-plugin-loading.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>). In the section of `solrconfig.xml` where the default `<lib/>` directives are, add a line:
|
To use the Analytics Component, the first step is to install this contrib module's plugins into Solr -- see the <<solr-plugins.adoc#installing-plugins,Solr Plugins>> section on how to do this.
|
||||||
|
|
||||||
[source,xml]
|
Next you need to register the request handler and search component. Add the following lines to `solrconfig.xml`, near the defintions for other request handlers:
|
||||||
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-analytics-\d.*\.jar" />
|
|
||||||
|
|
||||||
Next you need to enable the request handler and search component. Add the following lines to `solrconfig.xml`, near the defintions for other request handlers:
|
|
||||||
|
|
||||||
[source,xml]
|
[source,xml]
|
||||||
.solrconfig.xml
|
.solrconfig.xml
|
||||||
|
|
|
@ -16,17 +16,24 @@
|
||||||
// specific language governing permissions and limitations
|
// specific language governing permissions and limitations
|
||||||
// under the License.
|
// under the License.
|
||||||
|
|
||||||
On a multicore Solr instance, you may find that you want to share configuration between a number of different cores. You can achieve this using named configsets, which are essentially shared configuration directories stored under a configurable configset base directory.
|
Configsets are a set of configuration files used in a Solr installation: `solrconfig.xml`, the schema, and then <<resource-loading.adoc#resource-loading,resources>> like language files, `synonyms.txt`, DIH-related configuration, and others that are referenced from the config or schema.
|
||||||
|
|
||||||
Configsets are made up of the configuration files used in a Solr installation: inclduding `solrconfig.xml`, the schema, language-files, `synonyms.txt`, DIH-related configuration, and others as needed for your implementation.
|
Such configuration, _configsets_, can be named and then referenced by collections or cores, possibly with the intent to share them to avoid duplication.
|
||||||
|
|
||||||
Solr ships with two example configsets located in `server/solr/configsets`, which can be used as a base for your own. These example configsets are named `_default` and `sample_techproducts_configs`.
|
Solr ships with two example configsets located in `server/solr/configsets`, which can be used as a base for your own. These example configsets are named `_default` and `sample_techproducts_configs`.
|
||||||
|
|
||||||
== Configsets in Standalone Mode
|
== Configsets in Standalone Mode
|
||||||
|
|
||||||
If you are using Solr in standalone mode, configsets are created on the filesystem.
|
If you are using Solr in standalone mode, configsets are managed on the filesystem.
|
||||||
|
|
||||||
To create a configset, add a new directory under the configset base directory. The configset will be identified by the name of this directory. Then into this copy the configuration directory you want to share. The structure should look something like this:
|
Each Solr core can have it's very own configSet located beneath it in a `<instance_dir>/conf/` dir.
|
||||||
|
Here, it is not named or shared and the word _configset_ isn't found.
|
||||||
|
In Solr's early years, this was _the only way_ it was configured.
|
||||||
|
|
||||||
|
To create a named configset, add a new directory under the configset base directory.
|
||||||
|
The configset will be identified by the name of this directory.
|
||||||
|
Then add a `conf/` directory containing the configuration you want to share.
|
||||||
|
The structure should look something like this:
|
||||||
|
|
||||||
[source,bash]
|
[source,bash]
|
||||||
----
|
----
|
||||||
|
@ -76,4 +83,16 @@ curl -v -X POST -H 'Content-type: application/json' -d '{
|
||||||
|
|
||||||
== Configsets in SolrCloud Mode
|
== Configsets in SolrCloud Mode
|
||||||
|
|
||||||
In SolrCloud mode, you can use the <<configsets-api.adoc#configsets-api,Configsets API>> to manage your configsets.
|
In SolrCloud, it's critical to understand that configsets are fundamentally stored in ZooKeeper _and not_ the file system.
|
||||||
|
Solr's `_default` configset is uploaded to ZooKeeper on initialization.
|
||||||
|
This and some demonstration ones remain on the file system but Solr does not use them whatsoever in this mode.
|
||||||
|
|
||||||
|
When you create a collection in SolrCloud, you can specify a named configset -- possibly shared.
|
||||||
|
If you don't, then the `_default` will be copied and given a unique name for use by this collection.
|
||||||
|
|
||||||
|
A configset can be uploaded to ZooKeeper either via the <<configsets-api.adoc#configsets-api,Configsets API>> or more directly via <<solr-control-script-reference.adoc#upload-a-configuration-set,`bin/solr zk upconfig`>>.
|
||||||
|
The Configsets API has some other operations as well, and likewise, so does the CLI.
|
||||||
|
|
||||||
|
To upload a file to a configset already stored on ZooKeeper, you can use <<solr-control-script-reference.adoc#copy-between-local-files-and-zookeeper-znodes,`bin/solr zk cp`>>.
|
||||||
|
|
||||||
|
CAUTION: By default, ZooKeeper's file size limit is 1MB. If your files are larger than this, you'll need to either <<setting-up-an-external-zookeeper-ensemble.adoc#increasing-the-file-size-limit,increase the ZooKeeper file size limit>> or store them instead <<libs.adoc#lib-directives-in-solrconfig,on the filesystem>>.
|
|
@ -1,5 +1,15 @@
|
||||||
= Configuring solrconfig.xml
|
= Configuring solrconfig.xml
|
||||||
:page-children: datadir-and-directoryfactory-in-solrconfig, resource-and-plugin-loading, schema-factory-definition-in-solrconfig, indexconfig-in-solrconfig, requesthandlers-and-searchcomponents-in-solrconfig, initparams-in-solrconfig, updatehandlers-in-solrconfig, query-settings-in-solrconfig, requestdispatcher-in-solrconfig, update-request-processors, codec-factory
|
:page-children: datadir-and-directoryfactory-in-solrconfig, \
|
||||||
|
schema-factory-definition-in-solrconfig, \
|
||||||
|
indexconfig-in-solrconfig, \
|
||||||
|
requesthandlers-and-searchcomponents-in-solrconfig, \
|
||||||
|
initparams-in-solrconfig, \
|
||||||
|
updatehandlers-in-solrconfig, \
|
||||||
|
query-settings-in-solrconfig, \
|
||||||
|
requestdispatcher-in-solrconfig, \
|
||||||
|
update-request-processors, \
|
||||||
|
codec-factory
|
||||||
|
|
||||||
// Licensed to the Apache Software Foundation (ASF) under one
|
// Licensed to the Apache Software Foundation (ASF) under one
|
||||||
// or more contributor license agreements. See the NOTICE file
|
// or more contributor license agreements. See the NOTICE file
|
||||||
// distributed with this work for additional information
|
// distributed with this work for additional information
|
||||||
|
@ -38,7 +48,6 @@ The `solrconfig.xml` file is located in the `conf/` directory for each collectio
|
||||||
We've covered the options in the following sections:
|
We've covered the options in the following sections:
|
||||||
|
|
||||||
* <<datadir-and-directoryfactory-in-solrconfig.adoc#datadir-and-directoryfactory-in-solrconfig,DataDir and DirectoryFactory in SolrConfig>>
|
* <<datadir-and-directoryfactory-in-solrconfig.adoc#datadir-and-directoryfactory-in-solrconfig,DataDir and DirectoryFactory in SolrConfig>>
|
||||||
* <<resource-and-plugin-loading.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>
|
|
||||||
* <<schema-factory-definition-in-solrconfig.adoc#schema-factory-definition-in-solrconfig,Schema Factory Definition in SolrConfig>>
|
* <<schema-factory-definition-in-solrconfig.adoc#schema-factory-definition-in-solrconfig,Schema Factory Definition in SolrConfig>>
|
||||||
* <<indexconfig-in-solrconfig.adoc#indexconfig-in-solrconfig,IndexConfig in SolrConfig>>
|
* <<indexconfig-in-solrconfig.adoc#indexconfig-in-solrconfig,IndexConfig in SolrConfig>>
|
||||||
* <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#requesthandlers-and-searchcomponents-in-solrconfig,RequestHandlers and SearchComponents in SolrConfig>>
|
* <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#requesthandlers-and-searchcomponents-in-solrconfig,RequestHandlers and SearchComponents in SolrConfig>>
|
||||||
|
@ -49,6 +58,9 @@ We've covered the options in the following sections:
|
||||||
* <<update-request-processors.adoc#update-request-processors,Update Request Processors>>
|
* <<update-request-processors.adoc#update-request-processors,Update Request Processors>>
|
||||||
* <<codec-factory.adoc#codec-factory,Codec Factory>>
|
* <<codec-factory.adoc#codec-factory,Codec Factory>>
|
||||||
|
|
||||||
|
Some SolrConfig aspects are covered in other sections.
|
||||||
|
See <<libs.adoc#lib-directives-in-solrconfig,lib directives in SolrConfig>>, which can be used for both Plugins and Resources.
|
||||||
|
|
||||||
== Substituting Properties in Solr Config Files
|
== Substituting Properties in Solr Config Files
|
||||||
|
|
||||||
Solr supports variable substitution of property values in configuration files, which allows runtime specification of various configuration options in `solrconfig.xml`. The syntax is `${propertyname[:option default value]`}. This allows defining a default that can be overridden when Solr is launched. If a default value is not specified, then the property _must_ be specified at runtime or the configuration file will generate an error when parsed.
|
Solr supports variable substitution of property values in configuration files, which allows runtime specification of various configuration options in `solrconfig.xml`. The syntax is `${propertyname[:option default value]`}. This allows defining a default that can be overridden when Solr is launched. If a default value is not specified, then the property _must_ be specified at runtime or the configuration file will generate an error when parsed.
|
||||||
|
|
|
@ -80,7 +80,7 @@ Here is an example of a minimal OpenNLP `langid` configuration in `solrconfig.xm
|
||||||
==== OpenNLP-specific Parameters
|
==== OpenNLP-specific Parameters
|
||||||
|
|
||||||
`langid.model`::
|
`langid.model`::
|
||||||
An OpenNLP language detection model. The OpenNLP project provides a pre-trained 103 language model on the http://opennlp.apache.org/models.html[OpenNLP site's model dowload page]. Model training instructions are provided on the http://opennlp.apache.org/docs/{ivy-opennlp-version}/manual/opennlp.html#tools.langdetect[OpenNLP website]. This parameter is required. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for information on where to put the model.
|
An OpenNLP language detection model. The OpenNLP project provides a pre-trained 103 language model on the http://opennlp.apache.org/models.html[OpenNLP site's model dowload page]. Model training instructions are provided on the http://opennlp.apache.org/docs/{ivy-opennlp-version}/manual/opennlp.html#tools.langdetect[OpenNLP website]. This parameter is required. See <<resource-loading.adoc#resource-loading,Resource Loading>> for information on where to put the model.
|
||||||
|
|
||||||
==== OpenNLP Language Codes
|
==== OpenNLP Language Codes
|
||||||
|
|
||||||
|
|
|
@ -732,7 +732,7 @@ Note that for this filter to work properly, the upstream tokenizer must not remo
|
||||||
|
|
||||||
This filter is a custom Unicode normalization form that applies the foldings specified in http://www.unicode.org/reports/tr30/tr30-4.html[Unicode TR #30: Character Foldings] in addition to the `NFKC_Casefold` normalization form as described in <<ICU Normalizer 2 Filter>>. This filter is a better substitute for the combined behavior of the <<ASCII Folding Filter>>, <<Lower Case Filter>>, and <<ICU Normalizer 2 Filter>>.
|
This filter is a custom Unicode normalization form that applies the foldings specified in http://www.unicode.org/reports/tr30/tr30-4.html[Unicode TR #30: Character Foldings] in addition to the `NFKC_Casefold` normalization form as described in <<ICU Normalizer 2 Filter>>. This filter is a better substitute for the combined behavior of the <<ASCII Folding Filter>>, <<Lower Case Filter>>, and <<ICU Normalizer 2 Filter>>.
|
||||||
|
|
||||||
To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
*Factory class:* `solr.ICUFoldingFilterFactory`
|
*Factory class:* `solr.ICUFoldingFilterFactory`
|
||||||
|
|
||||||
|
@ -840,7 +840,7 @@ This filter factory normalizes text according to one of five Unicode Normalizati
|
||||||
|
|
||||||
For detailed information about these normalization forms, see http://unicode.org/reports/tr15/[Unicode Normalization Forms].
|
For detailed information about these normalization forms, see http://unicode.org/reports/tr15/[Unicode Normalization Forms].
|
||||||
|
|
||||||
To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
== ICU Transform Filter
|
== ICU Transform Filter
|
||||||
|
|
||||||
|
@ -882,7 +882,7 @@ This filter applies http://userguide.icu-project.org/transforms/general[ICU Tran
|
||||||
|
|
||||||
For detailed information about ICU Transforms, see http://userguide.icu-project.org/transforms/general.
|
For detailed information about ICU Transforms, see http://userguide.icu-project.org/transforms/general.
|
||||||
|
|
||||||
To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
== Keep Word Filter
|
== Keep Word Filter
|
||||||
|
|
||||||
|
@ -2210,7 +2210,7 @@ NOTE: Although this filter produces correct token graphs, it cannot consume an i
|
||||||
|
|
||||||
*Arguments:*
|
*Arguments:*
|
||||||
|
|
||||||
`synonyms`:: (required) The path of a file that contains a list of synonyms, one per line. In the (default) `solr` format - see the `format` argument below for alternatives - blank lines and lines that begin with "`#`" are ignored. This may be a comma-separated list of paths. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`synonyms`:: (required) The path of a file that contains a list of synonyms, one per line. In the (default) `solr` format - see the `format` argument below for alternatives - blank lines and lines that begin with "`#`" are ignored. This may be a comma-separated list of paths. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
+
|
+
|
||||||
There are two ways to specify synonym mappings:
|
There are two ways to specify synonym mappings:
|
||||||
+
|
+
|
||||||
|
|
|
@ -1,5 +1,24 @@
|
||||||
= Apache Solr Reference Guide
|
= Apache Solr Reference Guide
|
||||||
:page-children: about-this-guide, getting-started, deployment-and-operations, using-the-solr-administration-user-interface, documents-fields-and-schema-design, understanding-analyzers-tokenizers-and-filters, indexing-and-basic-data-operations, searching, streaming-expressions, solrcloud, legacy-scaling-and-distribution, the-well-configured-solr-instance, monitoring-solr, securing-solr, client-apis, further-assistance, solr-glossary, errata, how-to-contribute
|
:page-children: about-this-guide, \
|
||||||
|
getting-started, \
|
||||||
|
deployment-and-operations, \
|
||||||
|
using-the-solr-administration-user-interface, \
|
||||||
|
documents-fields-and-schema-design, \
|
||||||
|
understanding-analyzers-tokenizers-and-filters, \
|
||||||
|
indexing-and-basic-data-operations, \
|
||||||
|
searching, \
|
||||||
|
streaming-expressions, \
|
||||||
|
solrcloud, \
|
||||||
|
legacy-scaling-and-distribution, \
|
||||||
|
solr-plugins, \
|
||||||
|
the-well-configured-solr-instance, \
|
||||||
|
monitoring-solr, \
|
||||||
|
securing-solr, \
|
||||||
|
client-apis, \
|
||||||
|
further-assistance, \
|
||||||
|
solr-glossary, \
|
||||||
|
errata, \
|
||||||
|
how-to-contribute
|
||||||
:page-notitle:
|
:page-notitle:
|
||||||
:page-toc: false
|
:page-toc: false
|
||||||
:page-layout: home
|
:page-layout: home
|
||||||
|
|
|
@ -166,7 +166,7 @@ Compound words are most commonly found in Germanic languages.
|
||||||
|
|
||||||
*Arguments:*
|
*Arguments:*
|
||||||
|
|
||||||
`dictionary`:: (required) The path of a file that contains a list of simple words, one per line. Blank lines and lines that begin with "#" are ignored. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`dictionary`:: (required) The path of a file that contains a list of simple words, one per line. Blank lines and lines that begin with "#" are ignored. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
|
|
||||||
`minWordSize`:: (integer, default 5) Any token shorter than this is not decompounded.
|
`minWordSize`:: (integer, default 5) Any token shorter than this is not decompounded.
|
||||||
|
|
||||||
|
@ -220,7 +220,7 @@ Unicode Collation in Solr is fast, because all the work is done at index time.
|
||||||
|
|
||||||
Rather than specifying an analyzer within `<fieldtype ... class="solr.TextField">`, the `solr.CollationField` and `solr.ICUCollationField` field type classes provide this functionality. `solr.ICUCollationField`, which is backed by http://site.icu-project.org[the ICU4J library], provides more flexible configuration, has more locales, is significantly faster, and requires less memory and less index space, since its keys are smaller than those produced by the JDK implementation that backs `solr.CollationField`.
|
Rather than specifying an analyzer within `<fieldtype ... class="solr.TextField">`, the `solr.CollationField` and `solr.ICUCollationField` field type classes provide this functionality. `solr.ICUCollationField`, which is backed by http://site.icu-project.org[the ICU4J library], provides more flexible configuration, has more locales, is significantly faster, and requires less memory and less index space, since its keys are smaller than those produced by the JDK implementation that backs `solr.CollationField`.
|
||||||
|
|
||||||
To use `solr.ICUCollationField`, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
To use `solr.ICUCollationField`, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
`solr.ICUCollationField` and `solr.CollationField` fields can be created in two ways:
|
`solr.ICUCollationField` and `solr.CollationField` fields can be created in two ways:
|
||||||
|
|
||||||
|
@ -487,7 +487,7 @@ The `lucene/analysis/opennlp` module provides OpenNLP integration via several an
|
||||||
|
|
||||||
NOTE: The <<OpenNLP Tokenizer>> must be used with all other OpenNLP analysis components, for two reasons: first, the OpenNLP Tokenizer detects and marks the sentence boundaries required by all the OpenNLP filters; and second, since the pre-trained OpenNLP models used by these filters were trained using the corresponding language-specific sentence-detection/tokenization models, the same tokenization, using the same models, must be used at runtime for optimal performance.
|
NOTE: The <<OpenNLP Tokenizer>> must be used with all other OpenNLP analysis components, for two reasons: first, the OpenNLP Tokenizer detects and marks the sentence boundaries required by all the OpenNLP filters; and second, since the pre-trained OpenNLP models used by these filters were trained using the corresponding language-specific sentence-detection/tokenization models, the same tokenization, using the same models, must be used at runtime for optimal performance.
|
||||||
|
|
||||||
To use the OpenNLP components, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
To use the OpenNLP components, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
=== OpenNLP Tokenizer
|
=== OpenNLP Tokenizer
|
||||||
|
|
||||||
|
@ -497,9 +497,9 @@ The OpenNLP Tokenizer takes two language-specific binary model files as paramete
|
||||||
|
|
||||||
*Arguments:*
|
*Arguments:*
|
||||||
|
|
||||||
`sentenceModel`:: (required) The path of a language-specific OpenNLP sentence detection model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`sentenceModel`:: (required) The path of a language-specific OpenNLP sentence detection model file. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
|
|
||||||
`tokenizerModel`:: (required) The path of a language-specific OpenNLP tokenization model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`tokenizerModel`:: (required) The path of a language-specific OpenNLP tokenization model file. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
|
|
||||||
*Example:*
|
*Example:*
|
||||||
|
|
||||||
|
@ -541,7 +541,7 @@ NOTE: Lucene currently does not index token types, so if you want to keep this i
|
||||||
|
|
||||||
*Arguments:*
|
*Arguments:*
|
||||||
|
|
||||||
`posTaggerModel`:: (required) The path of a language-specific OpenNLP POS tagger model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`posTaggerModel`:: (required) The path of a language-specific OpenNLP POS tagger model file. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
|
|
||||||
*Examples:*
|
*Examples:*
|
||||||
|
|
||||||
|
@ -636,7 +636,7 @@ NOTE: Lucene currently does not index token types, so if you want to keep this i
|
||||||
|
|
||||||
*Arguments:*
|
*Arguments:*
|
||||||
|
|
||||||
`chunkerModel`:: (required) The path of a language-specific OpenNLP phrase chunker model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`chunkerModel`:: (required) The path of a language-specific OpenNLP phrase chunker model file. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
|
|
||||||
*Examples*:
|
*Examples*:
|
||||||
|
|
||||||
|
@ -700,9 +700,9 @@ This filter replaces the text of each token with its lemma. Both a dictionary-ba
|
||||||
|
|
||||||
Either `dictionary` or `lemmatizerModel` must be provided, and both may be provided - see the examples below:
|
Either `dictionary` or `lemmatizerModel` must be provided, and both may be provided - see the examples below:
|
||||||
|
|
||||||
`dictionary`:: (optional) The path of a lemmatization dictionary file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information. The dictionary file must be encoded as UTF-8, with one entry per line, in the form `word[tab]lemma[tab]part-of-speech`, e.g., `wrote[tab]write[tab]VBD`.
|
`dictionary`:: (optional) The path of a lemmatization dictionary file. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information. The dictionary file must be encoded as UTF-8, with one entry per line, in the form `word[tab]lemma[tab]part-of-speech`, e.g., `wrote[tab]write[tab]VBD`.
|
||||||
|
|
||||||
`lemmatizerModel`:: (optional) The path of a language-specific OpenNLP lemmatizer model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
|
`lemmatizerModel`:: (optional) The path of a language-specific OpenNLP lemmatizer model file. See <<resource-loading.adoc#resource-loading,Resource Loading>> for more information.
|
||||||
|
|
||||||
*Examples:*
|
*Examples:*
|
||||||
|
|
||||||
|
@ -1033,7 +1033,7 @@ Solr can stem Catalan using the Snowball Porter Stemmer with an argument of `lan
|
||||||
|
|
||||||
=== Traditional Chinese
|
=== Traditional Chinese
|
||||||
|
|
||||||
The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is suitable for Traditional Chinese text. It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
|
The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is suitable for Traditional Chinese text. It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
|
||||||
|
|
||||||
<<tokenizers.adoc#standard-tokenizer,Standard Tokenizer>> can also be used to tokenize Traditional Chinese text. Following the Word Break rules from the Unicode Text Segmentation algorithm, it produces one token per Chinese character. When combined with <<CJK Bigram Filter>>, overlapping bigrams of Chinese characters are formed.
|
<<tokenizers.adoc#standard-tokenizer,Standard Tokenizer>> can also be used to tokenize Traditional Chinese text. Following the Word Break rules from the Unicode Text Segmentation algorithm, it produces one token per Chinese character. When combined with <<CJK Bigram Filter>>, overlapping bigrams of Chinese characters are formed.
|
||||||
|
|
||||||
|
@ -1105,9 +1105,9 @@ See the example under <<Traditional Chinese>>.
|
||||||
|
|
||||||
=== Simplified Chinese
|
=== Simplified Chinese
|
||||||
|
|
||||||
For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the <<HMM Chinese Tokenizer>>. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
|
For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the <<HMM Chinese Tokenizer>>. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
|
||||||
|
|
||||||
The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is also suitable for Simplified Chinese text. It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
|
The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is also suitable for Simplified Chinese text. It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
|
||||||
|
|
||||||
Also useful for Chinese analysis:
|
Also useful for Chinese analysis:
|
||||||
|
|
||||||
|
@ -1162,7 +1162,7 @@ Also useful for Chinese analysis:
|
||||||
|
|
||||||
=== HMM Chinese Tokenizer
|
=== HMM Chinese Tokenizer
|
||||||
|
|
||||||
For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the `solr.HMMChineseTokenizerFactory` in the `analysis-extras` contrib module. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the `solr.HMMChineseTokenizerFactory` in the `analysis-extras` contrib module. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
*Factory class:* `solr.HMMChineseTokenizerFactory`
|
*Factory class:* `solr.HMMChineseTokenizerFactory`
|
||||||
|
|
||||||
|
@ -1958,7 +1958,7 @@ Example:
|
||||||
[[hebrew-lao-myanmar-khmer]]
|
[[hebrew-lao-myanmar-khmer]]
|
||||||
=== Hebrew, Lao, Myanmar, Khmer
|
=== Hebrew, Lao, Myanmar, Khmer
|
||||||
|
|
||||||
Lucene provides support, in addition to UAX#29 word break rules, for Hebrew's use of the double and single quote characters, and for segmenting Lao, Myanmar, and Khmer into syllables with the `solr.ICUTokenizerFactory` in the `analysis-extras` contrib module. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt for` instructions on which jars you need to add.
|
Lucene provides support, in addition to UAX#29 word break rules, for Hebrew's use of the double and single quote characters, and for segmenting Lao, Myanmar, and Khmer into syllables with the `solr.ICUTokenizerFactory` in the `analysis-extras` contrib module. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt for` instructions on which jars you need to add.
|
||||||
|
|
||||||
See <<tokenizers.adoc#icu-tokenizer,the ICUTokenizer>> for more information.
|
See <<tokenizers.adoc#icu-tokenizer,the ICUTokenizer>> for more information.
|
||||||
|
|
||||||
|
@ -2165,7 +2165,7 @@ Solr includes support for normalizing Persian, and Lucene includes an example st
|
||||||
|
|
||||||
=== Polish
|
=== Polish
|
||||||
|
|
||||||
Solr provides support for Polish stemming with the `solr.StempelPolishStemFilterFactory`, and `solr.MorphologikFilterFactory` for lemmatization, in the `contrib/analysis-extras` module. The `solr.StempelPolishStemFilterFactory` component includes an algorithmic stemmer with tables for Polish. To use either of these filters, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
Solr provides support for Polish stemming with the `solr.StempelPolishStemFilterFactory`, and `solr.MorphologikFilterFactory` for lemmatization, in the `contrib/analysis-extras` module. The `solr.StempelPolishStemFilterFactory` component includes an algorithmic stemmer with tables for Polish. To use either of these filters, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
*Factory class:* `solr.StempelPolishStemFilterFactory` and `solr.MorfologikFilterFactory`
|
*Factory class:* `solr.StempelPolishStemFilterFactory` and `solr.MorfologikFilterFactory`
|
||||||
|
|
||||||
|
@ -2682,7 +2682,7 @@ Solr includes support for stemming Turkish with the `solr.SnowballPorterFilterFa
|
||||||
|
|
||||||
=== Ukrainian
|
=== Ukrainian
|
||||||
|
|
||||||
Solr provides support for Ukrainian lemmatization with the `solr.MorphologikFilterFactory`, in the `contrib/analysis-extras` module. To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
Solr provides support for Ukrainian lemmatization with the `solr.MorphologikFilterFactory`, in the `contrib/analysis-extras` module. To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<solr-plugins.adoc#installing-plugins,Solr Plugins>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
|
||||||
|
|
||||||
Lucene also includes an example Ukrainian stopword list, in the `lucene-analyzers-morfologik` jar.
|
Lucene also includes an example Ukrainian stopword list, in the `lucene-analyzers-morfologik` jar.
|
||||||
|
|
||||||
|
|
|
@ -533,7 +533,7 @@ Assuming that you consider to use a large model placed at `/path/to/models/myMod
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
|
|
||||||
First, add the directory to Solr's resource paths with a <<resource-and-plugin-loading.adoc#lib-directives-in-solrconfig,`<lib/>` directive>>:
|
First, add the directory to Solr's resource paths with a <<libs.adoc#lib-directives-in-solrconfig,`<lib/>` directive>>:
|
||||||
|
|
||||||
[source,xml]
|
[source,xml]
|
||||||
----
|
----
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
= Lib Directories and Directives
|
||||||
|
|
||||||
|
// Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
// or more contributor license agreements. See the NOTICE file
|
||||||
|
// distributed with this work for additional information
|
||||||
|
// regarding copyright ownership. The ASF licenses this file
|
||||||
|
// to you under the Apache License, Version 2.0 (the
|
||||||
|
// "License"); you may not use this file except in compliance
|
||||||
|
// with the License. You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing,
|
||||||
|
// software distributed under the License is distributed on an
|
||||||
|
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
// KIND, either express or implied. See the License for the
|
||||||
|
// specific language governing permissions and limitations
|
||||||
|
// under the License.
|
||||||
|
|
||||||
|
Here we describe two simple and effective methods to make the `.jar` files for Solr plugins visible to Solr.
|
||||||
|
|
||||||
|
Such files are sometimes called "libraries" or "libs" for short.
|
||||||
|
Essentially you can put them in some special places or explicitly tell Solr about them from your config.
|
||||||
|
|
||||||
|
If there is overlap or inter-dependencies between libraries, then pay attention to the order. You can think of it like a stack that is searched top-down. At the top are the lib directives in reverse order, then Solr core's lib, then Solr home's lib, then Solr itself.
|
||||||
|
|
||||||
|
== Lib Directories
|
||||||
|
|
||||||
|
There are several special places you can place Solr plugin `.jar` files:
|
||||||
|
|
||||||
|
* `<solr_home>/lib/`: The `.jar` files placed here are available to all Solr cores running on the node, and to node level plugins referenced in `solr.xml` -- so basically everything.
|
||||||
|
This directory is not present by default so create it.
|
||||||
|
See <<taking-solr-to-production.adoc#solr-home-directory,Solr home directory>>.
|
||||||
|
|
||||||
|
* `<core_instance>/lib/`: In standalone Solr, you may want to add plugins just for a specific Solr core.
|
||||||
|
Create this adjacent to the `conf/` directory; it's not present by default.
|
||||||
|
|
||||||
|
* `<solr_install>/server/solr-webapp/webapp/WEB-INF/lib/`: The `.jar` files for Solr itself and it's dependencies live here.
|
||||||
|
Certain plugins or add-ons to plugins require placement here.
|
||||||
|
They will document themselves to say so.
|
||||||
|
|
||||||
|
Solr incorporates Jetty for providing HTTP server functionality.
|
||||||
|
Jetty has some directories that contain `.jar` files for itself and its own plugins / modules or JVM level plugins (e.g. loggers).
|
||||||
|
Solr plugins won't work in these locations.
|
||||||
|
|
||||||
|
== Lib Directives in SolrConfig
|
||||||
|
|
||||||
|
_Both_ plugin and <<resource-loading.adoc#resource-loading,resource>> file paths are configurable via `<lib/>` directives in `solrconfig.xml`.
|
||||||
|
When a directive matches a directory, then resources can be resolved from it.
|
||||||
|
When a directive matches a `.jar` file, Solr plugins and their dependencies are resolved from it.
|
||||||
|
Resources can be placed in a `.jar` too but that's unusual.
|
||||||
|
It's erroneous to refer to any other type of file.
|
||||||
|
|
||||||
|
A `<lib/>` directive must have one (not both) of these two attributes:
|
||||||
|
|
||||||
|
* `path`: used to refer to a single directory (for resources) or file (for a plugin `.jar`)
|
||||||
|
|
||||||
|
* `dir`: used to refer to _all_ direct descendants of the specified directory. Optionally supply a `regex` attribute to filter these to those matching the regular expression.
|
||||||
|
|
||||||
|
All directories are resolved as relative to the Solr core's `instanceDir`.
|
||||||
|
|
||||||
|
These examples show how to load contrib modules into Solr:
|
||||||
|
|
||||||
|
[source,xml]
|
||||||
|
----
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/contrib/extraction/lib" regex=".*\.jar" />
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-cell-\d.*\.jar" />
|
||||||
|
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/contrib/clustering/lib/" regex=".*\.jar" />
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-clustering-\d.*\.jar" />
|
||||||
|
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/contrib/langid/lib/" regex=".*\.jar" />
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-langid-\d.*\.jar" />
|
||||||
|
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/contrib/velocity/lib" regex=".*\.jar" />
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-velocity-\d.*\.jar" />
|
||||||
|
<lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-ltr-\d.*\.jar" />
|
||||||
|
----
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue