mirror of https://github.com/apache/lucene.git
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
This commit is contained in:
commit
42682543eb
|
@ -153,6 +153,12 @@ Bug Fixes
|
|||
* SOLR-11608: Correctly parse the new core-name in the V2 core rename API.
|
||||
(Jason Gerlowski via Anshum Gupta)
|
||||
|
||||
* SOLR-11256: The queue size for ConcurrentUpdateSolrClient should default to 10 instead of throwing an
|
||||
IllegalArgumentException. (Jason Gerlowski, Anshum Gupta)
|
||||
|
||||
* SOLR-11616: Snapshot the segments more robustly such that segments created during a backup does does not fail the
|
||||
operation (Varun Thacker)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
* SOLR-11285: Refactor autoscaling framework to avoid direct references to Zookeeper and Solr
|
||||
|
|
|
@ -82,7 +82,6 @@ import org.apache.solr.common.util.SimpleOrderedMap;
|
|||
import org.apache.solr.common.util.StrUtils;
|
||||
import org.apache.solr.common.util.SuppressForbidden;
|
||||
import org.apache.solr.core.CloseHook;
|
||||
import static org.apache.solr.core.Config.assertWarnOrFail;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.DirectoryFactory.DirContext;
|
||||
import org.apache.solr.core.IndexDeletionPolicyWrapper;
|
||||
|
@ -91,10 +90,9 @@ import org.apache.solr.core.SolrDeletionPolicy;
|
|||
import org.apache.solr.core.SolrEventListener;
|
||||
import org.apache.solr.core.backup.repository.BackupRepository;
|
||||
import org.apache.solr.core.backup.repository.LocalFileSystemRepository;
|
||||
import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
|
||||
import org.apache.solr.handler.IndexFetcher.IndexFetchResult;
|
||||
import org.apache.solr.metrics.MetricsMap;
|
||||
import org.apache.solr.metrics.SolrMetricManager;
|
||||
import org.apache.solr.handler.IndexFetcher.IndexFetchResult;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
|
@ -111,6 +109,7 @@ import org.slf4j.LoggerFactory;
|
|||
import org.slf4j.MDC;
|
||||
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
import static org.apache.solr.core.Config.assertWarnOrFail;
|
||||
|
||||
/**
|
||||
* <p> A Handler which provides a REST API for replication and serves replication requests from Slaves. </p>
|
||||
|
@ -530,41 +529,18 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
return status;
|
||||
}
|
||||
|
||||
private void doSnapShoot(SolrParams params, SolrQueryResponse rsp,
|
||||
SolrQueryRequest req) {
|
||||
private void doSnapShoot(SolrParams params, SolrQueryResponse rsp, SolrQueryRequest req) {
|
||||
try {
|
||||
int numberToKeep = params.getInt(NUMBER_BACKUPS_TO_KEEP_REQUEST_PARAM, 0);
|
||||
if (numberToKeep > 0 && numberBackupsToKeep > 0) {
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Cannot use "
|
||||
+ NUMBER_BACKUPS_TO_KEEP_REQUEST_PARAM + " if "
|
||||
+ NUMBER_BACKUPS_TO_KEEP_INIT_PARAM
|
||||
+ " was specified in the configuration.");
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Cannot use " + NUMBER_BACKUPS_TO_KEEP_REQUEST_PARAM +
|
||||
" if " + NUMBER_BACKUPS_TO_KEEP_INIT_PARAM + " was specified in the configuration.");
|
||||
}
|
||||
numberToKeep = Math.max(numberToKeep, numberBackupsToKeep);
|
||||
if (numberToKeep < 1) {
|
||||
numberToKeep = Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
IndexCommit indexCommit = null;
|
||||
String commitName = params.get(CoreAdminParams.COMMIT_NAME);
|
||||
if (commitName != null) {
|
||||
SolrSnapshotMetaDataManager snapshotMgr = core.getSnapshotMetaDataManager();
|
||||
Optional<IndexCommit> commit = snapshotMgr.getIndexCommitByName(commitName);
|
||||
if(commit.isPresent()) {
|
||||
indexCommit = commit.get();
|
||||
} else {
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Unable to find an index commit with name " + commitName +
|
||||
" for core " + core.getName());
|
||||
}
|
||||
} else {
|
||||
IndexDeletionPolicyWrapper delPolicy = core.getDeletionPolicy();
|
||||
indexCommit = delPolicy.getLatestCommit();
|
||||
|
||||
if (indexCommit == null) {
|
||||
indexCommit = req.getSearcher().getIndexReader().getIndexCommit();
|
||||
}
|
||||
}
|
||||
|
||||
String location = params.get(CoreAdminParams.BACKUP_LOCATION);
|
||||
String repoName = params.get(CoreAdminParams.BACKUP_REPOSITORY);
|
||||
CoreContainer cc = core.getCoreContainer();
|
||||
|
@ -586,12 +562,12 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
|
||||
// small race here before the commit point is saved
|
||||
URI locationUri = repo.createURI(location);
|
||||
String commitName = params.get(CoreAdminParams.COMMIT_NAME);
|
||||
SnapShooter snapShooter = new SnapShooter(repo, core, locationUri, params.get(NAME), commitName);
|
||||
snapShooter.validateCreateSnapshot();
|
||||
snapShooter.createSnapAsync(indexCommit, numberToKeep, (nl) -> snapShootDetails = nl);
|
||||
|
||||
snapShooter.createSnapAsync(numberToKeep, (nl) -> snapShootDetails = nl);
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Exception during creating a snapshot", e);
|
||||
LOG.error("Exception during creating a snapshot", e);
|
||||
rsp.add("exception", e);
|
||||
}
|
||||
}
|
||||
|
@ -1441,7 +1417,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
}
|
||||
SnapShooter snapShooter = new SnapShooter(core, null, null);
|
||||
snapShooter.validateCreateSnapshot();
|
||||
snapShooter.createSnapAsync(currentCommitPoint, numberToKeep, (nl) -> snapShootDetails = nl);
|
||||
snapShooter.createSnapAsync(numberToKeep, (nl) -> snapShootDetails = nl);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Exception while snapshooting", e);
|
||||
}
|
||||
|
|
|
@ -43,8 +43,6 @@ import org.apache.solr.core.backup.repository.BackupRepository;
|
|||
import org.apache.solr.core.backup.repository.BackupRepository.PathType;
|
||||
import org.apache.solr.core.backup.repository.LocalFileSystemRepository;
|
||||
import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -151,35 +149,57 @@ public class SnapShooter {
|
|||
}
|
||||
|
||||
public NamedList createSnapshot() throws Exception {
|
||||
RefCounted<SolrIndexSearcher> searcher = solrCore.getSearcher();
|
||||
try {
|
||||
if (commitName != null) {
|
||||
SolrSnapshotMetaDataManager snapshotMgr = solrCore.getSnapshotMetaDataManager();
|
||||
Optional<IndexCommit> commit = snapshotMgr.getIndexCommitByName(commitName);
|
||||
if(commit.isPresent()) {
|
||||
return createSnapshot(commit.get());
|
||||
}
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to find an index commit with name " + commitName +
|
||||
" for core " + solrCore.getName());
|
||||
} else {
|
||||
//TODO should we try solrCore.getDeletionPolicy().getLatestCommit() first?
|
||||
IndexDeletionPolicyWrapper deletionPolicy = solrCore.getDeletionPolicy();
|
||||
IndexCommit indexCommit = searcher.get().getIndexReader().getIndexCommit();
|
||||
deletionPolicy.saveCommitPoint(indexCommit.getGeneration());
|
||||
try {
|
||||
return createSnapshot(indexCommit);
|
||||
} finally {
|
||||
deletionPolicy.releaseCommitPoint(indexCommit.getGeneration());
|
||||
}
|
||||
IndexCommit indexCommit;
|
||||
if (commitName != null) {
|
||||
indexCommit = getIndexCommitFromName();
|
||||
return createSnapshot(indexCommit);
|
||||
} else {
|
||||
indexCommit = getIndexCommit();
|
||||
IndexDeletionPolicyWrapper deletionPolicy = solrCore.getDeletionPolicy();
|
||||
deletionPolicy.saveCommitPoint(indexCommit.getGeneration());
|
||||
try {
|
||||
return createSnapshot(indexCommit);
|
||||
} finally {
|
||||
deletionPolicy.releaseCommitPoint(indexCommit.getGeneration());
|
||||
}
|
||||
} finally {
|
||||
searcher.decref();
|
||||
}
|
||||
}
|
||||
|
||||
public void createSnapAsync(final IndexCommit indexCommit, final int numberToKeep, Consumer<NamedList> result) {
|
||||
solrCore.getDeletionPolicy().saveCommitPoint(indexCommit.getGeneration());
|
||||
private IndexCommit getIndexCommit() throws IOException {
|
||||
IndexDeletionPolicyWrapper delPolicy = solrCore.getDeletionPolicy();
|
||||
IndexCommit indexCommit = delPolicy.getLatestCommit();
|
||||
|
||||
if (indexCommit == null) {
|
||||
indexCommit = solrCore.getSearcher().get().getIndexReader().getIndexCommit();
|
||||
}
|
||||
return indexCommit;
|
||||
}
|
||||
|
||||
private IndexCommit getIndexCommitFromName() throws IOException {
|
||||
assert commitName !=null;
|
||||
IndexCommit indexCommit;
|
||||
SolrSnapshotMetaDataManager snapshotMgr = solrCore.getSnapshotMetaDataManager();
|
||||
Optional<IndexCommit> commit = snapshotMgr.getIndexCommitByName(commitName);
|
||||
if (commit.isPresent()) {
|
||||
indexCommit = commit.get();
|
||||
} else {
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Unable to find an index commit with name " + commitName +
|
||||
" for core " + solrCore.getName());
|
||||
}
|
||||
return indexCommit;
|
||||
}
|
||||
|
||||
public void createSnapAsync(final int numberToKeep, Consumer<NamedList> result) throws IOException {
|
||||
IndexCommit indexCommit;
|
||||
if (commitName != null) {
|
||||
indexCommit = getIndexCommitFromName();
|
||||
} else {
|
||||
indexCommit = getIndexCommit();
|
||||
}
|
||||
createSnapAsync(indexCommit, numberToKeep, result);
|
||||
}
|
||||
|
||||
private void createSnapAsync(final IndexCommit indexCommit, final int numberToKeep, Consumer<NamedList> result) {
|
||||
//TODO should use Solr's ExecutorUtil
|
||||
new Thread(() -> {
|
||||
try {
|
||||
|
@ -187,7 +207,7 @@ public class SnapShooter {
|
|||
} catch (Exception e) {
|
||||
LOG.error("Exception while creating snapshot", e);
|
||||
NamedList snapShootDetails = new NamedList<>();
|
||||
snapShootDetails.add("snapShootException", e.getMessage());
|
||||
snapShootDetails.add("exception", e.getMessage());
|
||||
result.accept(snapShootDetails);
|
||||
} finally {
|
||||
solrCore.getDeletionPolicy().releaseCommitPoint(indexCommit.getGeneration());
|
||||
|
@ -205,6 +225,7 @@ public class SnapShooter {
|
|||
|
||||
// note: remember to reserve the indexCommit first so it won't get deleted concurrently
|
||||
protected NamedList createSnapshot(final IndexCommit indexCommit) throws Exception {
|
||||
assert indexCommit != null;
|
||||
LOG.info("Creating backup snapshot " + (snapshotName == null ? "<not named>" : snapshotName) + " at " + baseSnapDirPath);
|
||||
boolean success = false;
|
||||
try {
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.solr.handler.admin;
|
|||
import java.net.URI;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.solr.cloud.ZkController;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
|
@ -34,11 +33,6 @@ import static org.apache.solr.common.params.CommonParams.NAME;
|
|||
class BackupCoreOp implements CoreAdminHandler.CoreAdminOp {
|
||||
@Override
|
||||
public void execute(CoreAdminHandler.CallInfo it) throws Exception {
|
||||
ZkController zkController = it.handler.coreContainer.getZkController();
|
||||
if (zkController == null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Internal SolrCloud API");
|
||||
}
|
||||
|
||||
final SolrParams params = it.req.getParams();
|
||||
String cname = params.get(CoreAdminParams.CORE);
|
||||
if (cname == null) {
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.handler;
|
||||
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.handler.admin.CoreAdminHandler;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestCoreBackup extends SolrTestCaseJ4 {
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
initCore("solrconfig.xml", "schema.xml");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBackupWithDocsNotSearchable() throws Exception {
|
||||
//See SOLR-11616 to see when this issue can be triggered
|
||||
|
||||
assertU(adoc("id", "1"));
|
||||
assertU(commit());
|
||||
|
||||
assertU(adoc("id", "2"));
|
||||
|
||||
assertU(commit("openSearcher", "false"));
|
||||
assertQ(req("q", "*:*"), "//result[@numFound='1']");
|
||||
|
||||
//call backup
|
||||
String location = createTempDir().toFile().getAbsolutePath();
|
||||
String snapshotName = TestUtil.randomSimpleString(random(), 1, 5);
|
||||
|
||||
final CoreContainer cores = h.getCoreContainer();
|
||||
final CoreAdminHandler admin = new CoreAdminHandler(cores);
|
||||
SolrQueryResponse resp = new SolrQueryResponse();
|
||||
admin.handleRequestBody
|
||||
(req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(),
|
||||
"core", DEFAULT_TEST_COLLECTION_NAME, "name", snapshotName, "location", location)
|
||||
, resp);
|
||||
assertNull("Backup should have succeeded", resp.getException());
|
||||
|
||||
}
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
= About This Guide
|
||||
:page-toc: false
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
|
@ -41,32 +40,36 @@ There are several items in this URL you might need to change locally. First, if
|
|||
|
||||
`\http://www.example.com/solr/mycollection/select?q=brown+cow`
|
||||
|
||||
== Paths
|
||||
== Directory Paths
|
||||
|
||||
Path information is given relative to `solr.home`, which is the location under the main Solr installation where Solr's collections and their `conf` and `data` directories are stored.
|
||||
|
||||
When running the various examples mentioned through out this tutorial (i.e., `bin/solr -e techproducts`) the `solr.home` will be a sub-directory of `example/` created for you automatically.
|
||||
In many cases, this is is in the `server/solr` directory of your installation. However, there can be exceptions, particularly if your installation has customized this.
|
||||
|
||||
In several cases of this Guide, our examples are built from the the "techproducts" example (i.e., you have started solr with the command `bin/solr -e techproducts`). In this case, `solr.home` will be a sub-directory of the `example/` directory created for you automatically.
|
||||
|
||||
See also the section <<solr-configuration-files.adoc#solr-home,Solr Home>> for further details on what is contained in this directory.
|
||||
|
||||
== API Examples
|
||||
|
||||
Solr has two styles of APIs that currently co-exist. The first has grown somewhat organically as Solr has developed over time, but the second, referred to as the "V2 API", redesigns many of the original APIs with a modernized and self-documenting API interface.
|
||||
|
||||
In many cases, but not all, the parameters and outputs of API calls are the same between the two styles. In all cases the paths and endpoints used are different.
|
||||
|
||||
Throughout this Guide, we have added examples of both styles with sections labeled "V1 API" and "V2 API". As of the 7.2 version of this Guide, these examples are not yet complete - more coverage will be added as future versions of the Guide are released.
|
||||
|
||||
The section <<v2-api.adoc#v2-api,V2 API>> provides more information about how to work with the new API structure, including how to disable it if you choose to do so.
|
||||
|
||||
== Special Inline Notes
|
||||
|
||||
Special notes are included throughout these pages. There are several types of notes:
|
||||
|
||||
=== Information Blocks
|
||||
NOTE: Information blocks provide additional information that's useful for you to know.
|
||||
|
||||
NOTE: These provide additional information that's useful for you to know.
|
||||
IMPORTANT: Important blocks provide information that we want to make sure you are aware of.
|
||||
|
||||
=== Important
|
||||
TIP: Tip blocks provide helpful tips.
|
||||
|
||||
IMPORTANT: These provide information that is critical for you to know.
|
||||
CAUTION: Caution blocks provide details on scenarios or configurations you should be careful with.
|
||||
|
||||
=== Tip
|
||||
|
||||
TIP: These provide helpful tips.
|
||||
|
||||
=== Caution
|
||||
|
||||
CAUTION: These provide details on scenarios or configurations you should be careful with.
|
||||
|
||||
=== Warning
|
||||
|
||||
WARNING: These are meant to warn you from a possibly dangerous change or action.
|
||||
WARNING: Warning blocks are used to warn you from a possibly dangerous change or action.
|
||||
|
|
|
@ -23,7 +23,7 @@ When running Solr in SolrCloud mode and you want to use custom code (such as cus
|
|||
.This Feature is Disabled By Default
|
||||
[IMPORTANT]
|
||||
====
|
||||
In addition to requiring that Solr by running in <<solrcloud.adoc#solrcloud,SolrCloud>> mode, this feature is also disabled by default unless all Solr nodes are run with the `-Denable.runtime.lib=true` option on startup.
|
||||
In addition to requiring that Solr is running in <<solrcloud.adoc#solrcloud,SolrCloud>> mode, this feature is also disabled by default unless all Solr nodes are run with the `-Denable.runtime.lib=true` option on startup.
|
||||
|
||||
Before enabling this feature, users should carefully consider the issues discussed in the <<Securing Runtime Libraries>> section below.
|
||||
====
|
||||
|
@ -42,6 +42,12 @@ The following commands are used to manage runtime libs:
|
|||
* `update-runtimelib`
|
||||
* `delete-runtimelib`
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1manage-libs]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/techproducts/config -H 'Content-type:application/json' -d '{
|
||||
|
@ -50,6 +56,22 @@ curl http://localhost:8983/solr/techproducts/config -H 'Content-type:application
|
|||
"delete-runtimelib": "jarblobname"
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2manage-libs]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/collections/techproducts/config -H 'Content-type:application/json' -d '{
|
||||
"add-runtimelib": { "name":"jarblobname", "version":2 },
|
||||
"update-runtimelib": { "name":"jarblobname", "version":3 },
|
||||
"delete-runtimelib": "jarblobname"
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
The name to use is the name of the blob that you specified when you uploaded your jar to the blob store. You should also include the version of the jar found in the blob store that you want to use. These details are added to `configoverlay.json`.
|
||||
|
||||
|
@ -59,6 +81,12 @@ Every pluggable component can have an optional extra attribute called `runtimeLi
|
|||
|
||||
This example shows creating a ValueSourceParser using a jar that has been loaded to the Blob store.
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1add-jar]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/techproducts/config -H 'Content-type:application/json' -d '{
|
||||
|
@ -69,6 +97,24 @@ curl http://localhost:8983/solr/techproducts/config -H 'Content-type:application
|
|||
"nvlFloatValue": 0.0 }
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2add-jar]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/collections/techproducts/config -H 'Content-type:application/json' -d '{
|
||||
"create-valuesourceparser": {
|
||||
"name": "nvl",
|
||||
"runtimeLib": true,
|
||||
"class": "solr.org.apache.solr.search.function.NvlValueSourceParser,
|
||||
"nvlFloatValue": 0.0 }
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
== Securing Runtime Libraries
|
||||
|
||||
|
@ -149,6 +195,12 @@ The blob name that you give the jar file in this step will be used as the name i
|
|||
|
||||
Finally, add the jar to the classpath using the Config API as detailed above. In this step, you will need to provide the signature of the jar that you got in Step 4.
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1add-jar2]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/techproducts/config -H 'Content-type:application/json' -d '{
|
||||
|
@ -159,3 +211,21 @@ curl http://localhost:8983/solr/techproducts/config -H 'Content-type:application
|
|||
PYgUB1nsr9pk4EFyD9KfJ8TqeH/ijQ9waa/vjqyiKEI9U550EtSzruLVZ32wJ7smvV0fj2YYhrUaaPzOn9g0=" }
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2add-jar2]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/collections/techproducts/config -H 'Content-type:application/json' -d '{
|
||||
"add-runtimelib": {
|
||||
"name":"blobname",
|
||||
"version":2,
|
||||
"sig":"mW1Gwtz2QazjfVdrLFHfbGwcr8xzFYgUOLu68LHqWRDvLG0uLcy1McQ+AzVmeZFBf1yLPDEHBWJb5KXr8bdbHN/
|
||||
PYgUB1nsr9pk4EFyD9KfJ8TqeH/ijQ9waa/vjqyiKEI9U550EtSzruLVZ32wJ7smvV0fj2YYhrUaaPzOn9g0=" }
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
|
|
@ -79,7 +79,8 @@ An Authentication API allows modifying user IDs and passwords. The API provides
|
|||
|
||||
=== API Entry Point
|
||||
|
||||
`admin/authentication`
|
||||
* v1: `\http://localhost:8983/solr/admin/authentication`
|
||||
* v2: `\http://localhost:8983/api/cluster/security/authentication`
|
||||
|
||||
This endpoint is not collection-specific, so users are created for the entire Solr cluster. If users need to be restricted to a specific collection, that can be done with the authorization rules.
|
||||
|
||||
|
@ -87,34 +88,81 @@ This endpoint is not collection-specific, so users are created for the entire So
|
|||
|
||||
The `set-user` command allows you to add users and change their passwords. For example, the following defines two users and their passwords:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1set-user]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl --user solr:SolrRocks http://localhost:8983/solr/admin/authentication -H 'Content-type:application/json' -d '{
|
||||
"set-user": {"tom" : "TomIsCool" ,
|
||||
"harry":"HarrysSecret"}}'
|
||||
curl --user solr:SolrRocks http://localhost:8983/solr/admin/authentication -H 'Content-type:application/json' -d '{"set-user": {"tom":"TomIsCool", "harry":"HarrysSecret"}}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2set-user]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl --user solr:SolrRocks http://localhost:8983/api/cluster/security/authentication -H 'Content-type:application/json' -d '{"set-user": {"tom":"TomIsCool", "harry":"HarrysSecret"}}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Delete a User
|
||||
|
||||
The `delete-user` command allows you to remove a user. The user password does not need to be sent to remove a user. In the following example, we've asked that user IDs 'tom' and 'harry' be removed from the system.
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1delete-user]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl --user solr:SolrRocks http://localhost:8983/solr/admin/authentication -H 'Content-type:application/json' -d '{
|
||||
"delete-user": ["tom","harry"]}'
|
||||
curl --user solr:SolrRocks http://localhost:8983/solr/admin/authentication -H 'Content-type:application/json' -d '{"delete-user": ["tom", "harry"]}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2delete-user]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl --user solr:SolrRocks http://localhost:8983/api/cluster/security/authentication -H 'Content-type:application/json' -d '{"delete-user": ["tom", "harry"]}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Set a Property
|
||||
|
||||
Set arbitrary properties for authentication plugin. The only supported property is `'blockUnknown'`
|
||||
Set properties for the authentication plugin. The only currently supported property for the Basic Authentication plugin is `blockUnknown`.
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1set-property]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl --user solr:SolrRocks http://localhost:8983/solr/admin/authentication -H 'Content-type:application/json' -d '{
|
||||
"set-property": {"blockUnknown":false}}'
|
||||
curl --user solr:SolrRocks http://localhost:8983/solr/admin/authentication -H 'Content-type:application/json' -d '{"set-property": {"blockUnknown":false}}'
|
||||
----
|
||||
====
|
||||
|
||||
=== Using BasicAuth with SolrJ
|
||||
[example.tab-pane#v2set-property]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl --user solr:SolrRocks http://localhost:8983/api/cluster/security/authentication -H 'Content-type:application/json' -d '{"set-property": {"blockUnknown":false}}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Using Basic Auth with SolrJ
|
||||
|
||||
In SolrJ, the basic authentication credentials need to be set for each request as in this example:
|
||||
|
||||
|
@ -134,7 +182,7 @@ req.setBasicAuthCredentials(userName, password);
|
|||
QueryResponse rsp = req.process(solrClient);
|
||||
----
|
||||
|
||||
=== Using Command Line scripts with BasicAuth
|
||||
=== Using the Solr Control Script with Basic Auth
|
||||
|
||||
Add the following line to the `solr.in.sh` or `solr.in.cmd` file. This example tells the `bin/solr` command line to to use "basic" as the type of authentication, and to pass credentials with the user-name "solr" and password "SolrRocks":
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
The Blob Store REST API provides REST methods to store, retrieve or list files in a Lucene index.
|
||||
|
||||
It can be used to upload a jar file which contains standard solr components such as RequestHandlers, SearchComponents, or other custom code you have written for Solr. Schema components _do not_ yet support the Blob Store.
|
||||
It can be used to upload a jar file which contains standard Solr components such as RequestHandlers, SearchComponents, or other custom code you have written for Solr. Schema components _do not_ yet support the Blob Store.
|
||||
|
||||
When using the blob store, note that the API does not delete or overwrite a previous object if a new one is uploaded with the same name. It always adds a new version of the blob to the index. Deletes can be performed with standard REST delete commands.
|
||||
|
||||
|
@ -36,10 +36,28 @@ If you do not use the `-shards` or `-replicationFactor` options, then defaults o
|
|||
|
||||
You can create the `.system` collection with the <<collections-api.adoc#collections-api,Collections API>>, as in this example:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1create]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/admin/collections?action=CREATE&name=.system&replicationFactor=2
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2create]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type: application/json' -d '{"create":{"name":".system", "replicationFactor": 2}}' http://localhost:8983/api/collections
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
IMPORTANT: The `bin/solr` script cannot be used to create the `.system` collection.
|
||||
|
||||
|
|
|
@ -35,6 +35,23 @@ To create a configset, simply add a new directory under the configset base direc
|
|||
|
||||
The default base directory is `$SOLR_HOME/configsets`, and it can be configured in `solr.xml`.
|
||||
|
||||
To create a new core using a configset, pass `configSet` as one of the core properties. For example, if you do this via the core admin API:
|
||||
To create a new core using a configset, pass `configSet` as one of the core properties. For example, if you do this via the CoreAdmin API:
|
||||
|
||||
`\http://localhost:8983/admin/cores?action=CREATE&name=mycore&instanceDir=path/to/instance&configSet=configset2`
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
|
||||
[example.tab-pane#v1api]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
|
||||
[source,text]
|
||||
curl http://localhost:8983/admin/cores?action=CREATE&name=mycore&instanceDir=path/to/instance&configSet=configset2
|
||||
====
|
||||
|
||||
[example.tab-pane#v2api]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,text]
|
||||
curl -v -X POST -H 'Content-type: application/json' -d '{"create":[{"name":"mycore", "instanceDir":"path/to/instance", "configSet":"configSet2"}]}' http://localhost:8983/api/cores
|
||||
====
|
||||
--
|
||||
|
|
|
@ -31,33 +31,47 @@ Currently request handlers can get content streams in a variety of ways:
|
|||
|
||||
By default, curl sends a `contentType="application/x-www-form-urlencoded"` header. If you need to test a SolrContentHeader content stream, you will need to set the content type with curl's `-H` flag.
|
||||
|
||||
== RemoteStreaming
|
||||
== Remote Streaming
|
||||
|
||||
Remote streaming lets you send the contents of a URL as a stream to a given SolrRequestHandler. You could use remote streaming to send a remote or local file to an update plugin.
|
||||
Remote streaming lets you send the contents of a URL as a stream to a given Solr RequestHandler. You could use remote streaming to send a remote or local file to an update plugin.
|
||||
|
||||
Remote streaming is disabled by default. Enabling it is not recommended in a production situation without additional security between you and untrusted remote clients.
|
||||
|
||||
In `solrconfig.xml`, you can enable it by changing the following `enableRemoteStreaming` parameter to `true`:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
*** WARNING ***
|
||||
Before enabling remote streaming, you should make sure your
|
||||
system has authentication enabled.
|
||||
|
||||
<requestParsers enableRemoteStreaming="false"...>
|
||||
<requestParsers enableRemoteStreaming="false" />
|
||||
----
|
||||
|
||||
When `enableRemoteStreaming` is not specified in `solrconfig.xml`, the default behavior is to _not_ allow remote streaming (i.e., `enableRemoteStreaming="false"`).
|
||||
|
||||
Remote streaming can also be enabled through the <<config-api.adoc#config-api,Config API>> as follows:
|
||||
|
||||
[source]
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1setconfigprop]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -d '
|
||||
{
|
||||
"set-property" : {"requestDispatcher.requestParsers.enableRemoteStreaming":true}
|
||||
}
|
||||
' http://localhost:8983/solr/techproducts/config -H 'Content-type:application/json'
|
||||
curl -H 'Content-type:application/json' -d '{"set-property": {"requestDispatcher.requestParsers.enableRemoteStreaming":true}}' 'http://localhost:8983/solr/techproducts/config'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2setconfigprop]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type: application/json' -d '{"set-property": {"requestDispatcher.requestParsers.enableRemoteStreaming":true}}' 'http://localhost:8983/api/collections/techproducts/config'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
|
@ -66,4 +80,4 @@ If `enableRemoteStreaming="true"` is used, be aware that this allows _anyone_ to
|
|||
|
||||
== Debugging Requests
|
||||
|
||||
The implicit "dump" RequestHandler (see <<implicit-requesthandlers.adoc#implicit-requesthandlers,Implicit RequestHandlers>>) simply outputs the contents of the SolrQueryRequest using the specified writer type `wt`. This is a useful tool to help understand what streams are available to the RequestHandlers.
|
||||
The implicit "dump" RequestHandler (see <<implicit-requesthandlers.adoc#implicit-requesthandlers,Implicit RequestHandlers>>) simply outputs the contents of the Solr QueryRequest using the specified writer type `wt`. This is a useful tool to help understand what streams are available to the RequestHandlers.
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
}
|
||||
|
||||
ul.nav.nav-pills li {
|
||||
margin-left: 1px;
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.pageSummary {
|
||||
|
@ -233,7 +233,7 @@ background-color: #347DBE;
|
|||
}
|
||||
|
||||
.nav > li > a {
|
||||
line-height: 20px;
|
||||
line-height: 30px;
|
||||
padding: 4px 10px;
|
||||
}
|
||||
|
||||
|
@ -411,7 +411,7 @@ i.border {
|
|||
}
|
||||
|
||||
a[data-toggle] {
|
||||
color: #248EC2;
|
||||
color: #305CB3;
|
||||
}
|
||||
|
||||
.summary {
|
||||
|
|
|
@ -823,7 +823,7 @@ Managed Synonym Filter has been deprecated in favor of Managed Synonym Graph Fil
|
|||
|
||||
*Factory class:* `solr.ManagedSynonymFilterFactory`
|
||||
|
||||
For arguments and examples, see the <<Managed Synonym Graph Filter>> below.
|
||||
For arguments and examples, see the <<Synonym Graph Filter>> below.
|
||||
|
||||
== Managed Synonym Graph Filter
|
||||
|
||||
|
@ -852,7 +852,7 @@ With this configuration the set of mappings is named "english" and can be manage
|
|||
</analyzer>
|
||||
----
|
||||
|
||||
See <<Managed Synonym Filter>> for example input/output.
|
||||
See <<Synonym Graph Filter>> below for example input/output.
|
||||
|
||||
== N-Gram Filter
|
||||
|
||||
|
|
|
@ -85,9 +85,28 @@ Next, the script prompts you for the number of shards to distribute the collecti
|
|||
|
||||
Next, the script will prompt you for the number of replicas to create for each shard. <<shards-and-indexing-data-in-solrcloud.adoc#shards-and-indexing-data-in-solrcloud,Replication>> is covered in more detail later in the guide, so if you're unsure, then use the default of 2 so that you can see how replication is handled in SolrCloud.
|
||||
|
||||
Lastly, the script will prompt you for the name of a configuration directory for your collection. You can choose *_default*, or *sample_techproducts_configs*. The configuration directories are pulled from `server/solr/configsets/` so you can review them beforehand if you wish. The *_default* configuration is useful when you're still designing a schema for your documents and need some flexiblity as you experiment with Solr, since it has schemaless functionality. However, after creating your collection, the schemaless functionality can be disabled in order to lock down the schema (so that documents indexed after doing so will not alter the schema) or to configure the schema by yourself. This can be done as follows (assuming your collection name is `mycollection`):
|
||||
Lastly, the script will prompt you for the name of a configuration directory for your collection. You can choose *_default*, or *sample_techproducts_configs*. The configuration directories are pulled from `server/solr/configsets/` so you can review them beforehand if you wish. The *_default* configuration is useful when you're still designing a schema for your documents and need some flexibility as you experiment with Solr, since it has schemaless functionality. However, after creating your collection, the schemaless functionality can be disabled in order to lock down the schema (so that documents indexed after doing so will not alter the schema) or to configure the schema by yourself. This can be done as follows (assuming your collection name is `mycollection`):
|
||||
|
||||
`curl http://host:8983/solr/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'`
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1autocreatefalse]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://host:8983/solr/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2autocreatefalse]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://host:8983/api/collections/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
At this point, you should have a new collection created in your local SolrCloud cluster. To verify this, you can run the status command:
|
||||
|
||||
|
|
|
@ -88,8 +88,26 @@ TIP: Because copy field rules can slow indexing and increase index size, it's re
|
|||
|
||||
Automatic field creation can be disabled with the `update.autoCreateFields` property. To do this, you can use the Config API with a command such as:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1setprop]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://host:8983/solr/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2setprop]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://host:8983/api/collections/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Changes to Default Behaviors
|
||||
* JSON is now the default response format. If you rely on XML responses, you must now define `wt=xml` in your request. In addition, line indentation is enabled by default (`indent=on`).
|
||||
|
|
|
@ -25,13 +25,13 @@ The search executed by a Ping is configured with the <<request-parameters-api.ad
|
|||
|
||||
The Ping option doesn't open a page, but the status of the request can be seen on the core overview page shown when clicking on a collection name. The length of time the request has taken is displayed next to the Ping option, in milliseconds.
|
||||
|
||||
== API Examples
|
||||
== Ping API Examples
|
||||
|
||||
While the UI screen makes it easy to see the ping response time, the underlying ping command can be more useful when executed by remote monitoring tools:
|
||||
|
||||
*Input*
|
||||
|
||||
[source,text]
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/<core-name>/admin/ping
|
||||
----
|
||||
|
@ -40,9 +40,9 @@ This command will ping the core name for a response.
|
|||
|
||||
*Input*
|
||||
|
||||
[source,text]
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/<collection-name>/admin/ping?distrib=true&wt=xml
|
||||
http://localhost:8983/solr/<collection-name>/admin/ping?distrib=true&wt=xml
|
||||
----
|
||||
|
||||
This command will ping all replicas of the given collection name for a response
|
||||
|
|
|
@ -40,7 +40,7 @@ Real Time Get requests can be performed using the `/get` handler which exists im
|
|||
</requestHandler>
|
||||
----
|
||||
|
||||
For example, if you started Solr using the `bin/solr -e techproducts` example command, you could then index a new document (with out committing it) like so:
|
||||
For example, if you started Solr using the `bin/solr -e techproducts` example command, you could then index a new document without committing it, like so:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
@ -48,34 +48,80 @@ curl 'http://localhost:8983/solr/techproducts/update/json?commitWithin=10000000'
|
|||
-H 'Content-type:application/json' -d '[{"id":"mydoc","name":"realtime-get test!"}]'
|
||||
----
|
||||
|
||||
If you do a normal search, this document should not be found yet:
|
||||
If you search for this document, it should not be found yet:
|
||||
|
||||
[source,text]
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/techproducts/query?q=id:mydoc
|
||||
...
|
||||
"response":
|
||||
{"numFound":0,"start":0,"docs":[]}
|
||||
----
|
||||
|
||||
However if you use the Real Time Get handler exposed at `/get`, you can still retrieve that document:
|
||||
[source,json]
|
||||
----
|
||||
{"response":
|
||||
{"numFound":0,"start":0,"docs":[]}
|
||||
}
|
||||
----
|
||||
|
||||
[source,text]
|
||||
However if you use the Real Time Get handler exposed at `/get`, you can retrieve the document:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[.tab-pane#v1get]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/techproducts/get?id=mydoc
|
||||
...
|
||||
{"doc":{"id":"mydoc","name":"realtime-get test!", "_version_":1487137811571146752}}
|
||||
----
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{"doc": {
|
||||
"id": "mydoc",
|
||||
"name": "realtime-get test!",
|
||||
"_version_": 1487137811571146752
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[.tab-pane#v2get]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/api/collections/techproducts/get?id=mydoc
|
||||
----
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{"doc": {
|
||||
"id": "mydoc",
|
||||
"name": "realtime-get test!",
|
||||
"_version_": 1487137811571146752
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
You can also specify multiple documents at once via the `ids` parameter and a comma separated list of ids, or by using multiple `id` parameters. If you specify multiple ids, or use the `ids` parameter, the response will mimic a normal query response to make it easier for existing clients to parse.
|
||||
|
||||
For example:
|
||||
|
||||
[source,text]
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[.tab-pane#v1getids]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/techproducts/get?ids=mydoc,IW-02
|
||||
http://localhost:8983/solr/techproducts/get?id=mydoc&id=IW-02
|
||||
...
|
||||
----
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{"response":
|
||||
{"numFound":2,"start":0,"docs":
|
||||
[ { "id":"mydoc",
|
||||
|
@ -83,19 +129,55 @@ http://localhost:8983/solr/techproducts/get?id=mydoc&id=IW-02
|
|||
"_version_":1487137811571146752},
|
||||
{
|
||||
"id":"IW-02",
|
||||
"name":"iPod & iPod Mini USB 2.0 Cable",
|
||||
...
|
||||
"name":"iPod & iPod Mini USB 2.0 Cable"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Real Time Get requests can also be combined with filter queries, specified with an <<common-query-parameters.adoc#fq-filter-query-parameter,`fq` parameter>>, just like search requests:
|
||||
[.tab-pane#v2getids]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/api/collections/techproducts/get?ids=mydoc,IW-02
|
||||
http://localhost:8983/api/collections/techproducts/get?id=mydoc&id=IW-02
|
||||
----
|
||||
|
||||
[source,text]
|
||||
[source,json]
|
||||
----
|
||||
{"response":
|
||||
{"numFound":2,"start":0,"docs":
|
||||
[ { "id":"mydoc",
|
||||
"name":"realtime-get test!",
|
||||
"_version_":1487137811571146752},
|
||||
{
|
||||
"id":"IW-02",
|
||||
"name":"iPod & iPod Mini USB 2.0 Cable"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
Real Time Get requests can also be combined with filter queries, specified with an <<common-query-parameters.adoc#fq-filter-query-parameter,`fq` parameter>>:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[.tab-pane#v1getfq]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/techproducts/get?id=mydoc&id=IW-02&fq=name:realtime-get
|
||||
...
|
||||
----
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{"response":
|
||||
{"numFound":1,"start":0,"docs":
|
||||
[ { "id":"mydoc",
|
||||
|
@ -105,10 +187,33 @@ http://localhost:8983/solr/techproducts/get?id=mydoc&id=IW-02&fq=name:realtime-g
|
|||
}
|
||||
}
|
||||
----
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Do *NOT* disable the realtime get handler at `/get` if you are using SolrCloud otherwise any leader election will cause a full sync in *ALL* replicas for the shard in question.
|
||||
|
||||
[.tab-pane#v2getfq]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/api/collections/techproducts/get?id=mydoc&id=IW-02&fq=name:realtime-get
|
||||
----
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{"response":
|
||||
{"numFound":1,"start":0,"docs":
|
||||
[ { "id":"mydoc",
|
||||
"name":"realtime-get test!",
|
||||
"_version_":1487137811571146752}
|
||||
]
|
||||
}
|
||||
}
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
[WARNING]
|
||||
====
|
||||
Do *NOT* disable the realtime get handler at `/get` if you are using SolrCloud. Doing so will result in any leader election to cause a full sync in *ALL* replicas for the shard in question.
|
||||
|
||||
Similarly, a replica recovery will also always fetch the complete index from the leader because a partial sync will not be possible in the absence of this handler.
|
||||
====
|
||||
|
|
|
@ -70,13 +70,35 @@ This attribute can be used to indicate that the original `HttpServletRequest` ob
|
|||
|
||||
The below command is an example of how to enable RemoteStreaming and BodyStreaming through the <<config-api.adoc#creating-and-updating-common-properties,Config API>>:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1enablestreaming]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d'{
|
||||
"set-property" : {"requestDispatcher.requestParsers.enableRemoteStreaming":true},
|
||||
"set-property" : {"requestDispatcher.requestParsers.enableStreamBody":true}
|
||||
}'
|
||||
curl -H 'Content-type:application/json' -d '{"set-property": {"requestDispatcher.requestParsers.enableRemoteStreaming": true}, "set-property": {"requestDispatcher.requestParsers.enableStreamBody": true}}' http://localhost:8983/solr/gettingstarted/config
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2enablestreaming]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl -H 'Content-type:application/json' -d '{"set-property": {"requestDispatcher.requestParsers.enableRemoteStreaming": true}, "set-property":{"requestDispatcher.requestParsers.enableStreamBody": true}}' http://localhost:8983/api/cores/gettingstarted/config
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2enablestreamingcloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl -H 'Content-type:application/json' -d '{"set-property": {"requestDispatcher.requestParsers.enableRemoteStreaming": true}, "set-property":{"requestDispatcher.requestParsers.enableStreamBody": true}}' http://localhost:8983/api/collections/gettingstarted/config
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
== httpCaching Element
|
||||
|
||||
|
|
|
@ -195,16 +195,48 @@ The delay (in ms) after which a replica marked as down would be unmarked. The de
|
|||
|
||||
When doing offline maintenance on the cluster and for various other use cases where an admin would like to temporarily disable auto addition of replicas, the following APIs will disable and re-enable autoAddReplicas for *all collections in the cluster*:
|
||||
|
||||
Disable auto addition of replicas cluster wide by setting the cluster property `autoAddReplicas` to `false`:
|
||||
Disable automatic addition of replicas cluster-wide by setting the cluster property `autoAddReplicas` to `false`, as in these examples:
|
||||
|
||||
[source,text]
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1disableautoadd]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/admin/collections?action=CLUSTERPROP&name=autoAddReplicas&val=false
|
||||
----
|
||||
====
|
||||
|
||||
Re-enable auto addition of replicas (for those collections created with autoAddReplica=true) by unsetting the `autoAddReplicas` cluster property (when no `val` param is provided, the cluster property is unset):
|
||||
[example.tab-pane#v2disableautoadd]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type: application/json' -d '{"set-property": {"name":"autoAddReplicas", "val":false}}' http://localhost:8983/api/cluster
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
[source,text]
|
||||
Re-enable automatic addition of replicas (for those collections created with `autoAddReplica=true`) by unsetting the `autoAddReplicas` cluster property. When no `val` param is provided, the cluster property is unset:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1enableautoadd]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
http://localhost:8983/solr/admin/collections?action=CLUSTERPROP&name=autoAddReplicas
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2enableautoadd]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type: application/json' -d '{"set-property": {"name":"autoAddReplicas"}}' http://localhost:8983/api/cluster
|
||||
----
|
||||
====
|
||||
--
|
||||
|
|
|
@ -43,12 +43,18 @@ If you modify your schema, you will likely need to re-index all documents. If yo
|
|||
Modifying your schema will never modify any documents that are already indexed. You must re-index documents in order to apply schema changes to them. Queries and updates made after the change may encounter errors that were not present before the change. Completely deleting the index and rebuilding it is usually the only option to fix such errors.
|
||||
====
|
||||
|
||||
////
|
||||
// Commented out section because this makes less sense with 2 API endpoints available.
|
||||
The base address for the API is `\http://<host>:<port>/solr/<collection_name>`. If, for example, you run Solr's "```cloud```" example (via the `bin/solr` command shown below), which creates a "```gettingstarted```" collection, then the base URL for that collection (as in all the sample URLs in this section) would be: `\http://localhost:8983/solr/gettingstarted`.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
bin/solr -e cloud -noprompt
|
||||
----
|
||||
////
|
||||
|
||||
////
|
||||
// TODO this needs to probably go away with v2 APIs?
|
||||
|
||||
== Schema API Entry Points
|
||||
|
||||
|
@ -61,11 +67,10 @@ bin/solr -e cloud -noprompt
|
|||
* `/schema/version`: <<Show the Schema Version,retrieve>> the schema version
|
||||
* `/schema/uniquekey`: <<List UniqueKey,retrieve>> the defined uniqueKey
|
||||
* `/schema/similarity`: <<Show Global Similarity,retrieve>> the global similarity definition
|
||||
////
|
||||
|
||||
== Modify the Schema
|
||||
|
||||
`POST /_collection_/schema`
|
||||
|
||||
To add, remove or replace fields, dynamic field rules, copy field rules, or new field types, you can send a POST request to the `/collection/schema/` endpoint with a sequence of commands to perform the requested actions. The following commands are supported:
|
||||
|
||||
* `add-field`: add a new field with parameters you provide.
|
||||
|
@ -94,6 +99,11 @@ All of the properties available when defining a field with manual `schema.xml` e
|
|||
|
||||
For example, to define a new stored field named "sell-by", of type "pdate", you would POST the following request:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1add]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -103,6 +113,22 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"stored":true }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2add]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"add-field":{
|
||||
"name":"sell-by",
|
||||
"type":"pdate",
|
||||
"stored":true }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Delete a Field
|
||||
|
||||
|
@ -110,12 +136,30 @@ The `delete-field` command removes a field definition from your schema. If the f
|
|||
|
||||
For example, to delete a field named "sell-by", you would POST the following request:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1delete]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-field" : { "name":"sell-by" }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2delete]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-field" : { "name":"sell-by" }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Replace a Field
|
||||
|
||||
|
@ -125,6 +169,11 @@ All of the properties available when defining a field with manual `schema.xml` e
|
|||
|
||||
For example, to replace the definition of an existing field "sell-by", to make it be of type "date" and to not be stored, you would POST the following request:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1replace]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -134,6 +183,22 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"stored":false }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2replace]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"replace-field":{
|
||||
"name":"sell-by",
|
||||
"type":"date",
|
||||
"stored":false }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Add a Dynamic Field Rule
|
||||
|
||||
|
@ -143,6 +208,11 @@ All of the properties available when editing `schema.xml` can be passed with the
|
|||
|
||||
For example, to create a new dynamic field rule where all incoming fields ending with "_s" would be stored and have field type "string", you can POST a request like this:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1add-dynamic]
|
||||
====
|
||||
[tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -152,6 +222,22 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"stored":true }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2add-dynamic]
|
||||
====
|
||||
[tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"add-dynamic-field":{
|
||||
"name":"*_s",
|
||||
"type":"string",
|
||||
"stored":true }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Delete a Dynamic Field Rule
|
||||
|
||||
|
@ -159,12 +245,30 @@ The `delete-dynamic-field` command deletes a dynamic field rule from your schema
|
|||
|
||||
For example, to delete a dynamic field rule matching "*_s", you can POST a request like this:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1delete-dynamic]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-dynamic-field":{ "name":"*_s" }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2delete-dynamic]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-dynamic-field":{ "name":"*_s" }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Replace a Dynamic Field Rule
|
||||
|
||||
|
@ -174,6 +278,11 @@ All of the properties available when editing `schema.xml` can be passed with the
|
|||
|
||||
For example, to replace the definition of the "*_s" dynamic field rule with one where the field type is "text_general" and it's not stored, you can POST a request like this:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1replace-dynamic]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -183,6 +292,22 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"stored":false }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2replace-dynamic]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"replace-dynamic-field":{
|
||||
"name":"*_s",
|
||||
"type":"text_general",
|
||||
"stored":false }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Add a New Field Type
|
||||
|
||||
|
@ -192,6 +317,11 @@ All of the field type properties available when editing `schema.xml` by hand are
|
|||
|
||||
For example, to create a new field type named "myNewTxtField", you can POST a request as follows:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1-single-analyzer]
|
||||
====
|
||||
[tab-label]*V1 API with Single Analysis*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -212,7 +342,14 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
|
||||
Note in this example that we have only defined a single analyzer section that will apply to index analysis and query analysis. If we wanted to define separate analysis, we would replace the `analyzer` section in the above example with separate sections for `indexAnalyzer` and `queryAnalyzer`. As in this example:
|
||||
Note in this example that we have only defined a single analyzer section that will apply to index analysis and query analysis.
|
||||
====
|
||||
|
||||
[example.tab-pane#v1-two-analyzers]
|
||||
====
|
||||
[tab-label]*V1 API with Two Analyzers*
|
||||
|
||||
If we wanted to define separate analysis, we would replace the `analyzer` section in the above example with separate sections for `indexAnalyzer` and `queryAnalyzer`. As in this example:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
@ -229,6 +366,30 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"class":"solr.KeywordTokenizerFactory" }}}
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2-two-analyzers]
|
||||
====
|
||||
[tab-label]*V2 API with Two Analyzers*
|
||||
|
||||
To define two analyzers with the V2 API, we just use a different endpoint:
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"add-field-type":{
|
||||
"name":"myNewTextField",
|
||||
"class":"solr.TextField",
|
||||
"indexAnalyzer":{
|
||||
"tokenizer":{
|
||||
"class":"solr.PathHierarchyTokenizerFactory",
|
||||
"delimiter":"/" }},
|
||||
"queryAnalyzer":{
|
||||
"tokenizer":{
|
||||
"class":"solr.KeywordTokenizerFactory" }}}
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Delete a Field Type
|
||||
|
||||
|
@ -236,12 +397,30 @@ The `delete-field-type` command removes a field type from your schema. If the fi
|
|||
|
||||
For example, to delete the field type named "myNewTxtField", you can make a POST request as follows:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1delete-type]
|
||||
====
|
||||
[tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-field-type":{ "name":"myNewTxtField" }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2delete-type]
|
||||
====
|
||||
[tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-field-type":{ "name":"myNewTxtField" }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Replace a Field Type
|
||||
|
||||
|
@ -251,6 +430,11 @@ All of the field type properties available when editing `schema.xml` by hand are
|
|||
|
||||
For example, to replace the definition of a field type named "myNewTxtField", you can make a POST request as follows:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1replace-type]
|
||||
====
|
||||
[tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -263,6 +447,25 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"class":"solr.StandardTokenizerFactory" }}}
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2replace-type]
|
||||
====
|
||||
[tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"replace-field-type":{
|
||||
"name":"myNewTxtField",
|
||||
"class":"solr.TextField",
|
||||
"positionIncrementGap":"100",
|
||||
"analyzer":{
|
||||
"tokenizer":{
|
||||
"class":"solr.StandardTokenizerFactory" }}}
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Add a New Copy Field Rule
|
||||
|
||||
|
@ -279,9 +482,13 @@ A field or an array of fields to which the source field will be copied. This par
|
|||
`maxChars`::
|
||||
The upper limit for the number of characters to be copied. The section <<copying-fields.adoc#copying-fields,Copying Fields>> has more details.
|
||||
|
||||
|
||||
For example, to define a rule to copy the field "shelf" to the "location" and "catchall" fields, you would POST the following request:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1add-copyfield]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
|
@ -290,6 +497,21 @@ curl -X POST -H 'Content-type:application/json' --data-binary '{
|
|||
"dest":[ "location", "catchall" ]}
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2add-copyfield]
|
||||
====
|
||||
[.tab-label]*V2 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"add-copy-field":{
|
||||
"source":"shelf",
|
||||
"dest":[ "location", "catchall" ]}
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Delete a Copy Field Rule
|
||||
|
||||
|
@ -299,12 +521,30 @@ The `source` and `dest` attributes are required by this command.
|
|||
|
||||
For example, to delete a rule to copy the field "shelf" to the "location" field, you would POST the following request:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1delete-copyfield]
|
||||
====
|
||||
[tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-copy-field":{ "source":"shelf", "dest":"location" }
|
||||
}' http://localhost:8983/solr/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2delete-copyfield]
|
||||
====
|
||||
[tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl -X POST -H 'Content-type:application/json' --data-binary '{
|
||||
"delete-copy-field":{ "source":"shelf", "dest":"location" }
|
||||
}' http://localhost:8983/api/cores/gettingstarted/schema
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
=== Multiple Commands in a Single POST
|
||||
|
||||
|
|
|
@ -16,37 +16,46 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
If you have JSON documents that you would like to index without transforming them into Solr's structure, you can add them to Solr by including some parameters with the update request. These parameters provide information on how to split a single JSON file into multiple Solr documents and how to map fields to Solr's schema. One or more valid JSON documents can be sent to the `/update/json/docs` path with the configuration params.
|
||||
If you have JSON documents that you would like to index without transforming them into Solr's structure, you can add them to Solr by including some parameters with the update request.
|
||||
|
||||
These parameters provide information on how to split a single JSON file into multiple Solr documents and how to map fields to Solr's schema. One or more valid JSON documents can be sent to the `/update/json/docs` path with the configuration params.
|
||||
|
||||
== Mapping Parameters
|
||||
|
||||
These parameters allow you to define how a JSON file should be read for multiple Solr documents.
|
||||
|
||||
split::
|
||||
Defines the path at which to split the input JSON into multiple Solr documents and is required if you have multiple documents in a single JSON file. If the entire JSON makes a single solr document, the path must be “`/`”. It is possible to pass multiple `split` paths by separating them with a pipe `(|)`, for example: `split=/|/foo|/foo/bar`. If one path is a child of another, they automatically become a child document.
|
||||
`split`::
|
||||
Defines the path at which to split the input JSON into multiple Solr documents and is required if you have multiple documents in a single JSON file. If the entire JSON makes a single Solr document, the path must be “`/`”.
|
||||
+
|
||||
It is possible to pass multiple `split` paths by separating them with a pipe `(|)`, for example: `split=/|/foo|/foo/bar`. If one path is a child of another, they automatically become a child document.
|
||||
|
||||
f::
|
||||
A multivalued mapping parameter. The format of the parameter is `target-field-name:json-path`. The `json-path` is required. The `target-field-name` is the Solr document field name, and is optional. If not specified, it is automatically derived from the input JSON. The default target field name is the fully qualified name of the field.
|
||||
`f`::
|
||||
Provides multivalued mapping to map document field names to Solr field names. The format of the parameter is `target-field-name:json-path`, as in `f=first:/first`. The `json-path` is required. The `target-field-name` is the Solr document field name, and is optional. If not specified, it is automatically derived from the input JSON. The default target field name is the fully qualified name of the field.
|
||||
+
|
||||
Wildcards can be used here, see <<Using Wildcards for Field Names>> below for more information.
|
||||
|
||||
mapUniqueKeyOnly::
|
||||
`mapUniqueKeyOnly`::
|
||||
(boolean) This parameter is particularly convenient when the fields in the input JSON are not available in the schema and <<schemaless-mode.adoc#schemaless-mode,schemaless mode>> is not enabled. This will index all the fields into the default search field (using the `df` parameter, below) and only the `uniqueKey` field is mapped to the corresponding field in the schema. If the input JSON does not have a value for the `uniqueKey` field then a UUID is generated for the same.
|
||||
|
||||
df::
|
||||
`df`::
|
||||
If the `mapUniqueKeyOnly` flag is used, the update handler needs a field where the data should be indexed to. This is the same field that other handlers use as a default search field.
|
||||
|
||||
srcField::
|
||||
`srcField`::
|
||||
This is the name of the field to which the JSON source will be stored into. This can only be used if `split=/` (i.e., you want your JSON input file to be indexed as a single Solr document). Note that atomic updates will cause the field to be out-of-sync with the document.
|
||||
|
||||
echo::
|
||||
`echo`::
|
||||
This is for debugging purpose only. Set it to `true` if you want the docs to be returned as a response. Nothing will be indexed.
|
||||
|
||||
For example, if we have a JSON file that includes two documents, we could define an update request like this:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1transform]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=first:/first'\
|
||||
'&f=last:/last'\
|
||||
|
@ -71,39 +80,72 @@ curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
|||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
You can store and reuse the params by using <<request-parameters-api.adoc#request-parameters-api,Request Parameters>>.
|
||||
|
||||
[example.tab-pane#v2transform]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/my_collection/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"my_params": {
|
||||
"split": "/exams",
|
||||
"f": ["first:/first","last:/last","grade:/grade","subject:/exams/subject","test:/exams/test"]
|
||||
}}}'
|
||||
----
|
||||
|
||||
and use it as follows:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs?useParams=my_params' -H 'Content-type:application/json' -d '{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=first:/first'\
|
||||
'&f=last:/last'\
|
||||
'&f=grade:/grade'\
|
||||
'&f=subject:/exams/subject'\
|
||||
'&f=test:/exams/test'\
|
||||
'&f=marks:/exams/marks'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2transformcloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=first:/first'\
|
||||
'&f=last:/last'\
|
||||
'&f=grade:/grade'\
|
||||
'&f=subject:/exams/subject'\
|
||||
'&f=test:/exams/test'\
|
||||
'&f=marks:/exams/marks'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
With this request, we have defined that "exams" contains multiple documents. In addition, we have mapped several fields from the input document to Solr fields.
|
||||
|
||||
|
@ -129,11 +171,16 @@ When the update request is complete, the following two documents will be added t
|
|||
}
|
||||
----
|
||||
|
||||
In the prior example, all of the fields we wanted to use in Solr had the same names as they did in the input JSON. When that is the case, we can simplify the request as follows:
|
||||
In the prior example, all of the fields we wanted to use in Solr had the same names as they did in the input JSON. When that is the case, we can simplify the request by only specifying the `json-path` portion of the `f` parameter, as in this example:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1simpler]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=/first'\
|
||||
'&f=/last'\
|
||||
|
@ -158,14 +205,208 @@ curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
|||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2simpler]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=/first'\
|
||||
'&f=/last'\
|
||||
'&f=/grade'\
|
||||
'&f=/exams/subject'\
|
||||
'&f=/exams/test'\
|
||||
'&f=/exams/marks'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2simplercloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=/first'\
|
||||
'&f=/last'\
|
||||
'&f=/grade'\
|
||||
'&f=/exams/subject'\
|
||||
'&f=/exams/test'\
|
||||
'&f=/exams/marks'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
In this example, we simply named the field paths (such as `/exams/test`). Solr will automatically attempt to add the content of the field from the JSON input to the index in a field with the same name.
|
||||
|
||||
[TIP]
|
||||
====
|
||||
Documents WILL get rejected if the fields do not exist in the schema before indexing. So, if you are NOT using schemaless mode, pre-create those fields. If you are working in <<schemaless-mode.adoc#schemaless-mode,Schemaless Mode>>, fields that don't exist will be created on the fly with Solr's best guess for the field type.
|
||||
Documents will be rejected during indexing if the fields do not exist in the schema before indexing. So, if you are NOT using schemaless mode, you must pre-create all fields. If you are working in <<schemaless-mode.adoc#schemaless-mode,Schemaless Mode>>, however, fields that don't exist will be created on the fly with Solr's best guess for the field type.
|
||||
====
|
||||
|
||||
=== Reusing Parameters in Multiple Requests
|
||||
|
||||
You can store and re-use parameters with Solr's <<request-parameters-api.adoc#request-parameters-api,Request Parameters API>>.
|
||||
|
||||
Say we wanted to define parameters to split documents at the `exams` field, and map several other fields. We could make an API request such as:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1splitparams]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/techproducts/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"my_params": {
|
||||
"split": "/exams",
|
||||
"f": ["first:/first","last:/last","grade:/grade","subject:/exams/subject","test:/exams/test"]
|
||||
}}}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2splitparams]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/cores/techproducts/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"my_params": {
|
||||
"split": "/exams",
|
||||
"f": ["first:/first","last:/last","grade:/grade","subject:/exams/subject","test:/exams/test"]
|
||||
}}}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2splitparamscloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/collections/techproducts/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"my_params": {
|
||||
"split": "/exams",
|
||||
"f": ["first:/first","last:/last","grade:/grade","subject:/exams/subject","test:/exams/test"]
|
||||
}}}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
When we send the documents, we'd use the `useParams` parameter with the name of the parameter set we defined:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1useparams]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs?useParams=my_params' -H 'Content-type:application/json' -d '{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [{
|
||||
"subject": "Maths",
|
||||
"test": "term1",
|
||||
"marks": 90
|
||||
},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test": "term1",
|
||||
"marks": 86
|
||||
}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2useparams]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json?useParams=my_params' -H 'Content-type:application/json' -d '{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [{
|
||||
"subject": "Maths",
|
||||
"test": "term1",
|
||||
"marks": 90
|
||||
},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test": "term1",
|
||||
"marks": 86
|
||||
}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2useparamscloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json?useParams=my_params' -H 'Content-type:application/json' -d '{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [{
|
||||
"subject": "Maths",
|
||||
"test": "term1",
|
||||
"marks": 90
|
||||
},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test": "term1",
|
||||
"marks": 86
|
||||
}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
== Using Wildcards for Field Names
|
||||
|
||||
Instead of specifying all the field names explicitly, it is possible to specify wildcards to map fields automatically.
|
||||
|
@ -182,9 +423,14 @@ A single asterisk `\*` maps only to direct children, and a double asterisk `**`
|
|||
|
||||
With wildcards we can further simplify our previous example as follows:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1wildcards]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs'\
|
||||
'?split=/exams'\
|
||||
'&f=/**'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
|
@ -204,14 +450,75 @@ curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
|||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2wildcards]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json'\
|
||||
'?split=/exams'\
|
||||
'&f=/**'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2wildcardscloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json'\
|
||||
'?split=/exams'\
|
||||
'&f=/**'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
Because we want the fields to be indexed with the field names as they are found in the JSON input, the double wildcard in `f=/**` will map all fields and their descendants to the same fields in Solr.
|
||||
|
||||
It is also possible to send all the values to a single field and do a full text search on that. This is a good option to blindly index and query JSON documents without worrying about fields and schema.
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1wildcardtxt]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs'\
|
||||
'?split=/'\
|
||||
'&f=txt:/**'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
|
@ -231,14 +538,75 @@ curl 'http://localhost:8983/solr/my_collection/update/json/docs'\
|
|||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2wildcardtxt]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json'\
|
||||
'?split=/'\
|
||||
'&f=txt:/**'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2wildcardtxtcloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json'\
|
||||
'?split=/'\
|
||||
'&f=txt:/**'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
In the above example, we've said all of the fields should be added to a field in Solr named 'txt'. This will add multiple fields to a single field, so whatever field you choose should be multi-valued.
|
||||
|
||||
The default behavior is to use the fully qualified name (FQN) of the node. So, if we don't define any field mappings, like this:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1wildcardfqn]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs?split=/exams'\
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs?split=/exams'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
|
@ -256,6 +624,58 @@ curl 'http://localhost:8983/solr/my_collection/update/json/docs?split=/exams'\
|
|||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2wildcardfqn]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json?split=/exams'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2wildcardfqncloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json?split=/exams'\
|
||||
-H 'Content-type:application/json' -d '
|
||||
{
|
||||
"first": "John",
|
||||
"last": "Doe",
|
||||
"grade": 8,
|
||||
"exams": [
|
||||
{
|
||||
"subject": "Maths",
|
||||
"test" : "term1",
|
||||
"marks" : 90},
|
||||
{
|
||||
"subject": "Biology",
|
||||
"test" : "term1",
|
||||
"marks" : 86}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
The indexed documents would be added to the index with fields that look like this:
|
||||
|
||||
|
@ -283,29 +703,92 @@ This functionality supports documents in the http://jsonlines.org/[JSON Lines] f
|
|||
|
||||
For example:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1multidocs]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs' -H 'Content-type:application/json' -d '
|
||||
{ "first":"Steve", "last":"Jobs", "grade":1, "subject": "Social Science", "test" : "term1", "marks" : 90}
|
||||
{ "first":"Steve", "last":"Woz", "grade":1, "subject": "Political Science", "test" : "term1", "marks" : 86}'
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs' -H 'Content-type:application/json' -d '
|
||||
{ "first":"Steve", "last":"Jobs", "grade":1, "subject":"Social Science", "test":"term1", "marks":90}
|
||||
{ "first":"Steve", "last":"Woz", "grade":1, "subject":"Political Science", "test":"term1", "marks":86}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2multidocs]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json' -H 'Content-type:application/json' -d '
|
||||
{ "first":"Steve", "last":"Jobs", "grade":1, "subject":"Social Science", "test":"term1", "marks":90}
|
||||
{ "first":"Steve", "last":"Woz", "grade":1, "subject":"Political Science", "test":"term1", "marks":86}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2multidocscloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json' -H 'Content-type:application/json' -d '
|
||||
{ "first":"Steve", "last":"Jobs", "grade":1, "subject":"Social Science", "test":"term1", "marks":90}
|
||||
{ "first":"Steve", "last":"Woz", "grade":1, "subject":"Political Science", "test":"term1", "marks":86}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
Or even an array of documents, as in this example:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1array]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs' -H 'Content-type:application/json' -d '[
|
||||
{ "first":"Steve", "last":"Jobs", "grade":1, "subject": "Computer Science", "test" : "term1", "marks" : 90},
|
||||
{ "first":"Steve", "last":"Woz", "grade":1, "subject": "Calculus", "test" : "term1", "marks" : 86}]'
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs' -H 'Content-type:application/json' -d '[
|
||||
{"first":"Steve", "last":"Jobs", "grade":1, "subject":"Computer Science", "test":"term1", "marks":90},
|
||||
{"first":"Steve", "last":"Woz", "grade":1, "subject":"Calculus", "test":"term1", "marks":86}]'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2array]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json' -H 'Content-type:application/json' -d '[
|
||||
{"first":"Steve", "last":"Jobs", "grade":1, "subject":"Computer Science", "test":"term1", "marks":90},
|
||||
{"first":"Steve", "last":"Woz", "grade":1, "subject":"Calculus", "test":"term1", "marks":86}]'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2arraycloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json' -H 'Content-type:application/json' -d '[
|
||||
{"first":"Steve", "last":"Jobs", "grade":1, "subject":"Computer Science", "test":"term1", "marks":90},
|
||||
{"first":"Steve", "last":"Woz", "grade":1, "subject":"Calculus", "test":"term1", "marks":86}]'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
== Indexing Nested Documents
|
||||
|
||||
The following is an example of indexing nested documents:
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1nested]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/solr/my_collection/update/json/docs?split=/|/orgs'\
|
||||
curl 'http://localhost:8983/solr/techproducts/update/json/docs?split=/|/orgs'\
|
||||
-H 'Content-type:application/json' -d '{
|
||||
"name": "Joe Smith",
|
||||
"phone": 876876687,
|
||||
|
@ -323,6 +806,58 @@ curl 'http://localhost:8983/solr/my_collection/update/json/docs?split=/|/orgs'\
|
|||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2nested]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/cores/techproducts/update/json?split=/|/orgs'\
|
||||
-H 'Content-type:application/json' -d '{
|
||||
"name": "Joe Smith",
|
||||
"phone": 876876687,
|
||||
"orgs": [
|
||||
{
|
||||
"name": "Microsoft",
|
||||
"city": "Seattle",
|
||||
"zip": 98052
|
||||
},
|
||||
{
|
||||
"name": "Apple",
|
||||
"city": "Cupertino",
|
||||
"zip": 95014
|
||||
}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2nestedcloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl 'http://localhost:8983/api/collections/techproducts/update/json?split=/|/orgs'\
|
||||
-H 'Content-type:application/json' -d '{
|
||||
"name": "Joe Smith",
|
||||
"phone": 876876687,
|
||||
"orgs": [
|
||||
{
|
||||
"name": "Microsoft",
|
||||
"city": "Seattle",
|
||||
"zip": 98052
|
||||
},
|
||||
{
|
||||
"name": "Apple",
|
||||
"city": "Cupertino",
|
||||
"zip": 95014
|
||||
}
|
||||
]
|
||||
}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
With this example, the documents indexed would be, as follows:
|
||||
|
||||
|
@ -350,13 +885,13 @@ With this example, the documents indexed would be, as follows:
|
|||
|
||||
== Setting JSON Defaults
|
||||
|
||||
It is possible to send any json to the `/update/json/docs` endpoint and the default configuration of the component is as follows:
|
||||
It is possible to send any JSON to the `/update/json/docs` endpoint and the default configuration of the component is as follows:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<initParams path="/update/json/docs">
|
||||
<lst name="defaults">
|
||||
<!-- this ensures that the entire json doc will be stored verbatim into one field -->
|
||||
<!-- this ensures that the entire JSON doc will be stored verbatim into one field -->
|
||||
<str name="srcField">_src_</str>
|
||||
<!-- This means a the uniqueKeyField will be extracted from the fields and
|
||||
all fields go into the 'df' field. In this config df is already configured to be 'text'
|
||||
|
@ -368,19 +903,56 @@ It is possible to send any json to the `/update/json/docs` endpoint and the defa
|
|||
</initParams>
|
||||
----
|
||||
|
||||
So, if no params are passed, the entire json file would get indexed to the `\_src_` field and all the values in the input JSON would go to a field named `text`. If there is a value for the uniqueKey it is stored and if no value could be obtained from the input JSON, a UUID is created and used as the uniqueKey field value.
|
||||
So, if no params are passed, the entire JSON file would get indexed to the `\_src_` field and all the values in the input JSON would go to a field named `text`. If there is a value for the uniqueKey it is stored and if no value could be obtained from the input JSON, a UUID is created and used as the uniqueKey field value.
|
||||
|
||||
Alternately, use the Request Parameters feature to set these params
|
||||
Alternately, use the Request Parameters feature to set these parameters, as shown earlier in the section <<Reusing Parameters in Multiple Requests>>.
|
||||
|
||||
[.dynamic-tabs]
|
||||
--
|
||||
[example.tab-pane#v1jsondefault]
|
||||
====
|
||||
[.tab-label]*V1 API*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/my_collection/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"full_txt": {
|
||||
"srcField": "_src_",
|
||||
"mapUniqueKeyOnly" : true,
|
||||
"df": "text"
|
||||
}}}'
|
||||
curl http://localhost:8983/solr/techproducts/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"full_txt": {
|
||||
"srcField": "_src_",
|
||||
"mapUniqueKeyOnly" : true,
|
||||
"df": "text"
|
||||
}}}'
|
||||
----
|
||||
====
|
||||
|
||||
Send the parameter `useParams=full_txt` with each request.
|
||||
[example.tab-pane#v2jsondefault]
|
||||
====
|
||||
[.tab-label]*V2 API Standalone Solr*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/cores/techproducts/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"full_txt": {
|
||||
"srcField": "_src_",
|
||||
"mapUniqueKeyOnly" : true,
|
||||
"df": "text"
|
||||
}}}'
|
||||
----
|
||||
====
|
||||
|
||||
[example.tab-pane#v2jsondefaultcloud]
|
||||
====
|
||||
[.tab-label]*V2 API SolrCloud*
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/api/collections/techproducts/config/params -H 'Content-type:application/json' -d '{
|
||||
"set": {
|
||||
"full_txt": {
|
||||
"srcField": "_src_",
|
||||
"mapUniqueKeyOnly" : true,
|
||||
"df": "text"
|
||||
}}}'
|
||||
----
|
||||
====
|
||||
--
|
||||
|
||||
To use these parameters, send the parameter `useParams=full_txt` with each request.
|
||||
|
|
|
@ -772,7 +772,7 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
|
|||
*/
|
||||
public static class Builder extends SolrClientBuilder<Builder> {
|
||||
protected String baseSolrUrl;
|
||||
protected int queueSize;
|
||||
protected int queueSize = 10;
|
||||
protected int threadCount;
|
||||
protected ExecutorService executorService;
|
||||
protected boolean streamDeletes;
|
||||
|
@ -803,7 +803,7 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* The number of documents to batch together before sending to Solr.
|
||||
* The number of documents to batch together before sending to Solr. If not set, this defaults to 10.
|
||||
*/
|
||||
public Builder withQueueSize(int queueSize) {
|
||||
if (queueSize <= 0) {
|
||||
|
|
|
@ -30,4 +30,12 @@ public class ConcurrentUpdateSolrClientBuilderTest extends LuceneTestCase {
|
|||
public void testRejectsMissingBaseSolrUrl() {
|
||||
new Builder(null).build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMissingQueueSize() {
|
||||
try (ConcurrentUpdateSolrClient client = new Builder("someurl").build()){
|
||||
// Do nothing as we just need to test that the only mandatory parameter for building the client
|
||||
// is the baseSolrUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue