This commit is contained in:
parent
e7c393a518
commit
fb29c2dccf
|
@ -69,6 +69,13 @@ class SimulateExecutionService {
|
||||||
final AtomicInteger counter = new AtomicInteger();
|
final AtomicInteger counter = new AtomicInteger();
|
||||||
final List<SimulateDocumentResult> responses =
|
final List<SimulateDocumentResult> responses =
|
||||||
new CopyOnWriteArrayList<>(new SimulateDocumentBaseResult[request.getDocuments().size()]);
|
new CopyOnWriteArrayList<>(new SimulateDocumentBaseResult[request.getDocuments().size()]);
|
||||||
|
|
||||||
|
if (request.getDocuments().isEmpty()) {
|
||||||
|
l.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(),
|
||||||
|
request.isVerbose(), responses));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
int iter = 0;
|
int iter = 0;
|
||||||
for (IngestDocument ingestDocument : request.getDocuments()) {
|
for (IngestDocument ingestDocument : request.getDocuments()) {
|
||||||
final int index = iter;
|
final int index = iter;
|
||||||
|
|
|
@ -173,8 +173,15 @@ public class SimulatePipelineRequest extends ActionRequest implements ToXContent
|
||||||
private static List<IngestDocument> parseDocs(Map<String, Object> config) {
|
private static List<IngestDocument> parseDocs(Map<String, Object> config) {
|
||||||
List<Map<String, Object>> docs =
|
List<Map<String, Object>> docs =
|
||||||
ConfigurationUtils.readList(null, null, config, Fields.DOCS);
|
ConfigurationUtils.readList(null, null, config, Fields.DOCS);
|
||||||
|
if (docs.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("must specify at least one document in [docs]");
|
||||||
|
}
|
||||||
List<IngestDocument> ingestDocumentList = new ArrayList<>();
|
List<IngestDocument> ingestDocumentList = new ArrayList<>();
|
||||||
for (Map<String, Object> dataMap : docs) {
|
for (Object object : docs) {
|
||||||
|
if ((object instanceof Map) == false) {
|
||||||
|
throw new IllegalArgumentException("malformed [docs] section, should include an inner object");
|
||||||
|
}
|
||||||
|
Map<String, Object> dataMap = (Map<String, Object>) object;
|
||||||
Map<String, Object> document = ConfigurationUtils.readMap(null, null,
|
Map<String, Object> document = ConfigurationUtils.readMap(null, null,
|
||||||
dataMap, Fields.SOURCE);
|
dataMap, Fields.SOURCE);
|
||||||
String index = ConfigurationUtils.readStringOrIntProperty(null, null,
|
String index = ConfigurationUtils.readStringOrIntProperty(null, null,
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.ingest;
|
package org.elasticsearch.action.ingest;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.ingest.CompoundProcessor;
|
import org.elasticsearch.ingest.CompoundProcessor;
|
||||||
import org.elasticsearch.ingest.IngestDocument;
|
import org.elasticsearch.ingest.IngestDocument;
|
||||||
|
@ -46,6 +47,7 @@ import static org.elasticsearch.ingest.IngestDocument.MetaData.ROUTING;
|
||||||
import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE;
|
import static org.elasticsearch.ingest.IngestDocument.MetaData.TYPE;
|
||||||
import static org.elasticsearch.ingest.IngestDocument.MetaData.VERSION;
|
import static org.elasticsearch.ingest.IngestDocument.MetaData.VERSION;
|
||||||
import static org.elasticsearch.ingest.IngestDocument.MetaData.VERSION_TYPE;
|
import static org.elasticsearch.ingest.IngestDocument.MetaData.VERSION_TYPE;
|
||||||
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
@ -257,4 +259,33 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase {
|
||||||
() -> SimulatePipelineRequest.parseWithPipelineId(pipelineId, requestContent, false, ingestService));
|
() -> SimulatePipelineRequest.parseWithPipelineId(pipelineId, requestContent, false, ingestService));
|
||||||
assertThat(e.getMessage(), equalTo("pipeline [" + pipelineId + "] does not exist"));
|
assertThat(e.getMessage(), equalTo("pipeline [" + pipelineId + "] does not exist"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testNotValidDocs() {
|
||||||
|
Map<String, Object> requestContent = new HashMap<>();
|
||||||
|
List<Map<String, Object>> docs = new ArrayList<>();
|
||||||
|
Map<String, Object> pipelineConfig = new HashMap<>();
|
||||||
|
List<Map<String, Object>> processors = new ArrayList<>();
|
||||||
|
pipelineConfig.put("processors", processors);
|
||||||
|
requestContent.put(Fields.DOCS, docs);
|
||||||
|
requestContent.put(Fields.PIPELINE, pipelineConfig);
|
||||||
|
Exception e1 = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> SimulatePipelineRequest.parse(requestContent, false, ingestService));
|
||||||
|
assertThat(e1.getMessage(), equalTo("must specify at least one document in [docs]"));
|
||||||
|
|
||||||
|
List<String> stringList = new ArrayList<>();
|
||||||
|
stringList.add("test");
|
||||||
|
pipelineConfig.put("processors", processors);
|
||||||
|
requestContent.put(Fields.DOCS, stringList);
|
||||||
|
requestContent.put(Fields.PIPELINE, pipelineConfig);
|
||||||
|
Exception e2 = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> SimulatePipelineRequest.parse(requestContent, false, ingestService));
|
||||||
|
assertThat(e2.getMessage(), equalTo("malformed [docs] section, should include an inner object"));
|
||||||
|
|
||||||
|
docs.add(new HashMap<>());
|
||||||
|
requestContent.put(Fields.DOCS, docs);
|
||||||
|
requestContent.put(Fields.PIPELINE, pipelineConfig);
|
||||||
|
Exception e3 = expectThrows(ElasticsearchParseException.class,
|
||||||
|
() -> SimulatePipelineRequest.parse(requestContent, false, ingestService));
|
||||||
|
assertThat(e3.getMessage(), containsString("required property is missing"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue