remove equals/hashcode as part of Pipeline and adapt tests

Only MutateProcessor implemented equals / hashcode hence we would only use that one in our tests, since they relied on them. Better to not rely on equals/hashcode, drop them and mock processor/pipeline in our tests that need them. That also allow to make MutateProcessor constructor package private as the other processors.
This commit is contained in:
javanna 2015-11-13 19:35:09 +01:00 committed by Luca Cavanna
parent d093600729
commit 97f4f27b14
7 changed files with 141 additions and 171 deletions

View File

@ -71,21 +71,6 @@ public final class Pipeline {
return processors;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Pipeline pipeline = (Pipeline) o;
return Objects.equals(id, pipeline.id) &&
Objects.equals(description, pipeline.description) &&
Objects.equals(processors, pipeline.processors);
}
@Override
public int hashCode() {
return Objects.hash(id, description, processors);
}
public final static class Factory {
public Pipeline create(String id, Map<String, Object> config, Map<String, Processor.Factory> processorRegistry) throws IOException {

View File

@ -45,7 +45,7 @@ public final class MutateProcessor implements Processor {
private final List<String> uppercase;
private final List<String> lowercase;
public MutateProcessor(Map<String, Object> update, Map<String, String> rename, Map<String, String> convert,
MutateProcessor(Map<String, Object> update, Map<String, String> rename, Map<String, String> convert,
Map<String, String> split, List<GsubExpression> gsub, Map<String, String> join,
List<String> remove, List<String> trim, List<String> uppercase, List<String> lowercase) {
this.update = update;

View File

@ -51,21 +51,6 @@ public class ParsedSimulateRequest {
return verbose;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ParsedSimulateRequest that = (ParsedSimulateRequest) o;
return Objects.equals(verbose, that.verbose) &&
Objects.equals(documents, that.documents) &&
Objects.equals(pipeline, that.pipeline);
}
@Override
public int hashCode() {
return Objects.hash(documents, pipeline, verbose);
}
public static class Parser {
private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory();
public static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline";

View File

@ -26,7 +26,8 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
public class DataTests extends ESTestCase {
@ -86,28 +87,54 @@ public class DataTests extends ESTestCase {
assertThat(data.getProperty("fizz.new"), equalTo("bar"));
}
public void testEquals() {
Data otherData = new Data(data);
assertThat(otherData, equalTo(data));
}
public void testEqualsAndHashcode() throws Exception {
String index = randomAsciiOfLengthBetween(1, 10);
String type = randomAsciiOfLengthBetween(1, 10);
String id = randomAsciiOfLengthBetween(1, 10);
String fieldName = randomAsciiOfLengthBetween(1, 10);
String fieldValue = randomAsciiOfLengthBetween(1, 10);
Data data = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue));
public void testNotEqualsDiffIndex() {
Data otherData = new Data(data.getIndex() + "foo", data.getType(), data.getId(), data.getDocument());
assertThat(otherData, not(equalTo(data)));
}
boolean changed = false;
String otherIndex;
if (randomBoolean()) {
otherIndex = randomAsciiOfLengthBetween(1, 10);
changed = true;
} else {
otherIndex = index;
}
String otherType;
if (randomBoolean()) {
otherType = randomAsciiOfLengthBetween(1, 10);
changed = true;
} else {
otherType = type;
}
String otherId;
if (randomBoolean()) {
otherId = randomAsciiOfLengthBetween(1, 10);
changed = true;
} else {
otherId = id;
}
Map<String, Object> document;
if (randomBoolean()) {
document = Collections.singletonMap(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
changed = true;
} else {
document = Collections.singletonMap(fieldName, fieldValue);
}
public void testNotEqualsDiffType() {
Data otherData = new Data(data.getIndex(), data.getType() + "foo", data.getId(), data.getDocument());
assertThat(otherData, not(equalTo(data)));
}
public void testNotEqualsDiffId() {
Data otherData = new Data(data.getIndex(), data.getType(), data.getId() + "foo", data.getDocument());
assertThat(otherData, not(equalTo(data)));
}
public void testNotEqualsDiffDocument() {
Data otherData = new Data(data.getIndex(), data.getType(), data.getId(), Collections.emptyMap());
assertThat(otherData, not(equalTo(data)));
Data otherData = new Data(otherIndex, otherType, otherId, document);
if (changed) {
assertThat(data, not(equalTo(otherData)));
assertThat(otherData, not(equalTo(data)));
} else {
assertThat(data, equalTo(otherData));
assertThat(otherData, equalTo(data));
Data thirdData = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue));
assertThat(thirdData, equalTo(data));
assertThat(data, equalTo(thirdData));
}
}
}

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest;
import org.elasticsearch.ingest.processor.Processor;
import org.elasticsearch.ingest.processor.mutate.MutateProcessor;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import java.util.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.mock;
public class PipelineTests extends ESTestCase {
private Processor updateProcessor;
private Processor lowercaseProcessor;
private Pipeline pipeline;
@Before
public void setup() {
Map<String, Object> update = Collections.singletonMap("foo", 123);
List<String> lowercase = Collections.singletonList("foo");
updateProcessor = new MutateProcessor(update, null, null, null, null, null, null, null, null, null);
lowercaseProcessor = new MutateProcessor(null, null, null, null, null, null, null, null, null, lowercase);
pipeline = new Pipeline("id", "description", Arrays.asList(updateProcessor, lowercaseProcessor));
}
public void testEquals() throws Exception {
Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription(), pipeline.getProcessors());
assertThat(pipeline, equalTo(other));
}
public void testNotEqualsDiffId() throws Exception {
Pipeline other = new Pipeline(pipeline.getId() + "foo", pipeline.getDescription(), pipeline.getProcessors());
assertThat(pipeline, not(equalTo(other)));
}
public void testNotEqualsDiffDescription() throws Exception {
Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription() + "foo", pipeline.getProcessors());
assertThat(pipeline, not(equalTo(other)));
}
public void testNotEqualsDiffProcessors() throws Exception {
Pipeline other = new Pipeline(pipeline.getId(), pipeline.getDescription() + "foo", Collections.singletonList(updateProcessor));
assertThat(pipeline, not(equalTo(other)));
}
}

View File

@ -39,8 +39,7 @@ public class TransportDataTests extends ESTestCase {
String id = randomAsciiOfLengthBetween(1, 10);
String fieldName = randomAsciiOfLengthBetween(1, 10);
String fieldValue = randomAsciiOfLengthBetween(1, 10);
Data data = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue));
TransportData transportData = new TransportData(data);
TransportData transportData = new TransportData(new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)));
boolean changed = false;
String otherIndex;
@ -72,22 +71,14 @@ public class TransportDataTests extends ESTestCase {
document = Collections.singletonMap(fieldName, fieldValue);
}
Data otherData = new Data(otherIndex, otherType, otherId, document);
TransportData otherTransportData = new TransportData(otherData);
TransportData otherTransportData = new TransportData(new Data(otherIndex, otherType, otherId, document));
if (changed) {
assertThat(data, not(equalTo(otherData)));
assertThat(otherData, not(equalTo(data)));
assertThat(transportData, not(equalTo(otherTransportData)));
assertThat(otherTransportData, not(equalTo(transportData)));
} else {
assertThat(data, equalTo(otherData));
assertThat(otherData, equalTo(data));
assertThat(transportData, equalTo(otherTransportData));
assertThat(otherTransportData, equalTo(transportData));
Data thirdData = new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue));
TransportData thirdTransportData = new TransportData(thirdData);
assertThat(thirdData, equalTo(data));
assertThat(data, equalTo(thirdData));
TransportData thirdTransportData = new TransportData(new Data(index, type, id, Collections.singletonMap(fieldName, fieldValue)));
assertThat(thirdTransportData, equalTo(transportData));
assertThat(transportData, equalTo(thirdTransportData));
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.plugin.ingest.transport.simulate;
import org.elasticsearch.ingest.Data;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.ingest.processor.Processor;
import org.elasticsearch.ingest.processor.mutate.MutateProcessor;
import org.elasticsearch.plugin.ingest.PipelineStore;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
@ -30,70 +29,120 @@ import org.junit.Before;
import java.io.IOException;
import java.util.*;
import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.elasticsearch.plugin.ingest.transport.simulate.SimulatePipelineRequest.Fields;
public class ParsedSimulateRequestParserTests extends ESTestCase {
private PipelineStore store;
private ParsedSimulateRequest.Parser parser;
private Pipeline pipeline;
private Data data;
@Before
public void init() throws IOException {
parser = new ParsedSimulateRequest.Parser();
List<String> uppercase = Collections.singletonList("foo");
Processor processor = new MutateProcessor(null, null, null, null, null, null, null, null, uppercase, null);
pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Arrays.asList(processor));
data = new Data("_index", "_type", "_id", Collections.singletonMap("foo", "bar"));
Pipeline pipeline = new Pipeline(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, null, Collections.singletonList(mock(Processor.class)));
Map<String, Processor.Factory> processorRegistry = new HashMap<>();
processorRegistry.put("mutate", new MutateProcessor.Factory());
processorRegistry.put("mock_processor", mock(Processor.Factory.class));
store = mock(PipelineStore.class);
when(store.get("_id")).thenReturn(pipeline);
when(store.get(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID)).thenReturn(pipeline);
when(store.getProcessorFactoryRegistry()).thenReturn(processorRegistry);
}
public void testParseUsingPipelineStore() throws Exception {
ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false);
int numDocs = randomIntBetween(1, 10);
Map<String, Object> raw = new HashMap<>();
Map<String, Object> requestContent = new HashMap<>();
List<Map<String, Object>> docs = new ArrayList<>();
Map<String, Object> doc = new HashMap<>();
doc.put(Fields.INDEX, "_index");
doc.put(Fields.TYPE, "_type");
doc.put(Fields.ID, "_id");
doc.put(Fields.SOURCE, data.getDocument());
docs.add(doc);
raw.put(Fields.DOCS, docs);
List<Map<String, Object>> expectedDocs = new ArrayList<>();
requestContent.put(Fields.DOCS, docs);
for (int i = 0; i < numDocs; i++) {
Map<String, Object> doc = new HashMap<>();
String index = randomAsciiOfLengthBetween(1, 10);
String type = randomAsciiOfLengthBetween(1, 10);
String id = randomAsciiOfLengthBetween(1, 10);
doc.put(Fields.INDEX, index);
doc.put(Fields.TYPE, type);
doc.put(Fields.ID, id);
String fieldName = randomAsciiOfLengthBetween(1, 10);
String fieldValue = randomAsciiOfLengthBetween(1, 10);
doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
docs.add(doc);
Map<String, Object> expectedDoc = new HashMap<>();
expectedDoc.put(Fields.INDEX, index);
expectedDoc.put(Fields.TYPE, type);
expectedDoc.put(Fields.ID, id);
expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
expectedDocs.add(expectedDoc);
}
ParsedSimulateRequest actualRequest = parser.parseWithPipelineId("_id", raw, false, store);
assertThat(actualRequest, equalTo(expectedRequest));
ParsedSimulateRequest actualRequest = new ParsedSimulateRequest.Parser().parseWithPipelineId(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID, requestContent, false, store);
assertThat(actualRequest.isVerbose(), equalTo(false));
assertThat(actualRequest.getDocuments().size(), equalTo(numDocs));
Iterator<Map<String, Object>> expectedDocsIterator = expectedDocs.iterator();
for (Data data : actualRequest.getDocuments()) {
Map<String, Object> expectedDocument = expectedDocsIterator.next();
assertThat(data.getDocument(), equalTo(expectedDocument.get(Fields.SOURCE)));
assertThat(data.getIndex(), equalTo(expectedDocument.get(Fields.INDEX)));
assertThat(data.getType(), equalTo(expectedDocument.get(Fields.TYPE)));
assertThat(data.getId(), equalTo(expectedDocument.get(Fields.ID)));
}
assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID));
assertThat(actualRequest.getPipeline().getDescription(), nullValue());
assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(1));
}
public void testParseWithProvidedPipeline() throws Exception {
ParsedSimulateRequest expectedRequest = new ParsedSimulateRequest(pipeline, Collections.singletonList(data), false);
int numDocs = randomIntBetween(1, 10);
Map<String, Object> raw = new HashMap<>();
Map<String, Object> requestContent = new HashMap<>();
List<Map<String, Object>> docs = new ArrayList<>();
Map<String, Object> doc = new HashMap<>();
doc.put(Fields.INDEX, "_index");
doc.put(Fields.TYPE, "_type");
doc.put(Fields.ID, "_id");
doc.put(Fields.SOURCE, data.getDocument());
docs.add(doc);
List<Map<String, Object>> expectedDocs = new ArrayList<>();
requestContent.put(Fields.DOCS, docs);
for (int i = 0; i < numDocs; i++) {
Map<String, Object> doc = new HashMap<>();
String index = randomAsciiOfLengthBetween(1, 10);
String type = randomAsciiOfLengthBetween(1, 10);
String id = randomAsciiOfLengthBetween(1, 10);
doc.put(Fields.INDEX, index);
doc.put(Fields.TYPE, type);
doc.put(Fields.ID, id);
String fieldName = randomAsciiOfLengthBetween(1, 10);
String fieldValue = randomAsciiOfLengthBetween(1, 10);
doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
docs.add(doc);
Map<String, Object> expectedDoc = new HashMap<>();
expectedDoc.put(Fields.INDEX, index);
expectedDoc.put(Fields.TYPE, type);
expectedDoc.put(Fields.ID, id);
expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
expectedDocs.add(expectedDoc);
}
Map<String, Object> processorConfig = new HashMap<>();
processorConfig.put("uppercase", Arrays.asList("foo"));
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("mutate", processorConfig)));
List<Map<String, Object>> processors = new ArrayList<>();
int numProcessors = randomIntBetween(1, 10);
for (int i = 0; i < numProcessors; i++) {
processors.add(Collections.singletonMap("mock_processor", Collections.emptyMap()));
}
pipelineConfig.put("processors", processors);
requestContent.put(Fields.PIPELINE, pipelineConfig);
raw.put(Fields.DOCS, docs);
raw.put(Fields.PIPELINE, pipelineConfig);
ParsedSimulateRequest actualRequest = new ParsedSimulateRequest.Parser().parse(requestContent, false, store);
assertThat(actualRequest.isVerbose(), equalTo(false));
assertThat(actualRequest.getDocuments().size(), equalTo(numDocs));
Iterator<Map<String, Object>> expectedDocsIterator = expectedDocs.iterator();
for (Data data : actualRequest.getDocuments()) {
Map<String, Object> expectedDocument = expectedDocsIterator.next();
assertThat(data.getDocument(), equalTo(expectedDocument.get(Fields.SOURCE)));
assertThat(data.getIndex(), equalTo(expectedDocument.get(Fields.INDEX)));
assertThat(data.getType(), equalTo(expectedDocument.get(Fields.TYPE)));
assertThat(data.getId(), equalTo(expectedDocument.get(Fields.ID)));
}
ParsedSimulateRequest actualRequest = parser.parse(raw, false, store);
assertThat(actualRequest, equalTo(expectedRequest));
assertThat(actualRequest.getPipeline().getId(), equalTo(ParsedSimulateRequest.Parser.SIMULATED_PIPELINE_ID));
assertThat(actualRequest.getPipeline().getDescription(), nullValue());
assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(numProcessors));
}
}