Add tests for valid and invalid datasource names (#9614)

* Add tests for valid and invalid datasource names

* code review

* clean up dependencies
This commit is contained in:
Suneet Saldanha 2020-04-06 16:02:50 -07:00 committed by GitHub
parent fc2897da1d
commit 7bf1ebb0b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 63 additions and 11 deletions

View File

@ -430,6 +430,12 @@
<artifactId>equalsverifier</artifactId> <artifactId>equalsverifier</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -21,9 +21,12 @@ package org.apache.druid.segment.indexing;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.exc.ValueInstantiationException;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.druid.common.config.NullHandlingTest;
import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JSONParseSpec; import org.apache.druid.data.input.impl.JSONParseSpec;
@ -54,10 +57,14 @@ import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.Map; import java.util.Map;
public class DataSchemaTest public class DataSchemaTest extends NullHandlingTest
{ {
private static final String VALID_DATASOURCE_CHARS_NAME = "alpha123..*~!@#&%^&*()-+ Россия\\ 한국 中国!";
@Rule @Rule
public ExpectedException expectedException = ExpectedException.none(); public ExpectedException expectedException = ExpectedException.none();
@ -79,7 +86,7 @@ public class DataSchemaTest
); );
DataSchema schema = new DataSchema( DataSchema schema = new DataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
parser, parser,
new AggregatorFactory[]{ new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric1", "col1"),
@ -116,7 +123,7 @@ public class DataSchemaTest
); );
DataSchema schema = new DataSchema( DataSchema schema = new DataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
parser, parser,
new AggregatorFactory[]{ new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric1", "col1"),
@ -153,7 +160,7 @@ public class DataSchemaTest
); );
DataSchema schema = new DataSchema( DataSchema schema = new DataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
parserMap, parserMap,
new AggregatorFactory[]{ new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric1", "col1"),
@ -211,7 +218,7 @@ public class DataSchemaTest
); );
DataSchema schema = new DataSchema( DataSchema schema = new DataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
parser, parser,
new AggregatorFactory[]{ new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric1", "col1"),
@ -244,7 +251,7 @@ public class DataSchemaTest
); );
DataSchema schema = new DataSchema( DataSchema schema = new DataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
parser, parser,
new AggregatorFactory[]{ new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric1", "col1"),
@ -262,7 +269,7 @@ public class DataSchemaTest
public void testSerdeWithInvalidParserMap() throws Exception public void testSerdeWithInvalidParserMap() throws Exception
{ {
String jsonStr = "{" String jsonStr = "{"
+ "\"dataSource\":\"test\"," + "\"dataSource\":\"" + StringEscapeUtils.escapeJson(VALID_DATASOURCE_CHARS_NAME) + "\","
+ "\"parser\":{\"type\":\"invalid\"}," + "\"parser\":{\"type\":\"invalid\"},"
+ "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}]," + "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}],"
+ "\"granularitySpec\":{" + "\"granularitySpec\":{"
@ -365,7 +372,7 @@ public class DataSchemaTest
public void testSerde() throws Exception public void testSerde() throws Exception
{ {
String jsonStr = "{" String jsonStr = "{"
+ "\"dataSource\":\"test\"," + "\"dataSource\":\"" + StringEscapeUtils.escapeJson(VALID_DATASOURCE_CHARS_NAME) + "\","
+ "\"parser\":{" + "\"parser\":{"
+ "\"type\":\"string\"," + "\"type\":\"string\","
+ "\"parseSpec\":{" + "\"parseSpec\":{"
@ -389,7 +396,7 @@ public class DataSchemaTest
DataSchema.class DataSchema.class
); );
Assert.assertEquals(actual.getDataSource(), "test"); Assert.assertEquals(actual.getDataSource(), VALID_DATASOURCE_CHARS_NAME);
Assert.assertEquals( Assert.assertEquals(
actual.getParser().getParseSpec(), actual.getParser().getParseSpec(),
new JSONParseSpec( new JSONParseSpec(
@ -414,6 +421,45 @@ public class DataSchemaTest
); );
} }
@Test
public void testSerializeWithInvalidDataSourceName() throws Exception
{
// Escape backslashes to insert a tab character in the datasource name.
List<String> datasources = ImmutableList.of("", "../invalid", "\tname", "name\t invalid");
for (String datasource : datasources) {
String jsonStr = "{"
+ "\"dataSource\":\"" + StringEscapeUtils.escapeJson(datasource) + "\","
+ "\"parser\":{"
+ "\"type\":\"string\","
+ "\"parseSpec\":{"
+ "\"format\":\"json\","
+ "\"timestampSpec\":{\"column\":\"xXx\", \"format\": \"auto\", \"missingValue\": null},"
+ "\"dimensionsSpec\":{\"dimensions\":[], \"dimensionExclusions\":[]},"
+ "\"flattenSpec\":{\"useFieldDiscovery\":true, \"fields\":[]},"
+ "\"featureSpec\":{}},"
+ "\"encoding\":\"UTF-8\""
+ "},"
+ "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}],"
+ "\"granularitySpec\":{"
+ "\"type\":\"arbitrary\","
+ "\"queryGranularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1970-01-01T00:00:00.000Z\"},"
+ "\"intervals\":[\"2014-01-01T00:00:00.000Z/2015-01-01T00:00:00.000Z\"]}}";
try {
jsonMapper.readValue(
jsonMapper.writeValueAsString(
jsonMapper.readValue(jsonStr, DataSchema.class)
),
DataSchema.class
);
}
catch (ValueInstantiationException e) {
Assert.assertEquals(IllegalArgumentException.class, e.getCause().getClass());
continue;
}
Assert.fail("Serialization of datasource " + datasource + " should have failed.");
}
}
@Test @Test
public void testSerdeWithUpdatedDataSchemaAddedField() throws IOException public void testSerdeWithUpdatedDataSchemaAddedField() throws IOException
{ {
@ -430,7 +476,7 @@ public class DataSchemaTest
); );
DataSchema originalSchema = new DataSchema( DataSchema originalSchema = new DataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
parser, parser,
new AggregatorFactory[]{ new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric1", "col1"),
@ -469,7 +515,7 @@ public class DataSchemaTest
); );
TestModifiedDataSchema originalSchema = new TestModifiedDataSchema( TestModifiedDataSchema originalSchema = new TestModifiedDataSchema(
"test", VALID_DATASOURCE_CHARS_NAME,
null, null,
null, null,
new AggregatorFactory[]{ new AggregatorFactory[]{