mirror of https://github.com/apache/druid.git
[Integration-tests] Allow specifying additional properties while connecting to kafka (#5185)
* [Integration-tests] Allow specifying additional properties while connecting to kafka Use Case - For running integration tests with a secure kafka cluster we need to set additional properties. This PR adds a way to pass in additional properties to kafka consumer/producer used in kafka integration tests. Additionally it also adds a flag to skip creating/deleting kafka topic from tests since not all secure kafka clusters allow all users to manage topics. * fix check style
This commit is contained in:
parent
0f5c7d1aec
commit
6582b16a81
|
@ -179,6 +179,18 @@ public class ConfigFileConfigProvider implements IntegrationTestingConfigProvide
|
||||||
{
|
{
|
||||||
return password;
|
return password;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, String> getProperties()
|
||||||
|
{
|
||||||
|
return props;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean manageKafkaTopic()
|
||||||
|
{
|
||||||
|
return Boolean.valueOf(props.getOrDefault("manageKafkaTopic", "true"));
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,8 @@ package io.druid.testing;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
import javax.validation.constraints.NotNull;
|
import javax.validation.constraints.NotNull;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public class DockerConfigProvider implements IntegrationTestingConfigProvider
|
public class DockerConfigProvider implements IntegrationTestingConfigProvider
|
||||||
{
|
{
|
||||||
|
@ -109,6 +111,18 @@ public class DockerConfigProvider implements IntegrationTestingConfigProvider
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, String> getProperties()
|
||||||
|
{
|
||||||
|
return new HashMap<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean manageKafkaTopic()
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
|
|
||||||
package io.druid.testing;
|
package io.druid.testing;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public interface IntegrationTestingConfig
|
public interface IntegrationTestingConfig
|
||||||
|
@ -44,4 +46,8 @@ public interface IntegrationTestingConfig
|
||||||
String getUsername();
|
String getUsername();
|
||||||
|
|
||||||
String getPassword();
|
String getPassword();
|
||||||
|
|
||||||
|
Map<String, String> getProperties();
|
||||||
|
|
||||||
|
boolean manageKafkaTopic();
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,6 +48,7 @@ import org.testng.annotations.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
|
@ -64,6 +65,7 @@ public class ITKafkaIndexingServiceTest extends AbstractIndexerTest
|
||||||
private static final String TOPIC_NAME = "kafka_indexing_service_topic";
|
private static final String TOPIC_NAME = "kafka_indexing_service_topic";
|
||||||
private static final int NUM_EVENTS_TO_SEND = 60;
|
private static final int NUM_EVENTS_TO_SEND = 60;
|
||||||
private static final long WAIT_TIME_MILLIS = 2 * 60 * 1000L;
|
private static final long WAIT_TIME_MILLIS = 2 * 60 * 1000L;
|
||||||
|
public static final String testPropertyPrefix = "kafka.test.property.";
|
||||||
|
|
||||||
// We'll fill in the current time and numbers for added, deleted and changed
|
// We'll fill in the current time and numbers for added, deleted and changed
|
||||||
// before sending the event.
|
// before sending the event.
|
||||||
|
@ -117,10 +119,19 @@ public class ITKafkaIndexingServiceTest extends AbstractIndexerTest
|
||||||
ZKStringSerializer$.MODULE$
|
ZKStringSerializer$.MODULE$
|
||||||
);
|
);
|
||||||
zkUtils = new ZkUtils(zkClient, new ZkConnection(zkHosts, sessionTimeoutMs), false);
|
zkUtils = new ZkUtils(zkClient, new ZkConnection(zkHosts, sessionTimeoutMs), false);
|
||||||
int numPartitions = 4;
|
if (config.manageKafkaTopic()) {
|
||||||
int replicationFactor = 1;
|
int numPartitions = 4;
|
||||||
Properties topicConfig = new Properties();
|
int replicationFactor = 1;
|
||||||
AdminUtils.createTopic(zkUtils, TOPIC_NAME, numPartitions, replicationFactor, topicConfig, RackAwareMode.Disabled$.MODULE$);
|
Properties topicConfig = new Properties();
|
||||||
|
AdminUtils.createTopic(
|
||||||
|
zkUtils,
|
||||||
|
TOPIC_NAME,
|
||||||
|
numPartitions,
|
||||||
|
replicationFactor,
|
||||||
|
topicConfig,
|
||||||
|
RackAwareMode.Disabled$.MODULE$
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
throw new ISE(e, "could not create kafka topic");
|
throw new ISE(e, "could not create kafka topic");
|
||||||
|
@ -129,10 +140,14 @@ public class ITKafkaIndexingServiceTest extends AbstractIndexerTest
|
||||||
String spec;
|
String spec;
|
||||||
try {
|
try {
|
||||||
LOG.info("supervisorSpec name: [%s]", INDEXER_FILE);
|
LOG.info("supervisorSpec name: [%s]", INDEXER_FILE);
|
||||||
|
Properties consumerProperties = new Properties();
|
||||||
|
consumerProperties.put("bootstrap.servers", config.getKafkaHost());
|
||||||
|
addFilteredProperties(consumerProperties);
|
||||||
|
|
||||||
spec = getTaskAsString(INDEXER_FILE)
|
spec = getTaskAsString(INDEXER_FILE)
|
||||||
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
||||||
.replaceAll("%%TOPIC%%", TOPIC_NAME)
|
.replaceAll("%%TOPIC%%", TOPIC_NAME)
|
||||||
.replaceAll("%%KAFKA_BROKER%%", config.getKafkaHost());
|
.replaceAll("%%CONSUMER_PROPERTIES%%", jsonMapper.writeValueAsString(consumerProperties));
|
||||||
LOG.info("supervisorSpec: [%s]\n", spec);
|
LOG.info("supervisorSpec: [%s]\n", spec);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
|
@ -146,6 +161,7 @@ public class ITKafkaIndexingServiceTest extends AbstractIndexerTest
|
||||||
|
|
||||||
// set up kafka producer
|
// set up kafka producer
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
|
addFilteredProperties(properties);
|
||||||
properties.put("bootstrap.servers", config.getKafkaHost());
|
properties.put("bootstrap.servers", config.getKafkaHost());
|
||||||
LOG.info("Kafka bootstrap.servers: [%s]", config.getKafkaHost());
|
LOG.info("Kafka bootstrap.servers: [%s]", config.getKafkaHost());
|
||||||
properties.put("acks", "all");
|
properties.put("acks", "all");
|
||||||
|
@ -285,13 +301,23 @@ public class ITKafkaIndexingServiceTest extends AbstractIndexerTest
|
||||||
public void afterClass() throws Exception
|
public void afterClass() throws Exception
|
||||||
{
|
{
|
||||||
LOG.info("teardown");
|
LOG.info("teardown");
|
||||||
|
if (config.manageKafkaTopic()) {
|
||||||
// delete kafka topic
|
// delete kafka topic
|
||||||
AdminUtils.deleteTopic(zkUtils, TOPIC_NAME);
|
AdminUtils.deleteTopic(zkUtils, TOPIC_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
// remove segments
|
// remove segments
|
||||||
if (segmentsExist) {
|
if (segmentsExist) {
|
||||||
unloadAndKillData(DATASOURCE);
|
unloadAndKillData(DATASOURCE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void addFilteredProperties(Properties properties)
|
||||||
|
{
|
||||||
|
for (Map.Entry<String, String> entry : config.getProperties().entrySet()) {
|
||||||
|
if (entry.getKey().startsWith(testPropertyPrefix)) {
|
||||||
|
properties.put(entry.getKey().substring(testPropertyPrefix.length()), entry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,6 +48,7 @@ import org.testng.annotations.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
|
@ -64,6 +65,8 @@ public class ITKafkaTest extends AbstractIndexerTest
|
||||||
private static final String DATASOURCE = "kafka_test";
|
private static final String DATASOURCE = "kafka_test";
|
||||||
private static final String TOPIC_NAME = "kafkaTopic";
|
private static final String TOPIC_NAME = "kafkaTopic";
|
||||||
private static final int MINUTES_TO_SEND = 2;
|
private static final int MINUTES_TO_SEND = 2;
|
||||||
|
public static final String testPropertyPrefix = "kafka.test.property.";
|
||||||
|
|
||||||
|
|
||||||
// We'll fill in the current time and numbers for added, deleted and changed
|
// We'll fill in the current time and numbers for added, deleted and changed
|
||||||
// before sending the event.
|
// before sending the event.
|
||||||
|
@ -117,10 +120,20 @@ public class ITKafkaTest extends AbstractIndexerTest
|
||||||
ZKStringSerializer$.MODULE$
|
ZKStringSerializer$.MODULE$
|
||||||
);
|
);
|
||||||
zkUtils = new ZkUtils(zkClient, new ZkConnection(zkHosts, sessionTimeoutMs), false);
|
zkUtils = new ZkUtils(zkClient, new ZkConnection(zkHosts, sessionTimeoutMs), false);
|
||||||
int numPartitions = 1;
|
if (config.manageKafkaTopic()) {
|
||||||
int replicationFactor = 1;
|
int numPartitions = 1;
|
||||||
Properties topicConfig = new Properties();
|
int replicationFactor = 1;
|
||||||
AdminUtils.createTopic(zkUtils, TOPIC_NAME, numPartitions, replicationFactor, topicConfig, RackAwareMode.Disabled$.MODULE$);
|
Properties topicConfig = new Properties();
|
||||||
|
// addFilteredProperties(topicConfig);
|
||||||
|
AdminUtils.createTopic(
|
||||||
|
zkUtils,
|
||||||
|
TOPIC_NAME,
|
||||||
|
numPartitions,
|
||||||
|
replicationFactor,
|
||||||
|
topicConfig,
|
||||||
|
RackAwareMode.Disabled$.MODULE$
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
throw new ISE(e, "could not create kafka topic");
|
throw new ISE(e, "could not create kafka topic");
|
||||||
|
@ -128,6 +141,7 @@ public class ITKafkaTest extends AbstractIndexerTest
|
||||||
|
|
||||||
// set up kafka producer
|
// set up kafka producer
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
|
addFilteredProperties(properties);
|
||||||
properties.put("bootstrap.servers", config.getKafkaHost());
|
properties.put("bootstrap.servers", config.getKafkaHost());
|
||||||
LOG.info("Kafka bootstrap.servers: [%s]", config.getKafkaHost());
|
LOG.info("Kafka bootstrap.servers: [%s]", config.getKafkaHost());
|
||||||
properties.put("acks", "all");
|
properties.put("acks", "all");
|
||||||
|
@ -179,12 +193,25 @@ public class ITKafkaTest extends AbstractIndexerTest
|
||||||
// replace temp strings in indexer file
|
// replace temp strings in indexer file
|
||||||
try {
|
try {
|
||||||
LOG.info("indexerFile name: [%s]", INDEXER_FILE);
|
LOG.info("indexerFile name: [%s]", INDEXER_FILE);
|
||||||
|
|
||||||
|
Properties consumerProperties = new Properties();
|
||||||
|
consumerProperties.put("zookeeper.connect", config.getZookeeperHosts());
|
||||||
|
consumerProperties.put("zookeeper.connection.timeout.ms", "15000");
|
||||||
|
consumerProperties.put("zookeeper.sync.time.ms", "5000");
|
||||||
|
consumerProperties.put("group.id", Long.toString(System.currentTimeMillis()));
|
||||||
|
consumerProperties.put("zookeeper.sync.time.ms", "5000");
|
||||||
|
consumerProperties.put("fetch.message.max.bytes", "1048586");
|
||||||
|
consumerProperties.put("auto.offset.reset", "smallest");
|
||||||
|
consumerProperties.put("auto.commit.enable", "false");
|
||||||
|
|
||||||
|
addFilteredProperties(consumerProperties);
|
||||||
|
|
||||||
indexerSpec = getTaskAsString(INDEXER_FILE)
|
indexerSpec = getTaskAsString(INDEXER_FILE)
|
||||||
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
||||||
.replaceAll("%%TOPIC%%", TOPIC_NAME)
|
.replaceAll("%%TOPIC%%", TOPIC_NAME)
|
||||||
.replaceAll("%%ZOOKEEPER_SERVER%%", config.getZookeeperHosts())
|
.replaceAll("%%COUNT%%", Integer.toString(num_events))
|
||||||
.replaceAll("%%GROUP_ID%%", Long.toString(System.currentTimeMillis()))
|
.replaceAll("%%CONSUMER_PROPERTIES%%", jsonMapper.writeValueAsString(consumerProperties));
|
||||||
.replaceAll("%%COUNT%%", Integer.toString(num_events));
|
|
||||||
LOG.info("indexerFile: [%s]\n", indexerSpec);
|
LOG.info("indexerFile: [%s]\n", indexerSpec);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
|
@ -239,11 +266,11 @@ public class ITKafkaTest extends AbstractIndexerTest
|
||||||
|
|
||||||
String queryStr = query_response_template
|
String queryStr = query_response_template
|
||||||
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
||||||
// time boundary
|
// time boundary
|
||||||
.replace("%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst))
|
.replace("%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst))
|
||||||
.replace("%%TIMEBOUNDARY_RESPONSE_MAXTIME%%", TIMESTAMP_FMT.print(dtLast))
|
.replace("%%TIMEBOUNDARY_RESPONSE_MAXTIME%%", TIMESTAMP_FMT.print(dtLast))
|
||||||
.replace("%%TIMEBOUNDARY_RESPONSE_MINTIME%%", TIMESTAMP_FMT.print(dtFirst))
|
.replace("%%TIMEBOUNDARY_RESPONSE_MINTIME%%", TIMESTAMP_FMT.print(dtFirst))
|
||||||
// time series
|
// time series
|
||||||
.replace("%%TIMESERIES_QUERY_START%%", INTERVAL_FMT.print(dtFirst))
|
.replace("%%TIMESERIES_QUERY_START%%", INTERVAL_FMT.print(dtFirst))
|
||||||
.replace("%%TIMESERIES_QUERY_END%%", INTERVAL_FMT.print(dtFirst.plusMinutes(MINUTES_TO_SEND + 2)))
|
.replace("%%TIMESERIES_QUERY_END%%", INTERVAL_FMT.print(dtFirst.plusMinutes(MINUTES_TO_SEND + 2)))
|
||||||
.replace("%%TIMESERIES_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst))
|
.replace("%%TIMESERIES_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst))
|
||||||
|
@ -263,14 +290,24 @@ public class ITKafkaTest extends AbstractIndexerTest
|
||||||
public void afterClass() throws Exception
|
public void afterClass() throws Exception
|
||||||
{
|
{
|
||||||
LOG.info("teardown");
|
LOG.info("teardown");
|
||||||
|
if (config.manageKafkaTopic()) {
|
||||||
// delete kafka topic
|
// delete kafka topic
|
||||||
AdminUtils.deleteTopic(zkUtils, TOPIC_NAME);
|
AdminUtils.deleteTopic(zkUtils, TOPIC_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
// remove segments
|
// remove segments
|
||||||
if (segmentsExist) {
|
if (segmentsExist) {
|
||||||
unloadAndKillData(DATASOURCE);
|
unloadAndKillData(DATASOURCE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void addFilteredProperties(Properties properties)
|
||||||
|
{
|
||||||
|
for (Map.Entry<String, String> entry : config.getProperties().entrySet()) {
|
||||||
|
if (entry.getKey().startsWith(testPropertyPrefix)) {
|
||||||
|
properties.put(entry.getKey().substring(testPropertyPrefix.length()), entry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -52,16 +52,7 @@
|
||||||
"count": "%%COUNT%%",
|
"count": "%%COUNT%%",
|
||||||
"delegate": {
|
"delegate": {
|
||||||
"type": "kafka-0.8",
|
"type": "kafka-0.8",
|
||||||
"consumerProps": {
|
"consumerProps": %%CONSUMER_PROPERTIES%%,
|
||||||
"zookeeper.connect": "%%ZOOKEEPER_SERVER%%",
|
|
||||||
"zookeeper.connection.timeout.ms" : "15000",
|
|
||||||
"zookeeper.session.timeout.ms" : "15000",
|
|
||||||
"zookeeper.sync.time.ms" : "5000",
|
|
||||||
"group.id": "%%GROUP_ID%%",
|
|
||||||
"fetch.message.max.bytes" : "1048586",
|
|
||||||
"auto.offset.reset": "smallest",
|
|
||||||
"auto.commit.enable": "false"
|
|
||||||
},
|
|
||||||
"feed": "%%TOPIC%%"
|
"feed": "%%TOPIC%%"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,9 +52,7 @@
|
||||||
},
|
},
|
||||||
"ioConfig": {
|
"ioConfig": {
|
||||||
"topic": "%%TOPIC%%",
|
"topic": "%%TOPIC%%",
|
||||||
"consumerProperties": {
|
"consumerProperties": %%CONSUMER_PROPERTIES%%,
|
||||||
"bootstrap.servers": "%%KAFKA_BROKER%%"
|
|
||||||
},
|
|
||||||
"taskCount": 2,
|
"taskCount": 2,
|
||||||
"replicas": 1,
|
"replicas": 1,
|
||||||
"taskDuration": "PT2M",
|
"taskDuration": "PT2M",
|
||||||
|
|
Loading…
Reference in New Issue