NIFI-4623: This closes #2281. Removed obsolete instability warning in documentation of newer (>= 0_10) Kafka processors

Signed-off-by: joewitt <joewitt@apache.org>
This commit is contained in:
Janosch Woschitz 2017-11-21 10:51:23 +01:00 committed by joewitt
parent ce1b227e85
commit e8b2387cb2
6 changed files with 4 additions and 20 deletions

View File

@ -54,9 +54,7 @@ import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.HEX_EN
import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.UTF8_ENCODING;
@CapabilityDescription("Consumes messages from Apache Kafka specifically built against the Kafka 0.10.x Consumer API. "
+ " Please note there are cases where the publisher can get into an indefinite stuck state. We are closely monitoring"
+ " how this evolves in the Kafka community and will take advantage of those fixes as soon as we can. In the meantime"
+ " it is possible to enter states where the only resolution will be to restart the JVM NiFi runs on. The complementary NiFi processor for sending messages is PublishKafka_0_10.")
+ "The complementary NiFi processor for sending messages is PublishKafka_0_10.")
@Tags({"Kafka", "Get", "Ingest", "Ingress", "Topic", "PubSub", "Consume", "0.10.x"})
@WritesAttributes({
@WritesAttribute(attribute = KafkaProcessorUtils.KAFKA_COUNT, description = "The number of messages written if more than one"),

View File

@ -64,10 +64,7 @@ import org.apache.nifi.serialization.record.RecordSet;
@Tags({"Apache", "Kafka", "Record", "csv", "json", "avro", "logs", "Put", "Send", "Message", "PubSub", "0.10.x"})
@CapabilityDescription("Sends the contents of a FlowFile as individual records to Apache Kafka using the Kafka 0.10.x Producer API. "
+ "The contents of the FlowFile are expected to be record-oriented data that can be read by the configured Record Reader. "
+ " Please note there are cases where the publisher can get into an indefinite stuck state. We are closely monitoring"
+ " how this evolves in the Kafka community and will take advantage of those fixes as soon as we can. In the meantime"
+ " it is possible to enter states where the only resolution will be to restart the JVM NiFi runs on. The complementary NiFi "
+ "processor for fetching messages is ConsumeKafka_0_10_Record.")
+ "The complementary NiFi processor for fetching messages is ConsumeKafka_0_10_Record.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@DynamicProperty(name = "The name of a Kafka configuration property.", value = "The value of a given Kafka configuration property.",
description = "These properties will be added on the Kafka configuration after loading any provided configuration properties."

View File

@ -60,9 +60,7 @@ import org.apache.nifi.processor.util.StandardValidators;
@CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka using the Kafka 0.10.x Producer API."
+ "The messages to send may be individual FlowFiles or may be delimited, using a "
+ "user-specified delimiter, such as a new-line. "
+ " Please note there are cases where the publisher can get into an indefinite stuck state. We are closely monitoring"
+ " how this evolves in the Kafka community and will take advantage of those fixes as soon as we can. In the meantime"
+ " it is possible to enter states where the only resolution will be to restart the JVM NiFi runs on. The complementary NiFi processor for fetching messages is ConsumeKafka_0_10.")
+ "The complementary NiFi processor for fetching messages is ConsumeKafka_0_10.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@DynamicProperty(name = "The name of a Kafka configuration property.", value = "The value of a given Kafka configuration property.",
description = "These properties will be added on the Kafka configuration after loading any provided configuration properties."

View File

@ -55,9 +55,6 @@ import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.HEX_EN
import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.UTF8_ENCODING;
@CapabilityDescription("Consumes messages from Apache Kafka specifically built against the Kafka 0.11.x Consumer API. "
+ " Please note there are cases where the publisher can get into an indefinite stuck state. We are closely monitoring"
+ " how this evolves in the Kafka community and will take advantage of those fixes as soon as we can. In the meantime"
+ " it is possible to enter states where the only resolution will be to restart the JVM NiFi runs on. "
+ "The complementary NiFi processor for sending messages is PublishKafka_0_11.")
@Tags({"Kafka", "Get", "Ingest", "Ingress", "Topic", "PubSub", "Consume", "0.11.x"})
@WritesAttributes({

View File

@ -66,10 +66,7 @@ import org.apache.nifi.serialization.record.RecordSet;
@Tags({"Apache", "Kafka", "Record", "csv", "json", "avro", "logs", "Put", "Send", "Message", "PubSub", "0.11.x"})
@CapabilityDescription("Sends the contents of a FlowFile as individual records to Apache Kafka using the Kafka 0.11.x Producer API. "
+ "The contents of the FlowFile are expected to be record-oriented data that can be read by the configured Record Reader. "
+ " Please note there are cases where the publisher can get into an indefinite stuck state. We are closely monitoring"
+ " how this evolves in the Kafka community and will take advantage of those fixes as soon as we can. In the meantime"
+ " it is possible to enter states where the only resolution will be to restart the JVM NiFi runs on. The complementary NiFi "
+ "processor for fetching messages is ConsumeKafka_0_11_Record.")
+ "The complementary NiFi processor for fetching messages is ConsumeKafka_0_11_Record.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@DynamicProperty(name = "The name of a Kafka configuration property.", value = "The value of a given Kafka configuration property.",
description = "These properties will be added on the Kafka configuration after loading any provided configuration properties."

View File

@ -62,9 +62,6 @@ import org.apache.nifi.processor.util.StandardValidators;
@CapabilityDescription("Sends the contents of a FlowFile as a message to Apache Kafka using the Kafka 0.11.x Producer API."
+ "The messages to send may be individual FlowFiles or may be delimited, using a "
+ "user-specified delimiter, such as a new-line. "
+ " Please note there are cases where the publisher can get into an indefinite stuck state. We are closely monitoring"
+ " how this evolves in the Kafka community and will take advantage of those fixes as soon as we can. In the meantime"
+ " it is possible to enter states where the only resolution will be to restart the JVM NiFi runs on. "
+ "The complementary NiFi processor for fetching messages is ConsumeKafka_0_11.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@DynamicProperty(name = "The name of a Kafka configuration property.", value = "The value of a given Kafka configuration property.",