NIFI-2269 Fixing minor issues with style check, licensing, etc.

This commit is contained in:
jpercivall 2016-08-22 21:34:30 -04:00
parent 376d3c4ef4
commit 6e82ec738c
9 changed files with 27 additions and 166 deletions

View File

@ -59,10 +59,6 @@ The following binary components are provided under the Apache Software License v
Tatu Saloranta (http://wiki.fasterxml.com/TatuSaloranta)
(ASLv2) Google Guava
The following NOTICE information applies:
Copyright 2011 Guava International Limited
(ASLv2) Dropwizard Metrics
The following NOTICE information applies:
Copyright (c) 2010-2013 Coda Hale, Yammer.com

View File

@ -83,7 +83,6 @@ language governing permissions and limitations under the License. -->
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-framework-api</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
@ -135,7 +134,6 @@ language governing permissions and limitations under the License. -->
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-datadog-nar</artifactId>
<version>1.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
@ -374,147 +372,6 @@ language governing permissions and limitations under the License. -->
<type>nar</type>
</dependency>
</dependencies>
<properties>
<!--Wrapper Properties -->
<nifi.jvm.heap.mb>512</nifi.jvm.heap.mb>
<nifi.jvm.permgen.mb>128</nifi.jvm.permgen.mb>
<nifi.run.as />
<!-- nifi.properties: core properties -->
<nifi.version>${project.version}</nifi.version>
<nifi.flowcontroller.autoResumeState>true</nifi.flowcontroller.autoResumeState>
<nifi.flowcontroller.graceful.shutdown.period>10 sec</nifi.flowcontroller.graceful.shutdown.period>
<nifi.flowservice.writedelay.interval>500 ms</nifi.flowservice.writedelay.interval>
<nifi.administrative.yield.duration>30 sec</nifi.administrative.yield.duration>
<nifi.bored.yield.duration>10 millis</nifi.bored.yield.duration>
<nifi.flow.configuration.file>./conf/flow.xml.gz</nifi.flow.configuration.file>
<nifi.flow.configuration.archive.enabled>true</nifi.flow.configuration.archive.enabled>
<nifi.flow.configuration.archive.dir>./conf/archive/</nifi.flow.configuration.archive.dir>
<nifi.flow.configuration.archive.max.time>30 days</nifi.flow.configuration.archive.max.time>
<nifi.flow.configuration.archive.max.storage>500 MB</nifi.flow.configuration.archive.max.storage>
<nifi.login.identity.provider.configuration.file>./conf/login-identity-providers.xml</nifi.login.identity.provider.configuration.file>
<nifi.authorizer.configuration.file>./conf/authorizers.xml</nifi.authorizer.configuration.file>
<nifi.templates.directory>./conf/templates</nifi.templates.directory>
<nifi.database.directory>./database_repository</nifi.database.directory>
<nifi.state.management.configuration.file>./conf/state-management.xml</nifi.state.management.configuration.file>
<nifi.state.management.embedded.zookeeper.start>false</nifi.state.management.embedded.zookeeper.start>
<nifi.state.management.embedded.zookeeper.properties>./conf/zookeeper.properties</nifi.state.management.embedded.zookeeper.properties>
<nifi.state.management.provider.local>local-provider</nifi.state.management.provider.local>
<nifi.state.management.provider.cluster>zk-provider</nifi.state.management.provider.cluster>
<nifi.flowfile.repository.implementation>org.apache.nifi.controller.repository.WriteAheadFlowFileRepository</nifi.flowfile.repository.implementation>
<nifi.flowfile.repository.directory>./flowfile_repository</nifi.flowfile.repository.directory>
<nifi.flowfile.repository.partitions>256</nifi.flowfile.repository.partitions>
<nifi.flowfile.repository.checkpoint.interval>2 mins</nifi.flowfile.repository.checkpoint.interval>
<nifi.flowfile.repository.always.sync>false</nifi.flowfile.repository.always.sync>
<nifi.swap.manager.implementation>org.apache.nifi.controller.FileSystemSwapManager</nifi.swap.manager.implementation>
<nifi.queue.swap.threshold>20000</nifi.queue.swap.threshold>
<nifi.swap.in.period>5 sec</nifi.swap.in.period>
<nifi.swap.in.threads>1</nifi.swap.in.threads>
<nifi.swap.out.period>5 sec</nifi.swap.out.period>
<nifi.swap.out.threads>4</nifi.swap.out.threads>
<nifi.content.repository.implementation>org.apache.nifi.controller.repository.FileSystemRepository</nifi.content.repository.implementation>
<nifi.content.claim.max.appendable.size>10 MB</nifi.content.claim.max.appendable.size>
<nifi.content.claim.max.flow.files>100</nifi.content.claim.max.flow.files>
<nifi.content.repository.directory.default>./content_repository</nifi.content.repository.directory.default>
<nifi.content.repository.archive.max.retention.period>12 hours</nifi.content.repository.archive.max.retention.period>
<nifi.content.repository.archive.max.usage.percentage>50%</nifi.content.repository.archive.max.usage.percentage>
<nifi.content.repository.archive.enabled>true</nifi.content.repository.archive.enabled>
<nifi.content.repository.always.sync>false</nifi.content.repository.always.sync>
<nifi.content.viewer.url>/nifi-content-viewer/</nifi.content.viewer.url>
<nifi.restore.directory />
<nifi.ui.banner.text />
<nifi.ui.autorefresh.interval>30 sec</nifi.ui.autorefresh.interval>
<nifi.nar.library.directory>./lib</nifi.nar.library.directory>
<nifi.nar.working.directory>./work/nar/</nifi.nar.working.directory>
<nifi.documentation.working.directory>./work/docs/components</nifi.documentation.working.directory>
<nifi.sensitive.props.algorithm>PBEWITHMD5AND256BITAES-CBC-OPENSSL</nifi.sensitive.props.algorithm>
<nifi.sensitive.props.provider>BC</nifi.sensitive.props.provider>
<nifi.h2.url.append>;LOCK_TIMEOUT=25000;WRITE_DELAY=0;AUTO_SERVER=FALSE</nifi.h2.url.append>
<nifi.remote.input.socket.port>9990</nifi.remote.input.socket.port>
<!-- persistent provenance repository properties -->
<nifi.provenance.repository.implementation>org.apache.nifi.provenance.PersistentProvenanceRepository</nifi.provenance.repository.implementation>
<nifi.provenance.repository.directory.default>./provenance_repository</nifi.provenance.repository.directory.default>
<nifi.provenance.repository.max.storage.time>24 hours</nifi.provenance.repository.max.storage.time>
<nifi.provenance.repository.max.storage.size>1 GB</nifi.provenance.repository.max.storage.size>
<nifi.provenance.repository.rollover.time>30 secs</nifi.provenance.repository.rollover.time>
<nifi.provenance.repository.rollover.size>100 MB</nifi.provenance.repository.rollover.size>
<nifi.provenance.repository.query.threads>2</nifi.provenance.repository.query.threads>
<nifi.provenance.repository.index.threads>1</nifi.provenance.repository.index.threads>
<nifi.provenance.repository.compress.on.rollover>true</nifi.provenance.repository.compress.on.rollover>
<nifi.provenance.repository.indexed.fields>EventType, FlowFileUUID, Filename, ProcessorID, Relationship</nifi.provenance.repository.indexed.fields>
<nifi.provenance.repository.indexed.attributes />
<nifi.provenance.repository.index.shard.size>500 MB</nifi.provenance.repository.index.shard.size>
<nifi.provenance.repository.always.sync>false</nifi.provenance.repository.always.sync>
<nifi.provenance.repository.journal.count>16</nifi.provenance.repository.journal.count>
<nifi.provenance.repository.max.attribute.length>65536</nifi.provenance.repository.max.attribute.length>
<!-- volatile provenance repository properties -->
<nifi.provenance.repository.buffer.size>100000</nifi.provenance.repository.buffer.size>
<!-- Component status repository properties -->
<nifi.components.status.repository.implementation>org.apache.nifi.controller.status.history.VolatileComponentStatusRepository</nifi.components.status.repository.implementation>
<nifi.components.status.repository.buffer.size>1440</nifi.components.status.repository.buffer.size>
<nifi.components.status.snapshot.frequency>1 min</nifi.components.status.snapshot.frequency>
<!-- nifi.properties: web properties -->
<nifi.web.war.directory>./lib</nifi.web.war.directory>
<nifi.web.http.host />
<nifi.web.http.port>8080</nifi.web.http.port>
<nifi.web.https.host />
<nifi.web.https.port />
<nifi.jetty.work.dir>./work/jetty</nifi.jetty.work.dir>
<nifi.web.jetty.threads>200</nifi.web.jetty.threads>
<!-- nifi.properties: security properties -->
<nifi.security.keystore />
<nifi.security.keystoreType />
<nifi.security.keystorePasswd />
<nifi.security.keyPasswd />
<nifi.security.truststore />
<nifi.security.truststoreType />
<nifi.security.truststorePasswd />
<nifi.security.needClientAuth />
<nifi.security.user.authorizer>file-provider</nifi.security.user.authorizer>
<nifi.security.user.login.identity.provider />
<nifi.security.x509.principal.extractor />
<nifi.security.ocsp.responder.url />
<nifi.security.ocsp.responder.certificate />
<!-- nifi.properties: cluster common properties (cluster manager and nodes must have same values) -->
<nifi.cluster.protocol.heartbeat.interval>5 sec</nifi.cluster.protocol.heartbeat.interval>
<nifi.cluster.protocol.is.secure>false</nifi.cluster.protocol.is.secure>
<!-- nifi.properties: cluster node properties (only configure for cluster nodes) -->
<nifi.cluster.is.node>false</nifi.cluster.is.node>
<nifi.cluster.node.address />
<nifi.cluster.node.protocol.port />
<nifi.cluster.node.protocol.threads>10</nifi.cluster.node.protocol.threads>
<nifi.cluster.node.event.history.size>25</nifi.cluster.node.event.history.size>
<nifi.cluster.node.connection.timeout>5 sec</nifi.cluster.node.connection.timeout>
<nifi.cluster.node.read.timeout>5 sec</nifi.cluster.node.read.timeout>
<nifi.cluster.firewall.file />
<nifi.cluster.request.replication.claim.timeout>15 secs</nifi.cluster.request.replication.claim.timeout>
<!-- nifi.properties: zookeeper properties -->
<nifi.zookeeper.connect.string></nifi.zookeeper.connect.string>
<nifi.zookeeper.connect.timeout>3 secs</nifi.zookeeper.connect.timeout>
<nifi.zookeeper.session.timeout>3 secs</nifi.zookeeper.session.timeout>
<nifi.zookeeper.root.node>/nifi</nifi.zookeeper.root.node>
<!-- nifi.properties: kerberos properties -->
<nifi.kerberos.krb5.file> </nifi.kerberos.krb5.file>
<nifi.kerberos.service.principal />
<nifi.kerberos.keytab.location />
<nifi.kerberos.authentication.expiration>12 hours</nifi.kerberos.authentication.expiration>
</properties>
<profiles>
<profile>
<id>rpm</id>

View File

@ -30,7 +30,6 @@
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-datadog-reporting-task</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -34,7 +34,6 @@ import org.apache.nifi.controller.status.ConnectionStatus;
import org.apache.nifi.controller.status.PortStatus;
import org.apache.nifi.controller.status.ProcessGroupStatus;
import org.apache.nifi.controller.status.ProcessorStatus;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.reporting.AbstractReportingTask;
import org.apache.nifi.reporting.ReportingContext;
@ -43,26 +42,29 @@ import org.coursera.metrics.datadog.DynamicTagsCallback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.Map;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
@Tags({"reporting", "datadog", "metrics"})
@CapabilityDescription("Publishes metrics from NiFi to datadog")
@CapabilityDescription("Publishes metrics from NiFi to datadog. For accurate and informative reporting, components should have unique names.")
public class DataDogReportingTask extends AbstractReportingTask {
static final AllowableValue DATADOG_AGENT = new AllowableValue("DataDog Agent", "DataDog Agent",
"Metrics will be sent via locally installed DataDog agent. " +
"DataDog agent needs to be installed manually before using this option");
static final AllowableValue DATADOG_AGENT = new AllowableValue("Datadog Agent", "Datadog Agent",
"Metrics will be sent via locally installed Datadog agent. " +
"Datadog agent needs to be installed manually before using this option");
static final AllowableValue DATADOG_HTTP = new AllowableValue("Datadog HTTP", "Datadog HTTP",
"Metrics will be sent via HTTP transport with no need of Agent installed. " +
"DataDog API key needs to be set");
"Datadog API key needs to be set");
static final PropertyDescriptor DATADOG_TRANSPORT = new PropertyDescriptor.Builder()
.name("DataDog transport")
.description("Transport through which metrics will be sent to DataDog")
.name("Datadog transport")
.description("Transport through which metrics will be sent to Datadog")
.required(true)
.allowableValues(DATADOG_AGENT, DATADOG_HTTP)
.defaultValue(DATADOG_HTTP.getValue())
@ -70,7 +72,7 @@ public class DataDogReportingTask extends AbstractReportingTask {
static final PropertyDescriptor API_KEY = new PropertyDescriptor.Builder()
.name("API key")
.description("DataDog API key. If specified value is 'agent', local DataDog agent will be used.")
.description("Datadog API key. If specified value is 'agent', local Datadog agent will be used.")
.expressionLanguageSupported(false)
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@ -148,8 +150,8 @@ public class DataDogReportingTask extends AbstractReportingTask {
protected void updateMetrics(Map<String, Double> metrics, Optional<String> processorName, Map<String, String> tags) {
for (Map.Entry<String, Double> entry : metrics.entrySet()) {
logger.info(entry.getKey() + ": " + entry.getValue());
final String metricName = buildMetricName(processorName, entry.getKey());
logger.debug(metricName + ": " + entry.getValue());
//if metric is not registered yet - register it
if (!metricsMap.containsKey(metricName)) {
metricsMap.put(metricName, new AtomicDouble(entry.getValue()));
@ -160,7 +162,7 @@ public class DataDogReportingTask extends AbstractReportingTask {
}
}
private void updateAllMetricGroups (ProcessGroupStatus processGroupStatus) {
private void updateAllMetricGroups(ProcessGroupStatus processGroupStatus) {
final List<ProcessorStatus> processorStatuses = new ArrayList<>();
populateProcessorStatuses(processGroupStatus, processorStatuses);
for (final ProcessorStatus processorStatus : processorStatuses) {
@ -191,7 +193,7 @@ public class DataDogReportingTask extends AbstractReportingTask {
portTags.putAll(metricsService.getPortStatusTags(portStatus));
updateMetrics(metricsService.getPortStatusMetrics(portStatus), Optional.<String>absent(), portTags);
}
updateMetrics(metricsService.getJVMMetrics(virtualMachineMetrics),
Optional.<String>absent(), defaultTags);
updateMetrics(metricsService.getDataFlowMetrics(processGroupStatus), Optional.<String>absent(), defaultTags);

View File

@ -108,7 +108,7 @@ public class MetricsService {
return metrics;
}
public List<String> getAllTagsList () {
public List<String> getAllTagsList() {
List<String> tagsList = new ArrayList<>();
tagsList.add("env");
tagsList.add("dataflow_id");

View File

@ -25,7 +25,6 @@ import com.yammer.metrics.core.VirtualMachineMetrics;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.status.ProcessGroupStatus;
import org.apache.nifi.controller.status.ProcessorStatus;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.reporting.EventAccess;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.reporting.ReportingContext;

View File

@ -31,6 +31,11 @@
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-datadog-reporting-task</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>

View File

@ -1217,6 +1217,12 @@ language governing permissions and limitations under the License. -->
<version>1.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-datadog-nar</artifactId>
<version>1.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-properties</artifactId>