This commit is contained in:
Matt Gilman 2015-04-11 07:55:28 -04:00
commit 7aca8eaeae
99 changed files with 6480 additions and 488 deletions

View File

@ -501,6 +501,38 @@ The following binary components are provided under the Apache Software License v
Apache License Version 2.0 http://www.apache.org/licenses/.
(c) Daniel Lemire, http://lemire.me/en/
(ASLv2) Twitter4J
The following NOTICE information applies:
Copyright 2007 Yusuke Yamamoto
Twitter4J includes software from JSON.org to parse JSON response from the Twitter API. You can see the license term at http://www.JSON.org/license.html
(ASLv2) JOAuth
The following NOTICE information applies:
JOAuth
Copyright 2010-2013 Twitter, Inc
Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
(ASLv2) Hosebird Client
The following NOTICE information applies:
Hosebird Client (hbc)
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
(ASLv2) GeoIP2 Java API
The following NOTICE information applies:
GeoIP2 Java API
This software is Copyright (c) 2013 by MaxMind, Inc.
This is free software, licensed under the Apache License, Version 2.0.
(ASLv2) Google HTTP Client Library for Java
The following NOTICE information applies:
Google HTTP Client Library for Java
This is free software, licensed under the Apache License, Version 2.0.
************************
Common Development and Distribution License 1.1
@ -540,6 +572,14 @@ The following binary components are provided under the Common Development and Di
(CDDL 1.0) JavaServer Pages(TM) API (javax.servlet.jsp:jsp-api:jar:2.1 - http://jsp.java.net)
(CDDL 1.0) SR 250 Common Annotations For The JavaTM Platform (javax.annotation:jsr250-api:jar:1.0 - http://jcp.org/aboutJava/communityprocess/final/jsr250/index.html)
************************
Creative Commons Attribution-ShareAlike 3.0
************************
The following binary components are provided under the Creative Commons Attribution-ShareAlike 3.0. See project link for details.
(CCAS 3.0) MaxMind DB (https://github.com/maxmind/MaxMind-DB)
************************
Eclipse Public License 1.0
************************
@ -559,6 +599,15 @@ The following binary components are provided under the Mozilla Public License v2
(MPL 2.0) Saxon HE (net.sf.saxon:Saxon-HE:jar:9.6.0-4 - http://www.saxonica.com/)
*****************
Mozilla Public License v1.1
*****************
The following binary components are provided under the Mozilla Public License v1.1. See project link for details.
(MPL 1.1) HAPI Base (ca.uhn.hapi:hapi-base:2.2 - http://hl7api.sourceforge.net/)
(MPL 1.1) HAPI Structures (ca.uhn.hapi:hapi-structures-v*:2.2 - http://hl7api.sourceforge.net/)
*****************
Public Domain
*****************

View File

@ -1,457 +1,484 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-assembly</artifactId>
<packaging>pom</packaging>
<description>This is the assembly Apache NiFi (incubating)</description>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<finalName>nifi-${project.version}</finalName>
<attach>false</attach>
</configuration>
<executions>
<execution>
<id>make shared resource</id>
<goals>
<goal>single</goal>
</goals>
<phase>package</phase>
<configuration>
<descriptors>
<descriptor>src/main/assembly/dependencies.xml</descriptor>
</descriptors>
<tarLongFileMode>posix</tarLongFileMode>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-runtime</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-bootstrap</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-resources</artifactId>
<classifier>resources</classifier>
<scope>runtime</scope>
<type>zip</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-docs</artifactId>
<classifier>resources</classifier>
<scope>runtime</scope>
<type>zip</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-framework-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-provenance-repository-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-standard-services-api-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-ssl-context-service-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-distributed-cache-services-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-standard-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-jetty-bundle</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-update-attribute-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hadoop-libraries-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hadoop-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-kafka-nar</artifactId>
<type>nar</type>
</dependency>
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor
license agreements. See the NOTICE file distributed with this work for additional
information regarding copyright ownership. The ASF licenses this file to
You under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of
the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License. -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-assembly</artifactId>
<packaging>pom</packaging>
<description>This is the assembly Apache NiFi (incubating)</description>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<finalName>nifi-${project.version}</finalName>
<attach>false</attach>
</configuration>
<executions>
<execution>
<id>make shared resource</id>
<goals>
<goal>single</goal>
</goals>
<phase>package</phase>
<configuration>
<descriptors>
<descriptor>src/main/assembly/dependencies.xml</descriptor>
</descriptors>
<tarLongFileMode>posix</tarLongFileMode>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-http-context-map-nar</artifactId>
<type>nar</type>
</dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-kite-nar</artifactId>
<type>nar</type>
</dependency>
</dependencies>
<properties>
<!--Wrapper Properties-->
<nifi.wrapper.jvm.heap.initial.mb>256</nifi.wrapper.jvm.heap.initial.mb>
<nifi.wrapper.jvm.heap.max.mb>512</nifi.wrapper.jvm.heap.max.mb>
<nifi.initial.permgen.size.mb>128</nifi.initial.permgen.size.mb>
<nifi.max.permgen.size.mb>128</nifi.max.permgen.size.mb>
<nifi.wrapper.logfile.maxsize>10m</nifi.wrapper.logfile.maxsize>
<nifi.wrapper.logfile.maxfiles>10</nifi.wrapper.logfile.maxfiles>
<!-- nifi.properties: core properties -->
<nifi.version>${project.version}</nifi.version>
<nifi.flowcontroller.autoResumeState>true</nifi.flowcontroller.autoResumeState>
<nifi.flowcontroller.graceful.shutdown.period>10 sec</nifi.flowcontroller.graceful.shutdown.period>
<nifi.flowservice.writedelay.interval>500 ms</nifi.flowservice.writedelay.interval>
<nifi.administrative.yield.duration>30 sec</nifi.administrative.yield.duration>
<nifi.bored.yield.duration>10 millis</nifi.bored.yield.duration>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>log4j-over-slf4j</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-runtime</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-bootstrap</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-resources</artifactId>
<classifier>resources</classifier>
<scope>runtime</scope>
<type>zip</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-docs</artifactId>
<classifier>resources</classifier>
<scope>runtime</scope>
<type>zip</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-framework-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-provenance-repository-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-standard-services-api-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-ssl-context-service-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-distributed-cache-services-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-standard-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-jetty-bundle</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-update-attribute-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hadoop-libraries-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hadoop-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-kafka-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-http-context-map-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-kite-nar</artifactId>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-social-media-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-language-translation-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-geo-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
</dependencies>
<nifi.flow.configuration.file>./conf/flow.xml.gz</nifi.flow.configuration.file>
<nifi.flow.configuration.archive.dir>./conf/archive/</nifi.flow.configuration.archive.dir>
<nifi.authority.provider.configuration.file>./conf/authority-providers.xml</nifi.authority.provider.configuration.file>
<nifi.templates.directory>./conf/templates</nifi.templates.directory>
<nifi.database.directory>./database_repository</nifi.database.directory>
<properties>
<!--Wrapper Properties -->
<nifi.wrapper.jvm.heap.initial.mb>256</nifi.wrapper.jvm.heap.initial.mb>
<nifi.wrapper.jvm.heap.max.mb>512</nifi.wrapper.jvm.heap.max.mb>
<nifi.initial.permgen.size.mb>128</nifi.initial.permgen.size.mb>
<nifi.max.permgen.size.mb>128</nifi.max.permgen.size.mb>
<nifi.wrapper.logfile.maxsize>10m</nifi.wrapper.logfile.maxsize>
<nifi.wrapper.logfile.maxfiles>10</nifi.wrapper.logfile.maxfiles>
<nifi.flowfile.repository.implementation>org.apache.nifi.controller.repository.WriteAheadFlowFileRepository</nifi.flowfile.repository.implementation>
<nifi.flowfile.repository.directory>./flowfile_repository</nifi.flowfile.repository.directory>
<nifi.flowfile.repository.partitions>256</nifi.flowfile.repository.partitions>
<nifi.flowfile.repository.checkpoint.interval>2 mins</nifi.flowfile.repository.checkpoint.interval>
<nifi.flowfile.repository.always.sync>false</nifi.flowfile.repository.always.sync>
<nifi.swap.manager.implementation>org.apache.nifi.controller.FileSystemSwapManager</nifi.swap.manager.implementation>
<nifi.queue.swap.threshold>20000</nifi.queue.swap.threshold>
<nifi.swap.in.period>5 sec</nifi.swap.in.period>
<nifi.swap.in.threads>1</nifi.swap.in.threads>
<nifi.swap.out.period>5 sec</nifi.swap.out.period>
<nifi.swap.out.threads>4</nifi.swap.out.threads>
<nifi.content.repository.implementation>org.apache.nifi.controller.repository.FileSystemRepository</nifi.content.repository.implementation>
<nifi.content.claim.max.appendable.size>10 MB</nifi.content.claim.max.appendable.size>
<nifi.content.claim.max.flow.files>100</nifi.content.claim.max.flow.files>
<nifi.content.repository.directory.default>./content_repository</nifi.content.repository.directory.default>
<nifi.content.repository.archive.max.retention.period />
<nifi.content.repository.archive.max.usage.percentage />
<nifi.content.repository.archive.enabled>false</nifi.content.repository.archive.enabled>
<nifi.content.repository.always.sync>false</nifi.content.repository.always.sync>
<nifi.content.viewer.url />
<nifi.restore.directory />
<nifi.ui.banner.text />
<nifi.ui.autorefresh.interval>30 sec</nifi.ui.autorefresh.interval>
<nifi.nar.library.directory>./lib</nifi.nar.library.directory>
<nifi.nar.working.directory>./work/nar/</nifi.nar.working.directory>
<nifi.documentation.working.directory>./work/docs/components</nifi.documentation.working.directory>
<nifi.sensitive.props.algorithm>PBEWITHMD5AND256BITAES-CBC-OPENSSL</nifi.sensitive.props.algorithm>
<nifi.sensitive.props.provider>BC</nifi.sensitive.props.provider>
<nifi.h2.url.append>;LOCK_TIMEOUT=25000;WRITE_DELAY=0;AUTO_SERVER=FALSE</nifi.h2.url.append>
<!-- nifi.properties: core properties -->
<nifi.version>${project.version}</nifi.version>
<nifi.flowcontroller.autoResumeState>true</nifi.flowcontroller.autoResumeState>
<nifi.flowcontroller.graceful.shutdown.period>10 sec</nifi.flowcontroller.graceful.shutdown.period>
<nifi.flowservice.writedelay.interval>500 ms</nifi.flowservice.writedelay.interval>
<nifi.administrative.yield.duration>30 sec</nifi.administrative.yield.duration>
<nifi.bored.yield.duration>10 millis</nifi.bored.yield.duration>
<nifi.remote.input.socket.port>9990</nifi.remote.input.socket.port>
<!-- persistent provenance repository properties -->
<nifi.provenance.repository.implementation>org.apache.nifi.provenance.PersistentProvenanceRepository</nifi.provenance.repository.implementation>
<nifi.provenance.repository.directory.default>./provenance_repository</nifi.provenance.repository.directory.default>
<nifi.provenance.repository.max.storage.time>24 hours</nifi.provenance.repository.max.storage.time>
<nifi.provenance.repository.max.storage.size>1 GB</nifi.provenance.repository.max.storage.size>
<nifi.provenance.repository.rollover.time>5 mins</nifi.provenance.repository.rollover.time>
<nifi.provenance.repository.rollover.size>100 MB</nifi.provenance.repository.rollover.size>
<nifi.provenance.repository.query.threads>2</nifi.provenance.repository.query.threads>
<nifi.provenance.repository.compress.on.rollover>true</nifi.provenance.repository.compress.on.rollover>
<nifi.provenance.repository.indexed.fields>EventType, FlowFileUUID, Filename, ProcessorID</nifi.provenance.repository.indexed.fields>
<nifi.provenance.repository.indexed.attributes />
<nifi.provenance.repository.index.shard.size>500 MB</nifi.provenance.repository.index.shard.size>
<nifi.provenance.repository.always.sync>false</nifi.provenance.repository.always.sync>
<nifi.provenance.repository.journal.count>16</nifi.provenance.repository.journal.count>
<!-- volatile provenance repository properties -->
<nifi.provenance.repository.buffer.size>100000</nifi.provenance.repository.buffer.size>
<!-- Component status repository properties -->
<nifi.components.status.repository.implementation>org.apache.nifi.controller.status.history.VolatileComponentStatusRepository</nifi.components.status.repository.implementation>
<nifi.components.status.repository.buffer.size>288</nifi.components.status.repository.buffer.size>
<nifi.components.status.snapshot.frequency>5 mins</nifi.components.status.snapshot.frequency>
<!-- nifi.properties: web properties -->
<nifi.web.war.directory>./lib</nifi.web.war.directory>
<nifi.web.http.host />
<nifi.web.http.port>8080</nifi.web.http.port>
<nifi.web.https.host />
<nifi.web.https.port />
<nifi.jetty.work.dir>./work/jetty</nifi.jetty.work.dir>
<nifi.web.jetty.threads>200</nifi.web.jetty.threads>
<!-- nifi.properties: security properties -->
<nifi.security.keystore />
<nifi.security.keystoreType />
<nifi.security.keystorePasswd />
<nifi.security.keyPasswd />
<nifi.security.truststore />
<nifi.security.truststoreType />
<nifi.security.truststorePasswd />
<nifi.security.needClientAuth />
<nifi.security.authorizedUsers.file>./conf/authorized-users.xml</nifi.security.authorizedUsers.file>
<nifi.security.user.credential.cache.duration>24 hours</nifi.security.user.credential.cache.duration>
<nifi.security.user.authority.provider>file-provider</nifi.security.user.authority.provider>
<nifi.security.x509.principal.extractor />
<nifi.security.support.new.account.requests />
<nifi.security.ocsp.responder.url />
<nifi.security.ocsp.responder.certificate />
<!-- nifi.properties: cluster common properties (cluster manager and nodes must have same values) -->
<nifi.cluster.protocol.heartbeat.interval>5 sec</nifi.cluster.protocol.heartbeat.interval>
<nifi.cluster.protocol.is.secure>false</nifi.cluster.protocol.is.secure>
<nifi.cluster.protocol.socket.timeout>30 sec</nifi.cluster.protocol.socket.timeout>
<nifi.cluster.protocol.connection.handshake.timeout>45 sec</nifi.cluster.protocol.connection.handshake.timeout>
<nifi.cluster.protocol.use.multicast>false</nifi.cluster.protocol.use.multicast>
<nifi.cluster.protocol.multicast.address />
<nifi.cluster.protocol.multicast.port />
<nifi.cluster.protocol.multicast.service.broadcast.delay>500 ms</nifi.cluster.protocol.multicast.service.broadcast.delay>
<nifi.cluster.protocol.multicast.service.locator.attempts>3</nifi.cluster.protocol.multicast.service.locator.attempts>
<nifi.cluster.protocol.multicast.service.locator.attempts.delay>1 sec</nifi.cluster.protocol.multicast.service.locator.attempts.delay>
<nifi.flow.configuration.file>./conf/flow.xml.gz</nifi.flow.configuration.file>
<nifi.flow.configuration.archive.dir>./conf/archive/</nifi.flow.configuration.archive.dir>
<nifi.authority.provider.configuration.file>./conf/authority-providers.xml</nifi.authority.provider.configuration.file>
<nifi.templates.directory>./conf/templates</nifi.templates.directory>
<nifi.database.directory>./database_repository</nifi.database.directory>
<!-- nifi.properties: cluster node properties (only configure for cluster nodes) -->
<nifi.cluster.is.node>false</nifi.cluster.is.node>
<nifi.cluster.node.address />
<nifi.cluster.node.protocol.port />
<nifi.cluster.node.protocol.threads>2</nifi.cluster.node.protocol.threads>
<nifi.cluster.node.unicast.manager.address />
<nifi.cluster.node.unicast.manager.protocol.port />
<!-- nifi.properties: cluster manager properties (only configure for cluster manager) -->
<nifi.cluster.is.manager>false</nifi.cluster.is.manager>
<nifi.cluster.manager.address />
<nifi.cluster.manager.protocol.port />
<nifi.cluster.manager.node.firewall.file />
<nifi.cluster.manager.node.event.history.size>10</nifi.cluster.manager.node.event.history.size>
<nifi.cluster.manager.node.api.connection.timeout>30 sec</nifi.cluster.manager.node.api.connection.timeout>
<nifi.cluster.manager.node.api.read.timeout>30 sec</nifi.cluster.manager.node.api.read.timeout>
<nifi.cluster.manager.node.api.request.threads>10</nifi.cluster.manager.node.api.request.threads>
<nifi.cluster.manager.flow.retrieval.delay>5 sec</nifi.cluster.manager.flow.retrieval.delay>
<nifi.cluster.manager.protocol.threads>10</nifi.cluster.manager.protocol.threads>
<nifi.cluster.manager.safemode.duration>0 sec</nifi.cluster.manager.safemode.duration>
</properties>
<profiles>
<profile>
<id>rpm</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>unpack-shared-resources</id>
<goals>
<goal>unpack-dependencies</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<outputDirectory>${project.build.directory}/generated-resources</outputDirectory>
<includeArtifactIds>nifi-resources</includeArtifactIds>
<includeGroupIds>org.apache.nifi</includeGroupIds>
<excludeTransitive>false</excludeTransitive>
</configuration>
</execution>
<execution>
<id>unpack-docs</id>
<goals>
<goal>unpack-dependencies</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<outputDirectory>${project.build.directory}/generated-docs</outputDirectory>
<includeArtifactIds>nifi-docs</includeArtifactIds>
<includeGroupIds>org.apache.nifi</includeGroupIds>
<excludeTransitive>false</excludeTransitive>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<configuration>
<summary>Apache NiFi (incubating)</summary>
<description>Apache Nifi (incubating) is dataflow system based on the Flow-Based Programming concepts.</description>
<license>Apache License, Version 2.0 and others (see included LICENSE file)</license>
<url>http://nifi.incubator.apache.org</url>
<group>Utilities</group>
<prefix>/opt/nifi</prefix>
<defineStatements>
<defineStatement>_use_internal_dependency_generator 0</defineStatement>
</defineStatements>
<defaultDirmode>750</defaultDirmode>
<defaultFilemode>640</defaultFilemode>
<defaultUsername>root</defaultUsername>
<defaultGroupname>root</defaultGroupname>
</configuration>
<executions>
<execution>
<id>build-bin-rpm</id>
<goals>
<goal>attached-rpm</goal>
</goals>
<configuration>
<classifier>bin</classifier>
<provides>
<provide>nifi</provide>
</provides>
<mappings>
<mapping>
<directory>/opt/nifi/nifi-${project.version}</directory>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}</directory>
<sources>
<source>
<location>./LICENSE</location>
</source>
<source>
<location>./NOTICE</location>
</source>
<source>
<location>../DISCLAIMER</location>
</source>
<source>
<location>./README.md</location>
<destination>README</destination>
</source>
</sources>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/bin</directory>
<filemode>750</filemode>
<sources>
<source>
<location>${project.build.directory}/generated-resources/bin/nifi.sh</location>
<destination>nifi.sh</destination>
<filter>true</filter>
</source>
</sources>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/conf</directory>
<configuration>true</configuration>
<sources>
<source>
<location>${project.build.directory}/generated-resources/conf</location>
<filter>true</filter>
</source>
</sources>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/lib</directory>
<dependency>
<excludes>
<exclude>org.apache.nifi:nifi-bootstrap</exclude>
<exclude>org.apache.nifi:nifi-resources</exclude>
<exclude>org.apache.nifi:nifi-docs</exclude>
</excludes>
</dependency>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/lib/bootstrap</directory>
<dependency>
<includes>
<include>org.apache.nifi:nifi-bootstrap</include>
</includes>
</dependency>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/docs</directory>
<sources>
<source>
<location>${project.build.directory}/generated-docs</location>
</source>
</sources>
</mapping>
</mappings>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<nifi.flowfile.repository.implementation>org.apache.nifi.controller.repository.WriteAheadFlowFileRepository</nifi.flowfile.repository.implementation>
<nifi.flowfile.repository.directory>./flowfile_repository</nifi.flowfile.repository.directory>
<nifi.flowfile.repository.partitions>256</nifi.flowfile.repository.partitions>
<nifi.flowfile.repository.checkpoint.interval>2 mins</nifi.flowfile.repository.checkpoint.interval>
<nifi.flowfile.repository.always.sync>false</nifi.flowfile.repository.always.sync>
<nifi.swap.manager.implementation>org.apache.nifi.controller.FileSystemSwapManager</nifi.swap.manager.implementation>
<nifi.queue.swap.threshold>20000</nifi.queue.swap.threshold>
<nifi.swap.in.period>5 sec</nifi.swap.in.period>
<nifi.swap.in.threads>1</nifi.swap.in.threads>
<nifi.swap.out.period>5 sec</nifi.swap.out.period>
<nifi.swap.out.threads>4</nifi.swap.out.threads>
<nifi.content.repository.implementation>org.apache.nifi.controller.repository.FileSystemRepository</nifi.content.repository.implementation>
<nifi.content.claim.max.appendable.size>10 MB</nifi.content.claim.max.appendable.size>
<nifi.content.claim.max.flow.files>100</nifi.content.claim.max.flow.files>
<nifi.content.repository.directory.default>./content_repository</nifi.content.repository.directory.default>
<nifi.content.repository.archive.max.retention.period />
<nifi.content.repository.archive.max.usage.percentage />
<nifi.content.repository.archive.enabled>false</nifi.content.repository.archive.enabled>
<nifi.content.repository.always.sync>false</nifi.content.repository.always.sync>
<nifi.content.viewer.url />
<nifi.restore.directory />
<nifi.ui.banner.text />
<nifi.ui.autorefresh.interval>30 sec</nifi.ui.autorefresh.interval>
<nifi.nar.library.directory>./lib</nifi.nar.library.directory>
<nifi.nar.working.directory>./work/nar/</nifi.nar.working.directory>
<nifi.documentation.working.directory>./work/docs/components</nifi.documentation.working.directory>
<nifi.sensitive.props.algorithm>PBEWITHMD5AND256BITAES-CBC-OPENSSL</nifi.sensitive.props.algorithm>
<nifi.sensitive.props.provider>BC</nifi.sensitive.props.provider>
<nifi.h2.url.append>;LOCK_TIMEOUT=25000;WRITE_DELAY=0;AUTO_SERVER=FALSE</nifi.h2.url.append>
<nifi.remote.input.socket.port>9990</nifi.remote.input.socket.port>
<!-- persistent provenance repository properties -->
<nifi.provenance.repository.implementation>org.apache.nifi.provenance.PersistentProvenanceRepository</nifi.provenance.repository.implementation>
<nifi.provenance.repository.directory.default>./provenance_repository</nifi.provenance.repository.directory.default>
<nifi.provenance.repository.max.storage.time>24 hours</nifi.provenance.repository.max.storage.time>
<nifi.provenance.repository.max.storage.size>1 GB</nifi.provenance.repository.max.storage.size>
<nifi.provenance.repository.rollover.time>5 mins</nifi.provenance.repository.rollover.time>
<nifi.provenance.repository.rollover.size>100 MB</nifi.provenance.repository.rollover.size>
<nifi.provenance.repository.query.threads>2</nifi.provenance.repository.query.threads>
<nifi.provenance.repository.compress.on.rollover>true</nifi.provenance.repository.compress.on.rollover>
<nifi.provenance.repository.indexed.fields>EventType, FlowFileUUID,
Filename, ProcessorID</nifi.provenance.repository.indexed.fields>
<nifi.provenance.repository.indexed.attributes />
<nifi.provenance.repository.index.shard.size>500 MB</nifi.provenance.repository.index.shard.size>
<nifi.provenance.repository.always.sync>false</nifi.provenance.repository.always.sync>
<nifi.provenance.repository.journal.count>16</nifi.provenance.repository.journal.count>
<!-- volatile provenance repository properties -->
<nifi.provenance.repository.buffer.size>100000</nifi.provenance.repository.buffer.size>
<!-- Component status repository properties -->
<nifi.components.status.repository.implementation>org.apache.nifi.controller.status.history.VolatileComponentStatusRepository</nifi.components.status.repository.implementation>
<nifi.components.status.repository.buffer.size>288</nifi.components.status.repository.buffer.size>
<nifi.components.status.snapshot.frequency>5 mins</nifi.components.status.snapshot.frequency>
<!-- nifi.properties: web properties -->
<nifi.web.war.directory>./lib</nifi.web.war.directory>
<nifi.web.http.host />
<nifi.web.http.port>8080</nifi.web.http.port>
<nifi.web.https.host />
<nifi.web.https.port />
<nifi.jetty.work.dir>./work/jetty</nifi.jetty.work.dir>
<nifi.web.jetty.threads>200</nifi.web.jetty.threads>
<!-- nifi.properties: security properties -->
<nifi.security.keystore />
<nifi.security.keystoreType />
<nifi.security.keystorePasswd />
<nifi.security.keyPasswd />
<nifi.security.truststore />
<nifi.security.truststoreType />
<nifi.security.truststorePasswd />
<nifi.security.needClientAuth />
<nifi.security.authorizedUsers.file>./conf/authorized-users.xml</nifi.security.authorizedUsers.file>
<nifi.security.user.credential.cache.duration>24 hours</nifi.security.user.credential.cache.duration>
<nifi.security.user.authority.provider>file-provider</nifi.security.user.authority.provider>
<nifi.security.x509.principal.extractor />
<nifi.security.support.new.account.requests />
<nifi.security.ocsp.responder.url />
<nifi.security.ocsp.responder.certificate />
<!-- nifi.properties: cluster common properties (cluster manager and nodes
must have same values) -->
<nifi.cluster.protocol.heartbeat.interval>5 sec</nifi.cluster.protocol.heartbeat.interval>
<nifi.cluster.protocol.is.secure>false</nifi.cluster.protocol.is.secure>
<nifi.cluster.protocol.socket.timeout>30 sec</nifi.cluster.protocol.socket.timeout>
<nifi.cluster.protocol.connection.handshake.timeout>45 sec</nifi.cluster.protocol.connection.handshake.timeout>
<nifi.cluster.protocol.use.multicast>false</nifi.cluster.protocol.use.multicast>
<nifi.cluster.protocol.multicast.address />
<nifi.cluster.protocol.multicast.port />
<nifi.cluster.protocol.multicast.service.broadcast.delay>500 ms</nifi.cluster.protocol.multicast.service.broadcast.delay>
<nifi.cluster.protocol.multicast.service.locator.attempts>3</nifi.cluster.protocol.multicast.service.locator.attempts>
<nifi.cluster.protocol.multicast.service.locator.attempts.delay>1 sec</nifi.cluster.protocol.multicast.service.locator.attempts.delay>
<!-- nifi.properties: cluster node properties (only configure for cluster
nodes) -->
<nifi.cluster.is.node>false</nifi.cluster.is.node>
<nifi.cluster.node.address />
<nifi.cluster.node.protocol.port />
<nifi.cluster.node.protocol.threads>2</nifi.cluster.node.protocol.threads>
<nifi.cluster.node.unicast.manager.address />
<nifi.cluster.node.unicast.manager.protocol.port />
<!-- nifi.properties: cluster manager properties (only configure for cluster
manager) -->
<nifi.cluster.is.manager>false</nifi.cluster.is.manager>
<nifi.cluster.manager.address />
<nifi.cluster.manager.protocol.port />
<nifi.cluster.manager.node.firewall.file />
<nifi.cluster.manager.node.event.history.size>10</nifi.cluster.manager.node.event.history.size>
<nifi.cluster.manager.node.api.connection.timeout>30 sec</nifi.cluster.manager.node.api.connection.timeout>
<nifi.cluster.manager.node.api.read.timeout>30 sec</nifi.cluster.manager.node.api.read.timeout>
<nifi.cluster.manager.node.api.request.threads>10</nifi.cluster.manager.node.api.request.threads>
<nifi.cluster.manager.flow.retrieval.delay>5 sec</nifi.cluster.manager.flow.retrieval.delay>
<nifi.cluster.manager.protocol.threads>10</nifi.cluster.manager.protocol.threads>
<nifi.cluster.manager.safemode.duration>0 sec</nifi.cluster.manager.safemode.duration>
</properties>
<profiles>
<profile>
<id>rpm</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>unpack-shared-resources</id>
<goals>
<goal>unpack-dependencies</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<outputDirectory>${project.build.directory}/generated-resources</outputDirectory>
<includeArtifactIds>nifi-resources</includeArtifactIds>
<includeGroupIds>org.apache.nifi</includeGroupIds>
<excludeTransitive>false</excludeTransitive>
</configuration>
</execution>
<execution>
<id>unpack-docs</id>
<goals>
<goal>unpack-dependencies</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<outputDirectory>${project.build.directory}/generated-docs</outputDirectory>
<includeArtifactIds>nifi-docs</includeArtifactIds>
<includeGroupIds>org.apache.nifi</includeGroupIds>
<excludeTransitive>false</excludeTransitive>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<configuration>
<summary>Apache NiFi (incubating)</summary>
<description>Apache Nifi (incubating) is dataflow system based on
the Flow-Based Programming concepts.</description>
<license>Apache License, Version 2.0 and others (see included
LICENSE file)</license>
<url>http://nifi.incubator.apache.org</url>
<group>Utilities</group>
<prefix>/opt/nifi</prefix>
<defineStatements>
<defineStatement>_use_internal_dependency_generator 0</defineStatement>
</defineStatements>
<defaultDirmode>750</defaultDirmode>
<defaultFilemode>640</defaultFilemode>
<defaultUsername>root</defaultUsername>
<defaultGroupname>root</defaultGroupname>
</configuration>
<executions>
<execution>
<id>build-bin-rpm</id>
<goals>
<goal>attached-rpm</goal>
</goals>
<configuration>
<classifier>bin</classifier>
<provides>
<provide>nifi</provide>
</provides>
<mappings>
<mapping>
<directory>/opt/nifi/nifi-${project.version}</directory>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}</directory>
<sources>
<source>
<location>./LICENSE</location>
</source>
<source>
<location>./NOTICE</location>
</source>
<source>
<location>../DISCLAIMER</location>
</source>
<source>
<location>./README.md</location>
<destination>README</destination>
</source>
</sources>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/bin</directory>
<filemode>750</filemode>
<sources>
<source>
<location>${project.build.directory}/generated-resources/bin/nifi.sh</location>
<destination>nifi.sh</destination>
<filter>true</filter>
</source>
</sources>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/conf</directory>
<configuration>true</configuration>
<sources>
<source>
<location>${project.build.directory}/generated-resources/conf</location>
<filter>true</filter>
</source>
</sources>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/lib</directory>
<dependency>
<excludes>
<exclude>org.apache.nifi:nifi-bootstrap</exclude>
<exclude>org.apache.nifi:nifi-resources</exclude>
<exclude>org.apache.nifi:nifi-docs</exclude>
</excludes>
</dependency>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/lib/bootstrap</directory>
<dependency>
<includes>
<include>org.apache.nifi:nifi-bootstrap</include>
</includes>
</dependency>
</mapping>
<mapping>
<directory>/opt/nifi/nifi-${project.version}/docs</directory>
<sources>
<source>
<location>${project.build.directory}/generated-docs</location>
</source>
</sources>
</mapping>
</mappings>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,3 @@
/target/
/target/
/target/

View File

@ -0,0 +1,125 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-commons</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-hl7-query-language</artifactId>
<packaging>jar</packaging>
<name>NiFi Health Level 7 (HL7) Query Language</name>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr3-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>antlr</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>src/test/resources/hypoglycemia</exclude>
<exclude>src/test/resources/hyperglycemia</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr-runtime</artifactId>
<version>3.5.2</version>
</dependency>
<!-- HAPI to parse v2 messages -->
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-base</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v21</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v22</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v23</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v231</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v24</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v25</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v251</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v26</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,173 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
lexer grammar HL7QueryLexer;
@header {
package org.apache.nifi.hl7.query.antlr;
import org.apache.nifi.hl7.query.exception.HL7QueryParsingException;
}
@rulecatch {
catch(final Exception e) {
throw new HL7QueryParsingException(e);
}
}
@members {
public void displayRecognitionError(String[] tokenNames, RecognitionException e) {
final StringBuilder sb = new StringBuilder();
if ( e.token == null ) {
sb.append("Unrecognized token ");
} else {
sb.append("Unexpected token '").append(e.token.getText()).append("' ");
}
sb.append("at line ").append(e.line);
if ( e.approximateLineInfo ) {
sb.append(" (approximately)");
}
sb.append(", column ").append(e.charPositionInLine);
sb.append(". Query: ").append(e.input.toString());
throw new HL7QueryParsingException(sb.toString());
}
public void recover(RecognitionException e) {
final StringBuilder sb = new StringBuilder();
if ( e.token == null ) {
sb.append("Unrecognized token ");
} else {
sb.append("Unexpected token '").append(e.token.getText()).append("' ");
}
sb.append("at line ").append(e.line);
if ( e.approximateLineInfo ) {
sb.append(" (approximately)");
}
sb.append(", column ").append(e.charPositionInLine);
sb.append(". Query: ").append(e.input.toString());
throw new HL7QueryParsingException(sb.toString());
}
}
// PUNCTUATION & SPECIAL CHARACTERS
WHITESPACE : (' '|'\t'|'\n'|'\r')+ { $channel = HIDDEN; };
COMMENT : '#' ( ~('\n') )* '\n' { $channel = HIDDEN; };
LPAREN : '(';
RPAREN : ')';
LBRACE : '{';
RBRACE : '}';
COLON : ':';
COMMA : ',';
DOT : '.';
SEMICOLON : ';';
// OPERATORS
EQUALS : '=';
NOT_EQUALS : '!=';
GT : '>';
GE : '>=';
LT : '<';
LE : '<=';
REGEX : 'MATCHES REGEX';
LIKE : 'LIKE';
IS_NULL : 'IS NULL';
NOT_NULL : 'NOT NULL';
// KEYWORDS
AND : 'AND';
OR : 'OR';
NOT : 'NOT';
TRUE : 'true';
FALSE : 'false';
SELECT : 'select' | 'SELECT';
DECLARE : 'declare' | 'DECLARE';
OPTIONAL : 'optional' | 'OPTIONAL';
REQUIRED : 'required' | 'REQUIRED';
AS : 'as' | 'AS';
WHERE : 'where' | 'WHERE';
MESSAGE : 'MESSAGE' | 'message';
SEGMENT : 'SEGMENT' | 'segment';
SEGMENT_NAME : LETTER ALPHA_NUMERIC ALPHA_NUMERIC;
NUMBER : ('0'..'9')+;
fragment LETTER : 'A'..'Z';
fragment ALPHA_NUMERIC : 'A'..'Z' | '0'..'9';
// STRINGS
STRING_LITERAL
@init{StringBuilder lBuf = new StringBuilder();}
:
(
'"'
(
escaped=ESC {lBuf.append(getText());} |
normal = ~( '"' | '\\' | '\n' | '\r' | '\t' ) { lBuf.appendCodePoint(normal);}
)*
'"'
)
{
setText(lBuf.toString());
}
|
(
'\''
(
escaped=ESC {lBuf.append(getText());} |
normal = ~( '\'' | '\\' | '\n' | '\r' | '\t' ) { lBuf.appendCodePoint(normal);}
)*
'\''
)
{
setText(lBuf.toString());
}
;
fragment
ESC
: '\\'
(
'"' { setText("\""); }
| '\'' { setText("\'"); }
| 'r' { setText("\r"); }
| 'n' { setText("\n"); }
| 't' { setText("\t"); }
| '\\' { setText("\\\\"); }
| nextChar = ~('"' | '\'' | 'r' | 'n' | 't' | '\\')
{
StringBuilder lBuf = new StringBuilder(); lBuf.append("\\\\").appendCodePoint(nextChar); setText(lBuf.toString());
}
)
;
IDENTIFIER : (
~('$' | '{' | '}' | '(' | ')' | '[' | ']' | ',' | ':' | ';' | '/' | '*' | '\'' | ' ' | '\t' | '\r' | '\n' | '0'..'9' | '.')
~('$' | '{' | '}' | '(' | ')' | '[' | ']' | ',' | ':' | ';' | '/' | '*' | '\'' | ' ' | '\t' | '\r' | '\n' | '.')*
);

View File

@ -0,0 +1,108 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
parser grammar HL7QueryParser;
options {
output=AST;
tokenVocab=HL7QueryLexer;
}
tokens {
QUERY;
DECLARATION;
}
@header {
package org.apache.nifi.hl7.query.antlr;
import org.apache.nifi.hl7.query.exception.HL7QueryParsingException;
}
@members {
public void displayRecognitionError(String[] tokenNames, RecognitionException e) {
final StringBuilder sb = new StringBuilder();
if ( e.token == null ) {
sb.append("Unrecognized token ");
} else {
sb.append("Unexpected token '").append(e.token.getText()).append("' ");
}
sb.append("at line ").append(e.line);
if ( e.approximateLineInfo ) {
sb.append(" (approximately)");
}
sb.append(", column ").append(e.charPositionInLine);
sb.append(". Query: ").append(e.input.toString());
throw new HL7QueryParsingException(sb.toString());
}
public void recover(final RecognitionException e) {
final StringBuilder sb = new StringBuilder();
if ( e.token == null ) {
sb.append("Unrecognized token ");
} else {
sb.append("Unexpected token '").append(e.token.getText()).append("' ");
}
sb.append("at line ").append(e.line);
if ( e.approximateLineInfo ) {
sb.append(" (approximately)");
}
sb.append(", column ").append(e.charPositionInLine);
sb.append(". Query: ").append(e.input.toString());
throw new HL7QueryParsingException(sb.toString());
}
}
declareClause : DECLARE^ declaration (COMMA! declaration)*;
requiredOrOptional : REQUIRED | OPTIONAL;
declaration : IDENTIFIER AS requiredOrOptional SEGMENT_NAME ->
^(DECLARATION IDENTIFIER requiredOrOptional SEGMENT_NAME);
selectClause : SELECT^ selectableClause;
selectableClause : selectable (COMMA! selectable)*;
selectable : (MESSAGE | ref | field)^ (AS! IDENTIFIER^)?;
whereClause : WHERE^ conditions;
conditions : condition ((AND^ | OR^) condition)*;
condition : NOT^ condition | LPAREN! conditions RPAREN! | evaluation;
evaluation : expression
(
unaryOperator^
| (binaryOperator^ expression)
);
expression : (LPAREN! expr RPAREN!) | expr;
expr : ref | field | STRING_LITERAL | NUMBER;
unaryOperator : IS_NULL | NOT_NULL;
binaryOperator : EQUALS | NOT_EQUALS | LT | GT | LE | GE;
ref : (SEGMENT_NAME | IDENTIFIER);
field : ref DOT^ NUMBER
(DOT^ NUMBER (DOT^ NUMBER (DOT^ NUMBER)?)?)?;
query : declareClause? selectClause whereClause? EOF ->
^(QUERY declareClause? selectClause whereClause?);

View File

@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.hapi;
import java.util.Collections;
import java.util.List;
import org.apache.nifi.hl7.model.HL7Component;
import org.apache.nifi.hl7.model.HL7Field;
public class EmptyField implements HL7Field {
@Override
public String getValue() {
return null;
}
@Override
public List<HL7Component> getComponents() {
return Collections.emptyList();
}
}

View File

@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.hapi;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.nifi.hl7.model.HL7Component;
import org.apache.nifi.hl7.model.HL7Field;
import ca.uhn.hl7v2.model.Composite;
import ca.uhn.hl7v2.model.ExtraComponents;
import ca.uhn.hl7v2.model.Primitive;
import ca.uhn.hl7v2.model.Type;
import ca.uhn.hl7v2.model.Varies;
import ca.uhn.hl7v2.parser.EncodingCharacters;
import ca.uhn.hl7v2.parser.PipeParser;
public class HapiField implements HL7Field, HL7Component {
private final String value;
private final List<HL7Component> components;
public HapiField(final Type type) {
this.value = PipeParser.encode(type, EncodingCharacters.defaultInstance());
final List<HL7Component> componentList = new ArrayList<>();
if ( type instanceof Composite ) {
final Composite composite = (Composite) type;
for ( final Type component : composite.getComponents() ) {
componentList.add(new HapiField(component));
}
}
final ExtraComponents extra = type.getExtraComponents();
if ( extra != null && extra.numComponents() > 0 ) {
final String singleFieldValue;
if ( type instanceof Primitive ) {
singleFieldValue = ((Primitive) type).getValue();
} else {
singleFieldValue = this.value;
}
componentList.add(new SingleValueField(singleFieldValue));
for (int i=0; i < extra.numComponents(); i++) {
final Varies varies = extra.getComponent(i);
componentList.add(new HapiField(varies));
}
}
this.components = Collections.unmodifiableList(componentList);
}
@Override
public String getValue() {
return value;
}
@Override
public List<HL7Component> getComponents() {
return components;
}
@Override
public String toString() {
return value;
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.hapi;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Message;
import org.apache.nifi.hl7.model.HL7Segment;
import ca.uhn.hl7v2.HL7Exception;
import ca.uhn.hl7v2.model.Group;
import ca.uhn.hl7v2.model.Message;
import ca.uhn.hl7v2.model.Segment;
import ca.uhn.hl7v2.model.Structure;
public class HapiMessage implements HL7Message {
private final Message message;
private final List<HL7Segment> allSegments;
private final Map<String, List<HL7Segment>> segmentMap;
public HapiMessage(final Message message) throws HL7Exception {
this.message = message;
allSegments = new ArrayList<>();
populateSegments(message, allSegments);
segmentMap = new HashMap<>();
for ( final HL7Segment segment : allSegments ) {
final String segmentName = segment.getName();
List<HL7Segment> segmentList = segmentMap.get(segmentName);
if ( segmentList == null ) {
segmentList = new ArrayList<>();
segmentMap.put(segmentName, segmentList);
}
segmentList.add(segment);
}
}
private void populateSegments(final Group group, final List<HL7Segment> segments) throws HL7Exception {
for ( final String structureName : group.getNames() ) {
final Structure[] structures = group.getAll(structureName);
if ( group.isGroup(structureName) ) {
for ( final Structure structure : structures ) {
populateSegments((Group) structure, segments);
}
} else {
for ( final Structure structure : structures ) {
final Segment segment = (Segment) structure;
final HapiSegment hapiSegment = new HapiSegment(segment);
segments.add(hapiSegment);
}
}
}
}
@Override
public List<HL7Segment> getSegments() {
return Collections.unmodifiableList(allSegments);
}
@Override
public List<HL7Segment> getSegments(final String segmentType) {
final List<HL7Segment> segments = segmentMap.get(segmentType);
if ( segments == null ) {
return Collections.emptyList();
}
return Collections.unmodifiableList(segments);
}
@Override
public String toString() {
return message.toString();
}
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.hapi;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.nifi.hl7.model.HL7Field;
import org.apache.nifi.hl7.model.HL7Segment;
import ca.uhn.hl7v2.HL7Exception;
import ca.uhn.hl7v2.model.Segment;
import ca.uhn.hl7v2.model.Type;
public class HapiSegment implements HL7Segment {
private final Segment segment;
private final List<HL7Field> fields;
public HapiSegment(final Segment segment) throws HL7Exception {
this.segment = segment;
final List<HL7Field> fieldList = new ArrayList<>();
for (int i=1; i <= segment.numFields(); i++) {
final Type[] types = segment.getField(i);
if ( types == null || types.length == 0 ) {
fieldList.add(new EmptyField());
continue;
}
for ( final Type type : types ) {
fieldList.add(new HapiField(type));
}
}
this.fields = Collections.unmodifiableList(fieldList);
}
@Override
public String getName() {
return segment.getName();
}
@Override
public List<HL7Field> getFields() {
return fields;
}
@Override
public String toString() {
return segment.toString();
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.hapi;
import java.util.Collections;
import java.util.List;
import org.apache.nifi.hl7.model.HL7Component;
import org.apache.nifi.hl7.model.HL7Field;
public class SingleValueField implements HL7Field {
private final String value;
public SingleValueField(final String value) {
this.value = value;
}
@Override
public String getValue() {
return value;
}
@Override
public List<HL7Component> getComponents() {
return Collections.emptyList();
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.io;
import java.io.IOException;
import org.apache.nifi.hl7.model.HL7Message;
public interface HL7Reader {
HL7Message nextMessage() throws IOException;
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.io.exception;
import java.io.IOException;
public class InvalidHL7Exception extends IOException {
private static final long serialVersionUID = -5675416667224562441L;
public InvalidHL7Exception() {
super();
}
public InvalidHL7Exception(String message, Throwable cause) {
super(message, cause);
}
public InvalidHL7Exception(String message) {
super(message);
}
public InvalidHL7Exception(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,24 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.model;
import java.util.List;
public interface HL7Component {
String getValue();
List<HL7Component> getComponents();
}

View File

@ -0,0 +1,21 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.model;
public interface HL7Field extends HL7Component {
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.model;
import java.util.List;
public interface HL7Message {
List<HL7Segment> getSegments();
List<HL7Segment> getSegments(String segmentType);
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.model;
import java.util.List;
public interface HL7Segment {
String getName();
List<HL7Field> getFields();
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query;
import org.apache.nifi.hl7.model.HL7Message;
public interface Declaration {
String getAlias();
boolean isRequired();
Object getDeclaredValue(HL7Message message);
}

View File

@ -0,0 +1,412 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query;
import static org.apache.nifi.hl7.query.antlr.HL7QueryParser.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.tree.Tree;
import org.apache.nifi.hl7.model.HL7Message;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
import org.apache.nifi.hl7.query.evaluator.IntegerEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.EqualsEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.GreaterThanEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.GreaterThanOrEqualEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.IsNullEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.LessThanEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.LessThanOrEqualEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.NotEqualsEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.NotEvaluator;
import org.apache.nifi.hl7.query.evaluator.comparison.NotNullEvaluator;
import org.apache.nifi.hl7.query.evaluator.literal.IntegerLiteralEvaluator;
import org.apache.nifi.hl7.query.evaluator.literal.StringLiteralEvaluator;
import org.apache.nifi.hl7.query.evaluator.logic.AndEvaluator;
import org.apache.nifi.hl7.query.evaluator.logic.OrEvaluator;
import org.apache.nifi.hl7.query.evaluator.message.DeclaredReferenceEvaluator;
import org.apache.nifi.hl7.query.evaluator.message.DotEvaluator;
import org.apache.nifi.hl7.query.evaluator.message.MessageEvaluator;
import org.apache.nifi.hl7.query.evaluator.message.SegmentEvaluator;
import org.apache.nifi.hl7.query.exception.HL7QueryParsingException;
import org.apache.nifi.hl7.query.result.MissedResult;
import org.apache.nifi.hl7.query.result.StandardQueryResult;
import org.apache.nifi.hl7.query.antlr.HL7QueryLexer;
import org.apache.nifi.hl7.query.antlr.HL7QueryParser;
public class HL7Query {
private final Tree tree;
private final String query;
private final Set<Declaration> declarations = new HashSet<>();
private final List<Selection> selections;
private final BooleanEvaluator whereEvaluator;
private HL7Query(final Tree tree, final String query) {
this.tree = tree;
this.query = query;
List<Selection> select = null;
BooleanEvaluator where = null;
for (int i=0; i < tree.getChildCount(); i++) {
final Tree child = tree.getChild(i);
switch (child.getType()) {
case DECLARE:
processDeclare(child);
break;
case SELECT:
select = processSelect(child);
break;
case WHERE:
where = processWhere(child);
break;
default:
throw new HL7QueryParsingException("Found unexpected clause at root level: " + tree.getText());
}
}
this.whereEvaluator = where;
this.selections = select;
}
private void processDeclare(final Tree declare) {
for (int i=0; i < declare.getChildCount(); i++) {
final Tree declarationTree = declare.getChild(i);
final String identifier = declarationTree.getChild(0).getText();
final Tree requiredOrOptionalTree = declarationTree.getChild(1);
final boolean required = requiredOrOptionalTree.getType() == REQUIRED;
final String segmentName = declarationTree.getChild(2).getText();
final Declaration declaration = new Declaration() {
@Override
public String getAlias() {
return identifier;
}
@Override
public boolean isRequired() {
return required;
}
@Override
public Object getDeclaredValue(final HL7Message message) {
if ( message == null ) {
return null;
}
return message.getSegments(segmentName);
}
};
declarations.add(declaration);
}
}
private List<Selection> processSelect(final Tree select) {
final List<Selection> selections = new ArrayList<>();
for (int i=0; i < select.getChildCount(); i++) {
final Tree selectable = select.getChild(i);
final String alias = getSelectedName(selectable);
final Evaluator<?> selectionEvaluator = buildReferenceEvaluator(selectable);
final Selection selection = new Selection(selectionEvaluator, alias);
selections.add(selection);
}
return selections;
}
private String getSelectedName(final Tree selectable) {
if ( selectable.getChildCount() == 0 ) {
return selectable.getText();
} else if (selectable.getType() == DOT ) {
return getSelectedName(selectable.getChild(0)) + "." + getSelectedName(selectable.getChild(1));
} else {
return selectable.getChild(selectable.getChildCount() - 1).getText();
}
}
private BooleanEvaluator processWhere(final Tree where) {
return buildBooleanEvaluator(where.getChild(0));
}
private Evaluator<?> buildReferenceEvaluator(final Tree tree) {
switch (tree.getType()) {
case MESSAGE:
return new MessageEvaluator();
case SEGMENT_NAME:
return new SegmentEvaluator(new StringLiteralEvaluator(tree.getText()));
case IDENTIFIER:
return new DeclaredReferenceEvaluator(new StringLiteralEvaluator(tree.getText()));
case DOT:
final Tree firstChild = tree.getChild(0);
final Tree secondChild = tree.getChild(1);
return new DotEvaluator(buildReferenceEvaluator(firstChild), buildIntegerEvaluator(secondChild));
case STRING_LITERAL:
return new StringLiteralEvaluator(tree.getText());
case NUMBER:
return new IntegerLiteralEvaluator(Integer.parseInt(tree.getText()));
default:
throw new HL7QueryParsingException("Failed to build evaluator for " + tree.getText());
}
}
private IntegerEvaluator buildIntegerEvaluator(final Tree tree) {
switch (tree.getType()) {
case NUMBER:
return new IntegerLiteralEvaluator(Integer.parseInt(tree.getText()));
default:
throw new HL7QueryParsingException("Failed to build Integer Evaluator for " + tree.getText());
}
}
private BooleanEvaluator buildBooleanEvaluator(final Tree tree) {
// TODO: add Date comparisons
// LT/GT/GE/GE should allow for dates based on Field's Type
// BETWEEN
// DATE('2015/01/01')
// DATE('2015/01/01 12:00:00')
// DATE('24 HOURS AGO')
// DATE('YESTERDAY')
switch (tree.getType()) {
case EQUALS:
return new EqualsEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case NOT_EQUALS:
return new NotEqualsEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case GT:
return new GreaterThanEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case LT:
return new LessThanEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case GE:
return new GreaterThanOrEqualEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case LE:
return new LessThanOrEqualEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case NOT:
return new NotEvaluator(buildBooleanEvaluator(tree.getChild(0)));
case AND:
return new AndEvaluator(buildBooleanEvaluator(tree.getChild(0)), buildBooleanEvaluator(tree.getChild(1)));
case OR:
return new OrEvaluator(buildBooleanEvaluator(tree.getChild(0)), buildBooleanEvaluator(tree.getChild(1)));
case IS_NULL:
return new IsNullEvaluator(buildReferenceEvaluator(tree.getChild(0)));
case NOT_NULL:
return new NotNullEvaluator(buildReferenceEvaluator(tree.getChild(0)));
default:
throw new HL7QueryParsingException("Cannot build boolean evaluator for '" + tree.getText() + "'");
}
}
Tree getTree() {
return tree;
}
public String getQuery() {
return query;
}
@Override
public String toString() {
return "HL7Query[" + query + "]";
}
public static HL7Query compile(final String query) {
try {
final CommonTokenStream lexerTokenStream = createTokenStream(query);
final HL7QueryParser parser = new HL7QueryParser(lexerTokenStream);
final Tree tree = (Tree) parser.query().getTree();
return new HL7Query(tree, query);
} catch (final HL7QueryParsingException e) {
throw e;
} catch (final Exception e) {
throw new HL7QueryParsingException(e);
}
}
private static CommonTokenStream createTokenStream(final String expression) throws HL7QueryParsingException {
final CharStream input = new ANTLRStringStream(expression);
final HL7QueryLexer lexer = new HL7QueryLexer(input);
return new CommonTokenStream(lexer);
}
public List<Class<?>> getReturnTypes() {
final List<Class<?>> returnTypes = new ArrayList<>();
for ( final Selection selection : selections ) {
returnTypes.add( selection.getEvaluator().getType() );
}
return returnTypes;
}
@SuppressWarnings("unchecked")
public QueryResult evaluate(final HL7Message message) {
int totalIterations = 1;
final LinkedHashMap<String, List<Object>> possibleValueMap = new LinkedHashMap<>();
for ( final Declaration declaration : declarations ) {
final Object value = declaration.getDeclaredValue(message);
if ( value == null && declaration.isRequired() ) {
return new MissedResult(selections);
}
final List<Object> possibleValues;
if ( value instanceof List ) {
possibleValues = (List<Object>) value;
} else if ( value instanceof Collection ) {
possibleValues = new ArrayList<Object>((Collection<Object>) value);
} else {
possibleValues = new ArrayList<>(1);
possibleValues.add(value);
}
if ( possibleValues.isEmpty() ) {
return new MissedResult(selections);
}
possibleValueMap.put(declaration.getAlias(), possibleValues);
totalIterations *= possibleValues.size();
}
final Set<Map<String, Object>> resultSet = new HashSet<>();
for (int i=0; i < totalIterations; i++) {
final Map<String, Object> aliasValues = assignAliases(possibleValueMap, i);
aliasValues.put(Evaluator.MESSAGE_KEY, message);
if (whereEvaluator == null || Boolean.TRUE.equals(whereEvaluator.evaluate(aliasValues))) {
final Map<String, Object> resultMap = new HashMap<>();
for ( final Selection selection : selections ) {
final Object value = selection.getEvaluator().evaluate(aliasValues);
resultMap.put(selection.getName(), value);
}
resultSet.add(resultMap);
}
}
// for ( final Declaration declaration : declarations ) {
// final Object value = declaration.getDeclaredValue(message);
// if ( value == null && declaration.isRequired() ) {
// return new MissedResult(selections);
// }
// objectMap.put(declaration.getAlias(), value);
// }
//
// if (whereEvaluator == null || Boolean.TRUE.equals(whereEvaluator.evaluate(objectMap))) {
// for ( final Selection selection : selections ) {
// final Object value = selection.getEvaluator().evaluate(objectMap);
// resultMap.put(selection.getName(), value);
// }
// } else {
// return new MissedResult(selections);
// }
return new StandardQueryResult(selections, resultSet);
}
// assigns one of the possible values to each alias, based on which iteration this is.
// require LinkedHashMap just to be very clear and explicit that the order of the Map MUST be guaranteed
// between multiple invocations of this method.
// package protected for testing visibility
// static Map<String, Object> assignAliases(final LinkedHashMap<String, List<Object>> possibleValues, final int iteration) {
// final Map<String, Object> aliasMap = new HashMap<>();
//
// int aliasIndex = possibleValues.size() - 1;
// for ( final Map.Entry<String, List<Object>> entry : possibleValues.entrySet() ) {
// final String alias = entry.getKey();
// final List<Object> validValues = entry.getValue();
//
// final int validValueIndex;
// if (aliasIndex > 0) {
// validValueIndex = iteration / aliasIndex;
// } else {
// validValueIndex = iteration;
// }
//
// final Object obj = validValues.get(validValueIndex % validValues.size());
// aliasMap.put(alias, obj);
//
// aliasIndex--;
// }
//
// return aliasMap;
// }
//
static Map<String, Object> assignAliases(final LinkedHashMap<String, List<Object>> possibleValues, final int iteration) {
final Map<String, Object> aliasMap = new HashMap<>();
int divisor = 1;
for ( final Map.Entry<String, List<Object>> entry : possibleValues.entrySet() ) {
final String alias = entry.getKey();
final List<Object> validValues = entry.getValue();
final int idx = (iteration / divisor) % validValues.size();
final Object obj = validValues.get(idx);
aliasMap.put(alias, obj);
divisor *= validValues.size();
}
return aliasMap;
}
public String toTreeString() {
final StringBuilder sb = new StringBuilder();
toTreeString(tree, sb, 0);
return sb.toString();
}
private void toTreeString(final Tree tree, final StringBuilder sb, final int indentLevel) {
final String nodeName = tree.getText();
for (int i=0; i < indentLevel; i++) {
sb.append(" ");
}
sb.append(nodeName);
sb.append("\n");
for (int i=0; i < tree.getChildCount(); i++) {
final Tree child = tree.getChild(i);
toTreeString(child, sb, indentLevel + 2);
}
}
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query;
import java.util.List;
public interface QueryResult {
boolean isMatch();
List<String> getLabels();
int getHitCount();
ResultHit nextHit();
}

View File

@ -0,0 +1,25 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query;
import java.util.Map;
public interface ResultHit {
Object getValue(String label);
Map<String, Object> getSelectedValues();
}

View File

@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class Selection {
private final Evaluator<?> evaluator;
private final String name;
public Selection(final Evaluator<?> evaluator, final String name) {
this.evaluator = evaluator;
this.name = name;
}
public String getName() {
return name;
}
public Evaluator<?> getEvaluator() {
return evaluator;
}
}

View File

@ -0,0 +1,24 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator;
public abstract class BooleanEvaluator implements Evaluator<Boolean> {
public Class<? extends Boolean> getType() {
return Boolean.class;
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator;
import java.util.Map;
public interface Evaluator<T> {
public static final String MESSAGE_KEY = "message";
T evaluate(Map<String, Object> objectMap);
Class<? extends T> getType();
}

View File

@ -0,0 +1,26 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator;
public abstract class IntegerEvaluator implements Evaluator<Integer> {
public Class<? extends Integer> getType() {
return Integer.class;
}
}

View File

@ -0,0 +1,25 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator;
public abstract class StringEvaluator implements Evaluator<String> {
public Class<? extends String> getType() {
return String.class;
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import java.util.Collection;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Field;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public abstract class AbstractComparisonEvaluator extends BooleanEvaluator {
private final Evaluator<?> lhs;
private final Evaluator<?> rhs;
public AbstractComparisonEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
this.lhs = lhs;
this.rhs = rhs;
}
public final Boolean evaluate(final Map<String, Object> objectMap) {
final Object lhsValue = lhs.evaluate(objectMap);
if ( lhsValue == null ) {
return false;
}
final Object rhsValue = rhs.evaluate(objectMap);
if ( rhsValue == null ) {
return false;
}
return compareRaw(lhsValue, rhsValue);
}
private Boolean compareRaw(Object lhsValue, Object rhsValue) {
if ( lhsValue == null || rhsValue == null ) {
return false;
}
if ( lhsValue instanceof HL7Field ) {
lhsValue = ((HL7Field) lhsValue).getValue();
}
if ( rhsValue instanceof HL7Field ) {
rhsValue = ((HL7Field) rhsValue).getValue();
}
if ( lhsValue == null || rhsValue == null ) {
return false;
}
// both are collections, and compare(lhsValue, rhsValue) is false.
// this would be the case, for instance, if we compared field 1 of one segment to
// a field in another segment, and both fields had components.
if ( lhsValue instanceof Collection && rhsValue instanceof Collection ) {
return false;
}
// if one side is a collection but the other is not, check if any element in that
// collection compares to the other element in a way that satisfies the condition.
// this would happen, for instance, if we check Segment1.Field5 = 'X' and field 5 repeats
// with a value "A~B~C~X~Y~Z"; in this case we do want to consider Field 5 = X as true.
if ( lhsValue instanceof Collection ) {
for ( final Object lhsObject : (Collection<?>) lhsValue ) {
if ( compareRaw(lhsObject, rhsValue) ) {
return true;
}
}
return false;
}
if ( rhsValue instanceof Collection ) {
for ( final Object rhsObject : (Collection<?>) rhsValue ) {
if ( compareRaw(rhsObject, lhsValue) ) {
return true;
}
}
return false;
}
if ( lhsValue != null && rhsValue != null && compare(lhsValue, rhsValue) ) {
return true;
}
return false;
}
protected abstract boolean compare(Object lhs, Object rhs);
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import java.util.regex.Pattern;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public abstract class AbstractNumericComparison extends AbstractComparisonEvaluator {
private static final Pattern NUMERIC_PATTERN = Pattern.compile("\\d+(\\.\\d+)?");
public AbstractNumericComparison(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected final boolean compare(final Object lhs, final Object rhs) {
final Double lhsDouble = toDouble(lhs);
if ( lhsDouble == null ) {
return false;
}
final Double rhsDouble = toDouble(rhs);
if ( rhsDouble == null ) {
return false;
}
return compareNumbers(lhsDouble, rhsDouble);
}
private Double toDouble(final Object value) {
if ( value == null ) {
return null;
}
if ( value instanceof Double ) {
return (Double) value;
}
if ( value instanceof Number ) {
return ((Number) value).doubleValue();
}
if ( value instanceof String ) {
if ( NUMERIC_PATTERN.matcher((String) value).matches() ) {
return Double.parseDouble((String) value);
}
}
return null;
}
protected abstract boolean compareNumbers(final Double lhs, final Double rhs);
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class EqualsEvaluator extends AbstractComparisonEvaluator {
public EqualsEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected boolean compare(final Object lhs, final Object rhs) {
return lhs != null && rhs != null && ((lhs == rhs) || (lhs.equals(rhs)) || lhs.toString().equals(rhs.toString()));
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class GreaterThanEvaluator extends AbstractNumericComparison {
public GreaterThanEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected boolean compareNumbers(final Double lhs, final Double rhs) {
return lhs > rhs;
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class GreaterThanOrEqualEvaluator extends AbstractNumericComparison {
public GreaterThanOrEqualEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected boolean compareNumbers(final Double lhs, final Double rhs) {
return lhs >= rhs;
}
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import java.util.Collection;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Component;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class IsNullEvaluator extends BooleanEvaluator {
private final Evaluator<?> subjectEvaluator;
public IsNullEvaluator(final Evaluator<?> subjectEvaluator) {
this.subjectEvaluator = subjectEvaluator;
}
@Override
public Boolean evaluate(final Map<String, Object> objectMap) {
Object subjectValue = subjectEvaluator.evaluate(objectMap);
if ( subjectValue == null ) {
return true;
}
return isNull(subjectValue);
}
private boolean isNull(Object subjectValue) {
if ( subjectValue == null ) {
return true;
}
if ( subjectValue instanceof HL7Component ) {
subjectValue = ((HL7Component) subjectValue).getValue();
}
if ( subjectValue instanceof Collection ) {
final Collection<?> collection = (Collection<?>) subjectValue;
if ( collection.isEmpty() ) {
return true;
}
for ( final Object obj : collection ) {
if ( !isNull(obj) ) {
return false;
}
}
return true;
}
return subjectValue == null;
}
}

View File

@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class LessThanEvaluator extends AbstractNumericComparison {
public LessThanEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected boolean compareNumbers(final Double lhs, final Double rhs) {
return lhs < rhs;
}
}

View File

@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class LessThanOrEqualEvaluator extends AbstractNumericComparison {
public LessThanOrEqualEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected boolean compareNumbers(final Double lhs, final Double rhs) {
return lhs <= rhs;
}
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class NotEqualsEvaluator extends AbstractComparisonEvaluator {
public NotEqualsEvaluator(final Evaluator<?> lhs, final Evaluator<?> rhs) {
super(lhs, rhs);
}
@Override
protected boolean compare(final Object lhs, final Object rhs) {
return lhs != null && rhs != null && lhs != rhs && !lhs.equals(rhs) && !lhs.toString().equals(rhs.toString());
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import java.util.Map;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
public class NotEvaluator extends BooleanEvaluator {
private final BooleanEvaluator subjectEvaluator;
public NotEvaluator(final BooleanEvaluator subjectEvaluator) {
this.subjectEvaluator = subjectEvaluator;
}
@Override
public Boolean evaluate(final Map<String, Object> objectMap) {
final Boolean subjectValue = subjectEvaluator.evaluate(objectMap);
return (subjectValue == null || Boolean.TRUE.equals(subjectValue));
}
}

View File

@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.comparison;
import java.util.Collection;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Component;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class NotNullEvaluator extends BooleanEvaluator {
private final Evaluator<?> subjectEvaluator;
public NotNullEvaluator(final Evaluator<?> subjectEvaluator) {
this.subjectEvaluator = subjectEvaluator;
}
@Override
public Boolean evaluate(final Map<String, Object> objectMap) {
Object subjectValue = subjectEvaluator.evaluate(objectMap);
if ( subjectValue == null ) {
return false;
}
return isNotNull(subjectValue);
}
private boolean isNotNull(Object subjectValue) {
if ( subjectValue instanceof HL7Component ) {
subjectValue = ((HL7Component) subjectValue).getValue();
}
if ( subjectValue instanceof Collection ) {
final Collection<?> collection = (Collection<?>) subjectValue;
if ( collection.isEmpty() ) {
return false;
}
for ( final Object obj : collection ) {
if ( isNotNull(obj) ) {
return true;
}
}
return false;
}
return subjectValue != null;
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.literal;
import java.util.Map;
import org.apache.nifi.hl7.query.evaluator.IntegerEvaluator;
public class IntegerLiteralEvaluator extends IntegerEvaluator {
private final Integer value;
public IntegerLiteralEvaluator(final Integer value) {
this.value = value;
}
@Override
public Integer evaluate(final Map<String, Object> objectMap) {
return value;
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.literal;
import java.util.Map;
import org.apache.nifi.hl7.query.evaluator.StringEvaluator;
public class StringLiteralEvaluator extends StringEvaluator {
private final String value;
public StringLiteralEvaluator(final String value) {
this.value = value;
}
@Override
public String evaluate(final Map<String, Object> objectMap) {
return value;
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.logic;
import java.util.Map;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
public class AndEvaluator extends BooleanEvaluator {
private final BooleanEvaluator lhs;
private final BooleanEvaluator rhs;
public AndEvaluator(final BooleanEvaluator lhs, final BooleanEvaluator rhs) {
this.lhs = lhs;
this.rhs = rhs;
}
@Override
public Boolean evaluate(final Map<String, Object> objectMap) {
final Boolean lhsValue = lhs.evaluate(objectMap);
if ( lhsValue == null || Boolean.FALSE.equals(lhsValue) ) {
return false;
}
final Boolean rhsValue = rhs.evaluate(objectMap);
return (rhsValue != null && Boolean.TRUE.equals(rhsValue));
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.logic;
import java.util.Map;
import org.apache.nifi.hl7.query.evaluator.BooleanEvaluator;
public class OrEvaluator extends BooleanEvaluator {
private final BooleanEvaluator lhs;
private final BooleanEvaluator rhs;
public OrEvaluator(final BooleanEvaluator lhs, final BooleanEvaluator rhs) {
this.lhs = lhs;
this.rhs = rhs;
}
@Override
public Boolean evaluate(final Map<String, Object> objectMap) {
final Boolean lhsValue = lhs.evaluate(objectMap);
if ( lhsValue != null && Boolean.TRUE.equals(lhsValue) ) {
return true;
}
final Boolean rhsValue = rhs.evaluate(objectMap);
return (rhsValue != null && Boolean.TRUE.equals(rhsValue));
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.message;
import java.util.Map;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
import org.apache.nifi.hl7.query.evaluator.StringEvaluator;
public class DeclaredReferenceEvaluator implements Evaluator<Object> {
private final StringEvaluator referenceNameEvaluator;
public DeclaredReferenceEvaluator(final StringEvaluator referenceNameEvaluator) {
this.referenceNameEvaluator = referenceNameEvaluator;
}
@Override
public Object evaluate(final Map<String, Object> objectMap) {
final String referenceName = referenceNameEvaluator.evaluate(objectMap);
return objectMap.get(referenceName);
}
@Override
public Class<? extends Object> getType() {
return Object.class;
}
}

View File

@ -0,0 +1,88 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.message;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Component;
import org.apache.nifi.hl7.model.HL7Message;
import org.apache.nifi.hl7.model.HL7Segment;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
import org.apache.nifi.hl7.query.evaluator.IntegerEvaluator;
public class DotEvaluator implements Evaluator<Object> {
private final Evaluator<?> lhs;
private final IntegerEvaluator rhs;
public DotEvaluator(final Evaluator<?> lhs, final IntegerEvaluator rhs) {
this.lhs = lhs;
this.rhs = rhs;
}
@Override
public Object evaluate(final Map<String, Object> objectMap) {
final Object lhsValue = this.lhs.evaluate(objectMap);
final Integer rhsValue = this.rhs.evaluate(objectMap);
if ( lhsValue == null || rhsValue == null ) {
return null;
}
final List<Object> results = new ArrayList<>();
if ( lhsValue instanceof Collection ) {
final Collection<?> lhsCollection = (Collection<?>) lhsValue;
for ( final Object obj : lhsCollection ) {
final Object val = getValue(obj, rhsValue);
results.add(val);
}
} else {
final Object val = getValue(lhsValue, rhsValue);
return val;
}
return results;
}
private Object getValue(final Object lhsValue, final int rhsValue) {
final List<?> list;
if ( lhsValue instanceof HL7Message ) {
list = ((HL7Message) lhsValue).getSegments();
} else if ( lhsValue instanceof HL7Segment ) {
list = ((HL7Segment) lhsValue).getFields();
} else if ( lhsValue instanceof HL7Component ) {
list = ((HL7Component) lhsValue).getComponents();
} else {
return null;
}
if ( rhsValue > list.size() ) {
return null;
}
// convert from 0-based to 1-based
return list.get(rhsValue - 1);
}
@Override
public Class<? extends Object> getType() {
return Object.class;
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.message;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Field;
import org.apache.nifi.hl7.model.HL7Segment;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
import org.apache.nifi.hl7.query.evaluator.IntegerEvaluator;
@SuppressWarnings("rawtypes")
public class FieldEvaluator implements Evaluator<List> {
private final SegmentEvaluator segmentEvaluator;
private final IntegerEvaluator indexEvaluator;
public FieldEvaluator(final SegmentEvaluator segmentEvaluator, final IntegerEvaluator indexEvaluator) {
this.segmentEvaluator = segmentEvaluator;
this.indexEvaluator = indexEvaluator;
}
public List<HL7Field> evaluate(final Map<String, Object> objectMap) {
final List<HL7Segment> segments = segmentEvaluator.evaluate(objectMap);
if ( segments == null ) {
return Collections.emptyList();
}
final Integer index = indexEvaluator.evaluate(objectMap);
if ( index == null ) {
return Collections.emptyList();
}
final List<HL7Field> fields = new ArrayList<>();
for ( final HL7Segment segment : segments ) {
final List<HL7Field> segmentFields = segment.getFields();
if ( segmentFields.size() <= index ) {
continue;
}
fields.add(segmentFields.get(index));
}
return fields;
}
public Class<? extends List> getType() {
return List.class;
}
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.message;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Message;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
public class MessageEvaluator implements Evaluator<HL7Message> {
public HL7Message evaluate(final Map<String, Object> objectMap) {
return (HL7Message) objectMap.get(Evaluator.MESSAGE_KEY);
}
public Class<? extends HL7Message> getType() {
return HL7Message.class;
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.evaluator.message;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.nifi.hl7.model.HL7Message;
import org.apache.nifi.hl7.model.HL7Segment;
import org.apache.nifi.hl7.query.evaluator.Evaluator;
import org.apache.nifi.hl7.query.evaluator.StringEvaluator;
@SuppressWarnings("rawtypes")
public class SegmentEvaluator implements Evaluator<List> {
private final StringEvaluator segmentTypeEvaluator;
public SegmentEvaluator(final StringEvaluator segmentTypeEvaluator) {
this.segmentTypeEvaluator = segmentTypeEvaluator;
}
public List<HL7Segment> evaluate(final Map<String, Object> objectMap) {
final String segmentType = segmentTypeEvaluator.evaluate(objectMap);
if ( segmentType == null ) {
return Collections.emptyList();
}
final HL7Message message = (HL7Message) objectMap.get(Evaluator.MESSAGE_KEY);
final List<HL7Segment> segments = message.getSegments(segmentType);
return (segments == null) ? Collections.<HL7Segment>emptyList() : segments;
}
public Class<? extends List> getType() {
return List.class;
}
}

View File

@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.exception;
public class HL7QueryParsingException extends RuntimeException {
private static final long serialVersionUID = 1L;
public HL7QueryParsingException() {
super();
}
public HL7QueryParsingException(final Throwable cause) {
super(cause);
}
public HL7QueryParsingException(final String message) {
super(message);
}
public HL7QueryParsingException(final String message, final Throwable cause) {
super(message, cause);
}
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.result;
import java.util.ArrayList;
import java.util.List;
import org.apache.nifi.hl7.query.QueryResult;
import org.apache.nifi.hl7.query.ResultHit;
import org.apache.nifi.hl7.query.Selection;
public class MissedResult implements QueryResult {
private final List<Selection> selections;
public MissedResult(final List<Selection> selections) {
this.selections = selections;
}
@Override
public List<String> getLabels() {
final List<String> labels = new ArrayList<>();
for ( final Selection selection : selections ) {
labels.add(selection.getName());
}
return labels;
}
@Override
public boolean isMatch() {
return false;
}
@Override
public ResultHit nextHit() {
return null;
}
@Override
public int getHitCount() {
return 0;
}
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.result;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.nifi.hl7.query.QueryResult;
import org.apache.nifi.hl7.query.ResultHit;
import org.apache.nifi.hl7.query.Selection;
public class StandardQueryResult implements QueryResult {
private final List<Selection> selections;
private final Set<Map<String, Object>> hits;
private final Iterator<Map<String, Object>> hitIterator;
public StandardQueryResult(final List<Selection> selections, final Set<Map<String, Object>> hits) {
this.selections = selections;
this.hits = hits;
hitIterator = hits.iterator();
}
@Override
public boolean isMatch() {
return !hits.isEmpty();
}
@Override
public List<String> getLabels() {
final List<String> labels = new ArrayList<>();
for ( final Selection selection : selections ) {
labels.add(selection.getName());
}
return labels;
}
@Override
public int getHitCount() {
return hits.size();
}
@Override
public ResultHit nextHit() {
if ( hitIterator.hasNext() ) {
return new StandardResultHit(hitIterator.next());
} else {
return null;
}
}
}

View File

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query.result;
import java.util.Collections;
import java.util.Map;
import org.apache.nifi.hl7.query.ResultHit;
public class StandardResultHit implements ResultHit {
private final Map<String, Object> values;
public StandardResultHit(final Map<String, Object> values) {
this.values = values;
}
@Override
public Object getValue(final String label) {
return values.get(label);
}
@Override
public Map<String, Object> getSelectedValues() {
return Collections.unmodifiableMap(values);
}
}

View File

@ -0,0 +1,310 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hl7.query;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.nifi.hl7.hapi.HapiMessage;
import org.apache.nifi.hl7.model.HL7Field;
import org.apache.nifi.hl7.model.HL7Message;
import org.junit.Test;
import ca.uhn.hl7v2.DefaultHapiContext;
import ca.uhn.hl7v2.HL7Exception;
import ca.uhn.hl7v2.HapiContext;
import ca.uhn.hl7v2.model.Message;
import ca.uhn.hl7v2.parser.PipeParser;
import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
@SuppressWarnings("resource")
public class TestHL7Query {
@Test
public void testAssignAliases() {
final LinkedHashMap<String, List<Object>> possibleValueMap = new LinkedHashMap<>();
final List<Object> valuesA = new ArrayList<>();
valuesA.add("a");
valuesA.add("b");
valuesA.add("c");
final List<Object> valuesB = new ArrayList<>();
valuesB.add("d");
final List<Object> valuesC = new ArrayList<>();
valuesC.add("e");
valuesC.add("f");
final List<Object> valuesD = new ArrayList<>();
valuesD.add("g");
valuesD.add("h");
possibleValueMap.put("A", valuesA);
possibleValueMap.put("B", valuesB);
possibleValueMap.put("C", valuesC);
possibleValueMap.put("D", valuesD);
for (int i=0; i < valuesA.size() * valuesB.size() * valuesC.size() * valuesD.size(); i++) {
System.out.println(i + " : " + HL7Query.assignAliases(possibleValueMap, i));
}
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 0), "a", "d", "e", "g");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 1), "b", "d", "e", "g");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 2), "c", "d", "e", "g");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 3), "a", "d", "f", "g");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 4), "b", "d", "f", "g");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 5), "c", "d", "f", "g");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 6), "a", "d", "e", "h");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 7), "b", "d", "e", "h");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 8), "c", "d", "e", "h");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 9), "a", "d", "f", "h");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 10), "b", "d", "f", "h");
verifyAssignments(HL7Query.assignAliases(possibleValueMap, 11), "c", "d", "f", "h");
}
private void verifyAssignments(final Map<String, Object> map, final String a, final String b, final String c, final String d) {
assertEquals(a, map.get("A"));
assertEquals(b, map.get("B"));
assertEquals(c, map.get("C"));
assertEquals(d, map.get("D"));
}
@Test
public void testSelectMessage() throws HL7Exception, IOException {
final HL7Query query = HL7Query.compile("SELECT MESSAGE");
final HL7Message msg = createMessage(new File("src/test/resources/hypoglycemia"));
final QueryResult result = query.evaluate(msg);
assertTrue(result.isMatch());
final List<String> labels = result.getLabels();
assertEquals(1, labels.size());
assertEquals("MESSAGE", labels.get(0));
assertEquals(1, result.getHitCount());
assertEquals(msg, result.nextHit().getValue("MESSAGE"));
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testSelectField() throws HL7Exception, IOException {
final HL7Query query = HL7Query.compile("SELECT PID.5");
final HL7Message msg = createMessage(new File("src/test/resources/hypoglycemia"));
final QueryResult result = query.evaluate(msg);
assertTrue(result.isMatch());
final List<String> labels = result.getLabels();
assertEquals(1, labels.size());
assertEquals(1, result.getHitCount());
final Object names = result.nextHit().getValue("PID.5");
assertTrue(names instanceof List);
final List<Object> nameList = (List) names;
assertEquals(1, nameList.size());
final HL7Field nameField = (HL7Field) nameList.get(0);
assertEquals("SMITH^JOHN", nameField.getValue());
}
@Test
public void testSelectAbnormalTestResult() throws HL7Exception, IOException {
final String query = "DECLARE result AS REQUIRED OBX SELECT result WHERE result.7 != 'N' AND result.1 = 1";
final HL7Query hl7Query = HL7Query.compile(query);
final QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
}
@Test
public void testFieldEqualsString() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.7 = 'L'");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.7 = 'H'");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testLessThan() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 < 600");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 < 59");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testCompareTwoFields() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 < result.6.2");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE NOT(result.4 > result.6.3)");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testLessThanOrEqual() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 <= 59");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 <= 600");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 <= 58");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testGreaterThanOrEqual() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 >= 59");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 >= 6");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 >= 580");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testGreaterThan() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 > 58");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 > 6");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.4 > 580");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testDistinctValuesReturned() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result1 AS REQUIRED OBX, result2 AS REQUIRED OBX SELECT MESSAGE WHERE result1.7 = 'L' OR result2.7 != 'H'");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
assertEquals(1, result.getHitCount());
}
@Test
public void testAndWithParens() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.7 = 'L' AND result.3.1 = 'GLU'");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.7 = 'L' AND result.3.1 = 'GLU'");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hyperglycemia")));
assertFalse( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.7 = 'H' AND result.3.1 = 'GLU'");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.7 = 'H' AND result.3.1 = 'GLU'");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hyperglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE (result.7 = 'H') AND (result.3.1 = 'GLU')");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hyperglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE ((result.7 = 'H') AND (result.3.1 = 'GLU'))");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hyperglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE (( ((result.7 = 'H')) AND ( ((result.3.1 = 'GLU')) )))");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hyperglycemia")));
assertTrue( result.isMatch() );
}
@Test
public void testIsNull() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.999 IS NULL");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.1 IS NULL");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
hl7Query = HL7Query.compile("SELECT MESSAGE WHERE ZZZ IS NULL");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("SELECT MESSAGE WHERE OBX IS NULL");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
}
@Test
public void testNotNull() throws HL7Exception, IOException {
HL7Query hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.999 NOT NULL");
QueryResult result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
hl7Query = HL7Query.compile("DECLARE result AS REQUIRED OBX SELECT MESSAGE WHERE result.1 NOT NULL");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
hl7Query = HL7Query.compile("SELECT MESSAGE WHERE ZZZ NOT NULL");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertFalse( result.isMatch() );
hl7Query = HL7Query.compile("SELECT MESSAGE WHERE OBX NOT NULL");
result = hl7Query.evaluate(createMessage(new File("src/test/resources/hypoglycemia")));
assertTrue( result.isMatch() );
}
private HL7Message createMessage(final File file) throws HL7Exception, IOException {
final byte[] bytes = Files.readAllBytes(file.toPath());
final String msgText = new String(bytes, "UTF-8");
final HapiContext hapiContext = new DefaultHapiContext();
hapiContext.setValidationContext(ValidationContextFactory.noValidation());
final PipeParser parser = hapiContext.getPipeParser();
final Message message = parser.parse(msgText);
return new HapiMessage(message);
}
}

View File

@ -0,0 +1,5 @@
MSH|^~\&|XXXXXXXX||HealthProvider||||ORU^R01|Q1111111111111111111|P|2.3|
PID|||111111111||SMITH^JOHN||19700100|M||||||||||111111111111|123456789|
PD1||||1234567890^LAST^FIRST^M^^^^^NPI|
OBR|1|341856649^HNAM_ORDERID|000000000000000000|648088^Basic Metabolic Panel|||20150101000000|||||||||1620^Johnson^Corey^A||||||20150101000000|||F|||||||||||20150101000000|
OBX|1|NM|GLU^Glucose Lvl|159|mg/dL|65-99^65^99|H|||F|||20150102000000|

View File

@ -0,0 +1,5 @@
MSH|^~\&|XXXXXXXX||HealthProvider||||ORU^R01|Q1111111111111111111|P|2.3|
PID|||111111111||SMITH^JOHN||19700100|M||||||||||111111111111|123456789|
PD1||||1234567890^LAST^FIRST^M^^^^^NPI|
OBR|1|341856649^HNAM_ORDERID|000000000000000000|648088^Basic Metabolic Panel|||20150101000000|||||||||1620^Johnson^Corey^A||||||20150101000000|||F|||||||||||20150101000000|
OBX|1|NM|GLU^Glucose Lvl|59|mg/dL|65-99^65^99|L|||F|||20150102000000|

View File

@ -88,6 +88,7 @@ import org.apache.nifi.web.api.dto.ControllerDTO;
import org.apache.nifi.web.api.dto.PortDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.helpers.MessageFormatter;
public class EndpointConnectionPool {
public static final long PEER_REFRESH_PERIOD = 60000L;
@ -202,6 +203,28 @@ public class EndpointConnectionPool {
}, 5, 5, TimeUnit.SECONDS);
}
void warn(final String msg, final Object... args) {
logger.warn(msg, args);
if ( eventReporter != null ) {
eventReporter.reportEvent(Severity.WARNING, "Site-to-Site", MessageFormatter.arrayFormat(msg, args).getMessage());
}
}
void warn(final String msg, final Throwable t) {
logger.warn(msg, t);
if ( eventReporter != null ) {
eventReporter.reportEvent(Severity.WARNING, "Site-to-Site", msg + ": " + t.toString());
}
}
void error(final String msg, final Object... args) {
logger.error(msg, args);
if ( eventReporter != null ) {
eventReporter.reportEvent(Severity.ERROR, "Site-to-Site", MessageFormatter.arrayFormat(msg, args).getMessage());
}
}
private String getPortIdentifier(final TransferDirection transferDirection) throws IOException {
if ( remoteDestination.getIdentifier() != null ) {
return remoteDestination.getIdentifier();
@ -271,6 +294,7 @@ public class EndpointConnectionPool {
logger.debug("{} No Connection available for Port {}; creating new Connection", this, portId);
protocol = new SocketClientProtocol();
protocol.setDestination(new IdEnrichedRemoteDestination(remoteDestination, portId));
protocol.setEventReporter(eventReporter);
final long penalizationMillis = remoteDestination.getYieldPeriod(TimeUnit.MILLISECONDS);
try {
@ -312,8 +336,15 @@ public class EndpointConnectionPool {
// handle error cases
if ( protocol.isDestinationFull() ) {
logger.warn("{} {} indicates that port's destination is full; penalizing peer", this, peer);
logger.warn("{} {} indicates that port {}'s destination is full; penalizing peer",
this, peer, config.getPortName() == null ? config.getPortIdentifier() : config.getPortName());
penalize(peer, penalizationMillis);
try {
peer.close();
} catch (final IOException ioe) {
}
continue;
} else if ( protocol.isPortInvalid() ) {
penalize(peer, penalizationMillis);
@ -336,7 +367,7 @@ public class EndpointConnectionPool {
cleanup(protocol, peer);
final String message = String.format("%s failed to communicate with %s due to %s", this, peer == null ? clusterUrl : peer, e.toString());
logger.error(message);
error(message);
if ( logger.isDebugEnabled() ) {
logger.error("", e);
}
@ -359,6 +390,15 @@ public class EndpointConnectionPool {
}
}
} while ( connection == null || codec == null || commsSession == null || protocol == null );
} catch (final Throwable t) {
if ( commsSession != null ) {
try {
commsSession.close();
} catch (final IOException ioe) {
}
}
throw t;
} finally {
if ( !addBack.isEmpty() ) {
connectionQueue.addAll(addBack);
@ -449,7 +489,7 @@ public class EndpointConnectionPool {
peerList = createPeerStatusList(direction);
} catch (final Exception e) {
final String message = String.format("%s Failed to update list of peers due to %s", this, e.toString());
logger.warn(message);
warn(message);
if ( logger.isDebugEnabled() ) {
logger.warn("", e);
}
@ -489,7 +529,7 @@ public class EndpointConnectionPool {
}
private boolean isPenalized(final PeerStatus peerStatus) {
final Long expirationEnd = peerTimeoutExpirations.get(peerStatus);
final Long expirationEnd = peerTimeoutExpirations.get(peerStatus.getPeerDescription());
return (expirationEnd == null ? false : expirationEnd > System.currentTimeMillis() );
}
@ -573,7 +613,7 @@ public class EndpointConnectionPool {
clientProtocol.shutdown(peer);
} catch (final IOException e) {
final String message = String.format("%s Failed to shutdown protocol when updating list of peers due to %s", this, e.toString());
logger.warn(message);
warn(message);
if (logger.isDebugEnabled()) {
logger.warn("", e);
}
@ -583,7 +623,7 @@ public class EndpointConnectionPool {
peer.close();
} catch (final IOException e) {
final String message = String.format("%s Failed to close resources when updating list of peers due to %s", this, e.toString());
logger.warn(message);
warn(message);
if (logger.isDebugEnabled()) {
logger.warn("", e);
}
@ -608,7 +648,8 @@ public class EndpointConnectionPool {
}
} catch (final IOException e) {
logger.error("Failed to persist list of Peers due to {}; if restarted and peer's NCM is down, may be unable to transfer data until communications with NCM are restored", e.toString(), e);
error("Failed to persist list of Peers due to {}; if restarted and peer's NCM is down, may be unable to transfer data until communications with NCM are restored", e.toString());
logger.error("", e);
}
}
@ -804,7 +845,7 @@ public class EndpointConnectionPool {
peerStatusCache = new PeerStatusCache(statuses);
logger.info("{} Successfully refreshed Peer Status; remote instance consists of {} peers", this, statuses.size());
} catch (Exception e) {
logger.warn("{} Unable to refresh Remote Group's peers due to {}", this, e);
warn("{} Unable to refresh Remote Group's peers due to {}", this, e);
if (logger.isDebugEnabled()) {
logger.warn("", e);
}

View File

@ -84,6 +84,7 @@ public class SocketClient implements SiteToSiteClient {
logger.debug("Unable to resolve port [{}] to an identifier", portName);
} else {
logger.debug("Resolved port [{}] to identifier [{}]", portName, portId);
this.portIdentifier = portId;
}
return portId;
@ -136,7 +137,7 @@ public class SocketClient implements SiteToSiteClient {
connectionState.getPeer(), connectionState.getCodec(), direction);
} catch (final Throwable t) {
pool.terminate(connectionState);
throw t;
throw new IOException("Unable to create Transaction to communicate with " + connectionState.getPeer(), t);
}
// Wrap the transaction in a new one that will return the EndpointConnectionState back to the pool whenever

View File

@ -27,6 +27,7 @@ import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.ProcessContext;
@ -75,6 +76,7 @@ public class SocketClientProtocol implements ClientProtocol {
private int batchCount;
private long batchSize;
private long batchMillis;
private EventReporter eventReporter;
private static final long BATCH_SEND_NANOS = TimeUnit.SECONDS.toNanos(5L); // send batches of up to 5 seconds
@ -93,6 +95,10 @@ public class SocketClientProtocol implements ClientProtocol {
this.batchMillis = millis;
}
public void setEventReporter(final EventReporter eventReporter) {
this.eventReporter = eventReporter;
}
public void setDestination(final RemoteDestination destination) {
this.destination = destination;
this.useCompression = destination.isUseCompression();
@ -272,7 +278,7 @@ public class SocketClientProtocol implements ClientProtocol {
}
return new SocketClientTransaction(versionNegotiator.getVersion(), destination.getIdentifier(), peer, codec,
direction, useCompression, (int) destination.getYieldPeriod(TimeUnit.MILLISECONDS));
direction, useCompression, (int) destination.getYieldPeriod(TimeUnit.MILLISECONDS), eventReporter);
}

View File

@ -27,6 +27,7 @@ import java.util.zip.CRC32;
import java.util.zip.CheckedInputStream;
import java.util.zip.CheckedOutputStream;
import org.apache.nifi.events.EventReporter;
import org.apache.nifi.remote.Communicant;
import org.apache.nifi.remote.Peer;
import org.apache.nifi.remote.Transaction;
@ -39,6 +40,7 @@ import org.apache.nifi.remote.io.CompressionOutputStream;
import org.apache.nifi.remote.protocol.DataPacket;
import org.apache.nifi.remote.protocol.RequestType;
import org.apache.nifi.remote.util.StandardDataPacket;
import org.apache.nifi.reporting.Severity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -56,6 +58,7 @@ public class SocketClientTransaction implements Transaction {
private final Peer peer;
private final int penaltyMillis;
private final String destinationId;
private final EventReporter eventReporter;
private boolean dataAvailable = false;
private int transfers = 0;
@ -63,7 +66,7 @@ public class SocketClientTransaction implements Transaction {
private TransactionState state;
SocketClientTransaction(final int protocolVersion, final String destinationId, final Peer peer, final FlowFileCodec codec,
final TransferDirection direction, final boolean useCompression, final int penaltyMillis) throws IOException {
final TransferDirection direction, final boolean useCompression, final int penaltyMillis, final EventReporter eventReporter) throws IOException {
this.protocolVersion = protocolVersion;
this.destinationId = destinationId;
this.peer = peer;
@ -74,6 +77,7 @@ public class SocketClientTransaction implements Transaction {
this.compress = useCompression;
this.state = TransactionState.TRANSACTION_STARTED;
this.penaltyMillis = penaltyMillis;
this.eventReporter = eventReporter;
initialize();
}
@ -116,11 +120,11 @@ public class SocketClientTransaction implements Transaction {
try {
try {
if ( state != TransactionState.DATA_EXCHANGED && state != TransactionState.TRANSACTION_STARTED) {
throw new IllegalStateException("Cannot receive data because Transaction State is " + state);
throw new IllegalStateException("Cannot receive data from " + peer + " because Transaction State is " + state);
}
if ( direction == TransferDirection.SEND ) {
throw new IllegalStateException("Attempting to receive data but started a SEND Transaction");
throw new IllegalStateException("Attempting to receive data from " + peer + " but started a SEND Transaction");
}
// if we already know there's no data, just return null
@ -142,7 +146,7 @@ public class SocketClientTransaction implements Transaction {
this.dataAvailable = false;
break;
default:
throw new ProtocolException("Got unexpected response when asking for data: " + dataAvailableCode);
throw new ProtocolException("Got unexpected response from " + peer + " when asking for data: " + dataAvailableCode);
}
}
@ -184,11 +188,11 @@ public class SocketClientTransaction implements Transaction {
try {
try {
if ( state != TransactionState.DATA_EXCHANGED && state != TransactionState.TRANSACTION_STARTED) {
throw new IllegalStateException("Cannot send data because Transaction State is " + state);
throw new IllegalStateException("Cannot send data to " + peer + " because Transaction State is " + state);
}
if ( direction == TransferDirection.RECEIVE ) {
throw new IllegalStateException("Attempting to send data but started a RECEIVE Transaction");
throw new IllegalStateException("Attempting to send data to " + peer + " but started a RECEIVE Transaction");
}
if ( transfers > 0 ) {
@ -242,7 +246,7 @@ public class SocketClientTransaction implements Transaction {
try {
try {
if ( state != TransactionState.TRANSACTION_CONFIRMED ) {
throw new IllegalStateException("Cannot complete transaction because state is " + state +
throw new IllegalStateException("Cannot complete transaction with " + peer + " because state is " + state +
"; Transaction can only be completed when state is " + TransactionState.TRANSACTION_CONFIRMED);
}
@ -272,7 +276,7 @@ public class SocketClientTransaction implements Transaction {
peer.penalize(destinationId, penaltyMillis);
backoff = true;
} else if ( transactionResponse.getCode() != ResponseCode.TRANSACTION_FINISHED ) {
throw new ProtocolException("After sending data, expected TRANSACTION_FINISHED response but got " + transactionResponse);
throw new ProtocolException("After sending data to " + peer + ", expected TRANSACTION_FINISHED response but got " + transactionResponse);
}
state = TransactionState.TRANSACTION_COMPLETED;
@ -324,7 +328,10 @@ public class SocketClientTransaction implements Transaction {
try {
confirmTransactionResponse = Response.read(dis);
} catch (final IOException ioe) {
logger.error("Failed to receive response code from {} when expected confirmation of transaction", peer);
logger.error("Failed to receive response code from {} when expecting confirmation of transaction", peer);
if ( eventReporter != null ) {
eventReporter.reportEvent(Severity.ERROR, "Site-to-Site", "Failed to receive response code from " + peer + " when expecting confirmation of transaction");
}
throw ioe;
}

View File

@ -36,5 +36,6 @@
<module>nifi-processor-utilities</module>
<module>nifi-write-ahead-log</module>
<module>nifi-site-to-site-client</module>
<module>nifi-hl7-query-language</module>
</modules>
</project>

View File

@ -32,14 +32,6 @@
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-properties</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-processor-utils</artifactId>

View File

@ -26,7 +26,6 @@ import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.DynamicProperties;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
@ -212,13 +211,23 @@ public class HtmlDocumentationWriter implements DocumentationWriter {
xmlStreamWriter.writeEndElement();
xmlStreamWriter.writeStartElement("p");
if (tags != null) {
final String tagString = StringUtils.join(tags.value(), ", ");
final String tagString = join(tags.value(), ", ");
xmlStreamWriter.writeCharacters(tagString);
} else {
xmlStreamWriter.writeCharacters("None.");
}
xmlStreamWriter.writeEndElement();
}
static String join(final String[] toJoin, final String delimiter) {
final StringBuilder sb = new StringBuilder();
for (int i=0; i < toJoin.length; i++) {
sb.append(toJoin[i]);
if ( i < toJoin.length - 1 ) {
sb.append(delimiter);
}
}
return sb.toString();
}
/**

View File

@ -23,7 +23,6 @@ import java.util.List;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.DynamicRelationship;
import org.apache.nifi.annotation.behavior.ReadsAttribute;
import org.apache.nifi.annotation.behavior.ReadsAttributes;
@ -67,6 +66,13 @@ public class HtmlProcessorDocumentationWriter extends HtmlDocumentationWriter {
handleWritesAttributes(xmlStreamWriter, processor);
}
private String defaultIfBlank(final String test, final String defaultValue) {
if ( test == null || test.trim().isEmpty() ) {
return defaultValue;
}
return test;
}
/**
* Writes out just the attributes that are being read in a table form.
*
@ -91,10 +97,10 @@ public class HtmlProcessorDocumentationWriter extends HtmlDocumentationWriter {
for (ReadsAttribute attribute : attributesRead) {
xmlStreamWriter.writeStartElement("tr");
writeSimpleElement(xmlStreamWriter, "td",
StringUtils.defaultIfBlank(attribute.attribute(), "Not Specified"));
defaultIfBlank(attribute.attribute(), "Not Specified"));
// TODO allow for HTML characters here.
writeSimpleElement(xmlStreamWriter, "td",
StringUtils.defaultIfBlank(attribute.description(), "Not Specified"));
defaultIfBlank(attribute.description(), "Not Specified"));
xmlStreamWriter.writeEndElement();
}
@ -129,10 +135,10 @@ public class HtmlProcessorDocumentationWriter extends HtmlDocumentationWriter {
for (WritesAttribute attribute : attributesRead) {
xmlStreamWriter.writeStartElement("tr");
writeSimpleElement(xmlStreamWriter, "td",
StringUtils.defaultIfBlank(attribute.attribute(), "Not Specified"));
defaultIfBlank(attribute.attribute(), "Not Specified"));
// TODO allow for HTML characters here.
writeSimpleElement(xmlStreamWriter, "td",
StringUtils.defaultIfBlank(attribute.description(), "Not Specified"));
defaultIfBlank(attribute.description(), "Not Specified"));
xmlStreamWriter.writeEndElement();
}
xmlStreamWriter.writeEndElement();

View File

@ -30,9 +30,17 @@ import org.apache.nifi.reporting.ReportingTask;
import org.junit.Test;
import static org.apache.nifi.documentation.html.XmlValidator.assertContains;
import static org.junit.Assert.assertEquals;
public class HtmlDocumentationWriterTest {
@Test
public void testJoin() {
assertEquals("a, b, c", HtmlDocumentationWriter.join(new String[] {"a", "b", "c"}, ", "));
assertEquals("a, b", HtmlDocumentationWriter.join(new String[] {"a", "b"}, ", "));
assertEquals("a", HtmlDocumentationWriter.join(new String[] {"a"}, ", "));
}
@Test
public void testDocumentControllerService() throws InitializationException, IOException {

View File

@ -116,12 +116,23 @@ public class StandardRepositoryRecord implements RepositoryRecord {
public void setWorking(final FlowFileRecord flowFile, final String attributeKey, final String attributeValue) {
workingFlowFileRecord = flowFile;
updatedAttributes.put(attributeKey, attributeValue);
// If setting attribute to same value as original, don't add to updated attributes
final String currentValue = originalAttributes.get(attributeKey);
if ( currentValue == null || !currentValue.equals(attributeValue) ) {
updatedAttributes.put(attributeKey, attributeValue);
}
}
public void setWorking(final FlowFileRecord flowFile, final Map<String, String> updatedAttribs) {
workingFlowFileRecord = flowFile;
updatedAttributes.putAll(updatedAttribs);
for ( final Map.Entry<String, String> entry : updatedAttribs.entrySet() ) {
final String currentValue = originalAttributes.get(entry.getKey());
if ( currentValue == null || !currentValue.equals(entry.getValue()) ) {
updatedAttributes.put(entry.getKey(), entry.getValue());
}
}
}
@Override

View File

@ -107,6 +107,6 @@ public class ContinuallyRunConnectableTask implements Callable<Boolean> {
return true;
}
return true;
return false; // do not yield
}
}

View File

@ -171,6 +171,7 @@ public class StandardRemoteGroupPort extends RemoteGroupPort {
this.targetRunning.set(false);
final String message = String.format("%s failed to communicate with %s because the remote instance indicates that the port is not in a valid state", this, url);
logger.error(message);
session.rollback();
remoteGroup.getEventReporter().reportEvent(Severity.ERROR, CATEGORY, message);
return;
} catch (final UnknownPortException e) {
@ -178,21 +179,24 @@ public class StandardRemoteGroupPort extends RemoteGroupPort {
this.targetExists.set(false);
final String message = String.format("%s failed to communicate with %s because the remote instance indicates that the port no longer exists", this, url);
logger.error(message);
session.rollback();
remoteGroup.getEventReporter().reportEvent(Severity.ERROR, CATEGORY, message);
return;
} catch (final IOException e) {
context.yield();
final String message = String.format("%s failed to communicate with %s due to %s", this, url, e.toString());
logger.error(message);
if ( logger.isDebugEnabled() ) {
logger.error("", e);
}
session.rollback();
remoteGroup.getEventReporter().reportEvent(Severity.ERROR, CATEGORY, message);
session.rollback();
return;
}
if ( transaction == null ) {
logger.debug("{} Unable to create transaction to communicate with; all peers must be penalized, so yielding context", this);
session.rollback();
context.yield();
return;
}

View File

@ -0,0 +1,33 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-geo-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-geo-nar</artifactId>
<packaging>nar</packaging>
<description>NiFi Geo Enrichment NAR</description>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-geo-processors</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1 @@
/target/

View File

@ -0,0 +1,43 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-geo-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-geo-processors</artifactId>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-processor-utils</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-utils</artifactId>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId>
<version>2.1.0</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,210 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.SideEffectFree;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.maxmind.DatabaseReader;
import org.apache.nifi.util.StopWatch;
import com.maxmind.geoip2.exception.GeoIp2Exception;
import com.maxmind.geoip2.model.CityResponse;
import com.maxmind.geoip2.record.Subdivision;
@EventDriven
@SideEffectFree
@SupportsBatching
@Tags({"geo", "enrich", "ip", "maxmind"})
@CapabilityDescription("Looks up geolocation information for an IP address and adds the geo information to FlowFile attributes. The "
+ "geo data is provided as a MaxMind database. The attribute that contains the IP address to lookup is provided by the "
+ "'IP Address Attribute' property. If the name of the attribute provided is 'X', then the the attributes added by enrichment "
+ "will take the form X.geo.<fieldName>")
@WritesAttributes({
@WritesAttribute(attribute="X.geo.lookup.micros", description="The number of microseconds that the geo lookup took"),
@WritesAttribute(attribute="X.geo.city", description="The city identified for the IP address"),
@WritesAttribute(attribute="X.geo.latitude", description="The latitude identified for this IP address"),
@WritesAttribute(attribute="X.geo.longitude", description="The longitude identified for this IP address"),
@WritesAttribute(attribute="X.geo.subdivision.N", description="Each subdivision that is identified for this IP address is added with a one-up number appended to the attribute name, starting with 0"),
@WritesAttribute(attribute="X.geo.subdivision.isocode.N", description="The ISO code for the subdivision that is identified by X.geo.subdivision.N"),
@WritesAttribute(attribute="X.geo.country", description="The country identified for this IP address"),
@WritesAttribute(attribute="X.geo.country.isocode", description="The ISO Code for the country identified"),
@WritesAttribute(attribute="X.geo.postalcode", description="The postal code for the country identified"),
})
public class GeoEnrichIP extends AbstractProcessor {
public static final PropertyDescriptor GEO_DATABASE_FILE = new PropertyDescriptor.Builder()
.name("Geo Database File")
.description("Path to Maxmind Geo Enrichment Database File")
.required(true)
.addValidator(StandardValidators.FILE_EXISTS_VALIDATOR)
.build();
public static final PropertyDescriptor IP_ADDRESS_ATTRIBUTE = new PropertyDescriptor.Builder()
.name("IP Address Attribute")
.required(true)
.description("The name of an attribute whose value is a dotted decimal IP address for which enrichment should occur")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final Relationship REL_FOUND = new Relationship.Builder()
.name("found")
.description("Where to route flow files after successfully enriching attributes with geo data")
.build();
public static final Relationship REL_NOT_FOUND = new Relationship.Builder()
.name("not found")
.description("Where to route flow files after unsuccessfully enriching attributes because no geo data was found")
.build();
private Set<Relationship> relationships;
private List<PropertyDescriptor> propertyDescriptors;
private final AtomicReference<DatabaseReader> databaseReaderRef = new AtomicReference<>(null);
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return propertyDescriptors;
}
@OnScheduled
public final void onScheduled(final ProcessContext context) throws IOException {
final String dbFileString = context.getProperty(GEO_DATABASE_FILE).getValue();
final File dbFile = new File(dbFileString);
final StopWatch stopWatch = new StopWatch(true);
final DatabaseReader reader = new DatabaseReader.Builder(dbFile).build();
stopWatch.stop();
getLogger().info("Completed loading of Maxmind Geo Database. Elapsed time was {} milliseconds.", new Object[]{stopWatch.getDuration(TimeUnit.MILLISECONDS)});
databaseReaderRef.set(reader);
}
@OnStopped
public void closeReader() throws IOException {
final DatabaseReader reader = databaseReaderRef.get();
if ( reader != null ) {
reader.close();
}
}
@Override
protected void init(final ProcessorInitializationContext context) {
final Set<Relationship> rels = new HashSet<>();
rels.add(REL_FOUND);
rels.add(REL_NOT_FOUND);
this.relationships = Collections.unmodifiableSet(rels);
final List<PropertyDescriptor> props = new ArrayList<>();
props.add(GEO_DATABASE_FILE);
props.add(IP_ADDRESS_ATTRIBUTE);
this.propertyDescriptors = Collections.unmodifiableList(props);
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final DatabaseReader dbReader = databaseReaderRef.get();
final String ipAttributeName = context.getProperty(IP_ADDRESS_ATTRIBUTE).getValue();
final String ipAttributeValue = flowFile.getAttribute(ipAttributeName);
if (StringUtils.isEmpty(ipAttributeName)) { //TODO need to add additional validation - should look like an IPv4 or IPv6 addr for instance
session.transfer(flowFile, REL_NOT_FOUND);
getLogger().warn("Unable to find ip address for {}", new Object[]{flowFile});
return;
}
InetAddress inetAddress = null;
CityResponse response = null;
try {
inetAddress = InetAddress.getByName(ipAttributeValue);
} catch (final IOException ioe) {
session.transfer(flowFile, REL_NOT_FOUND);
getLogger().warn("Could not resolve {} to ip address for {}", new Object[]{ipAttributeValue, flowFile}, ioe);
return;
}
final StopWatch stopWatch = new StopWatch(true);
try {
response = dbReader.city(inetAddress);
stopWatch.stop();
} catch (final IOException | GeoIp2Exception ex) {
session.transfer(flowFile, REL_NOT_FOUND);
getLogger().warn("Failure while trying to find enrichment data for {} due to {}", new Object[]{flowFile, ex}, ex);
return;
}
if (response == null) {
session.transfer(flowFile, REL_NOT_FOUND);
getLogger().warn("No enrichment data found for ip {} of {}", new Object[]{ipAttributeValue, flowFile});
return;
}
final Map<String, String> attrs = new HashMap<>();
attrs.put(new StringBuilder(ipAttributeName).append(".geo.lookup.micros").toString(), String.valueOf(stopWatch.getDuration(TimeUnit.MICROSECONDS)));
attrs.put(new StringBuilder(ipAttributeName).append(".geo.city").toString(), response.getCity().getName());
attrs.put(new StringBuilder(ipAttributeName).append(".geo.latitude").toString(), response.getLocation().getLatitude().toString());
attrs.put(new StringBuilder(ipAttributeName).append(".geo.longitude").toString(), response.getLocation().getLongitude().toString());
int i = 0;
for (final Subdivision subd : response.getSubdivisions()) {
attrs.put(new StringBuilder(ipAttributeName).append(".geo.subdivision.").append(i).toString(), subd.getName());
attrs.put(new StringBuilder(ipAttributeName).append(".geo.subdivision.isocode.").append(i).toString(), subd.getIsoCode());
i++;
}
attrs.put(new StringBuilder(ipAttributeName).append(".geo.country").toString(), response.getCountry().getName());
attrs.put(new StringBuilder(ipAttributeName).append(".geo.country.isocode").toString(), response.getCountry().getIsoCode());
attrs.put(new StringBuilder(ipAttributeName).append(".geo.postalcode").toString(), response.getPostal().getCode());
flowFile = session.putAllAttributes(flowFile, attrs);
session.transfer(flowFile, REL_FOUND);
getLogger().info("Completed lookup of IP geo information for {}", new Object[]{flowFile});
}
}

View File

@ -0,0 +1,286 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.maxmind;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.util.Arrays;
import java.util.List;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.maxmind.db.Metadata;
import com.maxmind.db.Reader;
import com.maxmind.db.Reader.FileMode;
import com.maxmind.geoip2.GeoIp2Provider;
import com.maxmind.geoip2.exception.AddressNotFoundException;
import com.maxmind.geoip2.exception.GeoIp2Exception;
import com.maxmind.geoip2.model.AnonymousIpResponse;
import com.maxmind.geoip2.model.CityResponse;
import com.maxmind.geoip2.model.ConnectionTypeResponse;
import com.maxmind.geoip2.model.CountryResponse;
import com.maxmind.geoip2.model.DomainResponse;
import com.maxmind.geoip2.model.IspResponse;
/**
* <p>
* This class was copied from
* https://raw.githubusercontent.com/maxmind/GeoIP2-java/master/src/main/java/com/maxmind/geoip2/DatabaseReader.java
* It is written by Maxmind and it is available under Apache Software License V2
*
* The modification we're making to the code below is to stop using exceptions for
* mainline flow control. Specifically we don't want to throw an exception
* simply because an address was not found.
* </p>
*
* Instances of this class provide a reader for the GeoIP2 database format. IP
* addresses can be looked up using the <code>get</code> method.
*/
public class DatabaseReader implements GeoIp2Provider, Closeable {
private final Reader reader;
private final ObjectMapper om;
private DatabaseReader(Builder builder) throws IOException {
if (builder.stream != null) {
this.reader = new Reader(builder.stream);
} else if (builder.database != null) {
this.reader = new Reader(builder.database, builder.mode);
} else {
// This should never happen. If it does, review the Builder class
// constructors for errors.
throw new IllegalArgumentException(
"Unsupported Builder configuration: expected either File or URL");
}
this.om = new ObjectMapper();
this.om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
this.om.configure(
DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL, true);
InjectableValues inject = new InjectableValues.Std().addValue(
"locales", builder.locales);
this.om.setInjectableValues(inject);
}
/**
* <p>
* Constructs a Builder for the DatabaseReader. The file passed to it must
* be a valid GeoIP2 database file.
* </p>
* <p>
* <code>Builder</code> creates instances of <code>DatabaseReader</code>
* from values set by the methods.
* </p>
* <p>
* Only the values set in the <code>Builder</code> constructor are required.
* </p>
*/
public final static class Builder {
final File database;
final InputStream stream;
List<String> locales = Arrays.asList("en");
FileMode mode = FileMode.MEMORY_MAPPED;
/**
* @param stream the stream containing the GeoIP2 database to use.
*/
public Builder(InputStream stream) {
this.stream = stream;
this.database = null;
}
/**
* @param database the GeoIP2 database file to use.
*/
public Builder(File database) {
this.database = database;
this.stream = null;
}
/**
* @param val List of locale codes to use in name property from most
* preferred to least preferred.
* @return Builder object
*/
public Builder locales(List<String> val) {
this.locales = val;
return this;
}
/**
* @param val The file mode used to open the GeoIP2 database
* @return Builder object
* @throws java.lang.IllegalArgumentException if you initialized the Builder with a URL, which uses
* {@link FileMode#MEMORY}, but you provided a different
* FileMode to this method.
*/
public Builder fileMode(FileMode val) {
if (this.stream != null && !FileMode.MEMORY.equals(val)) {
throw new IllegalArgumentException(
"Only FileMode.MEMORY is supported when using an InputStream.");
}
this.mode = val;
return this;
}
/**
* @return an instance of <code>DatabaseReader</code> created from the
* fields set on this builder.
* @throws IOException if there is an error reading the database
*/
public DatabaseReader build() throws IOException {
return new DatabaseReader(this);
}
}
/**
* @param ipAddress IPv4 or IPv6 address to lookup.
* @return A <T> object with the data for the IP address or null if no
* information could be found for the given IP address
* @throws IOException if there is an error opening or reading from the file.
*/
private <T> T get(InetAddress ipAddress, Class<T> cls, boolean hasTraits,
String type) throws IOException, AddressNotFoundException {
String databaseType = this.getMetadata().getDatabaseType();
if (!databaseType.contains(type)) {
String caller = Thread.currentThread().getStackTrace()[2]
.getMethodName();
throw new UnsupportedOperationException(
"Invalid attempt to open a " + databaseType
+ " database using the " + caller + " method");
}
ObjectNode node = (ObjectNode) this.reader.get(ipAddress);
if (node == null) {
return null;
}
ObjectNode ipNode;
if (hasTraits) {
if (!node.has("traits")) {
node.set("traits", this.om.createObjectNode());
}
ipNode = (ObjectNode) node.get("traits");
} else {
ipNode = node;
}
ipNode.put("ip_address", ipAddress.getHostAddress());
return this.om.treeToValue(node, cls);
}
/**
* <p>
* Closes the database.
* </p>
* <p>
* If you are using <code>FileMode.MEMORY_MAPPED</code>, this will
* <em>not</em> unmap the underlying file due to a limitation in Java's
* <code>MappedByteBuffer</code>. It will however set the reference to
* the buffer to <code>null</code>, allowing the garbage collector to
* collect it.
* </p>
*
* @throws IOException if an I/O error occurs.
*/
@Override
public void close() throws IOException {
this.reader.close();
}
@Override
public CountryResponse country(InetAddress ipAddress) throws IOException,
GeoIp2Exception {
return this.get(ipAddress, CountryResponse.class, true, "Country");
}
@Override
public CityResponse city(InetAddress ipAddress) throws IOException,
GeoIp2Exception {
return this.get(ipAddress, CityResponse.class, true, "City");
}
/**
* Look up an IP address in a GeoIP2 Anonymous IP.
*
* @param ipAddress IPv4 or IPv6 address to lookup.
* @return a AnonymousIpResponse for the requested IP address.
* @throws GeoIp2Exception if there is an error looking up the IP
* @throws IOException if there is an IO error
*/
public AnonymousIpResponse anonymousIp(InetAddress ipAddress) throws IOException,
GeoIp2Exception {
return this.get(ipAddress, AnonymousIpResponse.class, false, "GeoIP2-Anonymous-IP");
}
/**
* Look up an IP address in a GeoIP2 Connection Type database.
*
* @param ipAddress IPv4 or IPv6 address to lookup.
* @return a ConnectTypeResponse for the requested IP address.
* @throws GeoIp2Exception if there is an error looking up the IP
* @throws IOException if there is an IO error
*/
public ConnectionTypeResponse connectionType(InetAddress ipAddress)
throws IOException, GeoIp2Exception {
return this.get(ipAddress, ConnectionTypeResponse.class, false,
"GeoIP2-Connection-Type");
}
/**
* Look up an IP address in a GeoIP2 Domain database.
*
* @param ipAddress IPv4 or IPv6 address to lookup.
* @return a DomainResponse for the requested IP address.
* @throws GeoIp2Exception if there is an error looking up the IP
* @throws IOException if there is an IO error
*/
public DomainResponse domain(InetAddress ipAddress) throws IOException,
GeoIp2Exception {
return this
.get(ipAddress, DomainResponse.class, false, "GeoIP2-Domain");
}
/**
* Look up an IP address in a GeoIP2 ISP database.
*
* @param ipAddress IPv4 or IPv6 address to lookup.
* @return an IspResponse for the requested IP address.
* @throws GeoIp2Exception if there is an error looking up the IP
* @throws IOException if there is an IO error
*/
public IspResponse isp(InetAddress ipAddress) throws IOException,
GeoIp2Exception {
return this.get(ipAddress, IspResponse.class, false, "GeoIP2-ISP");
}
/**
* @return the metadata for the open MaxMind DB file.
*/
public Metadata getMetadata() {
return this.reader.getMetadata();
}
}

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.nifi.processors.GeoEnrichIP

View File

@ -0,0 +1,42 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-nar-bundles</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-geo-bundle</artifactId>
<packaging>pom</packaging>
<description>NiFi Geo Enrichment Capability Set</description>
<modules>
<module>nifi-geo-processors</module>
<module>nifi-geo-nar</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-geo-processors</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-hl7-nar</artifactId>
<packaging>nar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-processors</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1 @@
/target/

View File

@ -0,0 +1,120 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-hl7-processors</artifactId>
<packaging>jar</packaging>
<build>
<plugins>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>src/test/resources/hypoglycemia.hl7</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-processor-utils</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-query-language</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-base</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v21</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v22</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v23</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v231</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v24</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v25</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v251</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi</groupId>
<artifactId>hapi-structures-v26</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-mock</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,247 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.hl7;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.nifi.annotation.behavior.SideEffectFree;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.stream.io.StreamUtils;
import ca.uhn.hl7v2.DefaultHapiContext;
import ca.uhn.hl7v2.HL7Exception;
import ca.uhn.hl7v2.HapiContext;
import ca.uhn.hl7v2.model.Composite;
import ca.uhn.hl7v2.model.Group;
import ca.uhn.hl7v2.model.Message;
import ca.uhn.hl7v2.model.Primitive;
import ca.uhn.hl7v2.model.Segment;
import ca.uhn.hl7v2.model.Structure;
import ca.uhn.hl7v2.model.Type;
import ca.uhn.hl7v2.model.Varies;
import ca.uhn.hl7v2.parser.PipeParser;
import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
@SideEffectFree
@SupportsBatching
@Tags({"HL7", "health level 7", "healthcare", "extract", "attributes"})
@CapabilityDescription("Extracts information from an HL7 (Health Level 7) formatted FlowFile and adds the information as FlowFile Attributes. "
+ "The attributes are named as <Segment Name> <dot> <Field Index>. If the segment is repeating, the naming will be "
+ "<Segment Name> <underscore> <Segment Index> <dot> <Field Index>. For example, we may have an attribute named \"MHS.12\" with "
+ "a value of \"2.1\" and an attribute named \"OBX_11.3\" with a value of \"93000^CPT4\".")
public class ExtractHL7Attributes extends AbstractProcessor {
public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
.name("Character Encoding")
.description("The Character Encoding that is used to encode the HL7 data")
.required(true)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.defaultValue("UTF-8")
.build();
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("A FlowFile is routed to this relationship if it is properly parsed as HL7 and its attributes extracted")
.build();
public static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("A FlowFile is routed to this relationship if it cannot be mapped to FlowFile Attributes. This would happen if the FlowFile does not contain valid HL7 data")
.build();
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(CHARACTER_SET);
return properties;
}
@Override
public Set<Relationship> getRelationships() {
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
return relationships;
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if ( flowFile == null ) {
return;
}
final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue());
final byte[] buffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buffer);
}
});
@SuppressWarnings("resource")
final HapiContext hapiContext = new DefaultHapiContext();
hapiContext.setValidationContext(ValidationContextFactory.noValidation());
final PipeParser parser = hapiContext.getPipeParser();
final String hl7Text = new String(buffer, charset);
final Message message;
try {
message = parser.parse(hl7Text);
final Group group = message.getParent();
final Map<String, String> attributes = new HashMap<>();
extractAttributes(group, attributes);
flowFile = session.putAllAttributes(flowFile, attributes);
getLogger().info("Successfully extracted {} attributes for {}; routing to success", new Object[] {attributes.size(), flowFile});
getLogger().debug("Added the following attributes for {}: {}", new Object[] {flowFile, attributes});
session.transfer(flowFile, REL_SUCCESS);
} catch (final HL7Exception e) {
getLogger().error("Failed to extract attributes from {} due to {}", new Object[] {flowFile, e});
session.transfer(flowFile, REL_FAILURE);
return;
}
}
private void extractAttributes(final Group group, final Map<String, String> attributes) throws HL7Exception {
extractAttributes(group, attributes, new HashMap<String, Integer>());
}
private void extractAttributes(final Group group, final Map<String, String> attributes, final Map<String, Integer> segmentCounts) throws HL7Exception {
if ( group.isEmpty() ) {
return;
}
final String[] structureNames = group.getNames();
for ( final String structName : structureNames ) {
final Structure[] subStructures = group.getAll(structName);
if ( group.isGroup(structName) ) {
for ( final Structure subStructure : subStructures ) {
final Group subGroup = (Group) subStructure;
extractAttributes(subGroup, attributes, segmentCounts);
}
} else {
for ( final Structure structure : subStructures ) {
final Segment segment = (Segment) structure ;
final String segmentName = segment.getName();
Integer segmentNum = segmentCounts.get(segmentName);
if (segmentNum == null) {
segmentNum = 1;
segmentCounts.put(segmentName, 1);
} else {
segmentNum++;
segmentCounts.put(segmentName, segmentNum);
}
final boolean segmentRepeating = segment.getParent().isRepeating(segment.getName());
final boolean parentRepeating = (segment.getParent().getParent() != segment.getParent() && segment.getParent().getParent().isRepeating(segment.getParent().getName()));
final boolean useSegmentIndex = segmentRepeating || parentRepeating;
final Map<String, String> attributeMap = getAttributes(segment, useSegmentIndex ? segmentNum : null);
attributes.putAll(attributeMap);
}
}
}
}
private Map<String, String> getAttributes(final Segment segment, final Integer segmentNum) throws HL7Exception {
final Map<String, String> attributes = new HashMap<>();
for (int i=1; i <= segment.numFields(); i++) {
final String fieldName = segment.getName() + (segmentNum == null ? "" : "_" + segmentNum) + "." + i;
final Type[] types = segment.getField(i);
final StringBuilder sb = new StringBuilder();
for ( final Type type : types ) {
final String typeValue = getValue(type);
if ( !typeValue.isEmpty() ) {
sb.append(typeValue).append("^");
}
}
if ( sb.length() == 0 ) {
continue;
}
String typeVal = sb.toString();
if ( typeVal.endsWith("^") ) {
typeVal = typeVal.substring(0, typeVal.length() - 1);
}
attributes.put(fieldName, typeVal);
}
return attributes;
}
private String getValue(final Type type) {
if ( type == null ) {
return "";
}
if ( type instanceof Primitive ) {
final String value = ((Primitive) type).getValue();
return value == null ? "" : value;
} else if ( type instanceof Composite ) {
final StringBuilder sb = new StringBuilder();
final Composite composite = (Composite) type;
for ( final Type component : composite.getComponents() ) {
final String componentValue = getValue(component);
if ( !componentValue.isEmpty() ) {
sb.append(componentValue).append("^");
}
}
final String value = sb.toString();
if ( value.endsWith("^") ) {
return value.substring(0, value.length() - 1);
}
return value;
} else if ( type instanceof Varies ) {
final Varies varies = (Varies) type;
return getValue(varies.getData());
}
return "";
}
}

View File

@ -0,0 +1,217 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.hl7;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.nifi.annotation.behavior.DynamicProperties;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.SideEffectFree;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.hl7.hapi.HapiMessage;
import org.apache.nifi.hl7.model.HL7Message;
import org.apache.nifi.hl7.query.HL7Query;
import org.apache.nifi.hl7.query.QueryResult;
import org.apache.nifi.hl7.query.exception.HL7QueryParsingException;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.stream.io.StreamUtils;
import ca.uhn.hl7v2.DefaultHapiContext;
import ca.uhn.hl7v2.HapiContext;
import ca.uhn.hl7v2.model.Message;
import ca.uhn.hl7v2.parser.PipeParser;
import ca.uhn.hl7v2.validation.impl.ValidationContextFactory;
@EventDriven
@SideEffectFree
@SupportsBatching
@Tags({"HL7", "healthcare", "route", "Health Level 7"})
@DynamicProperties({@DynamicProperty(name="Name of a Relationship", value="An HL7 Query Language query", description="If a FlowFile matches the query, it will be routed to a relationship with the name of the property")})
@WritesAttributes({@WritesAttribute(attribute="RouteHL7.Route", description="The name of the relationship to which the FlowFile was routed")})
@CapabilityDescription("Routes incoming HL7 data according to user-defined queries. To add a query, add a new property to the processor."
+ " The name of the property will become a new relationship for the processor, and the value is an HL7 Query Language query. If"
+ " a FlowFile matches the query, a copy of the FlowFile will be routed to the associated relationship.")
public class RouteHL7 extends AbstractProcessor {
public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
.name("Character Encoding")
.description("The Character Encoding that is used to encode the HL7 data")
.required(true)
.expressionLanguageSupported(true)
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.defaultValue("UTF-8")
.build();
static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("Any FlowFile that cannot be parsed as HL7 will be routed to this relationship")
.build();
static final Relationship REL_ORIGINAL = new Relationship.Builder()
.name("original")
.description("The original FlowFile that comes into this processor will be routed to this relationship, unless it is routed to 'failure'")
.build();
private volatile Map<Relationship, HL7Query> queries = new HashMap<>();
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.name(propertyDescriptorName)
.description("Specifies a query that will cause any HL7 message matching the query to be routed to the '" + propertyDescriptorName + "' relationship")
.required(false)
.dynamic(true)
.addValidator(new HL7QueryValidator())
.build();
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(CHARACTER_SET);
return properties;
}
@Override
public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
if ( !descriptor.isDynamic() ) {
return;
}
final Map<Relationship, HL7Query> updatedQueryMap = new HashMap<>(queries);
final Relationship relationship = new Relationship.Builder().name(descriptor.getName()).build();
if ( newValue == null ) {
updatedQueryMap.remove(relationship);
} else {
final HL7Query query = HL7Query.compile(newValue);
updatedQueryMap.put(relationship, query);
}
this.queries = updatedQueryMap;
}
@Override
public Set<Relationship> getRelationships() {
final Set<Relationship> relationships = new HashSet<>(queries.keySet());
relationships.add(REL_FAILURE);
relationships.add(REL_ORIGINAL);
return relationships;
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if ( flowFile == null ) {
return;
}
final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue());
final byte[] buffer = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buffer);
}
});
@SuppressWarnings("resource")
final HapiContext hapiContext = new DefaultHapiContext();
hapiContext.setValidationContext(ValidationContextFactory.noValidation());
final PipeParser parser = hapiContext.getPipeParser();
final String hl7Text = new String(buffer, charset);
final HL7Message message;
try {
final Message hapiMessage = parser.parse(hl7Text);
message = new HapiMessage(hapiMessage);
} catch (final Exception e) {
getLogger().error("Failed to parse {} as HL7 due to {}; routing to failure", new Object[] {flowFile, e});
session.transfer(flowFile, REL_FAILURE);
return;
}
final Set<String> matchingRels = new HashSet<>();
final Map<Relationship, HL7Query> queryMap = queries;
for ( final Map.Entry<Relationship, HL7Query> entry : queryMap.entrySet() ) {
final Relationship relationship = entry.getKey();
final HL7Query query = entry.getValue();
final QueryResult result = query.evaluate(message);
if ( result.isMatch() ) {
FlowFile clone = session.clone(flowFile);
clone = session.putAttribute(clone, "RouteHL7.Route", relationship.getName());
session.transfer(clone, relationship);
session.getProvenanceReporter().route(clone, relationship);
matchingRels.add(relationship.getName());
}
}
session.transfer(flowFile, REL_ORIGINAL);
getLogger().info("Routed a copy of {} to {} relationships: {}", new Object[] {flowFile, matchingRels.size(), matchingRels});
}
private static class HL7QueryValidator implements Validator {
@Override
public ValidationResult validate(final String subject, final String input, final ValidationContext context) {
String error = null;
try {
final HL7Query hl7Query = HL7Query.compile(input);
final List<Class<?>> returnTypes = hl7Query.getReturnTypes();
if ( returnTypes.size() != 1 ) {
error = "RouteHL7 requires that the HL7 Query return exactly 1 element of type MESSAGE";
} else if ( !HL7Message.class.isAssignableFrom(returnTypes.get(0)) ) {
error = "RouteHL7 requires that the HL7 Query return exactly 1 element of type MESSAGE";
}
} catch (final HL7QueryParsingException e) {
error = e.toString();
}
if ( error == null ) {
return new ValidationResult.Builder().subject(subject).input(input).valid(true).build();
} else {
return new ValidationResult.Builder().subject(subject).input(input).valid(false).explanation(error).build();
}
}
}
}

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.nifi.processors.hl7.ExtractHL7Attributes
org.apache.nifi.processors.hl7.RouteHL7

View File

@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.hl7;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.nifi.processors.hl7.ExtractHL7Attributes;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Test;
public class TestExtractHL7Attributes {
@Test
public void testExtract() throws IOException {
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi", "DEBUG");
final TestRunner runner = TestRunners.newTestRunner(ExtractHL7Attributes.class);
runner.enqueue(Paths.get("src/test/resources/hypoglycemia.hl7"));
runner.run();
runner.assertAllFlowFilesTransferred(ExtractHL7Attributes.REL_SUCCESS, 1);
final MockFlowFile out = runner.getFlowFilesForRelationship(ExtractHL7Attributes.REL_SUCCESS).get(0);
final SortedMap<String, String> sortedAttrs = new TreeMap<>(out.getAttributes());
for (final Map.Entry<String, String> entry : sortedAttrs.entrySet()) {
System.out.println(entry.getKey() + " : " + entry.getValue());
}
}
}

View File

@ -0,0 +1,5 @@
MSH|^~\&|XXXXXXXX||HealthProvider||||ORU^R01|Q1111111111111111111|P|2.3|
PID|||111111111||SMITH^JOHN||19700100|M||||||||||111111111111|123456789|
PD1||||1234567890^LAST^FIRST^M^^^^^NPI|
OBR|1|341856649^HNAM_ORDERID|000000000000000000|648088^Basic Metabolic Panel|||20150101000000|||||||||1620^Johnson^Corey^A||||||20150101000000|||F|||||||||||20150101000000|
OBX|1|NM|GLU^Glucose Lvl|59|mg/dL|65-99^65^99|L|||F|||20150102000000|

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-nar-bundles</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-hl7-bundle</artifactId>
<packaging>pom</packaging>
<modules>
<module>nifi-hl7-processors</module>
<module>nifi-hl7-nar</module>
</modules>
</project>

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-language-translation-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-language-translation-nar</artifactId>
<packaging>nar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-yandex-processors</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-language-translation-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-yandex-processors</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-processor-utils</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-mock</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,333 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.yandex;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.yandex.model.Translation;
import org.apache.nifi.processors.yandex.util.Languages;
import org.apache.nifi.processors.yandex.util.ObjectMapperResolver;
import org.apache.nifi.stream.io.StreamUtils;
import org.apache.nifi.util.StopWatch;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.api.json.JSONConfiguration;
import com.sun.jersey.core.util.MultivaluedMapImpl;
@SupportsBatching
@Tags({"yandex", "translate", "translation", "language"})
@CapabilityDescription("Translates content and attributes from one language to another")
@WritesAttributes({
@WritesAttribute(attribute="yandex.translate.failure.reason", description="If the text cannot be translated, this attribute will be set indicating the reason for the failure"),
@WritesAttribute(attribute="language", description="When the translation succeeds, if the content was translated, this attribute will be set indicating the new language of the content")
})
@DynamicProperty(name="The name of an attribute to set that will contain the translated text of the value",
value="The value to translate",
supportsExpressionLanguage=true,
description="User-defined properties are used to translate arbitrary text based on attributes.")
public class YandexTranslate extends AbstractProcessor {
public static final PropertyDescriptor KEY = new PropertyDescriptor.Builder()
.name("Yandex API Key")
.description("The API Key that is registered with Yandex")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.required(true)
.build();
public static final PropertyDescriptor SOURCE_LANGUAGE = new PropertyDescriptor.Builder()
.name("Input Language")
.description("The language of incoming data")
.required(true)
.defaultValue("es")
.expressionLanguageSupported(true)
.addValidator(new LanguageNameValidator())
.build();
public static final PropertyDescriptor TARGET_LANGUAGE = new PropertyDescriptor.Builder()
.name("Target Language")
.description("The language to translate the text into")
.required(true)
.defaultValue("en")
.expressionLanguageSupported(true)
.addValidator(new LanguageNameValidator())
.build();
public static final PropertyDescriptor TRANSLATE_CONTENT = new PropertyDescriptor.Builder()
.name("Translate Content")
.description("Specifies whether or not the content should be translated. If false, only the text specified by user-defined properties will be translated.")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.build();
public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
.name("Character Set")
.description("Specifies the character set of the data to be translated")
.required(true)
.defaultValue("UTF-8")
.expressionLanguageSupported(true)
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.build();
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("This relationship is used when the translation is successful")
.build();
public static final Relationship REL_COMMS_FAILURE = new Relationship.Builder()
.name("comms.failure")
.description("This relationship is used when the translation fails due to a problem such as a network failure, and for which the translation should be attempted again")
.build();
public static final Relationship REL_TRANSLATION_FAILED = new Relationship.Builder()
.name("translation.failure")
.description("This relationship is used if the translation cannot be performed for some reason other than communications failure")
.build();
private List<PropertyDescriptor> descriptors;
private Set<Relationship> relationships;
private volatile Client client;
private static final String URL = "https://translate.yandex.net/api/v1.5/tr.json/translate";
@Override
protected void init(final ProcessorInitializationContext context) {
final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
descriptors.add(KEY);
descriptors.add(SOURCE_LANGUAGE);
descriptors.add(TARGET_LANGUAGE);
descriptors.add(TRANSLATE_CONTENT);
descriptors.add(CHARACTER_SET);
this.descriptors = Collections.unmodifiableList(descriptors);
final Set<Relationship> relationships = new HashSet<Relationship>();
relationships.add(REL_SUCCESS);
relationships.add(REL_COMMS_FAILURE);
relationships.add(REL_TRANSLATION_FAILED);
this.relationships = Collections.unmodifiableSet(relationships);
}
@Override
public Set<Relationship> getRelationships() {
return this.relationships;
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return descriptors;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.name(propertyDescriptorName)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(true)
.dynamic(true)
.build();
}
@Override
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>();
if ( validationContext.getProperty(TRANSLATE_CONTENT).asBoolean().equals(Boolean.FALSE) ) {
boolean foundDynamic = false;
for ( final PropertyDescriptor descriptor : validationContext.getProperties().keySet() ) {
if ( descriptor.isDynamic() ) {
foundDynamic = true;
break;
}
}
if ( !foundDynamic ) {
results.add(new ValidationResult.Builder().subject("Text to translate").input("<none>").valid(false).explanation("Must either set 'Translate Content' to true or add at least one user-defined property").build());
}
}
return results;
}
@OnScheduled
public void onScheduled(final ProcessContext context) {
final ClientConfig config = new DefaultClientConfig();
config.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
config.getClasses().add(ObjectMapperResolver.class);
client = Client.create(config);
}
@OnStopped
public void destroyClient() {
if ( client != null ) {
client.destroy();
}
}
protected WebResource.Builder prepareResource(final String key, final List<String> text, final String sourceLanguage, final String destLanguage) {
WebResource webResource = client.resource(URL);
final MultivaluedMap<String, String> paramMap = new MultivaluedMapImpl();
paramMap.put("text", text);
paramMap.add("key", key);
paramMap.add("lang", sourceLanguage + "-" + destLanguage);
WebResource.Builder builder = webResource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_FORM_URLENCODED);
builder = builder.entity(paramMap);
return builder;
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if ( flowFile == null ) {
return;
}
final StopWatch stopWatch = new StopWatch(true);
final String key = context.getProperty(KEY).getValue();
final String sourceLanguage = context.getProperty(SOURCE_LANGUAGE).evaluateAttributeExpressions(flowFile).getValue();
final String targetLanguage = context.getProperty(TARGET_LANGUAGE).evaluateAttributeExpressions(flowFile).getValue();
final String encoding = context.getProperty(CHARACTER_SET).evaluateAttributeExpressions(flowFile).getValue();
final List<String> attributeNames = new ArrayList<>();
final List<String> textValues = new ArrayList<>();
for ( final PropertyDescriptor descriptor : context.getProperties().keySet() ) {
if ( descriptor.isDynamic() ) {
attributeNames.add(descriptor.getName()); // add to list so that we know the order when the translations come back.
textValues.add(context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue());
}
}
if ( context.getProperty(TRANSLATE_CONTENT).asBoolean() ) {
final byte[] buff = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, buff);
}
});
final String content = new String(buff, Charset.forName(encoding));
textValues.add(content);
}
final WebResource.Builder builder = prepareResource(key, textValues, sourceLanguage, targetLanguage);
final ClientResponse response;
try {
response = builder.post(ClientResponse.class);
} catch (final Exception e) {
getLogger().error("Failed to make request to Yandex to transate text for {} due to {}; routing to comms.failure", new Object[] {flowFile, e});
session.transfer(flowFile, REL_COMMS_FAILURE);
return;
}
if ( response.getStatus() != Status.OK.getStatusCode() ) {
getLogger().error("Failed to translate text using Yandex for {}; response was {}: {}; routing to {}", new Object[] {
flowFile, response.getStatus(), response.getStatusInfo().getReasonPhrase(), REL_TRANSLATION_FAILED.getName()});
flowFile = session.putAttribute(flowFile, "yandex.translate.failure.reason", response.getStatusInfo().getReasonPhrase());
session.transfer(flowFile, REL_TRANSLATION_FAILED);
return;
}
final Map<String, String> newAttributes = new HashMap<>();
final Translation translation = response.getEntity(Translation.class);
final List<String> texts = translation.getText();
for (int i=0; i < texts.size(); i++) {
final String text = texts.get(i);
if ( i < attributeNames.size() ) {
final String attributeName = attributeNames.get(i);
newAttributes.put(attributeName, text);
} else {
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
out.write(text.getBytes(encoding));
}
});
newAttributes.put("language", targetLanguage);
}
}
if ( !newAttributes.isEmpty() ) {
flowFile = session.putAllAttributes(flowFile, newAttributes);
}
stopWatch.stop();
session.transfer(flowFile, REL_SUCCESS);
getLogger().info("Successfully translated {} items for {} from {} to {} in {}; routing to success", new Object[] {texts.size(), flowFile, sourceLanguage, targetLanguage, stopWatch.getDuration()});
}
private static class LanguageNameValidator implements Validator {
@Override
public ValidationResult validate(final String subject, final String input, final ValidationContext context) {
if ( context.isExpressionLanguagePresent(input) ) {
return new ValidationResult.Builder().subject(subject).input(input).valid(true).explanation("Expression Language Present").build();
}
if ( Languages.getLanguageMap().keySet().contains(input.toLowerCase()) ) {
return new ValidationResult.Builder().subject(subject).input(input).valid(true).build();
}
return new ValidationResult.Builder().subject(subject).input(input).valid(false).explanation(input + " is not a language that is supported by Yandex").build();
}
}
}

View File

@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.yandex.model;
import java.util.List;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "translation")
public class Translation {
private int code;
private String lang;
private List<String> text;
public int getCode() {
return code;
}
public void setCode(final int code) {
this.code = code;
}
public String getLang() {
return lang;
}
public void setLang(final String lang) {
this.lang = lang;
}
public List<String> getText() {
return text;
}
public void setText(final List<String> text) {
this.text = text;
}
}

View File

@ -0,0 +1,86 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.yandex.util;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class Languages {
private static final Map<String, String> languageAbbreviationMap = new HashMap<>();
static {
languageAbbreviationMap.put("ar", "arabic");
languageAbbreviationMap.put("az", "azerbaijani");
languageAbbreviationMap.put("be", "belarusian");
languageAbbreviationMap.put("bg", "bulgarian");
languageAbbreviationMap.put("bs", "bosnian");
languageAbbreviationMap.put("ca", "catalan");
languageAbbreviationMap.put("cs", "czech");
languageAbbreviationMap.put("da", "danish");
languageAbbreviationMap.put("de", "german");
languageAbbreviationMap.put("el", "greek");
languageAbbreviationMap.put("en", "english");
languageAbbreviationMap.put("es", "spanish");
languageAbbreviationMap.put("et", "estonian");
languageAbbreviationMap.put("fi", "finnish");
languageAbbreviationMap.put("fr", "french");
languageAbbreviationMap.put("he", "hebrew");
languageAbbreviationMap.put("hr", "croatian");
languageAbbreviationMap.put("hu", "hungarian");
languageAbbreviationMap.put("hy", "armenian");
languageAbbreviationMap.put("id", "indonesian");
languageAbbreviationMap.put("is", "icelandic");
languageAbbreviationMap.put("it", "italian");
languageAbbreviationMap.put("ja", "japanese");
languageAbbreviationMap.put("ka", "georgian");
languageAbbreviationMap.put("ko", "korean");
languageAbbreviationMap.put("lt", "lithuanian");
languageAbbreviationMap.put("lv", "latvian");
languageAbbreviationMap.put("mk", "macedonian");
languageAbbreviationMap.put("ms", "malay");
languageAbbreviationMap.put("mt", "maltese");
languageAbbreviationMap.put("nl", "dutch");
languageAbbreviationMap.put("no", "norwegian");
languageAbbreviationMap.put("pl", "polish");
languageAbbreviationMap.put("pt", "portuguese");
languageAbbreviationMap.put("ro", "romanian");
languageAbbreviationMap.put("ru", "russian");
languageAbbreviationMap.put("sk", "slovak");
languageAbbreviationMap.put("sl", "slovenian");
languageAbbreviationMap.put("sq", "albanian");
languageAbbreviationMap.put("sr", "serbian");
languageAbbreviationMap.put("sv", "swedish");
languageAbbreviationMap.put("th", "thai");
languageAbbreviationMap.put("tr", "turkish");
languageAbbreviationMap.put("uk", "ukrainian");
languageAbbreviationMap.put("vi", "vietnamese");
languageAbbreviationMap.put("zh", "chinese");
final Map<String, String> reverseMap = new HashMap<>();
for ( final Map.Entry<String, String> entry : languageAbbreviationMap.entrySet() ) {
reverseMap.put(entry.getValue(), entry.getKey());
}
languageAbbreviationMap.putAll(reverseMap);
}
public static Map<String, String> getLanguageMap() {
return Collections.unmodifiableMap(languageAbbreviationMap);
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.yandex.util;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import org.codehaus.jackson.map.AnnotationIntrospector;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
@Provider
public class ObjectMapperResolver implements ContextResolver<ObjectMapper> {
private final ObjectMapper mapper;
public ObjectMapperResolver() throws Exception {
mapper = new ObjectMapper();
final AnnotationIntrospector jaxbIntrospector = new JaxbAnnotationIntrospector();
final SerializationConfig serializationConfig = mapper.getSerializationConfig();
final DeserializationConfig deserializationConfig = mapper.getDeserializationConfig();
mapper.setSerializationConfig(serializationConfig.withSerializationInclusion(Inclusion.NON_NULL).withAnnotationIntrospector(jaxbIntrospector));
mapper.setDeserializationConfig(deserializationConfig.withAnnotationIntrospector(jaxbIntrospector));
}
@Override
public ObjectMapper getContext(Class<?> objectType) {
return mapper;
}
}

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.nifi.processors.yandex.YandexTranslate

View File

@ -0,0 +1,226 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.yandex;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response.Status.Family;
import javax.ws.rs.core.Response.StatusType;
import org.apache.nifi.processors.yandex.model.Translation;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.WebResource.Builder;
public class TestYandexTranslate {
private static final Map<String, String> translations = new HashMap<>();
@BeforeClass
public static void setupTranslationMap() {
translations.put("bonjour", "hello");
translations.put("traduire", "translate");
translations.put("amusant", "fun");
translations.put("ordinateur", "computer");
}
private TestRunner createTestRunner(final int statusCode) {
return TestRunners.newTestRunner(new YandexTranslate() {
@Override
protected Builder prepareResource(final String key, final List<String> text, final String sourceLanguage, final String destLanguage) {
final WebResource.Builder builder = Mockito.mock(WebResource.Builder.class);
Mockito.doAnswer(new Answer<ClientResponse>() {
@Override
public ClientResponse answer(final InvocationOnMock invocation) throws Throwable {
final ClientResponse response = Mockito.mock(ClientResponse.class);
final StatusType statusType = new StatusType() {
@Override
public int getStatusCode() {
return statusCode;
}
@Override
public String getReasonPhrase() {
return String.valueOf(statusCode);
}
@Override
public Family getFamily() {
return statusCode == 200 ? Family.SUCCESSFUL : Family.SERVER_ERROR;
}
};
Mockito.when(response.getStatus()).thenReturn(statusCode);
Mockito.when(response.getStatusInfo()).thenReturn(statusType);
if ( statusCode == 200 ) {
final Translation translation = new Translation();
translation.setCode(statusCode);
translation.setLang(destLanguage);
final List<String> translationList = new ArrayList<>();
for ( final String original : text ) {
final String translated = translations.get(original);
translationList.add(translated == null ? original : translated);
}
translation.setText(translationList);
Mockito.when(response.getEntity(Translation.class)).thenReturn(translation);
}
return response;
}
}).when(builder).post(ClientResponse.class);
return builder;
}
});
}
@Test
public void testTranslateContent() {
final TestRunner testRunner = createTestRunner(200);
testRunner.setProperty(YandexTranslate.KEY, "a");
testRunner.setProperty(YandexTranslate.SOURCE_LANGUAGE, "fr");
testRunner.setProperty(YandexTranslate.TARGET_LANGUAGE, "en");
testRunner.setProperty(YandexTranslate.TRANSLATE_CONTENT, "true");
testRunner.setProperty(YandexTranslate.CHARACTER_SET, "UTF-8");
testRunner.enqueue("bonjour".getBytes());
testRunner.run();
testRunner.assertAllFlowFilesTransferred(YandexTranslate.REL_SUCCESS, 1);
final MockFlowFile out = testRunner.getFlowFilesForRelationship(YandexTranslate.REL_SUCCESS).get(0);
final String outText = new String(out.toByteArray());
assertEquals("hello", outText);
}
@Test
public void testTranslateSingleAttribute() {
final TestRunner testRunner = createTestRunner(200);
testRunner.setProperty(YandexTranslate.KEY, "A");
testRunner.setProperty(YandexTranslate.SOURCE_LANGUAGE, "fr");
testRunner.setProperty(YandexTranslate.TARGET_LANGUAGE, "en");
testRunner.setProperty(YandexTranslate.TRANSLATE_CONTENT, "false");
testRunner.setProperty(YandexTranslate.CHARACTER_SET, "UTF-8");
testRunner.setProperty("translated", "bonjour");
testRunner.enqueue(new byte[0]);
testRunner.run();
testRunner.assertAllFlowFilesTransferred(YandexTranslate.REL_SUCCESS, 1);
final MockFlowFile out = testRunner.getFlowFilesForRelationship(YandexTranslate.REL_SUCCESS).get(0);
assertEquals(0, out.toByteArray().length);
out.assertAttributeEquals("translated", "hello");
}
@Test
public void testTranslateMultipleAttributes() {
final TestRunner testRunner = createTestRunner(200);
testRunner.setProperty(YandexTranslate.KEY, "A");
testRunner.setProperty(YandexTranslate.SOURCE_LANGUAGE, "fr");
testRunner.setProperty(YandexTranslate.TARGET_LANGUAGE, "en");
testRunner.setProperty(YandexTranslate.TRANSLATE_CONTENT, "false");
testRunner.setProperty(YandexTranslate.CHARACTER_SET, "UTF-8");
testRunner.setProperty("hello", "bonjour");
testRunner.setProperty("translate", "traduire");
testRunner.setProperty("fun", "amusant");
testRunner.enqueue(new byte[0]);
testRunner.run();
testRunner.assertAllFlowFilesTransferred(YandexTranslate.REL_SUCCESS, 1);
final MockFlowFile out = testRunner.getFlowFilesForRelationship(YandexTranslate.REL_SUCCESS).get(0);
assertEquals(0, out.toByteArray().length);
out.assertAttributeEquals("hello", "hello");
out.assertAttributeEquals("translate", "translate");
out.assertAttributeEquals("fun", "fun");
}
@Test
public void testTranslateContentAndMultipleAttributes() {
final TestRunner testRunner = createTestRunner(200);
testRunner.setProperty(YandexTranslate.KEY, "A");
testRunner.setProperty(YandexTranslate.SOURCE_LANGUAGE, "fr");
testRunner.setProperty(YandexTranslate.TARGET_LANGUAGE, "en");
testRunner.setProperty(YandexTranslate.TRANSLATE_CONTENT, "true");
testRunner.setProperty(YandexTranslate.CHARACTER_SET, "UTF-8");
testRunner.setProperty("hello", "bonjour");
testRunner.setProperty("translate", "traduire");
testRunner.setProperty("fun", "amusant");
testRunner.setProperty("nifi", "nifi");
testRunner.enqueue("ordinateur".getBytes());
testRunner.run();
testRunner.assertAllFlowFilesTransferred(YandexTranslate.REL_SUCCESS, 1);
final MockFlowFile out = testRunner.getFlowFilesForRelationship(YandexTranslate.REL_SUCCESS).get(0);
out.assertContentEquals("computer");
out.assertAttributeEquals("hello", "hello");
out.assertAttributeEquals("translate", "translate");
out.assertAttributeEquals("fun", "fun");
out.assertAttributeEquals("nifi", "nifi");
}
@Test
public void testFailureResponse() {
final TestRunner testRunner = createTestRunner(403);
testRunner.setProperty(YandexTranslate.KEY, "A");
testRunner.setProperty(YandexTranslate.SOURCE_LANGUAGE, "fr");
testRunner.setProperty(YandexTranslate.TARGET_LANGUAGE, "en");
testRunner.setProperty(YandexTranslate.TRANSLATE_CONTENT, "true");
testRunner.setProperty(YandexTranslate.CHARACTER_SET, "UTF-8");
testRunner.setProperty("hello", "bonjour");
testRunner.setProperty("translate", "traduire");
testRunner.setProperty("fun", "amusant");
testRunner.setProperty("nifi", "nifi");
testRunner.enqueue("ordinateur".getBytes());
testRunner.run();
testRunner.assertAllFlowFilesTransferred(YandexTranslate.REL_TRANSLATION_FAILED, 1);
}
}

View File

@ -0,0 +1,48 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-nar-bundles</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-language-translation-bundle</artifactId>
<packaging>pom</packaging>
<modules>
<module>nifi-yandex-processors</module>
<module>nifi-language-translation-nar</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
<version>${jersey.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-social-media-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-social-media-nar</artifactId>
<packaging>nar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-twitter-processors</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1 @@
/target/

View File

@ -0,0 +1,60 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-social-media-bundle</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-twitter-processors</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-processor-utils</artifactId>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>hbc-twitter4j</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-mock</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,360 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.twitter;
import java.io.IOException;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.regex.Pattern;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import com.twitter.hbc.ClientBuilder;
import com.twitter.hbc.core.Client;
import com.twitter.hbc.core.Constants;
import com.twitter.hbc.core.endpoint.StatusesFilterEndpoint;
import com.twitter.hbc.core.endpoint.StatusesFirehoseEndpoint;
import com.twitter.hbc.core.endpoint.StatusesSampleEndpoint;
import com.twitter.hbc.core.endpoint.StreamingEndpoint;
import com.twitter.hbc.core.event.Event;
import com.twitter.hbc.core.processor.StringDelimitedProcessor;
import com.twitter.hbc.httpclient.auth.Authentication;
import com.twitter.hbc.httpclient.auth.OAuth1;
@SupportsBatching
@Tags({"twitter", "tweets", "social media", "status", "json"})
@CapabilityDescription("Pulls status changes from Twitter's streaming API")
@WritesAttribute(attribute="mime.type", description="Sets mime type to application/json")
public class GetTwitter extends AbstractProcessor {
static final AllowableValue ENDPOINT_SAMPLE = new AllowableValue("Sample Endpoint", "Sample Endpoint", "The endpoint that provides public data, aka a 'garden hose'");
static final AllowableValue ENDPOINT_FIREHOSE = new AllowableValue("Firehose Endpoint", "Firehose Endpoint", "The endpoint that provides access to all tweets");
static final AllowableValue ENDPOINT_FILTER = new AllowableValue("Filter Endpoint", "Filter Endpoint", "Endpoint that allows the stream to be filtered by specific terms or User IDs");
public static final PropertyDescriptor ENDPOINT = new PropertyDescriptor.Builder()
.name("Twitter Endpoint")
.description("Specifies which endpoint data should be pulled from")
.required(true)
.allowableValues(ENDPOINT_SAMPLE, ENDPOINT_FIREHOSE, ENDPOINT_FILTER)
.defaultValue(ENDPOINT_SAMPLE.getValue())
.build();
public static final PropertyDescriptor CONSUMER_KEY = new PropertyDescriptor.Builder()
.name("Consumer Key")
.description("The Consumer Key provided by Twitter")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor CONSUMER_SECRET = new PropertyDescriptor.Builder()
.name("Consumer Secret")
.description("The Consumer Secret provided by Twitter")
.required(true)
.sensitive(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor ACCESS_TOKEN = new PropertyDescriptor.Builder()
.name("Access Token")
.description("The Acces Token provided by Twitter")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor ACCESS_TOKEN_SECRET = new PropertyDescriptor.Builder()
.name("Access Token Secret")
.description("The Access Token Secret provided by Twitter")
.required(true)
.sensitive(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor LANGUAGES = new PropertyDescriptor.Builder()
.name("Languages")
.description("A comma-separated list of languages for which tweets should be fetched")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor FOLLOWING = new PropertyDescriptor.Builder()
.name("IDs to Follow")
.description("A comma-separated list of Twitter User ID's to follow. Ignored unless Endpoint is set to 'Filter Endpoint'.")
.required(false)
.addValidator(new FollowingValidator())
.build();
public static final PropertyDescriptor TERMS = new PropertyDescriptor.Builder()
.name("Terms to Filter On")
.description("A comma-separated list of terms to filter on. Ignored unless Endpoint is set to 'Filter Endpoint'. The filter works such that if any term matches, the status update will be retrieved; multiple terms separated by a space function as an 'AND'. I.e., 'it was, hello' will retrieve status updates that have either 'hello' or both 'it' AND 'was'")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("All status updates will be routed to this relationship")
.build();
private List<PropertyDescriptor> descriptors;
private Set<Relationship> relationships;
private final BlockingQueue<Event> eventQueue = new LinkedBlockingQueue<Event>(1000);
private volatile Client client;
private volatile BlockingQueue<String> messageQueue;
@Override
protected void init(final ProcessorInitializationContext context) {
final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
descriptors.add(ENDPOINT);
descriptors.add(CONSUMER_KEY);
descriptors.add(CONSUMER_SECRET);
descriptors.add(ACCESS_TOKEN);
descriptors.add(ACCESS_TOKEN_SECRET);
descriptors.add(LANGUAGES);
descriptors.add(TERMS);
descriptors.add(FOLLOWING);
this.descriptors = Collections.unmodifiableList(descriptors);
final Set<Relationship> relationships = new HashSet<Relationship>();
relationships.add(REL_SUCCESS);
this.relationships = Collections.unmodifiableSet(relationships);
}
@Override
public Set<Relationship> getRelationships() {
return this.relationships;
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return descriptors;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.name(propertyDescriptorName)
.description("Adds a query parameter with name '" + propertyDescriptorName + "' to the Twitter query")
.required(false)
.dynamic(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
}
@Override
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
final List<ValidationResult> results = new ArrayList<>();
final String endpointName = validationContext.getProperty(ENDPOINT).getValue();
if ( ENDPOINT_FILTER.getValue().equals(endpointName) ) {
if ( !validationContext.getProperty(TERMS).isSet() && !validationContext.getProperty(FOLLOWING).isSet() ) {
results.add(new ValidationResult.Builder().input("").subject(FOLLOWING.getName()).valid(false).explanation("When using the 'Filter Endpoint', at least one of '" + TERMS.getName() + "' or '" + FOLLOWING.getName() + "' must be set").build());
}
}
return results;
}
@Override
public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
// if any property is modified, the results are no longer valid. Destroy all messages in teh queue.
messageQueue.clear();
}
@OnScheduled
public void onScheduled(final ProcessContext context) throws MalformedURLException {
messageQueue = new LinkedBlockingQueue<>(100000);
final String endpointName = context.getProperty(ENDPOINT).getValue();
final Authentication oauth = new OAuth1(context.getProperty(CONSUMER_KEY).getValue(),
context.getProperty(CONSUMER_SECRET).getValue(),
context.getProperty(ACCESS_TOKEN).getValue(),
context.getProperty(ACCESS_TOKEN_SECRET).getValue());
final ClientBuilder clientBuilder = new ClientBuilder();
clientBuilder.name("GetTwitter[id=" + getIdentifier() + "]")
.authentication(oauth)
.eventMessageQueue(eventQueue)
.processor(new StringDelimitedProcessor(messageQueue));
final String languageString = context.getProperty(LANGUAGES).getValue();
final List<String> languages;
if ( languageString == null ) {
languages = null;
} else {
languages = new ArrayList<>();
for ( final String language : context.getProperty(LANGUAGES).getValue().split(",") ) {
languages.add(language.trim());
}
}
final String host;
final StreamingEndpoint streamingEndpoint;
if ( ENDPOINT_SAMPLE.getValue().equals(endpointName) ) {
host = Constants.STREAM_HOST;
final StatusesSampleEndpoint sse = new StatusesSampleEndpoint();
streamingEndpoint = sse;
if ( languages != null ) {
sse.languages(languages);
}
} else if ( ENDPOINT_FIREHOSE.getValue().equals(endpointName) ) {
host = Constants.STREAM_HOST;
final StatusesFirehoseEndpoint firehoseEndpoint = new StatusesFirehoseEndpoint();
streamingEndpoint = firehoseEndpoint;
if ( languages != null ) {
firehoseEndpoint.languages(languages);
}
} else if ( ENDPOINT_FILTER.getValue().equals(endpointName) ) {
host = Constants.STREAM_HOST;
final StatusesFilterEndpoint filterEndpoint = new StatusesFilterEndpoint();
final String followingString = context.getProperty(FOLLOWING).getValue();
final List<Long> followingIds;
if ( followingString == null ) {
followingIds = Collections.emptyList();
} else {
followingIds = new ArrayList<>();
for ( final String split : followingString.split(",") ) {
final Long id = Long.parseLong(split.trim());
followingIds.add(id);
}
}
final String termString = context.getProperty(TERMS).getValue();
final List<String> terms;
if ( termString == null ) {
terms = Collections.emptyList();
} else {
terms = new ArrayList<>();
for ( final String split : termString.split(",") ) {
terms.add(split.trim());
}
}
if ( !terms.isEmpty() ) {
filterEndpoint.trackTerms(terms);
}
if ( !followingIds.isEmpty() ) {
filterEndpoint.followings(followingIds);
}
if ( languages != null ) {
filterEndpoint.languages(languages);
}
streamingEndpoint = filterEndpoint;
} else {
throw new AssertionError("Endpoint was invalid value: " + endpointName);
}
clientBuilder.hosts(host).endpoint(streamingEndpoint);
client = clientBuilder.build();
client.connect();
}
@OnStopped
public void shutdownClient() {
if ( client != null ) {
client.stop();
}
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final Event event = eventQueue.poll();
if ( event != null ) {
switch (event.getEventType()) {
case STOPPED_BY_ERROR:
getLogger().error("Received error {}: {} due to {}. Will not attempt to reconnect", new Object[] {event.getEventType(), event.getMessage(), event.getUnderlyingException()});
break;
case CONNECTION_ERROR:
case HTTP_ERROR:
getLogger().error("Received error {}: {}. Will attempt to reconnect", new Object[] {event.getEventType(), event.getMessage()});
client.reconnect();
break;
default:
break;
}
}
final String tweet = messageQueue.poll();
if ( tweet == null ) {
context.yield();
return;
}
FlowFile flowFile = session.create();
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
out.write(tweet.getBytes(StandardCharsets.UTF_8));
}
});
final Map<String, String> attributes = new HashMap<>();
attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json");
attributes.put(CoreAttributes.FILENAME.key(), flowFile.getAttribute(CoreAttributes.FILENAME.key()) + ".json");
flowFile = session.putAllAttributes(flowFile, attributes);
session.transfer(flowFile, REL_SUCCESS);
session.getProvenanceReporter().receive(flowFile, Constants.STREAM_HOST + client.getEndpoint().getURI().toString());
}
private static class FollowingValidator implements Validator {
private static final Pattern NUMBER_PATTERN = Pattern.compile("\\d+");
@Override
public ValidationResult validate(final String subject, final String input, final ValidationContext context) {
final String[] splits = input.split(",");
for ( final String split : splits ) {
if ( !NUMBER_PATTERN.matcher(split.trim()).matches() ) {
return new ValidationResult.Builder().input(input).subject(subject).valid(false).explanation("Must be comma-separted list of User ID's").build();
}
}
return new ValidationResult.Builder().subject(subject).input(input).valid(true).build();
}
}
}

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.nifi.processors.twitter.GetTwitter

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-nar-bundles</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
</parent>
<artifactId>nifi-social-media-bundle</artifactId>
<packaging>pom</packaging>
<modules>
<module>nifi-twitter-processors</module>
<module>nifi-social-media-nar</module>
</modules>
</project>

View File

@ -35,6 +35,10 @@
<module>nifi-update-attribute-bundle</module>
<module>nifi-kafka-bundle</module>
<module>nifi-kite-bundle</module>
<module>nifi-social-media-bundle</module>
<module>nifi-geo-bundle</module>
<module>nifi-hl7-bundle</module>
<module>nifi-language-translation-bundle</module>
</modules>
<dependencyManagement>
<dependencies>

View File

@ -798,6 +798,30 @@
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-social-media-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-language-translation-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-geo-nar</artifactId>
<version>0.1.0-incubating-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-properties</artifactId>