mirror of https://github.com/apache/nifi.git
NIFI-403 addressed licensing/notice and trivial pom/source issues
This commit is contained in:
parent
4f009fc3fb
commit
4e73125441
|
@ -589,8 +589,8 @@ This product bundles 'reset.css' which is available in the 'public domain'.
|
|||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
The binary distribution of this product bundles 'Paranamer Core' which is available
|
||||
under a BSD style license.
|
||||
The binary distribution of this product bundles 'ParaNamer' and 'Paranamer Core'
|
||||
which is available under a BSD style license.
|
||||
|
||||
Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc
|
||||
All rights reserved.
|
||||
|
|
|
@ -90,6 +90,11 @@ The following binary components are provided under the Apache Software License v
|
|||
This product includes software from the Spring Framework,
|
||||
under the Apache License 2.0 (see: StringUtils.containsWhitespace())
|
||||
|
||||
(ASLv2) Apache Commons JEXL
|
||||
The following NOTICE information applies:
|
||||
Apache Commons JEXL
|
||||
Copyright 2001-2011 The Apache Software Foundation
|
||||
|
||||
(ASLv2) Spring Framework
|
||||
The following NOTICE information applies:
|
||||
Spring Framework 4.1.4.RELEASE
|
||||
|
@ -470,6 +475,28 @@ The following binary components are provided under the Apache Software License v
|
|||
The following NOTICE information applies:
|
||||
Copyright 2011 JsonPath authors
|
||||
|
||||
(ASLv2) Kite SDK
|
||||
The following NOTICE information applies:
|
||||
This product includes software developed by Cloudera, Inc.
|
||||
(http://www.cloudera.com/).
|
||||
|
||||
This product includes software developed at
|
||||
The Apache Software Foundation (http://www.apache.org/).
|
||||
|
||||
This product includes software developed by
|
||||
Saxonica (http://www.saxonica.com/).
|
||||
|
||||
(ASLv2) Parquet MR
|
||||
The following NOTICE information applies:
|
||||
Parquet MR
|
||||
Copyright 2012 Twitter, Inc.
|
||||
|
||||
This project includes code from https://github.com/lemire/JavaFastPFOR
|
||||
parquet-column/src/main/java/parquet/column/values/bitpacking/LemireBitPacking.java
|
||||
Apache License Version 2.0 http://www.apache.org/licenses/.
|
||||
(c) Daniel Lemire, http://lemire.me/en/
|
||||
|
||||
|
||||
************************
|
||||
Common Development and Distribution License 1.1
|
||||
************************
|
||||
|
|
|
@ -1,47 +1,45 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-bundle</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</parent>
|
||||
<parent>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-bundle</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>nifi-kite-nar</artifactId>
|
||||
<packaging>nar</packaging>
|
||||
<artifactId>nifi-kite-nar</artifactId>
|
||||
<packaging>nar</packaging>
|
||||
|
||||
<name>Kite NAR</name>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-hadoop-libraries-nar</artifactId>
|
||||
<type>nar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-processors</artifactId>
|
||||
<!-- Hadoop client is inherited from nifi-hadoop-libraries-nar -->
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-hadoop-libraries-nar</artifactId>
|
||||
<type>nar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-processors</artifactId>
|
||||
<!-- Hadoop client is inherited from nifi-hadoop-libraries-nar -->
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
|
|
@ -0,0 +1,240 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
APACHE NIFI SUBCOMPONENTS:
|
||||
|
||||
The Apache NiFi project contains subcomponents with separate copyright
|
||||
notices and license terms. Your use of the source code for the these
|
||||
subcomponents is subject to the terms and conditions of the following
|
||||
licenses.
|
||||
|
||||
This product bundles 'ParaNamer' which is available under a BSD style license.
|
||||
For details see http://asm.ow2.org/asmdex-license.html
|
||||
|
||||
Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. Neither the name of the copyright holders nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
|
||||
THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
nifi-kite-nar
|
||||
Copyright 2014-2015 The Apache Software Foundation
|
||||
|
||||
This product includes software developed at
|
||||
The Apache Software Foundation (http://www.apache.org/).
|
||||
|
||||
******************
|
||||
Apache Software License v2
|
||||
******************
|
||||
|
||||
The following binary components are provided under the Apache Software License v2
|
||||
|
||||
(ASLv2) Apache Avro
|
||||
The following NOTICE information applies:
|
||||
Apache Avro
|
||||
Copyright 2009-2013 The Apache Software Foundation
|
||||
|
||||
(ASLv2) Apache Commons Codec
|
||||
The following NOTICE information applies:
|
||||
Apache Commons Codec
|
||||
Copyright 2002-2014 The Apache Software Foundation
|
||||
|
||||
src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java
|
||||
contains test data from http://aspell.net/test/orig/batch0.tab.
|
||||
Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org)
|
||||
|
||||
===============================================================================
|
||||
|
||||
The content of package org.apache.commons.codec.language.bm has been translated
|
||||
from the original php source code available at http://stevemorse.org/phoneticinfo.htm
|
||||
with permission from the original authors.
|
||||
Original source copyright:
|
||||
Copyright (c) 2008 Alexander Beider & Stephen P. Morse.
|
||||
|
||||
(ASLv2) Apache Commons JEXL
|
||||
The following NOTICE information applies:
|
||||
Apache Commons JEXL
|
||||
Copyright 2001-2011 The Apache Software Foundation
|
||||
|
||||
(ASLv2) Apache Commons Logging
|
||||
The following NOTICE information applies:
|
||||
Apache Commons Logging
|
||||
Copyright 2003-2013 The Apache Software Foundation
|
||||
|
||||
(ASLv2) Kite SDK
|
||||
The following NOTICE information applies:
|
||||
This product includes software developed by Cloudera, Inc.
|
||||
(http://www.cloudera.com/).
|
||||
|
||||
This product includes software developed at
|
||||
The Apache Software Foundation (http://www.apache.org/).
|
||||
|
||||
This product includes software developed by
|
||||
Saxonica (http://www.saxonica.com/).
|
||||
|
||||
(ASLv2) Snappy Java
|
||||
The following NOTICE information applies:
|
||||
This product includes software developed by Google
|
||||
Snappy: http://code.google.com/p/snappy/ (New BSD License)
|
||||
|
||||
This product includes software developed by Apache
|
||||
PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/
|
||||
(Apache 2.0 license)
|
||||
|
||||
This library contains statically linked libstdc++. This inclusion is allowed by
|
||||
"GCC RUntime Library Exception"
|
||||
http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html
|
||||
|
||||
(ASLv2) Parquet MR
|
||||
The following NOTICE information applies:
|
||||
Parquet MR
|
||||
Copyright 2012 Twitter, Inc.
|
||||
|
||||
This project includes code from https://github.com/lemire/JavaFastPFOR
|
||||
parquet-column/src/main/java/parquet/column/values/bitpacking/LemireBitPacking.java
|
||||
Apache License Version 2.0 http://www.apache.org/licenses/.
|
||||
(c) Daniel Lemire, http://lemire.me/en/
|
||||
|
|
@ -1,148 +1,147 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-bundle</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</parent>
|
||||
<parent>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-bundle</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>nifi-kite-processors</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Kite Hadoop Processors</name>
|
||||
<artifactId>nifi-kite-processors</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
<kite.version>0.18.0</kite.version>
|
||||
<guava.version>11.0.2</guava.version>
|
||||
<junit.version>4.10</junit.version>
|
||||
<findbugs-annotations.version>1.3.9-1</findbugs-annotations.version>
|
||||
</properties>
|
||||
<properties>
|
||||
<kite.version>0.18.0</kite.version>
|
||||
<guava.version>11.0.2</guava.version>
|
||||
<junit.version>4.10</junit.version>
|
||||
<findbugs-annotations.version>1.3.9-1</findbugs-annotations.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- NiFi -->
|
||||
<dependencies>
|
||||
<!-- NiFi -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-utils</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-processor-utils</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-flowfile-packager</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-utils</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-processor-utils</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-flowfile-packager</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Kite -->
|
||||
<!-- Kite -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-data-core</artifactId>
|
||||
<version>${kite.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<!-- Use findbugs-annotations instead -->
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-data-core</artifactId>
|
||||
<version>${kite.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<!-- Use findbugs-annotations instead -->
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-hadoop-dependencies</artifactId>
|
||||
<type>pom</type>
|
||||
<version>${kite.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-hadoop-dependencies</artifactId>
|
||||
<type>pom</type>
|
||||
<version>${kite.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>${guava.version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>${guava.version}</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<!-- avoid warnings by bundling annotations -->
|
||||
<groupId>com.github.stephenc.findbugs</groupId>
|
||||
<artifactId>findbugs-annotations</artifactId>
|
||||
<scope>compile</scope>
|
||||
<version>${findbugs-annotations.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<!-- avoid warnings by bundling annotations -->
|
||||
<groupId>com.github.stephenc.findbugs</groupId>
|
||||
<artifactId>findbugs-annotations</artifactId>
|
||||
<scope>compile</scope>
|
||||
<version>${findbugs-annotations.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<!-- Test dependencies -->
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
<version>${junit.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
<version>${junit.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-mock</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-mock</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-minicluster</artifactId>
|
||||
<version>${kite.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-minicluster</artifactId>
|
||||
<version>${kite.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-servlet</artifactId>
|
||||
<version>1.14</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-servlet</artifactId>
|
||||
<version>1.14</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-data-core</artifactId>
|
||||
<version>${kite.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-data-core</artifactId>
|
||||
<version>${kite.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-hadoop-test-dependencies</artifactId>
|
||||
<type>pom</type>
|
||||
<scope>test</scope>
|
||||
<version>${kite.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.kitesdk</groupId>
|
||||
<artifactId>kite-hadoop-test-dependencies</artifactId>
|
||||
<type>pom</type>
|
||||
<scope>test</scope>
|
||||
<version>${kite.version}</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.processors.kite;
|
||||
|
||||
import com.google.common.base.Splitter;
|
||||
|
@ -47,171 +46,171 @@ import org.kitesdk.data.spi.DefaultConfiguration;
|
|||
|
||||
abstract class AbstractKiteProcessor extends AbstractProcessor {
|
||||
|
||||
private static final Splitter COMMA = Splitter.on(',').trimResults();
|
||||
protected static final Validator FILES_EXIST = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String configFiles,
|
||||
ValidationContext context) {
|
||||
if (configFiles != null && !configFiles.isEmpty()) {
|
||||
for (String file : COMMA.split(configFiles)) {
|
||||
ValidationResult result = StandardValidators.FILE_EXISTS_VALIDATOR
|
||||
.validate(subject, file, context);
|
||||
if (!result.isValid()) {
|
||||
return result;
|
||||
}
|
||||
private static final Splitter COMMA = Splitter.on(',').trimResults();
|
||||
protected static final Validator FILES_EXIST = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String configFiles,
|
||||
ValidationContext context) {
|
||||
if (configFiles != null && !configFiles.isEmpty()) {
|
||||
for (String file : COMMA.split(configFiles)) {
|
||||
ValidationResult result = StandardValidators.FILE_EXISTS_VALIDATOR
|
||||
.validate(subject, file, context);
|
||||
if (!result.isValid()) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new ValidationResult.Builder()
|
||||
.subject(subject)
|
||||
.input(configFiles)
|
||||
.explanation("Files exist")
|
||||
.valid(true)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
return new ValidationResult.Builder()
|
||||
.subject(subject)
|
||||
.input(configFiles)
|
||||
.explanation("Files exist")
|
||||
.valid(true)
|
||||
.build();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
protected static final PropertyDescriptor CONF_XML_FILES =
|
||||
new PropertyDescriptor.Builder()
|
||||
.name("Hadoop configuration files")
|
||||
.description("A comma-separated list of Hadoop configuration files")
|
||||
.addValidator(FILES_EXIST)
|
||||
.build();
|
||||
protected static final PropertyDescriptor CONF_XML_FILES
|
||||
= new PropertyDescriptor.Builder()
|
||||
.name("Hadoop configuration files")
|
||||
.description("A comma-separated list of Hadoop configuration files")
|
||||
.addValidator(FILES_EXIST)
|
||||
.build();
|
||||
|
||||
protected static final Validator RECOGNIZED_URI = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String uri,
|
||||
ValidationContext context) {
|
||||
String message = "not set";
|
||||
boolean isValid = true;
|
||||
if (uri == null || uri.isEmpty()) {
|
||||
isValid = false;
|
||||
} else {
|
||||
protected static final Validator RECOGNIZED_URI = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String uri,
|
||||
ValidationContext context) {
|
||||
String message = "not set";
|
||||
boolean isValid = true;
|
||||
if (uri == null || uri.isEmpty()) {
|
||||
isValid = false;
|
||||
} else {
|
||||
try {
|
||||
new URIBuilder(URI.create(uri)).build();
|
||||
} catch (RuntimeException e) {
|
||||
message = e.getMessage();
|
||||
isValid = false;
|
||||
}
|
||||
}
|
||||
return new ValidationResult.Builder()
|
||||
.subject(subject)
|
||||
.input(uri)
|
||||
.explanation("Dataset URI is invalid: " + message)
|
||||
.valid(isValid)
|
||||
.build();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves a {@link Schema} for the given string, either a URI or a JSON
|
||||
* literal.
|
||||
*/
|
||||
protected static Schema getSchema(String uriOrLiteral, Configuration conf) {
|
||||
URI uri;
|
||||
try {
|
||||
new URIBuilder(URI.create(uri)).build();
|
||||
} catch (RuntimeException e) {
|
||||
message = e.getMessage();
|
||||
isValid = false;
|
||||
uri = new URI(uriOrLiteral);
|
||||
} catch (URISyntaxException e) {
|
||||
// try to parse the schema as a literal
|
||||
return parseSchema(uriOrLiteral);
|
||||
}
|
||||
}
|
||||
return new ValidationResult.Builder()
|
||||
.subject(subject)
|
||||
.input(uri)
|
||||
.explanation("Dataset URI is invalid: " + message)
|
||||
.valid(isValid)
|
||||
.build();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolves a {@link Schema} for the given string, either a URI or a JSON
|
||||
* literal.
|
||||
*/
|
||||
protected static Schema getSchema(String uriOrLiteral, Configuration conf) {
|
||||
URI uri;
|
||||
try {
|
||||
uri = new URI(uriOrLiteral);
|
||||
} catch (URISyntaxException e) {
|
||||
// try to parse the schema as a literal
|
||||
return parseSchema(uriOrLiteral);
|
||||
try {
|
||||
if ("dataset".equals(uri.getScheme()) || "view".equals(uri.getScheme())) {
|
||||
return Datasets.load(uri).getDataset().getDescriptor().getSchema();
|
||||
} else if ("resource".equals(uri.getScheme())) {
|
||||
InputStream in = Resources.getResource(uri.getSchemeSpecificPart())
|
||||
.openStream();
|
||||
return parseSchema(uri, in);
|
||||
} else {
|
||||
// try to open the file
|
||||
Path schemaPath = new Path(uri);
|
||||
FileSystem fs = schemaPath.getFileSystem(conf);
|
||||
return parseSchema(uri, fs.open(schemaPath));
|
||||
}
|
||||
|
||||
} catch (DatasetNotFoundException e) {
|
||||
throw new SchemaNotFoundException(
|
||||
"Cannot read schema of missing dataset: " + uri, e);
|
||||
} catch (IOException e) {
|
||||
throw new SchemaNotFoundException(
|
||||
"Failed while reading " + uri + ": " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if ("dataset".equals(uri.getScheme()) || "view".equals(uri.getScheme())) {
|
||||
return Datasets.load(uri).getDataset().getDescriptor().getSchema();
|
||||
} else if ("resource".equals(uri.getScheme())) {
|
||||
InputStream in = Resources.getResource(uri.getSchemeSpecificPart())
|
||||
.openStream();
|
||||
return parseSchema(uri, in);
|
||||
} else {
|
||||
// try to open the file
|
||||
Path schemaPath = new Path(uri);
|
||||
FileSystem fs = schemaPath.getFileSystem(conf);
|
||||
return parseSchema(uri, fs.open(schemaPath));
|
||||
}
|
||||
|
||||
} catch (DatasetNotFoundException e) {
|
||||
throw new SchemaNotFoundException(
|
||||
"Cannot read schema of missing dataset: " + uri, e);
|
||||
} catch (IOException e) {
|
||||
throw new SchemaNotFoundException(
|
||||
"Failed while reading " + uri + ": " + e.getMessage(), e);
|
||||
private static Schema parseSchema(String literal) {
|
||||
try {
|
||||
return new Schema.Parser().parse(literal);
|
||||
} catch (RuntimeException e) {
|
||||
throw new SchemaNotFoundException(
|
||||
"Failed to parse schema: " + literal, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Schema parseSchema(String literal) {
|
||||
try {
|
||||
return new Schema.Parser().parse(literal);
|
||||
} catch (RuntimeException e) {
|
||||
throw new SchemaNotFoundException(
|
||||
"Failed to parse schema: " + literal, e);
|
||||
private static Schema parseSchema(URI uri, InputStream in) throws IOException {
|
||||
try {
|
||||
return new Schema.Parser().parse(in);
|
||||
} catch (RuntimeException e) {
|
||||
throw new SchemaNotFoundException("Failed to parse schema at " + uri, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Schema parseSchema(URI uri, InputStream in) throws IOException {
|
||||
try {
|
||||
return new Schema.Parser().parse(in);
|
||||
} catch (RuntimeException e) {
|
||||
throw new SchemaNotFoundException("Failed to parse schema at " + uri, e);
|
||||
protected static final Validator SCHEMA_VALIDATOR = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String uri, ValidationContext context) {
|
||||
Configuration conf = getConfiguration(
|
||||
context.getProperty(CONF_XML_FILES).getValue());
|
||||
|
||||
String error = null;
|
||||
try {
|
||||
getSchema(uri, conf);
|
||||
} catch (SchemaNotFoundException e) {
|
||||
error = e.getMessage();
|
||||
}
|
||||
return new ValidationResult.Builder()
|
||||
.subject(subject)
|
||||
.input(uri)
|
||||
.explanation(error)
|
||||
.valid(error == null)
|
||||
.build();
|
||||
}
|
||||
};
|
||||
|
||||
protected static final List<PropertyDescriptor> ABSTRACT_KITE_PROPS
|
||||
= ImmutableList.<PropertyDescriptor>builder()
|
||||
.add(CONF_XML_FILES)
|
||||
.build();
|
||||
|
||||
static List<PropertyDescriptor> getProperties() {
|
||||
return ABSTRACT_KITE_PROPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
protected void setDefaultConfiguration(ProcessContext context)
|
||||
throws IOException {
|
||||
DefaultConfiguration.set(getConfiguration(
|
||||
context.getProperty(CONF_XML_FILES).getValue()));
|
||||
}
|
||||
|
||||
protected static Configuration getConfiguration(String configFiles) {
|
||||
Configuration conf = DefaultConfiguration.get();
|
||||
|
||||
if (configFiles == null || configFiles.isEmpty()) {
|
||||
return conf;
|
||||
}
|
||||
|
||||
for (String file : COMMA.split(configFiles)) {
|
||||
// process each resource only once
|
||||
if (conf.getResource(file) == null) {
|
||||
// use Path instead of String to get the file from the FS
|
||||
conf.addResource(new Path(file));
|
||||
}
|
||||
}
|
||||
|
||||
return conf;
|
||||
}
|
||||
}
|
||||
|
||||
protected static final Validator SCHEMA_VALIDATOR = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String uri, ValidationContext context) {
|
||||
Configuration conf = getConfiguration(
|
||||
context.getProperty(CONF_XML_FILES).getValue());
|
||||
|
||||
String error = null;
|
||||
try {
|
||||
getSchema(uri, conf);
|
||||
} catch (SchemaNotFoundException e) {
|
||||
error = e.getMessage();
|
||||
}
|
||||
return new ValidationResult.Builder()
|
||||
.subject(subject)
|
||||
.input(uri)
|
||||
.explanation(error)
|
||||
.valid(error == null)
|
||||
.build();
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return ABSTRACT_KITE_PROPS;
|
||||
}
|
||||
};
|
||||
|
||||
protected static final List<PropertyDescriptor> ABSTRACT_KITE_PROPS =
|
||||
ImmutableList.<PropertyDescriptor>builder()
|
||||
.add(CONF_XML_FILES)
|
||||
.build();
|
||||
|
||||
static List<PropertyDescriptor> getProperties() {
|
||||
return ABSTRACT_KITE_PROPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
protected void setDefaultConfiguration(ProcessContext context)
|
||||
throws IOException {
|
||||
DefaultConfiguration.set(getConfiguration(
|
||||
context.getProperty(CONF_XML_FILES).getValue()));
|
||||
}
|
||||
|
||||
protected static Configuration getConfiguration(String configFiles) {
|
||||
Configuration conf = DefaultConfiguration.get();
|
||||
|
||||
if (configFiles == null || configFiles.isEmpty()) {
|
||||
return conf;
|
||||
}
|
||||
|
||||
for (String file : COMMA.split(configFiles)) {
|
||||
// process each resource only once
|
||||
if (conf.getResource(file) == null) {
|
||||
// use Path instead of String to get the file from the FS
|
||||
conf.addResource(new Path(file));
|
||||
}
|
||||
}
|
||||
|
||||
return conf;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return ABSTRACT_KITE_PROPS;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,13 +20,10 @@
|
|||
package org.apache.nifi.processors.kite;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.SchemaBuilder;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
import org.apache.avro.generic.IndexedRecord;
|
||||
import org.apache.avro.io.DatumReader;
|
||||
import org.apache.avro.io.DatumWriter;
|
||||
|
||||
import static org.apache.avro.generic.GenericData.StringType;
|
||||
|
||||
class AvroUtil {
|
||||
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.nifi.flowfile.FlowFile;
|
|||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.FlowFileAccessException;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.StreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -59,9 +58,9 @@ import static org.apache.nifi.processor.util.StandardValidators.createLongValida
|
|||
@CapabilityDescription(
|
||||
"Converts CSV files to Avro according to an Avro Schema")
|
||||
public class ConvertCSVToAvro extends AbstractKiteProcessor {
|
||||
private static CSVProperties DEFAULTS = new CSVProperties.Builder().build();
|
||||
private static final CSVProperties DEFAULTS = new CSVProperties.Builder().build();
|
||||
|
||||
private static Validator CHAR_VALIDATOR = new Validator() {
|
||||
private static final Validator CHAR_VALIDATOR = new Validator() {
|
||||
@Override
|
||||
public ValidationResult validate(String subject, String input,
|
||||
ValidationContext context) {
|
||||
|
@ -74,12 +73,12 @@ public class ConvertCSVToAvro extends AbstractKiteProcessor {
|
|||
}
|
||||
};
|
||||
|
||||
private static Relationship SUCCESS = new Relationship.Builder()
|
||||
private static final Relationship SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFile content has been successfully saved")
|
||||
.build();
|
||||
|
||||
private static Relationship FAILURE = new Relationship.Builder()
|
||||
private static final Relationship FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("FlowFile content could not be processed")
|
||||
.build();
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.nifi.processors.kite;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -38,7 +37,6 @@ import org.apache.nifi.flowfile.FlowFile;
|
|||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.FlowFileAccessException;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.StreamCallback;
|
||||
import org.kitesdk.data.DatasetException;
|
||||
|
@ -48,110 +46,110 @@ import org.kitesdk.data.spi.DefaultConfiguration;
|
|||
|
||||
@Tags({"kite", "json", "avro"})
|
||||
@CapabilityDescription(
|
||||
"Converts JSON files to Avro according to an Avro Schema")
|
||||
"Converts JSON files to Avro according to an Avro Schema")
|
||||
public class ConvertJSONToAvro extends AbstractKiteProcessor {
|
||||
|
||||
private static Relationship SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFile content has been successfully saved")
|
||||
.build();
|
||||
private static final Relationship SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFile content has been successfully saved")
|
||||
.build();
|
||||
|
||||
private static Relationship FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("FlowFile content could not be processed")
|
||||
.build();
|
||||
private static final Relationship FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("FlowFile content could not be processed")
|
||||
.build();
|
||||
|
||||
@VisibleForTesting
|
||||
static final PropertyDescriptor SCHEMA =
|
||||
new PropertyDescriptor.Builder()
|
||||
.name("Record schema")
|
||||
.description(
|
||||
"Outgoing Avro schema for each record created from a JSON object")
|
||||
.addValidator(SCHEMA_VALIDATOR)
|
||||
.required(true)
|
||||
.build();
|
||||
@VisibleForTesting
|
||||
static final PropertyDescriptor SCHEMA
|
||||
= new PropertyDescriptor.Builder()
|
||||
.name("Record schema")
|
||||
.description(
|
||||
"Outgoing Avro schema for each record created from a JSON object")
|
||||
.addValidator(SCHEMA_VALIDATOR)
|
||||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES =
|
||||
ImmutableList.<PropertyDescriptor>builder()
|
||||
.addAll(AbstractKiteProcessor.getProperties())
|
||||
.add(SCHEMA)
|
||||
.build();
|
||||
private static final List<PropertyDescriptor> PROPERTIES
|
||||
= ImmutableList.<PropertyDescriptor>builder()
|
||||
.addAll(AbstractKiteProcessor.getProperties())
|
||||
.add(SCHEMA)
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS =
|
||||
ImmutableSet.<Relationship>builder()
|
||||
.add(SUCCESS)
|
||||
.add(FAILURE)
|
||||
.build();
|
||||
private static final Set<Relationship> RELATIONSHIPS
|
||||
= ImmutableSet.<Relationship>builder()
|
||||
.add(SUCCESS)
|
||||
.add(FAILURE)
|
||||
.build();
|
||||
|
||||
public ConvertJSONToAvro() {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTrigger(ProcessContext context, final ProcessSession session)
|
||||
throws ProcessException {
|
||||
FlowFile flowFile = session.get();
|
||||
if (flowFile == null) {
|
||||
return;
|
||||
public ConvertJSONToAvro() {
|
||||
}
|
||||
|
||||
final Schema schema = getSchema(
|
||||
context.getProperty(SCHEMA).getValue(),
|
||||
DefaultConfiguration.get());
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
final DataFileWriter<Record> writer = new DataFileWriter<>(
|
||||
AvroUtil.newDatumWriter(schema, Record.class));
|
||||
writer.setCodec(CodecFactory.snappyCodec());
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
try {
|
||||
flowFile = session.write(flowFile, new StreamCallback() {
|
||||
@Override
|
||||
public void process(InputStream in, OutputStream out) throws IOException {
|
||||
long written = 0L;
|
||||
long errors = 0L;
|
||||
try (JSONFileReader<Record> reader = new JSONFileReader<>(
|
||||
in, schema, Record.class)) {
|
||||
reader.initialize();
|
||||
try (DataFileWriter<Record> w = writer.create(schema, out)) {
|
||||
while (reader.hasNext()) {
|
||||
try {
|
||||
Record record = reader.next();
|
||||
w.append(record);
|
||||
written += 1;
|
||||
} catch (DatasetRecordException e) {
|
||||
errors += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
session.adjustCounter("Converted records", written,
|
||||
false /* update only if file transfer is successful */);
|
||||
session.adjustCounter("Conversion errors", errors,
|
||||
false /* update only if file transfer is successful */);
|
||||
}
|
||||
@Override
|
||||
public void onTrigger(ProcessContext context, final ProcessSession session)
|
||||
throws ProcessException {
|
||||
FlowFile flowFile = session.get();
|
||||
if (flowFile == null) {
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
session.transfer(flowFile, SUCCESS);
|
||||
final Schema schema = getSchema(
|
||||
context.getProperty(SCHEMA).getValue(),
|
||||
DefaultConfiguration.get());
|
||||
|
||||
//session.getProvenanceReporter().send(flowFile, target.getUri().toString());
|
||||
} catch (ProcessException | DatasetIOException e) {
|
||||
getLogger().error("Failed reading or writing", e);
|
||||
session.transfer(flowFile, FAILURE);
|
||||
final DataFileWriter<Record> writer = new DataFileWriter<>(
|
||||
AvroUtil.newDatumWriter(schema, Record.class));
|
||||
writer.setCodec(CodecFactory.snappyCodec());
|
||||
|
||||
} catch (DatasetException e) {
|
||||
getLogger().error("Failed to read FlowFile", e);
|
||||
session.transfer(flowFile, FAILURE);
|
||||
try {
|
||||
flowFile = session.write(flowFile, new StreamCallback() {
|
||||
@Override
|
||||
public void process(InputStream in, OutputStream out) throws IOException {
|
||||
long written = 0L;
|
||||
long errors = 0L;
|
||||
try (JSONFileReader<Record> reader = new JSONFileReader<>(
|
||||
in, schema, Record.class)) {
|
||||
reader.initialize();
|
||||
try (DataFileWriter<Record> w = writer.create(schema, out)) {
|
||||
while (reader.hasNext()) {
|
||||
try {
|
||||
Record record = reader.next();
|
||||
w.append(record);
|
||||
written += 1;
|
||||
} catch (DatasetRecordException e) {
|
||||
errors += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
session.adjustCounter("Converted records", written,
|
||||
false /* update only if file transfer is successful */);
|
||||
session.adjustCounter("Conversion errors", errors,
|
||||
false /* update only if file transfer is successful */);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
session.transfer(flowFile, SUCCESS);
|
||||
|
||||
//session.getProvenanceReporter().send(flowFile, target.getUri().toString());
|
||||
} catch (ProcessException | DatasetIOException e) {
|
||||
getLogger().error("Failed reading or writing", e);
|
||||
session.transfer(flowFile, FAILURE);
|
||||
|
||||
} catch (DatasetException e) {
|
||||
getLogger().error("Failed to read FlowFile", e);
|
||||
session.transfer(flowFile, FAILURE);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -50,17 +50,17 @@ import org.kitesdk.data.spi.SchemaValidationUtil;
|
|||
@Tags({"kite", "avro", "parquet", "hive", "hdfs", "hbase"})
|
||||
@CapabilityDescription("Stores Avro records in a Kite dataset")
|
||||
public class StoreInKiteDataset extends AbstractKiteProcessor {
|
||||
private static Relationship SUCCESS = new Relationship.Builder()
|
||||
private static final Relationship SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFile content has been successfully saved")
|
||||
.build();
|
||||
|
||||
private static Relationship INCOMPATIBLE = new Relationship.Builder()
|
||||
private static final Relationship INCOMPATIBLE = new Relationship.Builder()
|
||||
.name("incompatible")
|
||||
.description("FlowFile content is not compatible with the target dataset")
|
||||
.build();
|
||||
|
||||
private static Relationship FAILURE = new Relationship.Builder()
|
||||
private static final Relationship FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("FlowFile content could not be processed")
|
||||
.build();
|
||||
|
|
|
@ -1,59 +1,58 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-nar-bundles</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>nifi-kite-bundle</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Kite Bundle</name>
|
||||
<description>A bundle of processors that use Kite to store data in Hadoop</description>
|
||||
|
||||
<modules>
|
||||
<module>nifi-kite-processors</module>
|
||||
<module>nifi-kite-nar</module>
|
||||
</modules>
|
||||
|
||||
<build>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<parent>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-processors</artifactId>
|
||||
<artifactId>nifi-nar-bundles</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
</parent>
|
||||
|
||||
<artifactId>nifi-kite-bundle</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<description>A bundle of processors that use Kite to store data in Hadoop</description>
|
||||
|
||||
<modules>
|
||||
<module>nifi-kite-processors</module>
|
||||
<module>nifi-kite-nar</module>
|
||||
</modules>
|
||||
|
||||
<build>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.nifi</groupId>
|
||||
<artifactId>nifi-kite-processors</artifactId>
|
||||
<version>0.0.2-incubating-SNAPSHOT</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
</project>
|
||||
|
|
Loading…
Reference in New Issue