NIFI-13150 Removed Couchbase Components

This closes #8753

Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
Joseph Witt 2024-05-06 11:06:24 -07:00 committed by exceptionfactory
parent e7facda912
commit 2711f9d8cf
No known key found for this signature in database
33 changed files with 0 additions and 3248 deletions

View File

@ -428,18 +428,6 @@ language governing permissions and limitations under the License. -->
<version>2.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-nar</artifactId>
<version>2.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-services-api-nar</artifactId>
<version>2.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-hl7-nar</artifactId>

View File

@ -91,12 +91,6 @@
<artifactId>reactor-netty-http</artifactId>
<version>1.0.44</version>
</dependency>
<!-- core-io from Couchbase -->
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>core-io</artifactId>
<version>1.7.24</version>
</dependency>
<!-- SSHD from Registry and other modules -->
<dependency>
<groupId>org.apache.sshd</groupId>
@ -857,16 +851,6 @@
<artifactId>nifi-confluent-schema-registry-service</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-processors</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-services-api</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-dropbox-processors</artifactId>

View File

@ -1,41 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-bundle</artifactId>
<version>2.0.0-SNAPSHOT</version>
</parent>
<artifactId>nifi-couchbase-nar</artifactId>
<packaging>nar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-services-api-nar</artifactId>
<version>2.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-processors</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -1,30 +0,0 @@
nifi-couchbase-nar
Copyright 2014-2024 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
******************
Apache Software License v2
******************
The following binary components are provided under the Apache Software License v2
(ASLv2) Couchbase Java SDK
The following NOTICE information applies:
Couchbase Java SDK
Copyright 2014 Couchbase, Inc.
(ASLv2) RxJava
The following NOTICE information applies:
Couchbase Java SDK
Copyright 2012 Netflix, Inc.
(ASLv2) Apache Commons Lang
The following NOTICE information applies:
Apache Commons Lang
Copyright 2001-2017 The Apache Software Foundation
This product includes software from the Spring Framework,
under the Apache License 2.0 (see: StringUtils.containsWhitespace())

View File

@ -1,79 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-bundle</artifactId>
<version>2.0.0-SNAPSHOT</version>
</parent>
<artifactId>nifi-couchbase-processors</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-utils</artifactId>
</dependency>
<!-- Included for StringUtils -->
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-services-api</artifactId>
<version>2.0.0-SNAPSHOT</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-distributed-cache-client-service-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-lookup-service-api</artifactId>
</dependency>
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>java-client</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>core-io</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-record</artifactId>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-expression-language</artifactId>
<version>2.0.0-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-record-serialization-service-api</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -1,75 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.ControllerServiceInitializationContext;
import org.apache.nifi.reporting.InitializationException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
public class AbstractCouchbaseLookupService extends AbstractControllerService {
protected static final String KEY = "key";
protected static final Set<String> REQUIRED_KEYS = Collections.unmodifiableSet(Stream.of(KEY).collect(Collectors.toSet()));
protected List<PropertyDescriptor> properties;
protected volatile CouchbaseClusterControllerService couchbaseClusterService;
protected volatile String bucketName;
@Override
protected void init(final ControllerServiceInitializationContext context) throws InitializationException {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(COUCHBASE_CLUSTER_SERVICE);
properties.add(BUCKET_NAME);
addProperties(properties);
this.properties = Collections.unmodifiableList(properties);
}
protected void addProperties(List<PropertyDescriptor> properties) {
}
@OnEnabled
public void onEnabled(final ConfigurationContext context) throws InitializationException {
couchbaseClusterService = context.getProperty(COUCHBASE_CLUSTER_SERVICE)
.asControllerService(CouchbaseClusterControllerService.class);
bucketName = context.getProperty(BUCKET_NAME).evaluateAttributeExpressions().getValue();
}
public Set<String> getRequiredKeys() {
return REQUIRED_KEYS;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
}

View File

@ -1,205 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnDisabled;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.reporting.InitializationException;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.CouchbaseCluster;
import org.apache.nifi.util.StringUtils;
/**
* Provides a centralized Couchbase connection and bucket passwords management.
*/
@CapabilityDescription("Provides a centralized Couchbase connection and bucket passwords management."
+ " Bucket passwords can be specified via dynamic properties.")
@Tags({ "nosql", "couchbase", "database", "connection" })
@DynamicProperty(name = "Bucket Password for BUCKET_NAME", value = "bucket password",
description = "Specify bucket password if necessary." +
" Couchbase Server 5.0 or later should use 'User Name' and 'User Password' instead.")
public class CouchbaseClusterService extends AbstractControllerService implements CouchbaseClusterControllerService {
public static final PropertyDescriptor CONNECTION_STRING = new PropertyDescriptor
.Builder()
.name("Connection String")
.description("The hostnames or ip addresses of the bootstraping nodes and optional parameters."
+ " Syntax) couchbase://node1,node2,nodeN?param1=value1&param2=value2&paramN=valueN")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor USER_NAME = new PropertyDescriptor
.Builder()
.name("user-name")
.displayName("User Name")
.description("The user name to authenticate NiFi as a Couchbase client." +
" This configuration can be used against Couchbase Server 5.0 or later" +
" supporting Roll-Based Access Control.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor USER_PASSWORD = new PropertyDescriptor
.Builder()
.name("user-password")
.displayName("User Password")
.description("The user password to authenticate NiFi as a Couchbase client." +
" This configuration can be used against Couchbase Server 5.0 or later" +
" supporting Roll-Based Access Control.")
.required(false)
.sensitive(true)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
private static final List<PropertyDescriptor> properties;
static {
final List<PropertyDescriptor> props = new ArrayList<>();
props.add(CONNECTION_STRING);
props.add(USER_NAME);
props.add(USER_PASSWORD);
properties = Collections.unmodifiableList(props);
}
private static final String DYNAMIC_PROP_BUCKET_PASSWORD = "Bucket Password for ";
private Map<String, String> bucketPasswords;
private volatile CouchbaseCluster cluster;
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(
String propertyDescriptorName) {
if (propertyDescriptorName.startsWith(DYNAMIC_PROP_BUCKET_PASSWORD)) {
return new PropertyDescriptor
.Builder().name(propertyDescriptorName)
.description("Bucket password.")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.dynamic(true)
.sensitive(true)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.build();
}
return null;
}
@Override
protected Collection<ValidationResult> customValidate(ValidationContext context) {
final Collection<ValidationResult> results = new ArrayList<>();
final boolean isUserNameSet = context.getProperty(USER_NAME).isSet();
final boolean isUserPasswordSet = context.getProperty(USER_PASSWORD).isSet();
if ((isUserNameSet && !isUserPasswordSet) || (!isUserNameSet && isUserPasswordSet)) {
results.add(new ValidationResult.Builder()
.subject("User Name and Password")
.explanation("Both User Name and Password are required to use.")
.build());
}
final boolean isBucketPasswordSet = context.getProperties().keySet().stream()
.anyMatch(p -> p.isDynamic() && p.getName().startsWith(DYNAMIC_PROP_BUCKET_PASSWORD));
if (isUserNameSet && isUserPasswordSet && isBucketPasswordSet) {
results.add(new ValidationResult.Builder()
.subject("Authentication methods")
.explanation("Different authentication methods can not be used at the same time," +
" Use either one of User Name and Password, or Bucket Password.")
.build());
}
return results;
}
/**
* Establish a connection to a Couchbase cluster.
* @param context the configuration context
* @throws InitializationException if unable to connect a Couchbase cluster
*/
@OnEnabled
public void onConfigured(final ConfigurationContext context) throws InitializationException {
bucketPasswords = new HashMap<>();
for(PropertyDescriptor p : context.getProperties().keySet()){
if(p.isDynamic() && p.getName().startsWith(DYNAMIC_PROP_BUCKET_PASSWORD)){
String bucketName = p.getName().substring(DYNAMIC_PROP_BUCKET_PASSWORD.length());
String password = context.getProperty(p).evaluateAttributeExpressions().getValue();
bucketPasswords.put(bucketName, password);
}
}
final String userName = context.getProperty(USER_NAME).evaluateAttributeExpressions().getValue();
final String userPassword = context.getProperty(USER_PASSWORD).evaluateAttributeExpressions().getValue();
try {
cluster = CouchbaseCluster.fromConnectionString(context.getProperty(CONNECTION_STRING).evaluateAttributeExpressions().getValue());
if (!StringUtils.isEmpty(userName) && !StringUtils.isEmpty(userPassword)) {
cluster.authenticate(userName, userPassword);
}
} catch(CouchbaseException e) {
throw new InitializationException(e);
}
}
@Override
public Bucket openBucket(String bucketName){
if (bucketPasswords.containsKey(bucketName)) {
return cluster.openBucket(bucketName, bucketPasswords.get(bucketName));
}
return cluster.openBucket(bucketName);
}
/**
* Disconnect from the Couchbase cluster.
*/
@OnDisabled
public void shutdown() {
if(cluster != null){
cluster.disconnect();
cluster = null;
}
}
}

View File

@ -1,83 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.error.DocumentDoesNotExistException;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.lookup.LookupFailureException;
import org.apache.nifi.lookup.StringLookupService;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.StringUtils;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.LOOKUP_SUB_DOC_PATH;
@Tags({"lookup", "enrich", "key", "value", "couchbase"})
@CapabilityDescription("Lookup a string value from Couchbase Server associated with the specified key."
+ " The coordinates that are passed to the lookup must contain the key 'key'.")
public class CouchbaseKeyValueLookupService extends AbstractCouchbaseLookupService implements StringLookupService {
private volatile String subDocPath;
@Override
protected void addProperties(List<PropertyDescriptor> properties) {
properties.add(LOOKUP_SUB_DOC_PATH);
}
@OnEnabled
public void onEnabled(final ConfigurationContext context) throws InitializationException {
super.onEnabled(context);
subDocPath = context.getProperty(LOOKUP_SUB_DOC_PATH).evaluateAttributeExpressions().getValue();
}
@Override
public Optional<String> lookup(Map<String, Object> coordinates) throws LookupFailureException {
try {
final Bucket bucket = couchbaseClusterService.openBucket(bucketName);
final Optional<String> docId = Optional.ofNullable(coordinates.get(KEY)).map(Object::toString);
if (!StringUtils.isBlank(subDocPath)) {
return docId.map(key -> {
try {
return bucket.lookupIn(key).get(subDocPath).execute();
} catch (DocumentDoesNotExistException e) {
getLogger().debug("Document was not found for {}", new Object[]{key});
return null;
}
}).map(fragment -> fragment.content(0)).map(Object::toString);
} else {
return docId.map(key -> CouchbaseUtils.getStringContent(bucket, key));
}
} catch (CouchbaseException e) {
throw new LookupFailureException("Failed to lookup from Couchbase using this coordinates: " + coordinates);
}
}
}

View File

@ -1,201 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
import com.couchbase.client.deps.io.netty.buffer.Unpooled;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.BinaryDocument;
import com.couchbase.client.java.document.Document;
import com.couchbase.client.java.error.CASMismatchException;
import com.couchbase.client.java.error.DocumentAlreadyExistsException;
import com.couchbase.client.java.error.DocumentDoesNotExistException;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.distributed.cache.client.AtomicCacheEntry;
import org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient;
import org.apache.nifi.distributed.cache.client.Deserializer;
import org.apache.nifi.distributed.cache.client.Serializer;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
@Tags({"distributed", "cache", "map", "cluster", "couchbase"})
@CapabilityDescription("Provides the ability to communicate with a Couchbase Server cluster as a DistributedMapCacheServer." +
" This can be used in order to share a Map between nodes in a NiFi cluster." +
" Couchbase Server cluster can provide a high available and persistent cache storage.")
public class CouchbaseMapCacheClient extends AbstractControllerService implements AtomicDistributedMapCacheClient<Long> {
private CouchbaseClusterControllerService clusterService;
private Bucket bucket;
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
final List<PropertyDescriptor> descriptors = new ArrayList<>();
descriptors.add(COUCHBASE_CLUSTER_SERVICE);
descriptors.add(BUCKET_NAME);
return descriptors;
}
@OnEnabled
public void configure(final ConfigurationContext context) {
clusterService = context.getProperty(COUCHBASE_CLUSTER_SERVICE).asControllerService(CouchbaseClusterControllerService.class);
final String bucketName = context.getProperty(BUCKET_NAME).evaluateAttributeExpressions().getValue();
bucket = clusterService.openBucket(bucketName);
}
private <V> Document toDocument(String docId, V value, Serializer<V> valueSerializer) throws IOException {
return toDocument(docId, value, valueSerializer, 0);
}
private <V> Document toDocument(String docId, V value, Serializer<V> valueSerializer, long revision) throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
valueSerializer.serialize(value, bos);
final ByteBuf byteBuf = Unpooled.wrappedBuffer(bos.toByteArray());
return BinaryDocument.create(docId, byteBuf, revision);
}
private <K> String toDocumentId(K key, Serializer<K> keySerializer) throws IOException {
final String docId;
if (key instanceof String) {
docId = (String) key;
} else {
// Coerce conversion from byte[] to String, this may generate unreadable String or exceed max key size.
ByteArrayOutputStream bos = new ByteArrayOutputStream();
keySerializer.serialize(key, bos);
final byte[] keyBytes = bos.toByteArray();
docId = new String(keyBytes);
}
return docId;
}
@Override
public <K, V> boolean putIfAbsent(K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException {
final String docId = toDocumentId(key, keySerializer);
final Document doc = toDocument(docId, value, valueSerializer);
try {
bucket.insert(doc);
return true;
} catch (DocumentAlreadyExistsException e) {
return false;
}
}
@Override
public <K, V> AtomicCacheEntry<K, V, Long> fetch(K key, Serializer<K> keySerializer, Deserializer<V> valueDeserializer) throws IOException {
final String docId = toDocumentId(key, keySerializer);
final BinaryDocument doc = bucket.get(BinaryDocument.create(docId));
if (doc == null) {
return null;
}
final V value = deserialize(doc, valueDeserializer);
return new AtomicCacheEntry<>(key, value, doc.cas());
}
@Override
public <K, V> V getAndPutIfAbsent(K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer, Deserializer<V> valueDeserializer) throws IOException {
final V existing = get(key, keySerializer, valueDeserializer);
if (existing != null) {
return existing;
}
// If there's no existing value, put this value.
if (!putIfAbsent(key, value, keySerializer, valueSerializer)) {
// If putting this value failed, it's possible that other client has put different doc, so return that.
return get(key, keySerializer, valueDeserializer);
}
// If successfully put this value, return this.
return value;
}
@Override
public <K, V> boolean replace(AtomicCacheEntry<K, V, Long> entry, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException {
final Long revision = entry.getRevision().orElse(-1L);
final String docId = toDocumentId(entry.getKey(), keySerializer);
final Document doc = toDocument(docId, entry.getValue(), valueSerializer, revision);
try {
if (revision < 0) {
// If the document does not exist yet, try to create one.
try {
bucket.insert(doc);
return true;
} catch (DocumentAlreadyExistsException e) {
return false;
}
}
bucket.replace(doc);
return true;
} catch (DocumentDoesNotExistException|CASMismatchException e) {
return false;
}
}
@Override
public <K> boolean containsKey(K key, Serializer<K> keySerializer) throws IOException {
return bucket.exists(toDocumentId(key, keySerializer));
}
@Override
public <K, V> void put(K key, V value, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException {
final String docId = toDocumentId(key, keySerializer);
final Document doc = toDocument(docId, value, valueSerializer);
bucket.upsert(doc);
}
@Override
public <K, V> V get(K key, Serializer<K> keySerializer, Deserializer<V> valueDeserializer) throws IOException {
final String docId = toDocumentId(key, keySerializer);
final BinaryDocument doc = bucket.get(BinaryDocument.create(docId));
return deserialize(doc, valueDeserializer);
}
private <V> V deserialize(BinaryDocument doc, Deserializer<V> valueDeserializer) throws IOException {
if (doc == null) {
return null;
}
final ByteBuf byteBuf = doc.content();
final byte[] bytes = new byte[byteBuf.readableBytes()];
byteBuf.readBytes(bytes);
byteBuf.release();
return valueDeserializer.deserialize(bytes);
}
@Override
public void close() {
}
@Override
public <K> boolean remove(K key, Serializer<K> serializer) throws IOException {
try {
bucket.remove(toDocumentId(key, serializer));
return true;
} catch (DocumentDoesNotExistException e) {
return false;
}
}
}

View File

@ -1,139 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.deps.io.netty.buffer.ByteBufInputStream;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.BinaryDocument;
import com.couchbase.client.java.document.RawJsonDocument;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.lookup.LookupFailureException;
import org.apache.nifi.lookup.RecordLookupService;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.serialization.RecordReader;
import org.apache.nifi.serialization.RecordReaderFactory;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.util.Tuple;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.DOCUMENT_TYPE;
@Tags({"lookup", "enrich", "couchbase"})
@CapabilityDescription("Lookup a record from Couchbase Server associated with the specified key."
+ " The coordinates that are passed to the lookup must contain the key 'key'.")
public class CouchbaseRecordLookupService extends AbstractCouchbaseLookupService implements RecordLookupService {
private volatile RecordReaderFactory readerFactory;
private volatile DocumentType documentType;
private static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder()
.name("record-reader")
.displayName("Record Reader")
.description("The Record Reader to use for parsing fetched document from Couchbase Server.")
.identifiesControllerService(RecordReaderFactory.class)
.required(true)
.build();
@Override
protected void addProperties(List<PropertyDescriptor> properties) {
properties.add(DOCUMENT_TYPE);
properties.add(RECORD_READER);
}
@OnEnabled
public void onEnabled(final ConfigurationContext context) throws InitializationException {
super.onEnabled(context);
readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class);
documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
}
@Override
public Optional<Record> lookup(Map<String, Object> coordinates) throws LookupFailureException {
final Bucket bucket = couchbaseClusterService.openBucket(bucketName);
final Optional<String> docId = Optional.ofNullable(coordinates.get(KEY)).map(Object::toString);
final Optional<InputStream> inputStream;
try {
switch (documentType) {
case Binary:
inputStream = docId
.map(key -> bucket.get(key, BinaryDocument.class))
.map(doc -> new ByteBufInputStream(doc.content()));
break;
case Json:
inputStream= docId
.map(key -> bucket.get(key, RawJsonDocument.class))
.map(doc -> new ByteArrayInputStream(doc.content().getBytes(StandardCharsets.UTF_8)));
break;
default:
return Optional.empty();
}
} catch (CouchbaseException e) {
throw new LookupFailureException("Failed to lookup from Couchbase using this coordinates: " + coordinates);
}
final Optional<Tuple<Exception, RecordReader>> errOrReader = inputStream.map(in -> {
try {
// Pass coordinates to initiate RecordReader, so that the reader can resolve schema dynamically.
// This allow using the same RecordReader service with different schemas if RecordReader is configured to
// access schema based on Expression Language.
final Map<String, String> recordReaderVariables = new HashMap<>(coordinates.size());
coordinates.keySet().forEach(k -> {
final Object value = coordinates.get(k);
if (value != null) {
recordReaderVariables.put(k, value.toString());
}
});
return new Tuple<>(null, readerFactory.createRecordReader(recordReaderVariables, in, -1, getLogger()));
} catch (Exception e) {
return new Tuple<>(e, null);
}
});
if (!errOrReader.isPresent()) {
return Optional.empty();
}
final Exception exception = errOrReader.get().getKey();
if (exception != null) {
throw new LookupFailureException(String.format("Failed to lookup with %s", coordinates), exception);
}
try {
return Optional.ofNullable(errOrReader.get().getValue().nextRecord());
} catch (Exception e) {
throw new LookupFailureException(String.format("Failed to read Record when looking up with %s", coordinates), e);
}
}
}

View File

@ -1,58 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.LegacyDocument;
import java.nio.charset.StandardCharsets;
public class CouchbaseUtils {
/**
* A convenient method to retrieve String value when Document type is unknown.
* This method uses LegacyDocument to get, then tries to convert content based on its class.
* @param bucket the bucket to get a document
* @param id the id of the target document
* @return String representation of the stored value, or null if not found
*/
public static String getStringContent(Bucket bucket, String id) {
final LegacyDocument doc = bucket.get(LegacyDocument.create(id));
if (doc == null) {
return null;
}
final Object content = doc.content();
return getStringContent(content);
}
public static String getStringContent(Object content) {
if (content instanceof String) {
return (String) content;
} else if (content instanceof byte[]) {
return new String((byte[]) content, StandardCharsets.UTF_8);
} else if (content instanceof ByteBuf) {
final ByteBuf byteBuf = (ByteBuf) content;
byte[] bytes = new byte[byteBuf.readableBytes()];
byteBuf.readBytes(bytes);
byteBuf.release();
return new String(bytes, StandardCharsets.UTF_8);
}
return content.toString();
}
}

View File

@ -1,194 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.java.Bucket;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
/**
* Provides common functionality for Couchbase processors.
*/
public abstract class AbstractCouchbaseProcessor extends AbstractProcessor {
static final PropertyDescriptor DOC_ID = new PropertyDescriptor.Builder()
.name("document-id")
.displayName("Document Id")
.description("A static, fixed Couchbase document id, or an expression to construct the Couchbase document id.")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
static final Relationship REL_ORIGINAL = new Relationship.Builder().name("original").build();
static final Relationship REL_SUCCESS = new Relationship.Builder().name("success").build();
static final Relationship REL_RETRY = new Relationship.Builder().name("retry").build();
static final Relationship REL_FAILURE = new Relationship.Builder().name("failure").build();
private List<PropertyDescriptor> descriptors;
private Set<Relationship> relationships;
private CouchbaseClusterControllerService clusterService;
@Override
protected final void init(final ProcessorInitializationContext context) {
final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
descriptors.add(COUCHBASE_CLUSTER_SERVICE);
descriptors.add(BUCKET_NAME);
addSupportedProperties(descriptors);
this.descriptors = Collections.unmodifiableList(descriptors);
final Set<Relationship> relationships = new HashSet<Relationship>();
addSupportedRelationships(relationships);
this.relationships = Collections.unmodifiableSet(relationships);
}
/**
* Add processor specific properties.
*
* @param descriptors add properties to this list
*/
protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
return;
}
/**
* Add processor specific relationships.
*
* @param relationships add relationships to this list
*/
protected void addSupportedRelationships(Set<Relationship> relationships) {
return;
}
@Override
public final Set<Relationship> getRelationships() {
return filterRelationships(this.relationships);
}
protected Set<Relationship> filterRelationships(Set<Relationship> rels) {
return rels;
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return descriptors;
}
private CouchbaseClusterControllerService getClusterService(final ProcessContext context) {
synchronized (AbstractCouchbaseProcessor.class) {
if (clusterService == null) {
clusterService = context.getProperty(COUCHBASE_CLUSTER_SERVICE)
.asControllerService(CouchbaseClusterControllerService.class);
}
}
return clusterService;
}
/**
* Open a bucket connection using a CouchbaseClusterControllerService.
*
* @param context a process context
* @return a bucket instance
*/
protected final Bucket openBucket(final ProcessContext context) {
return getClusterService(context).openBucket(context.getProperty(BUCKET_NAME).evaluateAttributeExpressions().getValue());
}
/**
* Generate a transit url.
*
* @param bucket the target bucket
* @return a transit url based on the bucket name and the CouchbaseClusterControllerService name
*/
protected String getTransitUrl(final Bucket bucket, final String docId) {
return "couchbase://" + bucket.name() + "/" + docId;
}
/**
* Handles the thrown CouchbaseException accordingly.
*
* @param context a process context
* @param session a process session
* @param logger a logger
* @param inFile an input FlowFile
* @param e the thrown CouchbaseException
* @param errMsg a message to be logged
*/
protected void handleCouchbaseException(final ProcessContext context, final ProcessSession session,
final ComponentLog logger, FlowFile inFile, CouchbaseException e,
String errMsg) {
logger.error(errMsg, e);
if (inFile != null) {
ErrorHandlingStrategy strategy = CouchbaseExceptionMappings.getStrategy(e);
switch (strategy.penalty()) {
case Penalize:
if (logger.isDebugEnabled()) {
logger.debug("Penalized: {}", new Object[] {inFile});
}
inFile = session.penalize(inFile);
break;
case Yield:
if (logger.isDebugEnabled()) {
logger.debug("Yielded context: {}", new Object[] {inFile});
}
context.yield();
break;
case None:
break;
}
switch (strategy.result()) {
case ProcessException:
throw new ProcessException(errMsg, e);
case Failure:
inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), e.getClass().getName());
session.transfer(inFile, REL_FAILURE);
break;
case Retry:
inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), e.getClass().getName());
session.transfer(inFile, REL_RETRY);
break;
}
}
}
}

View File

@ -1,63 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import org.apache.nifi.flowfile.attributes.FlowFileAttributeKey;
/**
* Couchbase related attribute keys.
*/
public enum CouchbaseAttributes implements FlowFileAttributeKey {
/**
* A reference to the related cluster.
*/
Cluster("couchbase.cluster"),
/**
* A related bucket name.
*/
Bucket("couchbase.bucket"),
/**
* The id of a related document.
*/
DocId("couchbase.doc.id"),
/**
* The CAS value of a related document.
*/
Cas("couchbase.doc.cas"),
/**
* The expiration of a related document.
*/
Expiry("couchbase.doc.expiry"),
/**
* The thrown CouchbaseException class.
*/
Exception("couchbase.exception"),
;
private final String key;
private CouchbaseAttributes(final String key) {
this.key = key;
}
@Override
public String key() {
return key;
}
}

View File

@ -1,128 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.ConfigurationError;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Fatal;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.InvalidInput;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.TemporalClusterError;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.TemporalFlowFileError;
import java.util.HashMap;
import java.util.Map;
import com.couchbase.client.core.BackpressureException;
import com.couchbase.client.core.BucketClosedException;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.core.DocumentConcurrentlyModifiedException;
import com.couchbase.client.core.DocumentMutationLostException;
import com.couchbase.client.core.ReplicaNotConfiguredException;
import com.couchbase.client.core.RequestCancelledException;
import com.couchbase.client.core.ServiceNotAvailableException;
import com.couchbase.client.core.config.ConfigurationException;
import com.couchbase.client.core.endpoint.SSLException;
import com.couchbase.client.core.endpoint.kv.AuthenticationException;
import com.couchbase.client.core.env.EnvironmentException;
import com.couchbase.client.core.state.NotConnectedException;
import com.couchbase.client.java.error.BucketDoesNotExistException;
import com.couchbase.client.java.error.CannotRetryException;
import com.couchbase.client.java.error.CouchbaseOutOfMemoryException;
import com.couchbase.client.java.error.DurabilityException;
import com.couchbase.client.java.error.InvalidPasswordException;
import com.couchbase.client.java.error.RequestTooBigException;
import com.couchbase.client.java.error.TemporaryFailureException;
import com.couchbase.client.java.error.TranscodingException;
public class CouchbaseExceptionMappings {
private static final Map<Class<? extends CouchbaseException>, ErrorHandlingStrategy>mapping = new HashMap<>();
/*
* - Won't happen
* BucketAlreadyExistsException: never create a bucket
* CASMismatchException: cas-id and replace is not used yet
* DesignDocumentException: View is not used yet
* DocumentAlreadyExistsException: insert is not used yet
* DocumentDoesNotExistException: replace is not used yet
* FlushDisabledException: never call flush
* RepositoryMappingException: EntityDocument is not used
* TemporaryLockFailureException: we don't obtain locks
* ViewDoesNotExistException: View is not used yet
* NamedPreparedStatementException: N1QL is not used yet
* QueryExecutionException: N1QL is not used yet
*/
static {
/*
* ConfigurationError
*/
mapping.put(AuthenticationException.class, ConfigurationError);
mapping.put(BucketDoesNotExistException.class, ConfigurationError);
mapping.put(ConfigurationException.class, ConfigurationError);
mapping.put(InvalidPasswordException.class, ConfigurationError);
mapping.put(EnvironmentException.class, ConfigurationError);
// when Couchbase doesn't have enough replica
mapping.put(ReplicaNotConfiguredException.class, ConfigurationError);
// when a particular Service(KV, View, Query, DCP) isn't running in a cluster
mapping.put(ServiceNotAvailableException.class, ConfigurationError);
// SSL configuration error, such as key store misconfiguration.
mapping.put(SSLException.class, ConfigurationError);
/*
* InvalidInput
*/
mapping.put(RequestTooBigException.class, InvalidInput);
mapping.put(TranscodingException.class, InvalidInput);
/*
* Temporal Cluster Error
*/
mapping.put(BackpressureException.class, TemporalClusterError);
mapping.put(CouchbaseOutOfMemoryException.class, TemporalClusterError);
mapping.put(TemporaryFailureException.class, TemporalClusterError);
// occurs when a connection gets lost
mapping.put(RequestCancelledException.class, TemporalClusterError);
/*
* Temporal FlowFile Error
*/
mapping.put(DocumentConcurrentlyModifiedException.class, TemporalFlowFileError);
mapping.put(DocumentMutationLostException.class, TemporalFlowFileError);
mapping.put(DurabilityException.class, TemporalFlowFileError);
/*
* Fatal
*/
mapping.put(BucketClosedException.class, Fatal);
mapping.put(CannotRetryException.class, Fatal);
mapping.put(NotConnectedException.class, Fatal);
}
/**
* Returns a registered error handling strategy.
* @param e the CouchbaseException
* @return a registered strategy, if it's not registered, then return Fatal
*/
public static ErrorHandlingStrategy getStrategy(CouchbaseException e){
ErrorHandlingStrategy strategy = mapping.get(e.getClass());
if(strategy == null) {
// Treat unknown Exception as Fatal.
return ErrorHandlingStrategy.Fatal;
}
return strategy;
}
}

View File

@ -1,81 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.None;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.Penalize;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Penalty.Yield;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.Failure;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.ProcessException;
import static org.apache.nifi.processors.couchbase.ErrorHandlingStrategy.Result.Retry;
public enum ErrorHandlingStrategy {
/**
* Processor setting has to be fixed, in order to NOT call failing processor
* frequently, this it be yielded.
*/
ConfigurationError(ProcessException, Yield),
/**
* The input FlowFile will be sent to the failure relationship for further
* processing without penalizing. Basically, the FlowFile shouldn't be sent
* this processor again unless the issue has been solved.
*/
InvalidInput(Failure, None),
/**
* Couchbase cluster is in unhealthy state. Retrying maybe successful,
* but it should be yielded for a while.
*/
TemporalClusterError(Retry, Yield),
/**
* The FlowFile was not processed successfully due to some temporal error
* related to this specific FlowFile or document. Retrying maybe successful,
* but it should be penalized for a while.
*/
TemporalFlowFileError(Retry, Penalize),
/**
* The error can't be recovered without DataFlow Manager intervention.
*/
Fatal(Retry, Yield);
private final Result result;
private final Penalty penalty;
ErrorHandlingStrategy(Result result, Penalty penalty){
this.result = result;
this.penalty = penalty;
}
public enum Result {
ProcessException, Failure, Retry
}
/**
* Indicating yield or penalize the processing when transfer the input FlowFile.
*/
public enum Penalty {
Yield, Penalize, None
}
public Result result(){
return this.result;
}
public Penalty penalty(){
return this.penalty;
}
}

View File

@ -1,235 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.nifi.annotation.behavior.SystemResourceConsideration;
import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.SystemResource;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.couchbase.CouchbaseUtils;
import org.apache.nifi.couchbase.DocumentType;
import org.apache.nifi.expression.AttributeExpression;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.io.OutputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.stream.io.StreamUtils;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.BinaryDocument;
import com.couchbase.client.java.document.Document;
import com.couchbase.client.java.document.RawJsonDocument;
import com.couchbase.client.java.error.DocumentDoesNotExistException;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.DOCUMENT_TYPE;
@Tags({"nosql", "couchbase", "database", "get"})
@InputRequirement(Requirement.INPUT_REQUIRED)
@CapabilityDescription("Get a document from Couchbase Server via Key/Value access. The ID of the document to fetch may be supplied by setting the <Document Id> property. "
+ "NOTE: if the Document Id property is not set, the contents of the FlowFile will be read to determine the Document Id, which means that the contents of the entire "
+ "FlowFile will be buffered in memory.")
@WritesAttributes({
@WritesAttribute(attribute = "couchbase.cluster", description = "Cluster where the document was retrieved from."),
@WritesAttribute(attribute = "couchbase.bucket", description = "Bucket where the document was retrieved from."),
@WritesAttribute(attribute = "couchbase.doc.id", description = "Id of the document."),
@WritesAttribute(attribute = "couchbase.doc.cas", description = "CAS of the document."),
@WritesAttribute(attribute = "couchbase.doc.expiry", description = "Expiration of the document."),
@WritesAttribute(attribute = "couchbase.exception", description = "If Couchbase related error occurs the CouchbaseException class name will be captured here.")
})
@SystemResourceConsideration(resource = SystemResource.MEMORY)
public class GetCouchbaseKey extends AbstractCouchbaseProcessor {
public static final PropertyDescriptor PUT_VALUE_TO_ATTRIBUTE = new PropertyDescriptor.Builder()
.name("put-to-attribute")
.displayName("Put Value to Attribute")
.description("If set, the retrieved value will be put into an attribute of the FlowFile instead of a the content of the FlowFile." +
" The attribute key to put to is determined by evaluating value of this property.")
.addValidator(StandardValidators.createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING))
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
private volatile boolean putToAttribute = false;
@Override
protected void addSupportedProperties(final List<PropertyDescriptor> descriptors) {
descriptors.add(DOCUMENT_TYPE);
descriptors.add(DOC_ID);
descriptors.add(PUT_VALUE_TO_ATTRIBUTE);
}
@Override
protected void addSupportedRelationships(final Set<Relationship> relationships) {
relationships.add(new Relationship.Builder().name(REL_ORIGINAL.getName())
.description("The original input FlowFile is routed to this relationship" +
" when the value is retrieved from Couchbase Server and routed to 'success'.").build());
relationships.add(new Relationship.Builder().name(REL_SUCCESS.getName())
.description("Values retrieved from Couchbase Server are written as outgoing FlowFiles content" +
" or put into an attribute of the incoming FlowFile and routed to this relationship.").build());
relationships.add(new Relationship.Builder().name(REL_RETRY.getName())
.description("All FlowFiles failed to fetch from Couchbase Server but can be retried are routed to this relationship.").build());
relationships.add(new Relationship.Builder().name(REL_FAILURE.getName())
.description("All FlowFiles failed to fetch from Couchbase Server and not retry-able are routed to this relationship.").build());
}
@Override
protected Set<Relationship> filterRelationships(Set<Relationship> rels) {
// If destination is attribute, then success == original.
return rels.stream().filter(rel -> !REL_ORIGINAL.equals(rel) || !putToAttribute).collect(Collectors.toSet());
}
@Override
public void onPropertyModified(PropertyDescriptor descriptor, String oldValue, String newValue) {
if (PUT_VALUE_TO_ATTRIBUTE.equals(descriptor)) {
putToAttribute = !isEmpty(newValue);
}
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile inFile = session.get();
if (inFile == null) {
return;
}
final long startNanos = System.nanoTime();
final ComponentLog logger = getLogger();
String docId = null;
if (context.getProperty(DOC_ID).isSet()) {
docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(inFile).getValue();
} else {
final byte[] content = new byte[(int) inFile.getSize()];
session.read(inFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, content, true);
}
});
docId = new String(content, StandardCharsets.UTF_8);
}
if (isEmpty(docId)) {
throw new ProcessException("Please check 'Document Id' setting. Couldn't get document id from " + inFile);
}
String putTargetAttr = null;
if (context.getProperty(PUT_VALUE_TO_ATTRIBUTE).isSet()) {
putTargetAttr = context.getProperty(PUT_VALUE_TO_ATTRIBUTE).evaluateAttributeExpressions(inFile).getValue();
if (isEmpty(putTargetAttr)) {
inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), "InvalidPutTargetAttributeName");
session.transfer(inFile, REL_FAILURE);
return;
}
}
try {
final Bucket bucket = openBucket(context);
final DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
Document<?> doc = null;
// A function to write a document into outgoing FlowFile content.
OutputStreamCallback outputStreamCallback = null;
final Map<String, String> updatedAttrs = new HashMap<>();
switch (documentType) {
case Json: {
RawJsonDocument document = bucket.get(docId, RawJsonDocument.class);
if (document != null) {
outputStreamCallback = out -> {
final byte[] content = document.content().getBytes(StandardCharsets.UTF_8);
out.write(content);
updatedAttrs.put(CoreAttributes.MIME_TYPE.key(), "application/json");
};
doc = document;
}
break;
}
case Binary: {
BinaryDocument document = bucket.get(docId, BinaryDocument.class);
if (document != null) {
outputStreamCallback = out -> {
// Write to OutputStream without copying any to heap.
final ByteBuf byteBuf = document.content();
byteBuf.getBytes(byteBuf.readerIndex(), out, byteBuf.readableBytes());
byteBuf.release();
};
doc = document;
}
break;
}
}
if (doc == null) {
logger.warn("Document {} was not found in {}; routing {} to failure", new Object[] {docId, getTransitUrl(bucket, docId), inFile});
inFile = session.putAttribute(inFile, CouchbaseAttributes.Exception.key(), DocumentDoesNotExistException.class.getName());
session.transfer(inFile, REL_FAILURE);
return;
}
FlowFile outFile;
if (putToAttribute) {
outFile = inFile;
updatedAttrs.put(putTargetAttr, CouchbaseUtils.getStringContent(doc.content()));
} else {
outFile = session.create(inFile);
outFile = session.write(outFile, outputStreamCallback);
session.transfer(inFile, REL_ORIGINAL);
}
updatedAttrs.put(CouchbaseAttributes.Cluster.key(), context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue());
updatedAttrs.put(CouchbaseAttributes.Bucket.key(), bucket.name());
updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId);
updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry()));
outFile = session.putAllAttributes(outFile, updatedAttrs);
final long fetchMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
session.getProvenanceReporter().fetch(outFile, getTransitUrl(bucket, docId), fetchMillis);
session.transfer(outFile, REL_SUCCESS);
} catch (final CouchbaseException e) {
String errMsg = String.format("Getting document %s from Couchbase Server using %s failed due to %s", docId, inFile, e);
handleCouchbaseException(context, session, logger, inFile, e, errMsg);
}
}
}

View File

@ -1,170 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.ByteArrayDocument;
import org.apache.nifi.annotation.behavior.SystemResourceConsideration;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.ReadsAttribute;
import org.apache.nifi.annotation.behavior.ReadsAttributes;
import org.apache.nifi.annotation.behavior.SystemResource;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.couchbase.DocumentType;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.stream.io.StreamUtils;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.java.PersistTo;
import com.couchbase.client.java.ReplicateTo;
import com.couchbase.client.java.document.Document;
import com.couchbase.client.java.document.RawJsonDocument;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.DOCUMENT_TYPE;
@Tags({"nosql", "couchbase", "database", "put"})
@CapabilityDescription("Put a document to Couchbase Server via Key/Value access.")
@InputRequirement(Requirement.INPUT_REQUIRED)
@ReadsAttributes({
@ReadsAttribute(attribute = "uuid", description = "Used as a document id if 'Document Id' is not specified"),
})
@WritesAttributes({
@WritesAttribute(attribute = "couchbase.cluster", description = "Cluster where the document was stored."),
@WritesAttribute(attribute = "couchbase.bucket", description = "Bucket where the document was stored."),
@WritesAttribute(attribute = "couchbase.doc.id", description = "Id of the document."),
@WritesAttribute(attribute = "couchbase.doc.cas", description = "CAS of the document."),
@WritesAttribute(attribute = "couchbase.doc.expiry", description = "Expiration of the document."),
@WritesAttribute(attribute = "couchbase.exception", description = "If Couchbase related error occurs the CouchbaseException class name will be captured here.")
})
@SystemResourceConsideration(resource = SystemResource.MEMORY)
public class PutCouchbaseKey extends AbstractCouchbaseProcessor {
public static final PropertyDescriptor PERSIST_TO = new PropertyDescriptor.Builder()
.name("persist-to")
.displayName("Persist To")
.description("Durability constraint about disk persistence.")
.required(true)
.allowableValues(PersistTo.values())
.defaultValue(PersistTo.NONE.toString())
.build();
public static final PropertyDescriptor REPLICATE_TO = new PropertyDescriptor.Builder()
.name("replicate-to")
.displayName("Replicate To")
.description("Durability constraint about replication.")
.required(true)
.allowableValues(ReplicateTo.values())
.defaultValue(ReplicateTo.NONE.toString())
.build();
@Override
protected void addSupportedProperties(List<PropertyDescriptor> descriptors) {
descriptors.add(DOCUMENT_TYPE);
descriptors.add(DOC_ID);
descriptors.add(PERSIST_TO);
descriptors.add(REPLICATE_TO);
}
@Override
protected void addSupportedRelationships(Set<Relationship> relationships) {
relationships.add(new Relationship.Builder().name(REL_SUCCESS.getName())
.description("All FlowFiles that are written to Couchbase Server are routed to this relationship.").build());
relationships.add(new Relationship.Builder().name(REL_RETRY.getName())
.description("All FlowFiles failed to be written to Couchbase Server but can be retried are routed to this relationship.").build());
relationships.add(new Relationship.Builder().name(REL_FAILURE.getName())
.description("All FlowFiles failed to be written to Couchbase Server and not retry-able are routed to this relationship.").build());
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final byte[] content = new byte[(int) flowFile.getSize()];
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
StreamUtils.fillBuffer(in, content, true);
}
});
String docId = flowFile.getAttribute(CoreAttributes.UUID.key());
if (context.getProperty(DOC_ID).isSet()) {
docId = context.getProperty(DOC_ID).evaluateAttributeExpressions(flowFile).getValue();
}
try {
Document<?> doc = null;
final DocumentType documentType = DocumentType.valueOf(context.getProperty(DOCUMENT_TYPE).getValue());
switch (documentType) {
case Json: {
doc = RawJsonDocument.create(docId, new String(content, StandardCharsets.UTF_8));
break;
}
case Binary: {
doc = ByteArrayDocument.create(docId, content);
break;
}
}
final PersistTo persistTo = PersistTo.valueOf(context.getProperty(PERSIST_TO).getValue());
final ReplicateTo replicateTo = ReplicateTo.valueOf(context.getProperty(REPLICATE_TO).getValue());
final Bucket bucket = openBucket(context);
doc = bucket.upsert(doc, persistTo, replicateTo);
final Map<String, String> updatedAttrs = new HashMap<>();
updatedAttrs.put(CouchbaseAttributes.Cluster.key(), context.getProperty(COUCHBASE_CLUSTER_SERVICE).getValue());
updatedAttrs.put(CouchbaseAttributes.Bucket.key(), bucket.name());
updatedAttrs.put(CouchbaseAttributes.DocId.key(), docId);
updatedAttrs.put(CouchbaseAttributes.Cas.key(), String.valueOf(doc.cas()));
updatedAttrs.put(CouchbaseAttributes.Expiry.key(), String.valueOf(doc.expiry()));
flowFile = session.putAllAttributes(flowFile, updatedAttrs);
session.getProvenanceReporter().send(flowFile, getTransitUrl(bucket, docId));
session.transfer(flowFile, REL_SUCCESS);
} catch (final CouchbaseException e) {
String errMsg = String.format("Writing document %s to Couchbase Server using %s failed due to %s", docId, flowFile, e);
handleCouchbaseException(context, session, logger, flowFile, e, errMsg);
}
}
}

View File

@ -1,18 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.nifi.couchbase.CouchbaseClusterService
org.apache.nifi.couchbase.CouchbaseMapCacheClient
org.apache.nifi.couchbase.CouchbaseKeyValueLookupService
org.apache.nifi.couchbase.CouchbaseRecordLookupService

View File

@ -1,16 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.nifi.processors.couchbase.GetCouchbaseKey
org.apache.nifi.processors.couchbase.PutCouchbaseKey

View File

@ -1,35 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<head>
<meta charset="utf-8" />
<title>CouchbaseMapCacheClient</title>
<link rel="stylesheet" href="/nifi-docs/css/component-usage.css" type="text/css" />
</head>
<body>
<h2>CouchbaseMapCacheClient</h2>
<h3>Requirements</h3>
<h4>Couchbase Server 4.0 or higher is required for some operation using N1QL</h4>
Following cache operations require N1QL query, thus you need to deploy Couchbase Server 4.0 or higher for those operations. However, as of this writing (May 2017) there are only few processors using these operations. Most cache APIs are implemented using document id lookup and should work with older version of Couchbase Server.
In order to make N1QL work correctly you need to create a <a href="https://docs.couchbase.com/server/current/n1ql/n1ql-language-reference/createprimaryindex.html">Primary index</a> or an index covering N1QL queries performed by CouchbaseMapCacheClient. Please refer Couchbase Server documentations for how to create those.
</body>
</html>

View File

@ -1,64 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestCouchbaseClusterService {
private static final String SERVICE_ID = "couchbaseClusterService";
private TestRunner testRunner;
public static class SampleProcessor extends AbstractProcessor {
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
}
}
@BeforeEach
public void init() {
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.PutCouchbaseKey", "debug");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.couchbase.CouchbaseClusterService", "debug");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.couchbase.TestCouchbaseClusterService", "debug");
testRunner = TestRunners.newTestRunner(SampleProcessor.class);
testRunner.setValidateExpressionUsage(false);
}
@Test
public void testConnectionFailure() throws InitializationException {
String connectionString = "invalid-protocol://invalid-hostname";
CouchbaseClusterControllerService service = new CouchbaseClusterService();
testRunner.addControllerService(SERVICE_ID, service);
testRunner.setProperty(service, CouchbaseClusterService.CONNECTION_STRING, connectionString);
assertThrows(AssertionError.class, () -> testRunner.enableControllerService(service));
}
}

View File

@ -1,72 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
import com.couchbase.client.deps.io.netty.buffer.Unpooled;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.BinaryDocument;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.distributed.cache.client.Deserializer;
import org.apache.nifi.distributed.cache.client.Serializer;
import org.apache.nifi.util.MockConfigurationContext;
import org.apache.nifi.util.MockControllerServiceInitializationContext;
import org.junit.jupiter.api.Test;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestCouchbaseMapCacheClient {
private final Serializer<String> stringSerializer = (value, output) -> output.write(value.getBytes(StandardCharsets.UTF_8));
private final Deserializer<String> stringDeserializer = input -> new String(input, StandardCharsets.UTF_8);
// TODO: Add more tests
@Test
public void testGet() throws Exception {
final CouchbaseMapCacheClient client = new CouchbaseMapCacheClient();
final CouchbaseClusterControllerService couchbaseService = mock(CouchbaseClusterControllerService.class);
final Bucket bucket = mock(Bucket.class);
final MockControllerServiceInitializationContext serviceInitializationContext
= new MockControllerServiceInitializationContext(couchbaseService, "couchbaseService");
final Map<PropertyDescriptor, String> properties = new HashMap<>();
properties.put(COUCHBASE_CLUSTER_SERVICE, "couchbaseService");
properties.put(BUCKET_NAME, "bucketA");
final ByteBuf contents = Unpooled.copiedBuffer("value".getBytes(StandardCharsets.UTF_8));
final BinaryDocument doc = BinaryDocument.create("key", contents);
when(couchbaseService.openBucket(eq("bucketA"))).thenReturn(bucket);
when(bucket.get(any(BinaryDocument.class))).thenReturn(doc);
final MockConfigurationContext context = new MockConfigurationContext(properties, serviceInitializationContext, null);
client.configure(context);
final String cacheEntry = client.get("key", stringSerializer, stringDeserializer);
assertEquals("value", cacheEntry);
}
}

View File

@ -1,92 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import com.couchbase.client.deps.io.netty.buffer.Unpooled;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.CouchbaseCluster;
import com.couchbase.client.java.document.BinaryDocument;
import com.couchbase.client.java.document.ByteArrayDocument;
import com.couchbase.client.java.document.JsonArrayDocument;
import com.couchbase.client.java.document.JsonBooleanDocument;
import com.couchbase.client.java.document.JsonDocument;
import com.couchbase.client.java.document.JsonDoubleDocument;
import com.couchbase.client.java.document.JsonLongDocument;
import com.couchbase.client.java.document.JsonStringDocument;
import com.couchbase.client.java.document.LegacyDocument;
import com.couchbase.client.java.document.RawJsonDocument;
import com.couchbase.client.java.document.StringDocument;
import com.couchbase.client.java.document.json.JsonArray;
import com.couchbase.client.java.document.json.JsonObject;
import com.couchbase.client.java.error.TranscodingException;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.nio.charset.StandardCharsets;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestCouchbaseUtils {
@Disabled("This test method requires a live Couchbase Server instance")
@Test
public void testDocumentTypesAndStringConversion() {
final CouchbaseCluster cluster = CouchbaseCluster.fromConnectionString("couchbase://192.168.99.100:8091");
final Bucket bucket = cluster.openBucket("b1", "b1password");
bucket.upsert(JsonDocument.create("JsonDocument", JsonObject.create().put("one", 1)));
bucket.upsert(JsonArrayDocument.create("JsonArray", JsonArray.create().add(1).add(2).add(3)));
bucket.upsert(JsonDoubleDocument.create("JsonDouble", 0.123));
bucket.upsert(JsonStringDocument.create("JsonString", "value"));
bucket.upsert(JsonBooleanDocument.create("JsonBoolean", true));
bucket.upsert(JsonLongDocument.create("JsonLong", 123L));
bucket.upsert(RawJsonDocument.create("RawJsonDocument", "value"));
bucket.upsert(StringDocument.create("StringDocument", "value"));
bucket.upsert(BinaryDocument.create("BinaryDocument", Unpooled.copiedBuffer("value".getBytes(StandardCharsets.UTF_8))));
bucket.upsert(ByteArrayDocument.create("ByteArrayDocument", "value".getBytes(StandardCharsets.UTF_8)));
final String[][] expectations = {
{"JsonDocument", "String", "{\"one\":1}"},
{"JsonArray", "String", "[1,2,3]"},
{"JsonDouble", "String", "0.123"},
{"JsonString", "String", "\"value\""},
{"JsonBoolean", "String", "true"},
{"JsonLong", "String", "123"},
{"RawJsonDocument", "String", "value"},
{"StringDocument", "String", "value"},
{"BinaryDocument", "byte[]", "value"},
{"ByteArrayDocument", "byte[]", "value"},
};
for (String[] expectation : expectations) {
final LegacyDocument document = bucket.get(LegacyDocument.create(expectation[0]));
assertEquals(expectation[1], document.content().getClass().getSimpleName());
assertEquals(expectation[2], CouchbaseUtils.getStringContent(document.content()));
}
final BinaryDocument binaryDocument = bucket.get(BinaryDocument.create("BinaryDocument"));
final String stringFromByteBuff = CouchbaseUtils.getStringContent(binaryDocument.content());
assertEquals("value", stringFromByteBuff);
TranscodingException e = assertThrows(TranscodingException.class, () -> bucket.get(BinaryDocument.create("JsonDocument")));
assertTrue(e.getMessage().contains("Flags (0x2000000) indicate non-binary document for id JsonDocument"));
}
}

View File

@ -1,529 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import com.couchbase.client.core.BackpressureException;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.core.ServiceNotAvailableException;
import com.couchbase.client.core.endpoint.kv.AuthenticationException;
import com.couchbase.client.core.state.NotConnectedException;
import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
import com.couchbase.client.deps.io.netty.buffer.Unpooled;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.document.BinaryDocument;
import com.couchbase.client.java.document.RawJsonDocument;
import com.couchbase.client.java.error.DocumentDoesNotExistException;
import com.couchbase.client.java.error.DurabilityException;
import com.couchbase.client.java.error.RequestTooBigException;
import org.apache.nifi.attribute.expression.language.exception.AttributeExpressionLanguageException;
import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
import org.apache.nifi.couchbase.DocumentType;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.provenance.ProvenanceEventType;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.DOCUMENT_TYPE;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_ORIGINAL;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_RETRY;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
import static org.apache.nifi.processors.couchbase.CouchbaseAttributes.Exception;
import static org.apache.nifi.processors.couchbase.GetCouchbaseKey.PUT_VALUE_TO_ATTRIBUTE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestGetCouchbaseKey {
private static final String SERVICE_ID = "couchbaseClusterService";
private TestRunner testRunner;
@BeforeEach
public void init() throws Exception {
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.GetCouchbaseKey", "debug");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.TestGetCouchbaseKey", "debug");
testRunner = TestRunners.newTestRunner(GetCouchbaseKey.class);
}
private void setupMockBucket(Bucket bucket) throws InitializationException {
CouchbaseClusterControllerService service = mock(CouchbaseClusterControllerService.class);
when(service.getIdentifier()).thenReturn(SERVICE_ID);
when(service.openBucket(anyString())).thenReturn(bucket);
when(bucket.name()).thenReturn("bucket-1");
testRunner.addControllerService(SERVICE_ID, service);
testRunner.enableControllerService(service);
testRunner.setProperty(COUCHBASE_CLUSTER_SERVICE, SERVICE_ID);
}
@Test
public void testStaticDocId() throws Exception {
String bucketName = "bucket-1";
String docId = "doc-a";
Bucket bucket = mock(Bucket.class);
String content = "{\"key\":\"value\"}";
int expiry = 100;
long cas = 200L;
when(bucket.get(docId, RawJsonDocument.class)).thenReturn(RawJsonDocument.create(docId, expiry, content, cas));
setupMockBucket(bucket);
testRunner.setProperty(BUCKET_NAME, bucketName);
testRunner.setProperty(DOC_ID, docId);
testRunner.enqueue(new byte[0]);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_ORIGINAL, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
}
@Test
public void testDocIdExp() throws Exception {
String docIdExp = "${'someProperty'}";
String somePropertyValue = "doc-p";
Bucket bucket = mock(Bucket.class);
String content = "{\"key\":\"value\"}";
when(bucket.get(somePropertyValue, RawJsonDocument.class))
.thenReturn(RawJsonDocument.create(somePropertyValue, content));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
byte[] inFileData = "input FlowFile data".getBytes(StandardCharsets.UTF_8);
Map<String, String> properties = new HashMap<>();
properties.put("someProperty", somePropertyValue);
testRunner.enqueue(inFileData, properties);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_ORIGINAL, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
}
@Test
public void testDocIdExpWithEmptyFlowFile() throws Exception {
String docIdExp = "doc-s";
String docId = "doc-s";
Bucket bucket = mock(Bucket.class);
String content = "{\"key\":\"value\"}";
when(bucket.get(docId, RawJsonDocument.class))
.thenReturn(RawJsonDocument.create(docId, content));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
testRunner.enqueue(new byte[0]);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_ORIGINAL, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
}
@Test
public void testDocIdExpWithInvalidExpression() throws Exception {
String docIdExp = "${nonExistingFunction('doc-s')}";
String docId = "doc-s";
Bucket bucket = mock(Bucket.class);
String content = "{\"key\":\"value\"}";
when(bucket.get(docId, RawJsonDocument.class))
.thenReturn(RawJsonDocument.create(docId, content));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
testRunner.enqueue(new byte[0]);
AssertionError e = assertThrows(AssertionError.class, () -> testRunner.run());
assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testDocIdExpWithInvalidExpressionOnFlowFile() throws Exception {
String docIdExp = "${nonExistingFunction(someProperty)}";
Bucket bucket = mock(Bucket.class);
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
Map<String, String> properties = new HashMap<>();
properties.put("someProperty", "someValue");
testRunner.enqueue(inFileData, properties);
AssertionError e = assertThrows(AssertionError.class, () -> testRunner.run());
assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testInputFlowFileContent() throws Exception {
Bucket bucket = mock(Bucket.class);
String inFileDataStr = "doc-in";
String content = "{\"key\":\"value\"}";
when(bucket.get(inFileDataStr, RawJsonDocument.class))
.thenReturn(RawJsonDocument.create(inFileDataStr, content));
setupMockBucket(bucket);
byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_ORIGINAL, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_ORIGINAL).get(0);
orgFile.assertContentEquals(inFileDataStr);
}
@Test
public void testPutToAttribute() throws Exception {
Bucket bucket = mock(Bucket.class);
String inFileDataStr = "doc-in";
String content = "some-value";
when(bucket.get(inFileDataStr, RawJsonDocument.class))
.thenReturn(RawJsonDocument.create(inFileDataStr, content));
setupMockBucket(bucket);
byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.setProperty(PUT_VALUE_TO_ATTRIBUTE, "targetAttribute");
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
// Result is put to Attribute, so no need to pass it to original.
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileDataStr);
outFile.assertAttributeEquals("targetAttribute", content);
assertEquals(1, testRunner.getProvenanceEvents().size());
assertEquals(ProvenanceEventType.FETCH, testRunner.getProvenanceEvents().get(0).getEventType());
}
@Test
public void testPutToAttributeNoTargetAttribute() throws Exception {
Bucket bucket = mock(Bucket.class);
String inFileDataStr = "doc-in";
String content = "some-value";
when(bucket.get(inFileDataStr, RawJsonDocument.class))
.thenReturn(RawJsonDocument.create(inFileDataStr, content));
setupMockBucket(bucket);
byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.setProperty(PUT_VALUE_TO_ATTRIBUTE, "${expressionReturningNoValue}");
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 1);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
outFile.assertContentEquals(inFileDataStr);
}
@Test
public void testBinaryDocument() throws Exception {
Bucket bucket = mock(Bucket.class);
String inFileDataStr = "doc-in";
String content = "binary";
ByteBuf buf = Unpooled.copiedBuffer(content.getBytes(StandardCharsets.UTF_8));
when(bucket.get(inFileDataStr, BinaryDocument.class))
.thenReturn(BinaryDocument.create(inFileDataStr, buf));
setupMockBucket(bucket);
byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.setProperty(DOCUMENT_TYPE, DocumentType.Binary.toString());
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_ORIGINAL, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_ORIGINAL).get(0);
orgFile.assertContentEquals(inFileDataStr);
}
@Test
public void testBinaryDocumentToAttribute() throws Exception {
Bucket bucket = mock(Bucket.class);
String inFileDataStr = "doc-in";
String content = "binary";
ByteBuf buf = Unpooled.copiedBuffer(content.getBytes(StandardCharsets.UTF_8));
when(bucket.get(inFileDataStr, BinaryDocument.class))
.thenReturn(BinaryDocument.create(inFileDataStr, buf));
setupMockBucket(bucket);
byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.setProperty(DOCUMENT_TYPE, DocumentType.Binary.toString());
testRunner.setProperty(PUT_VALUE_TO_ATTRIBUTE, "targetAttribute");
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileDataStr);
outFile.assertAttributeEquals("targetAttribute", "binary");
}
@Test
public void testCouchbaseFailure() throws Exception {
Bucket bucket = mock(Bucket.class);
String inFileDataStr = "doc-in";
when(bucket.get(inFileDataStr, RawJsonDocument.class))
.thenThrow(new ServiceNotAvailableException());
setupMockBucket(bucket);
byte[] inFileData = inFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
AssertionError e = assertThrows(AssertionError.class, () -> testRunner.run());
assertTrue(e.getCause().getClass().equals(ProcessException.class));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testCouchbaseConfigurationError() throws Exception {
String docIdExp = "doc-c";
Bucket bucket = mock(Bucket.class);
when(bucket.get(docIdExp, RawJsonDocument.class))
.thenThrow(new AuthenticationException());
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
AssertionError e = assertThrows(AssertionError.class, () -> testRunner.run());
assertTrue(e.getCause().getClass().equals(ProcessException.class));
assertTrue(e.getCause().getCause().getClass().equals(AuthenticationException.class));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testCouchbaseInvalidInputError() throws Exception {
String docIdExp = "doc-c";
Bucket bucket = mock(Bucket.class);
CouchbaseException exception = new RequestTooBigException();
when(bucket.get(docIdExp, RawJsonDocument.class))
.thenThrow(exception);
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 1);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
orgFile.assertContentEquals(inputFileDataStr);
orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
}
@Test
public void testCouchbaseTempClusterError() throws Exception {
String docIdExp = "doc-c";
Bucket bucket = mock(Bucket.class);
CouchbaseException exception = new BackpressureException();
when(bucket.get(docIdExp, RawJsonDocument.class))
.thenThrow(exception);
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 1);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
orgFile.assertContentEquals(inputFileDataStr);
orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
}
@Test
public void testCouchbaseTempFlowFileError() throws Exception {
String docIdExp = "doc-c";
Bucket bucket = mock(Bucket.class);
// There is no suitable CouchbaseException for temp flowfile error, currently.
CouchbaseException exception = new DurabilityException();
when(bucket.get(docIdExp, RawJsonDocument.class))
.thenThrow(exception);
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 1);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
orgFile.assertContentEquals(inputFileDataStr);
orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
assertEquals(true, orgFile.isPenalized());
}
@Test
public void testCouchbaseFatalError() throws Exception {
String docIdExp = "doc-c";
Bucket bucket = mock(Bucket.class);
CouchbaseException exception = new NotConnectedException();
when(bucket.get(docIdExp, RawJsonDocument.class))
.thenThrow(exception);
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 1);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
orgFile.assertContentEquals(inputFileDataStr);
orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
}
@Test
public void testDocumentNotFound() throws Exception {
String docIdExp = "doc-n";
Bucket bucket = mock(Bucket.class);
when(bucket.get(docIdExp, RawJsonDocument.class))
.thenReturn(null);
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
String inputFileDataStr = "input FlowFile data";
byte[] inFileData = inputFileDataStr.getBytes(StandardCharsets.UTF_8);
testRunner.enqueue(inFileData);
testRunner.run();
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_ORIGINAL, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 1);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_FAILURE).get(0);
orgFile.assertContentEquals(inputFileDataStr);
orgFile.assertAttributeEquals(Exception.key(), DocumentDoesNotExistException.class.getName());
}
}

View File

@ -1,333 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.couchbase;
import com.couchbase.client.core.CouchbaseException;
import com.couchbase.client.core.ServiceNotAvailableException;
import com.couchbase.client.deps.io.netty.buffer.Unpooled;
import com.couchbase.client.java.Bucket;
import com.couchbase.client.java.PersistTo;
import com.couchbase.client.java.ReplicateTo;
import com.couchbase.client.java.document.ByteArrayDocument;
import com.couchbase.client.java.document.RawJsonDocument;
import com.couchbase.client.java.error.DurabilityException;
import org.apache.nifi.attribute.expression.language.exception.AttributeExpressionLanguageException;
import org.apache.nifi.couchbase.CouchbaseClusterControllerService;
import org.apache.nifi.couchbase.DocumentType;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.BUCKET_NAME;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.COUCHBASE_CLUSTER_SERVICE;
import static org.apache.nifi.couchbase.CouchbaseConfigurationProperties.DOCUMENT_TYPE;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.DOC_ID;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_FAILURE;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_RETRY;
import static org.apache.nifi.processors.couchbase.AbstractCouchbaseProcessor.REL_SUCCESS;
import static org.apache.nifi.processors.couchbase.CouchbaseAttributes.Exception;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TestPutCouchbaseKey {
private static final String SERVICE_ID = "couchbaseClusterService";
private TestRunner testRunner;
@BeforeEach
public void init() throws Exception {
System.setProperty("org.slf4j.simpleLogger.defaultLogLevel", "info");
System.setProperty("org.slf4j.simpleLogger.showDateTime", "true");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.PutCouchbaseKey", "debug");
System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.processors.couchbase.TestPutCouchbaseKey", "debug");
testRunner = TestRunners.newTestRunner(PutCouchbaseKey.class);
}
private void setupMockBucket(Bucket bucket) throws InitializationException {
CouchbaseClusterControllerService service = mock(CouchbaseClusterControllerService.class);
when(service.getIdentifier()).thenReturn(SERVICE_ID);
when(service.openBucket(anyString())).thenReturn(bucket);
when(bucket.name()).thenReturn("bucket-1");
testRunner.addControllerService(SERVICE_ID, service);
testRunner.enableControllerService(service);
testRunner.setProperty(COUCHBASE_CLUSTER_SERVICE, SERVICE_ID);
}
@Test
public void testStaticDocId() throws Exception {
String bucketName = "bucket-1";
String docId = "doc-a";
int expiry = 100;
long cas = 200L;
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(docId, expiry, inFileData, cas));
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(BUCKET_NAME, bucketName);
testRunner.setProperty(DOC_ID, docId);
testRunner.run();
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
}
@Test
public void testBinaryDoc() throws Exception {
String bucketName = "bucket-1";
String docId = "doc-a";
int expiry = 100;
long cas = 200L;
String inFileData = "12345";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(ByteArrayDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(ByteArrayDocument.create(docId, expiry, Unpooled.copiedBuffer(inFileData.getBytes(StandardCharsets.UTF_8)).array(), cas));
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(BUCKET_NAME, bucketName);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(DOCUMENT_TYPE, DocumentType.Binary.name());
testRunner.run();
verify(bucket, times(1)).upsert(any(ByteArrayDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
outFile.assertAttributeEquals(CouchbaseAttributes.Cluster.key(), SERVICE_ID);
outFile.assertAttributeEquals(CouchbaseAttributes.Bucket.key(), bucketName);
outFile.assertAttributeEquals(CouchbaseAttributes.DocId.key(), docId);
outFile.assertAttributeEquals(CouchbaseAttributes.Cas.key(), String.valueOf(cas));
outFile.assertAttributeEquals(CouchbaseAttributes.Expiry.key(), String.valueOf(expiry));
}
@Test
public void testDurabilityConstraint() throws Exception {
String docId = "doc-a";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.MASTER), eq(ReplicateTo.ONE)))
.thenReturn(RawJsonDocument.create(docId, inFileData));
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(PutCouchbaseKey.PERSIST_TO, PersistTo.MASTER.toString());
testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
testRunner.run();
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.MASTER), eq(ReplicateTo.ONE));
testRunner.assertAllFlowFilesTransferred(REL_SUCCESS);
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
}
@Test
public void testDocIdExp() throws Exception {
String docIdExp = "${'someProperty'}";
String somePropertyValue = "doc-p";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
Map<String, String> properties = new HashMap<>();
properties.put("someProperty", somePropertyValue);
testRunner.enqueue(inFileDataBytes, properties);
testRunner.run();
ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
assertEquals(somePropertyValue, capture.getValue().id());
assertEquals(inFileData, capture.getValue().content());
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
}
@Test
public void testInvalidDocIdExp() throws Exception {
String docIdExp = "${invalid_function(someProperty)}";
String somePropertyValue = "doc-p";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(somePropertyValue, inFileData));
setupMockBucket(bucket);
testRunner.setProperty(DOC_ID, docIdExp);
Map<String, String> properties = new HashMap<>();
properties.put("someProperty", somePropertyValue);
testRunner.enqueue(inFileDataBytes, properties);
AssertionError e = assertThrows(AssertionError.class, () -> testRunner.run());
assertTrue(e.getCause().getClass().equals(AttributeExpressionLanguageException.class));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testInputFlowFileUuid() throws Exception {
String uuid = "00029362-5106-40e8-b8a9-bf2cecfbc0d7";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.NONE)))
.thenReturn(RawJsonDocument.create(uuid, inFileData));
setupMockBucket(bucket);
Map<String, String> properties = new HashMap<>();
properties.put(CoreAttributes.UUID.key(), uuid);
testRunner.enqueue(inFileDataBytes, properties);
testRunner.run();
ArgumentCaptor<RawJsonDocument> capture = ArgumentCaptor.forClass(RawJsonDocument.class);
verify(bucket, times(1)).upsert(capture.capture(), eq(PersistTo.NONE), eq(ReplicateTo.NONE));
assertEquals(inFileData, capture.getValue().content());
testRunner.assertTransferCount(REL_SUCCESS, 1);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile outFile = testRunner.getFlowFilesForRelationship(REL_SUCCESS).get(0);
outFile.assertContentEquals(inFileData);
}
@Test
public void testCouchbaseFailure() throws Exception {
String docId = "doc-a";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
.thenThrow(new ServiceNotAvailableException());
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
AssertionError e = assertThrows(AssertionError.class, () -> testRunner.run());
assertTrue(e.getCause().getClass().equals(ProcessException.class));
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
testRunner.assertAllFlowFilesTransferred(REL_FAILURE);
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_RETRY, 0);
testRunner.assertTransferCount(REL_FAILURE, 0);
}
@Test
public void testCouchbaseTempFlowFileError() throws Exception {
String docId = "doc-a";
String inFileData = "{\"key\":\"value\"}";
byte[] inFileDataBytes = inFileData.getBytes(StandardCharsets.UTF_8);
Bucket bucket = mock(Bucket.class);
CouchbaseException exception = new DurabilityException();
when(bucket.upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE)))
.thenThrow(exception);
setupMockBucket(bucket);
testRunner.enqueue(inFileDataBytes);
testRunner.setProperty(DOC_ID, docId);
testRunner.setProperty(PutCouchbaseKey.REPLICATE_TO, ReplicateTo.ONE.toString());
testRunner.run();
verify(bucket, times(1)).upsert(any(RawJsonDocument.class), eq(PersistTo.NONE), eq(ReplicateTo.ONE));
testRunner.assertTransferCount(REL_SUCCESS, 0);
testRunner.assertTransferCount(REL_RETRY, 1);
testRunner.assertTransferCount(REL_FAILURE, 0);
MockFlowFile orgFile = testRunner.getFlowFilesForRelationship(REL_RETRY).get(0);
orgFile.assertContentEquals(inFileData);
orgFile.assertAttributeEquals(Exception.key(), exception.getClass().getName());
}
}

View File

@ -1,41 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-bundle</artifactId>
<version>2.0.0-SNAPSHOT</version>
</parent>
<artifactId>nifi-couchbase-services-api-nar</artifactId>
<packaging>nar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-standard-services-api-nar</artifactId>
<version>2.0.0-SNAPSHOT</version>
<type>nar</type>
</dependency>
<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-services-api</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
</dependencies>
</project>

View File

@ -1,21 +0,0 @@
nifi-couchbase-services-api-nar
Copyright 2014-2024 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
******************
Apache Software License v2
******************
The following binary components are provided under the Apache Software License v2
(ASLv2) Couchbase Java SDK
The following NOTICE information applies:
Couchbase Java SDK
Copyright 2014 Couchbase, Inc.
(ASLv2) RxJava
The following NOTICE information applies:
Couchbase Java SDK
Copyright 2012 Netflix, Inc.

View File

@ -1,35 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-couchbase-bundle</artifactId>
<version>2.0.0-SNAPSHOT</version>
</parent>
<artifactId>nifi-couchbase-services-api</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>java-client</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -1,36 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import org.apache.nifi.controller.ControllerService;
import com.couchbase.client.java.Bucket;
/**
* Provides a connection to a Couchbase Server cluster throughout a NiFi Data
* flow.
*/
public interface CouchbaseClusterControllerService extends ControllerService {
/**
* Open a bucket connection.
* @param bucketName the bucket name to access
* @return a connected bucket instance
*/
Bucket openBucket(String bucketName);
}

View File

@ -1,60 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.Validator;
import org.apache.nifi.expression.ExpressionLanguageScope;
public class CouchbaseConfigurationProperties {
public static final PropertyDescriptor COUCHBASE_CLUSTER_SERVICE = new PropertyDescriptor.Builder()
.name("cluster-controller-service")
.displayName("Couchbase Cluster Controller Service")
.description("A Couchbase Cluster Controller Service which manages connections to a Couchbase cluster.")
.required(true)
.identifiesControllerService(CouchbaseClusterControllerService.class)
.build();
public static final PropertyDescriptor BUCKET_NAME = new PropertyDescriptor.Builder()
.name("bucket-name")
.displayName("Bucket Name")
.description("The name of bucket to access.")
.required(true)
.addValidator(Validator.VALID)
.defaultValue("default")
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.build();
public static final PropertyDescriptor DOCUMENT_TYPE = new PropertyDescriptor.Builder()
.name("document-type")
.displayName("Document Type")
.description("The type of contents.")
.required(true)
.allowableValues(DocumentType.values())
.defaultValue(DocumentType.Json.toString())
.build();
public static final PropertyDescriptor LOOKUP_SUB_DOC_PATH = new PropertyDescriptor.Builder()
.name("lookup-sub-doc-path")
.displayName("Lookup Sub-Document Path")
.description("The Sub-Document lookup path within the target JSON document.")
.required(false)
.addValidator(Validator.VALID)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.build();
}

View File

@ -1,36 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.couchbase;
/**
* Supported Couchbase document types.
*
* In order to handle a variety type of document classes such as JsonDocument,
* JsonLongDocument or JsonStringDocument, Couchbase processors use
* RawJsonDocument for Json type.
*
* The distinction between Json and Binary exists because BinaryDocument doesn't
* set Json flag when it stored on Couchbase Server even if the content byte
* array represents a Json string, and it can't be retrieved as a Json document.
*/
public enum DocumentType {
Json,
Binary
}

View File

@ -1,49 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-standard-services-api-bom</artifactId>
<version>2.0.0-SNAPSHOT</version>
<relativePath>../nifi-standard-services-api-bom</relativePath>
</parent>
<artifactId>nifi-couchbase-bundle</artifactId>
<packaging>pom</packaging>
<modules>
<module>nifi-couchbase-services-api</module>
<module>nifi-couchbase-services-api-nar</module>
<module>nifi-couchbase-processors</module>
<module>nifi-couchbase-nar</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>java-client</artifactId>
<version>2.7.23</version>
</dependency>
<dependency>
<groupId>com.couchbase.client</groupId>
<artifactId>core-io</artifactId>
<version>1.7.24</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@ -45,7 +45,6 @@
<module>nifi-asana-bundle</module>
<module>nifi-media-bundle</module>
<module>nifi-avro-bundle</module>
<module>nifi-couchbase-bundle</module>
<module>nifi-azure-bundle</module>
<module>nifi-ldap-iaa-providers-bundle</module>
<module>nifi-kerberos-iaa-providers-bundle</module>