NIFI-1055: Fixed checkstyle violations

This commit is contained in:
Mark Payne 2015-10-23 09:59:24 -04:00
parent 5d90c9be07
commit 0fc5d30461
5 changed files with 140 additions and 138 deletions

View File

@ -50,7 +50,7 @@ import org.apache.nifi.processor.io.StreamCallback;
@SideEffectFree
@SupportsBatching
@Tags({ "json", "avro", "binary" })
@Tags({"json", "avro", "binary"})
@CapabilityDescription("Converts a Binary Avro record into a JSON object. This processor provides a direct mapping of an Avro field to a JSON field, such "
+ "that the resulting JSON will have the same hierarchical structure as the Avro document. Note that the Avro schema information will be lost, as this "
+ "is not a translation from binary Avro to JSON formatted Avro. The output JSON is encoded the UTF-8 encoding. If an incoming FlowFile contains a stream of "
@ -60,10 +60,10 @@ public class ConvertAvroToJSON extends AbstractProcessor {
protected static final String CONTAINER_ARRAY = "array";
protected static final String CONTAINER_NONE = "none";
static final PropertyDescriptor CONTAINER_OPTIONS
= new PropertyDescriptor.Builder()
static final PropertyDescriptor CONTAINER_OPTIONS = new PropertyDescriptor.Builder()
.name("JSON container options")
.description("Determines how stream of records is exposed: either as a sequence of single Objects (" + CONTAINER_NONE + ") (i.e. writing every Object to a new line), or as an array of Objects (" + CONTAINER_ARRAY + ").")
.description("Determines how stream of records is exposed: either as a sequence of single Objects (" + CONTAINER_NONE
+ ") (i.e. writing every Object to a new line), or as an array of Objects (" + CONTAINER_ARRAY + ").")
.allowableValues(CONTAINER_NONE, CONTAINER_ARRAY)
.required(true)
.defaultValue(CONTAINER_ARRAY)
@ -79,7 +79,6 @@ public class ConvertAvroToJSON extends AbstractProcessor {
.build();
private List<PropertyDescriptor> properties;
@Override
@ -89,12 +88,13 @@ public class ConvertAvroToJSON extends AbstractProcessor {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(CONTAINER_OPTIONS);
this.properties = Collections.unmodifiableList(properties);
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@Override
public Set<Relationship> getRelationships() {
final Set<Relationship> rels = new HashSet<>();

View File

@ -58,7 +58,6 @@ public class TestCSVToAvroProcessor {
/**
* Basic test for tab separated files, similar to #test
* @throws IOException
*/
@Test
public void testTabSeparatedConversion() throws IOException {

View File

@ -31,10 +31,12 @@ import java.util.regex.Pattern;
import javax.servlet.Servlet;
import javax.ws.rs.Path;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
import org.apache.nifi.stream.io.StreamThrottler;
import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
@ -42,15 +44,12 @@ import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessSessionFactory;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.standard.servlets.ContentAcknowledgmentServlet;
import org.apache.nifi.processors.standard.servlets.ListenHTTPServlet;
import org.apache.nifi.ssl.SSLContextService;
import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
import org.apache.nifi.stream.io.StreamThrottler;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
@ -173,7 +172,7 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
toShutdown.stop();
toShutdown.destroy();
} catch (final Exception ex) {
getLogger().warn("unable to cleanly shutdown embedded server due to {}", new Object[]{ex});
getLogger().warn("unable to cleanly shutdown embedded server due to {}", new Object[] {ex});
this.server = null;
}
}
@ -235,18 +234,17 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
connector.setPort(port);
// add the connector to the server
server.setConnectors(new Connector[]{connector});
server.setConnectors(new Connector[] {connector});
final ServletContextHandler contextHandler = new ServletContextHandler(server, "/", true, (keystorePath != null));
for (final Class<? extends Servlet> cls : getServerClasses()) {
final Path path = cls.getAnnotation(Path.class);
// Note: servlets must have a path annotation - this will NPE otherwise
// also, servlets other than ListenHttpServlet must have a path starting with /
if(basePath.isEmpty() && !path.value().isEmpty()){
if (basePath.isEmpty() && !path.value().isEmpty()) {
// Note: this is to handle the condition of an empty uri, otherwise pathSpec would start with //
contextHandler.addServlet(cls, path.value());
}
else{
} else {
contextHandler.addServlet(cls, "/" + basePath + path.value());
}
}
@ -304,7 +302,7 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
for (final String id : findOldFlowFileIds(context)) {
final FlowFileEntryTimeWrapper wrapper = flowFileMap.remove(id);
if (wrapper != null) {
getLogger().warn("failed to received acknowledgment for HOLD with ID {}; rolling back session", new Object[]{id});
getLogger().warn("failed to received acknowledgment for HOLD with ID {}; rolling back session", new Object[] {id});
wrapper.session.rollback();
}
}

View File

@ -16,6 +16,16 @@
*/
package org.apache.nifi.processors.standard;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.SupportsBatching;
@ -33,16 +43,14 @@ import org.apache.nifi.distributed.cache.client.exception.SerializationException
import org.apache.nifi.expression.AttributeExpression.ResultType;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ProcessorLog;
import org.apache.nifi.processor.*;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.*;
@EventDriven
@SupportsBatching
@Tags({"map", "cache", "put", "distributed"})
@ -207,7 +215,7 @@ public class PutDistributedMapCache extends AbstractProcessor {
} catch (final IOException e) {
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
logger.error("Unable to communicate with cache when processing {} due to {}", new Object[]{flowFile, e});
logger.error("Unable to communicate with cache when processing {} due to {}", new Object[] {flowFile, e});
}
}

View File

@ -16,6 +16,14 @@
*/
package org.apache.nifi.processors.standard;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.distributed.cache.client.Deserializer;
import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
@ -26,22 +34,11 @@ import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static org.junit.Assert.assertEquals;
public class TestPutDistributedMapCache {
private TestRunner runner;
private MockCacheClient service;
private PutDistributedMapCache processor;
@Before
public void setup() throws InitializationException {
@ -57,7 +54,7 @@ public class TestPutDistributedMapCache {
public void testNoCacheKey() throws InitializationException {
runner.setProperty(PutDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${caheKeyAttribute}");
runner.enqueue(new byte[]{});
runner.enqueue(new byte[] {});
runner.run();
@ -99,7 +96,7 @@ public class TestPutDistributedMapCache {
props.put("caheKeyAttribute", "2");
// flow file without content
runner.enqueue(new byte[]{}, props);
runner.enqueue(new byte[] {}, props);
runner.run();
@ -171,7 +168,7 @@ public class TestPutDistributedMapCache {
runner.clearTransferState();
//we expect that the cache entry is replaced
// we expect that the cache entry is replaced
value = service.get("replaceme", new PutDistributedMapCache.StringSerializer(), new PutDistributedMapCache.CacheValueDeserializer());
assertEquals(replaced, new String(value, "UTF-8"));
}
@ -215,7 +212,7 @@ public class TestPutDistributedMapCache {
runner.clearTransferState();
//we expect that the cache entry is NOT replaced
// we expect that the cache entry is NOT replaced
value = service.get("replaceme", new PutDistributedMapCache.StringSerializer(), new PutDistributedMapCache.CacheValueDeserializer());
assertEquals(original, new String(value, "UTF-8"));
}
@ -225,7 +222,7 @@ public class TestPutDistributedMapCache {
private boolean failOnCalls = false;
private void verifyNotFail() throws IOException {
if ( failOnCalls ) {
if (failOnCalls) {
throw new IOException("Could not call to remote service because Unit Test marked service unavailable");
}
}