NIFI-9881 Refactored the JSON services to use Jackson 2

This closes #5934

Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
Mike Thomsen 2022-04-06 09:18:13 -04:00 committed by exceptionfactory
parent 74a4104259
commit fdca3dd6b0
No known key found for this signature in database
GPG Key ID: 29B6A52D2AAE8DBA
9 changed files with 39 additions and 39 deletions

View File

@ -17,6 +17,13 @@
package org.apache.nifi.json;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.serialization.MalformedRecordException;
import org.apache.nifi.serialization.RecordReader;
@ -31,13 +38,6 @@ import org.apache.nifi.serialization.record.type.ChoiceDataType;
import org.apache.nifi.serialization.record.type.MapDataType;
import org.apache.nifi.serialization.record.type.RecordDataType;
import org.apache.nifi.serialization.record.util.DataTypeUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import java.io.IOException;
import java.io.InputStream;
@ -136,19 +136,19 @@ public abstract class AbstractJsonRowRecordReader implements RecordReader {
}
if (fieldNode.isNumber()) {
return fieldNode.getNumberValue();
return fieldNode.numberValue();
}
if (fieldNode.isBinary()) {
return fieldNode.getBinaryValue();
return fieldNode.binaryValue();
}
if (fieldNode.isBoolean()) {
return fieldNode.getBooleanValue();
return fieldNode.booleanValue();
}
if (fieldNode.isTextual()) {
final String textValue = fieldNode.getTextValue();
final String textValue = fieldNode.textValue();
if (dataType == null) {
return textValue;
}
@ -235,7 +235,7 @@ public abstract class AbstractJsonRowRecordReader implements RecordReader {
final Map<String, Object> mapValue = new HashMap<>();
final Iterator<Map.Entry<String, JsonNode>> fieldItr = fieldNode.getFields();
final Iterator<Map.Entry<String, JsonNode>> fieldItr = fieldNode.fields();
while (fieldItr.hasNext()) {
final Map.Entry<String, JsonNode> entry = fieldItr.next();
final String elementName = entry.getKey();
@ -299,7 +299,7 @@ public abstract class AbstractJsonRowRecordReader implements RecordReader {
}
private Record createRecordFromRawValue(final JsonNode fieldNode, final RecordSchema childSchema) throws IOException {
final Iterator<String> fieldNames = fieldNode.getFieldNames();
final Iterator<String> fieldNames = fieldNode.fieldNames();
final Map<String, Object> childValues = new HashMap<>();
while (fieldNames.hasNext()) {
final String childFieldName = fieldNames.next();

View File

@ -17,6 +17,7 @@
package org.apache.nifi.json;
import com.fasterxml.jackson.databind.JsonNode;
import com.jayway.jsonpath.JsonPath;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
@ -33,8 +34,8 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.schema.access.SchemaAccessStrategy;
import org.apache.nifi.schema.access.SchemaNotFoundException;
import org.apache.nifi.schema.inference.SchemaInferenceEngine;
import org.apache.nifi.schema.inference.RecordSourceFactory;
import org.apache.nifi.schema.inference.SchemaInferenceEngine;
import org.apache.nifi.schema.inference.SchemaInferenceUtil;
import org.apache.nifi.schema.inference.TimeValueInference;
import org.apache.nifi.schemaregistry.services.SchemaRegistry;
@ -44,7 +45,6 @@ import org.apache.nifi.serialization.RecordReader;
import org.apache.nifi.serialization.RecordReaderFactory;
import org.apache.nifi.serialization.SchemaRegistryService;
import org.apache.nifi.serialization.record.RecordSchema;
import org.codehaus.jackson.JsonNode;
import java.io.IOException;
import java.io.InputStream;

View File

@ -17,6 +17,7 @@
package org.apache.nifi.json;
import com.fasterxml.jackson.databind.JsonNode;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
@ -35,7 +36,6 @@ import org.apache.nifi.serialization.record.type.ArrayDataType;
import org.apache.nifi.serialization.record.type.RecordDataType;
import org.apache.nifi.serialization.record.util.DataTypeUtils;
import org.apache.nifi.serialization.record.util.IllegalTypeConversionException;
import org.codehaus.jackson.JsonNode;
import java.io.IOException;
import java.io.InputStream;

View File

@ -16,12 +16,12 @@
*/
package org.apache.nifi.json;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.nifi.schema.inference.RecordSource;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;

View File

@ -16,14 +16,14 @@
*/
package org.apache.nifi.json;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.DecimalNode;
import org.apache.nifi.schema.inference.HierarchicalSchemaInference;
import org.apache.nifi.schema.inference.TimeValueInference;
import org.apache.nifi.serialization.record.DataType;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.DecimalNode;
import java.math.BigDecimal;
import java.util.Iterator;
@ -43,7 +43,7 @@ public class JsonSchemaInference extends HierarchicalSchemaInference<JsonNode> {
protected DataType getDataType(final JsonNode jsonNode) {
if (jsonNode.isTextual()) {
final String text = jsonNode.getTextValue();
final String text = jsonNode.textValue();
if (text == null) {
return RecordFieldType.STRING.getDataType();
}
@ -69,7 +69,7 @@ public class JsonSchemaInference extends HierarchicalSchemaInference<JsonNode> {
if (jsonNode.isBigDecimal()) {
final DecimalNode decimalNode = (DecimalNode) jsonNode;
final BigDecimal value = decimalNode.getDecimalValue();
final BigDecimal value = decimalNode.decimalValue();
return RecordFieldType.DECIMAL.getDecimalDataType(value.precision(), value.scale());
}
@ -98,7 +98,7 @@ public class JsonSchemaInference extends HierarchicalSchemaInference<JsonNode> {
@Override
protected void forEachFieldInRecord(final JsonNode rawRecord, final BiConsumer<String, JsonNode> fieldConsumer) {
final Iterator<Map.Entry<String, JsonNode>> itr = rawRecord.getFields();
final Iterator<Map.Entry<String, JsonNode>> itr = rawRecord.fields();
while (itr.hasNext()) {
final Map.Entry<String, JsonNode> entry = itr.next();
final String fieldName = entry.getKey();

View File

@ -17,6 +17,7 @@
package org.apache.nifi.json;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
@ -39,7 +40,6 @@ import org.apache.nifi.serialization.RecordReader;
import org.apache.nifi.serialization.RecordReaderFactory;
import org.apache.nifi.serialization.SchemaRegistryService;
import org.apache.nifi.serialization.record.RecordSchema;
import org.codehaus.jackson.JsonNode;
import java.io.IOException;
import java.io.InputStream;

View File

@ -17,6 +17,8 @@
package org.apache.nifi.json;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.serialization.MalformedRecordException;
import org.apache.nifi.serialization.SimpleRecordSchema;
@ -31,8 +33,6 @@ import org.apache.nifi.serialization.record.type.ArrayDataType;
import org.apache.nifi.serialization.record.type.MapDataType;
import org.apache.nifi.serialization.record.type.RecordDataType;
import org.apache.nifi.serialization.record.util.DataTypeUtils;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.node.ArrayNode;
import java.io.IOException;
import java.io.InputStream;
@ -110,7 +110,7 @@ public class JsonTreeRowRecordReader extends AbstractJsonRowRecordReader {
values.put(fieldName, value);
}
} else {
final Iterator<String> fieldNames = jsonNode.getFieldNames();
final Iterator<String> fieldNames = jsonNode.fieldNames();
while (fieldNames.hasNext()) {
final String fieldName = fieldNames.next();
final JsonNode childNode = jsonNode.get(fieldName);
@ -164,7 +164,7 @@ public class JsonTreeRowRecordReader extends AbstractJsonRowRecordReader {
final DataType valueType = ((MapDataType) desiredType).getValueType();
final Map<String, Object> map = new HashMap<>();
final Iterator<String> fieldNameItr = fieldNode.getFieldNames();
final Iterator<String> fieldNameItr = fieldNode.fieldNames();
while (fieldNameItr.hasNext()) {
final String childName = fieldNameItr.next();
final JsonNode childNode = fieldNode.get(childName);
@ -198,7 +198,7 @@ public class JsonTreeRowRecordReader extends AbstractJsonRowRecordReader {
if (childSchema == null) {
final List<RecordField> fields = new ArrayList<>();
final Iterator<String> fieldNameItr = fieldNode.getFieldNames();
final Iterator<String> fieldNameItr = fieldNode.fieldNames();
while (fieldNameItr.hasNext()) {
fields.add(new RecordField(fieldNameItr.next(), RecordFieldType.STRING.getDataType()));
}

View File

@ -17,6 +17,10 @@
package org.apache.nifi.json;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.record.NullSuppression;
import org.apache.nifi.schema.access.SchemaAccessWriter;
@ -35,10 +39,6 @@ import org.apache.nifi.serialization.record.type.ChoiceDataType;
import org.apache.nifi.serialization.record.type.MapDataType;
import org.apache.nifi.serialization.record.type.RecordDataType;
import org.apache.nifi.serialization.record.util.DataTypeUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.util.MinimalPrettyPrinter;
import java.io.IOException;
import java.io.OutputStream;
@ -93,7 +93,7 @@ public class WriteJsonResult extends AbstractRecordSetWriter implements RecordSe
final JsonFactory factory = new JsonFactory();
factory.setCodec(objectMapper);
this.generator = factory.createJsonGenerator(out);
this.generator = factory.createGenerator(out);
if (prettyPrint) {
generator.useDefaultPrettyPrinter();
} else if (OutputGrouping.OUTPUT_ONELINE.equals(outputGrouping)) {

View File

@ -16,6 +16,7 @@
*/
package org.apache.nifi.json;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.schema.inference.InferSchemaAccessStrategy;
import org.apache.nifi.schema.inference.SchemaInferenceEngine;
@ -26,7 +27,6 @@ import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.serialization.record.type.ChoiceDataType;
import org.apache.nifi.serialization.record.type.RecordDataType;
import org.codehaus.jackson.JsonNode;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;