NIFI-3948: This closes #1834. Added flush() method to RecordWriter and call it when writing a single record to OutputStream for PublishKafkaRecord. Also removed no-longer-used class WriteAvroResult

Signed-off-by: joewitt <joewitt@apache.org>
This commit is contained in:
Mark Payne 2017-05-19 20:52:26 -04:00 committed by joewitt
parent 58ce52d5d6
commit c49933f03d
13 changed files with 61 additions and 69 deletions

View File

@ -39,6 +39,11 @@ public abstract class AbstractRecordSetWriter implements RecordSetWriter {
this.out.close();
}
@Override
public void flush() throws IOException {
out.flush();
}
@Override
public WriteResult write(final RecordSet recordSet) throws IOException {
beginRecordSet();

View File

@ -37,4 +37,11 @@ public interface RecordWriter extends Closeable {
* the mime.type attribute.
*/
String getMimeType();
/**
* Flushes any buffered data to the underlying storage mechanism
*
* @throws IOException if unable to write to the underlying storage mechanism
*/
void flush() throws IOException;
}

View File

@ -65,6 +65,11 @@ public class MockRecordWriter extends AbstractControllerService implements Recor
private int recordCount = 0;
private boolean headerWritten = false;
@Override
public void flush() throws IOException {
out.flush();
}
@Override
public WriteResult write(final RecordSet rs) throws IOException {
if (header != null && !headerWritten) {

View File

@ -113,6 +113,7 @@ public class PublisherLease implements Closeable {
recordCount++;
baos.reset();
writer.write(record);
writer.flush();
final byte[] messageContent = baos.toByteArray();
final String key = messageKeyField == null ? null : record.getAsString(messageKeyField);

View File

@ -60,6 +60,12 @@ public class MockRecordWriter extends AbstractControllerService implements Recor
@Override
public RecordSetWriter createWriter(final ComponentLog logger, final RecordSchema schema, final FlowFile flowFile, final OutputStream out) {
return new RecordSetWriter() {
@Override
public void flush() throws IOException {
out.flush();
}
@Override
public WriteResult write(final RecordSet rs) throws IOException {
out.write(header.getBytes());

View File

@ -93,6 +93,10 @@ class GroovyRecordSetWriter implements RecordSetWriter {
@Override
public void close() throws IOException {
}
@Override
public void flush() throws IOException {
}
}
class GroovyRecordSetWriterFactory extends AbstractControllerService implements RecordSetWriterFactory {

View File

@ -271,6 +271,12 @@ public class TestQueryRecord {
@Override
public RecordSetWriter createWriter(final ComponentLog logger, final RecordSchema schema, final FlowFile flowFile, final OutputStream out) {
return new RecordSetWriter() {
@Override
public void flush() throws IOException {
out.flush();
}
@Override
public WriteResult write(final RecordSet rs) throws IOException {
final int colCount = rs.getSchema().getFieldCount();

View File

@ -1,63 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.avro;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.nifi.serialization.RecordSetWriter;
import org.apache.nifi.serialization.WriteResult;
import org.apache.nifi.serialization.record.Record;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collections;
public abstract class WriteAvroResult implements RecordSetWriter {
private final Schema schema;
private final OutputStream out;
public WriteAvroResult(final Schema schema, final OutputStream out) {
this.schema = schema;
this.out = out;
}
protected Schema getSchema() {
return schema;
}
@Override
public WriteResult write(final Record record) throws IOException {
final GenericRecord rec = AvroTypeUtil.createAvroRecord(record, schema);
final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) {
dataFileWriter.create(schema, out);
dataFileWriter.append(rec);
}
return WriteResult.of(1, Collections.emptyMap());
}
@Override
public String getMimeType() {
return "application/avro-binary";
}
}

View File

@ -61,8 +61,7 @@ public class WriteAvroResultWithExternalSchema extends AbstractRecordSetWriter {
@Override
protected Map<String, String> onFinishRecordSet() throws IOException {
encoder.flush();
buffered.flush();
flush();
return schemaAccessWriter.getAttributes(recordSchema);
}
@ -73,6 +72,12 @@ public class WriteAvroResultWithExternalSchema extends AbstractRecordSetWriter {
return schemaAccessWriter.getAttributes(recordSchema);
}
@Override
public void flush() throws IOException {
encoder.flush();
buffered.flush();
}
@Override
public String getMimeType() {
return "application/avro-binary";

View File

@ -48,6 +48,11 @@ public class WriteAvroResultWithSchema extends AbstractRecordSetWriter {
dataFileWriter.close();
}
@Override
public void flush() throws IOException {
dataFileWriter.flush();
}
@Override
public Map<String, String> writeRecord(final Record record) throws IOException {
final GenericRecord rec = AvroTypeUtil.createAvroRecord(record, schema);

View File

@ -88,6 +88,11 @@ public class WriteCSVResult extends AbstractRecordSetWriter implements RecordSet
printer.close();
}
@Override
public void flush() throws IOException {
printer.flush();
}
@Override
public Map<String, String> writeRecord(final Record record) throws IOException {
int i = 0;

View File

@ -95,6 +95,13 @@ public class WriteJsonResult extends AbstractRecordSetWriter implements RecordSe
super.close();
}
@Override
public void flush() throws IOException {
if (generator != null) {
generator.flush();
}
}
@Override
public Map<String, String> writeRecord(final Record record) throws IOException {
writeRecord(record, recordSchema, generator, g -> g.writeStartObject(), g -> g.writeEndObject());

View File

@ -17,6 +17,7 @@
package org.apache.nifi.text;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
@ -37,13 +38,11 @@ public class FreeFormTextWriter extends AbstractRecordSetWriter implements Recor
private static final byte NEW_LINE = (byte) '\n';
private final PropertyValue propertyValue;
private final Charset charset;
private final OutputStream out;
public FreeFormTextWriter(final PropertyValue textPropertyValue, final Charset characterSet, final OutputStream out) {
super(out);
super(new BufferedOutputStream(out));
this.propertyValue = textPropertyValue;
this.charset = characterSet;
this.out = out;
}
private List<String> getColumnNames(final RecordSchema schema) {
@ -60,7 +59,7 @@ public class FreeFormTextWriter extends AbstractRecordSetWriter implements Recor
@Override
public Map<String, String> writeRecord(final Record record) throws IOException {
write(record, out, getColumnNames(record.getSchema()));
write(record, getOutputStream(), getColumnNames(record.getSchema()));
return Collections.emptyMap();
}