mirror of https://github.com/apache/nifi.git
NIFI-3413: Standardize 'write' event to 'insert'
This closes #1675. Signed-off-by: ijokarumawak <ijokarumawak@apache.org>
This commit is contained in:
parent
5f73ba7f2c
commit
edb9c644dd
|
@ -18,14 +18,14 @@ package org.apache.nifi.cdc.event;
|
|||
|
||||
|
||||
/**
|
||||
* An interface representing a data structure containing event information, and serialization/deserlization methods.
|
||||
* An interface representing a data structure containing event information, and serialization/deserialization methods.
|
||||
*/
|
||||
public interface EventInfo {
|
||||
|
||||
// Event type constants
|
||||
String BEGIN_EVENT = "begin";
|
||||
String COMMIT_EVENT = "commit";
|
||||
String WRITE_EVENT = "write";
|
||||
String INSERT_EVENT = "insert";
|
||||
String DELETE_EVENT = "delete";
|
||||
String UPDATE_EVENT = "update";
|
||||
String SCHEMA_CHANGE = "schema_change";
|
||||
|
|
|
@ -29,7 +29,7 @@ public class InsertRowsEventInfo extends BaseBinlogRowEventInfo<Serializable[]>
|
|||
private WriteRowsEventData data;
|
||||
|
||||
public InsertRowsEventInfo(TableInfo tableInfo, Long timestamp, String binlogFilename, Long binlogPosition, WriteRowsEventData data) {
|
||||
super(tableInfo, WRITE_EVENT, timestamp, binlogFilename, binlogPosition, data.getIncludedColumns(), data.getRows());
|
||||
super(tableInfo, INSERT_EVENT, timestamp, binlogFilename, binlogPosition, data.getIncludedColumns(), data.getRows());
|
||||
this.data = data;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ import static com.github.shyiko.mysql.binlog.event.EventType.WRITE_ROWS;
|
|||
@WritesAttribute(attribute = "cdc.sequence.id", description = "A sequence identifier (i.e. strictly increasing integer value) specifying the order "
|
||||
+ "of the CDC event flow file relative to the other event flow file(s)."),
|
||||
@WritesAttribute(attribute = "cdc.event.type", description = "A string indicating the type of CDC event that occurred, including (but not limited to) "
|
||||
+ "'begin', 'write', 'update', 'delete', 'schema_change' and 'commit'."),
|
||||
+ "'begin', 'insert', 'update', 'delete', 'schema_change' and 'commit'."),
|
||||
@WritesAttribute(attribute = "mime.type", description = "The processor outputs flow file content in JSON format, and sets the mime.type attribute to "
|
||||
+ "application/json")
|
||||
})
|
||||
|
|
|
@ -347,7 +347,7 @@ class CaptureChangeMySQLTest {
|
|||
testRunner.run(1, true, false)
|
||||
|
||||
def resultFiles = testRunner.getFlowFilesForRelationship(CaptureChangeMySQL.REL_SUCCESS)
|
||||
List<String> expectedEventTypes = ([] + 'begin' + Collections.nCopies(3, 'write') + 'commit' + 'begin' + 'update' + 'commit'
|
||||
List<String> expectedEventTypes = ([] + 'begin' + Collections.nCopies(3, 'insert') + 'commit' + 'begin' + 'update' + 'commit'
|
||||
+ 'begin' + 'schema_change' + Collections.nCopies(2, 'delete') + 'commit')
|
||||
|
||||
resultFiles.eachWithIndex { e, i ->
|
||||
|
|
Loading…
Reference in New Issue