Merge pull request #1922 from metamx/jsonIgnoresFinalFields

Change DefaultObjectMapper to NOT overwrite final fields unless explicitly asked to
This commit is contained in:
Fangjin Yang 2015-12-18 15:38:32 -08:00
commit 14229ba0f2
14 changed files with 45 additions and 23 deletions

View File

@ -109,11 +109,13 @@ public class RabbitMQFirehoseFactory implements FirehoseFactory<StringInputRowPa
@JsonCreator
public RabbitMQFirehoseFactory(
@JsonProperty("connection") JacksonifiedConnectionFactory connectionFactory,
@JsonProperty("config") RabbitMQFirehoseConfig config
@JsonProperty("config") RabbitMQFirehoseConfig config,
// See https://github.com/druid-io/druid/pull/1922
@JsonProperty("connectionFactory") JacksonifiedConnectionFactory connectionFactoryCOMPAT
) throws Exception
{
this.connectionFactory = connectionFactory == null
? JacksonifiedConnectionFactory.makeDefaultConnectionFactory()
? connectionFactoryCOMPAT == null ? JacksonifiedConnectionFactory.makeDefaultConnectionFactory() : connectionFactoryCOMPAT
: connectionFactory;
this.config = config == null ? RabbitMQFirehoseConfig.makeDefaultConfig() : config;
@ -125,7 +127,7 @@ public class RabbitMQFirehoseFactory implements FirehoseFactory<StringInputRowPa
return config;
}
@JsonProperty
@JsonProperty("connection")
public JacksonifiedConnectionFactory getConnectionFactory()
{
return connectionFactory;

View File

@ -66,7 +66,8 @@ public class RabbitMQFirehoseFactoryTest
RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory(
connectionFactory,
config
config,
null
);
byte[] bytes = mapper.writeValueAsBytes(factory);
@ -88,7 +89,8 @@ public class RabbitMQFirehoseFactoryTest
RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory(
connectionFactory,
config
config,
null
);
byte[] bytes = mapper.writeValueAsBytes(factory);

View File

@ -58,7 +58,8 @@ public class HadoopTuningConfig implements TuningConfig
false,
null,
false,
false
false,
null
);
}
@ -90,7 +91,9 @@ public class HadoopTuningConfig implements TuningConfig
final @JsonProperty("ignoreInvalidRows") boolean ignoreInvalidRows,
final @JsonProperty("jobProperties") Map<String, String> jobProperties,
final @JsonProperty("combineText") boolean combineText,
final @JsonProperty("useCombiner") Boolean useCombiner
final @JsonProperty("useCombiner") Boolean useCombiner,
// See https://github.com/druid-io/druid/pull/1922
final @JsonProperty("rowFlushBoundary") Integer maxRowsInMemoryCOMPAT
)
{
this.workingPath = workingPath;
@ -98,7 +101,7 @@ public class HadoopTuningConfig implements TuningConfig
this.partitionsSpec = partitionsSpec == null ? DEFAULT_PARTITIONS_SPEC : partitionsSpec;
this.shardSpecs = shardSpecs == null ? DEFAULT_SHARD_SPECS : shardSpecs;
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
this.rowFlushBoundary = maxRowsInMemory == null ? DEFAULT_ROW_FLUSH_BOUNDARY : maxRowsInMemory;
this.rowFlushBoundary = maxRowsInMemory == null ? maxRowsInMemoryCOMPAT == null ? DEFAULT_ROW_FLUSH_BOUNDARY : maxRowsInMemoryCOMPAT : maxRowsInMemory;
this.leaveIntermediate = leaveIntermediate;
this.cleanupOnFailure = cleanupOnFailure == null ? true : cleanupOnFailure;
this.overwriteFiles = overwriteFiles;
@ -140,7 +143,7 @@ public class HadoopTuningConfig implements TuningConfig
return indexSpec;
}
@JsonProperty
@JsonProperty("maxRowsInMemory")
public int getRowFlushBoundary()
{
return rowFlushBoundary;
@ -203,7 +206,8 @@ public class HadoopTuningConfig implements TuningConfig
ignoreInvalidRows,
jobProperties,
combineText,
useCombiner
useCombiner,
null
);
}
@ -222,7 +226,8 @@ public class HadoopTuningConfig implements TuningConfig
ignoreInvalidRows,
jobProperties,
combineText,
useCombiner
useCombiner,
null
);
}
@ -241,7 +246,8 @@ public class HadoopTuningConfig implements TuningConfig
ignoreInvalidRows,
jobProperties,
combineText,
useCombiner
useCombiner,
null
);
}
}

View File

@ -380,7 +380,8 @@ public class BatchDeltaIngestionTest
false,
null,
false,
false
false,
null
)
)
);

View File

@ -159,7 +159,8 @@ public class DetermineHashedPartitionsJobTest
false,
null,
false,
false
false,
null
)
);
this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec);

View File

@ -263,7 +263,8 @@ public class DeterminePartitionsJobTest
false,
null,
false,
false
false,
null
)
)
);

View File

@ -206,7 +206,8 @@ public class HadoopDruidIndexerConfigTest
false,
null,
false,
false
false,
null
)
);
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec);

View File

@ -52,7 +52,8 @@ public class HadoopTuningConfigTest
true,
null,
true,
true
true,
null
);
HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class);

View File

@ -394,7 +394,8 @@ public class IndexGeneratorJobTest
false,
ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
false,
useCombiner
useCombiner,
null
)
)
);

View File

@ -114,7 +114,8 @@ public class JobHelperTest
"THISISMYACCESSKEY"
),
false,
false
false,
null
)
)
);

View File

@ -200,7 +200,8 @@ public class HadoopConverterJobTest
false,
null,
false,
false
false,
null
)
)
);

View File

@ -48,7 +48,9 @@ public class MoveTask extends AbstractFixedIntervalTask
@JsonProperty("dataSource") String dataSource,
@JsonProperty("interval") Interval interval,
@JsonProperty("target") Map<String, Object> targetLoadSpec,
@JsonProperty("context") Map<String, Object> context
@JsonProperty("context") Map<String, Object> context,
// See https://github.com/druid-io/druid/pull/1922
@JsonProperty("targetLoadSpec") Map<String, Object> targetLoadSpecCOMPAT
)
{
super(
@ -57,7 +59,7 @@ public class MoveTask extends AbstractFixedIntervalTask
interval,
context
);
this.targetLoadSpec = targetLoadSpec;
this.targetLoadSpec = targetLoadSpec == null ? targetLoadSpecCOMPAT : targetLoadSpec;
}
@Override
@ -108,7 +110,7 @@ public class MoveTask extends AbstractFixedIntervalTask
return TaskStatus.success(getId());
}
@JsonProperty
@JsonProperty("target")
public Map<String, Object> getTargetLoadSpec()
{
return targetLoadSpec;

View File

@ -545,6 +545,7 @@ public class TaskSerdeTest
"foo",
new Interval("2010-01-01/P1D"),
ImmutableMap.<String, Object>of("bucket", "hey", "baseKey", "what"),
null,
null
);

View File

@ -55,6 +55,7 @@ public class DefaultObjectMapper extends ObjectMapper
configure(MapperFeature.AUTO_DETECT_FIELDS, false);
configure(MapperFeature.AUTO_DETECT_IS_GETTERS, false);
configure(MapperFeature.AUTO_DETECT_SETTERS, false);
configure(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS, false);
configure(SerializationFeature.INDENT_OUTPUT, false);
}