fix typos with batch ingestion in docs

This commit is contained in:
fjy 2015-05-07 14:46:17 -07:00
parent 8fcc5a2d51
commit b19435d172
2 changed files with 4 additions and 4 deletions

View File

@ -126,13 +126,13 @@ This field is required.
|Field|Type|Description|Required|
|-----|----|-----------|--------|
|type|String|This should always be 'hadoop'.|yes|
|pathSpec|Object|a specification of where to pull the data in from|yes|
|inputSpec|Object|a specification of where to pull the data in from. See below.|yes|
|segmentOutputPath|String|the path to dump segments into.|yes|
|metadataUpdateSpec|Object|a specification of how to update the metadata for the druid cluster these segments belong to.|yes|
#### Path specification
#### InputSpec specification
There are multiple types of path specification:
There are multiple types of inputSpecs:
##### `static`

View File

@ -551,7 +551,7 @@ public class HadoopDruidIndexerConfig
Preconditions.checkNotNull(schema.getDataSchema().getParser().getParseSpec(), "parseSpec");
Preconditions.checkNotNull(schema.getDataSchema().getParser().getParseSpec().getTimestampSpec(), "timestampSpec");
Preconditions.checkNotNull(schema.getDataSchema().getGranularitySpec(), "granularitySpec");
Preconditions.checkNotNull(pathSpec, "pathSpec");
Preconditions.checkNotNull(pathSpec, "inputSpec");
Preconditions.checkNotNull(schema.getTuningConfig().getWorkingPath(), "workingPath");
Preconditions.checkNotNull(schema.getIOConfig().getSegmentOutputPath(), "segmentOutputPath");
Preconditions.checkNotNull(schema.getTuningConfig().getVersion(), "version");