mirror of https://github.com/apache/druid.git
Support HDFS firehose (#8752)
This commit is contained in:
parent
ec8ce74f1c
commit
11230dff52
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 34 KiB |
|
@ -60,7 +60,8 @@ export type IngestionComboType =
|
|||
| 'index:ingestSegment'
|
||||
| 'index:inline'
|
||||
| 'index:static-s3'
|
||||
| 'index:static-google-blobstore';
|
||||
| 'index:static-google-blobstore'
|
||||
| 'index:hdfs';
|
||||
|
||||
// Some extra values that can be selected in the initial screen
|
||||
export type IngestionComboTypeWithExtra = IngestionComboType | 'hadoop' | 'example' | 'other';
|
||||
|
@ -99,6 +100,7 @@ export function getIngestionComboType(spec: IngestionSpec): IngestionComboType |
|
|||
case 'inline':
|
||||
case 'static-s3':
|
||||
case 'static-google-blobstore':
|
||||
case 'hdfs':
|
||||
return `index:${firehose.type}` as IngestionComboType;
|
||||
}
|
||||
}
|
||||
|
@ -126,6 +128,9 @@ export function getIngestionTitle(ingestionType: IngestionComboTypeWithExtra): s
|
|||
case 'index:static-google-blobstore':
|
||||
return 'Google Cloud Storage';
|
||||
|
||||
case 'index:hdfs':
|
||||
return 'HDFS';
|
||||
|
||||
case 'kafka':
|
||||
return 'Apache Kafka';
|
||||
|
||||
|
@ -175,6 +180,9 @@ export function getRequiredModule(ingestionType: IngestionComboTypeWithExtra): s
|
|||
case 'index:static-google-blobstore':
|
||||
return 'druid-google-extensions';
|
||||
|
||||
case 'index:hdfs':
|
||||
return 'druid-hdfs-storage';
|
||||
|
||||
case 'kafka':
|
||||
return 'druid-kafka-indexing-service';
|
||||
|
||||
|
@ -798,6 +806,9 @@ export interface Firehose {
|
|||
|
||||
// inline
|
||||
data?: string;
|
||||
|
||||
// hdfs
|
||||
paths?: string;
|
||||
}
|
||||
|
||||
export function getIoConfigFormFields(ingestionComboType: IngestionComboType): Field<IoConfig>[] {
|
||||
|
@ -805,7 +816,7 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
|
|||
name: 'firehose.type',
|
||||
label: 'Firehose type',
|
||||
type: 'string',
|
||||
suggestions: ['local', 'http', 'inline', 'static-s3', 'static-google-blobstore'],
|
||||
suggestions: ['local', 'http', 'inline', 'static-s3', 'static-google-blobstore', 'hdfs'],
|
||||
info: (
|
||||
<p>
|
||||
Druid connects to raw data through{' '}
|
||||
|
@ -1025,6 +1036,18 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
|
|||
},
|
||||
];
|
||||
|
||||
case 'index:hdfs':
|
||||
return [
|
||||
firehoseType,
|
||||
{
|
||||
name: 'firehose.paths',
|
||||
label: 'Paths',
|
||||
type: 'string',
|
||||
placeholder: '/path/to/file.ext',
|
||||
required: true,
|
||||
},
|
||||
];
|
||||
|
||||
case 'kafka':
|
||||
return [
|
||||
{
|
||||
|
@ -1172,6 +1195,12 @@ function issueWithFirehose(firehose: Firehose | undefined): string | undefined {
|
|||
return 'must have at least one blob';
|
||||
}
|
||||
break;
|
||||
|
||||
case 'hdfs':
|
||||
if (!firehose.paths) {
|
||||
return 'must have paths';
|
||||
}
|
||||
break;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -1206,6 +1235,7 @@ export function getIoConfigTuningFormFields(
|
|||
case 'index:http':
|
||||
case 'index:static-s3':
|
||||
case 'index:static-google-blobstore':
|
||||
case 'index:hdfs':
|
||||
return [
|
||||
{
|
||||
name: 'firehose.fetchTimeout',
|
||||
|
|
|
@ -692,7 +692,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
{this.renderIngestionCard('kinesis')}
|
||||
{this.renderIngestionCard('index:static-s3')}
|
||||
{this.renderIngestionCard('index:static-google-blobstore')}
|
||||
{this.renderIngestionCard('hadoop')}
|
||||
{this.renderIngestionCard('index:hdfs')}
|
||||
{this.renderIngestionCard('index:ingestSegment')}
|
||||
{this.renderIngestionCard('index:http')}
|
||||
{this.renderIngestionCard('index:local')}
|
||||
|
@ -771,32 +771,15 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
case 'index:static-google-blobstore':
|
||||
return <p>Load text based data from the Google Blobstore.</p>;
|
||||
|
||||
case 'index:hdfs':
|
||||
return <p>Load text based data from HDFS.</p>;
|
||||
|
||||
case 'kafka':
|
||||
return <p>Load streaming data in real-time from Apache Kafka.</p>;
|
||||
|
||||
case 'kinesis':
|
||||
return <p>Load streaming data in real-time from Amazon Kinesis.</p>;
|
||||
|
||||
case 'hadoop':
|
||||
return (
|
||||
<>
|
||||
<p>
|
||||
<em>Data loader support coming soon!</em>
|
||||
</p>
|
||||
<p>
|
||||
You can not ingest data from HDFS via the data loader at this time, however you can
|
||||
ingest it through a Druid task.
|
||||
</p>
|
||||
<p>
|
||||
Please follow{' '}
|
||||
<ExternalLink href="https://druid.apache.org/docs/latest/ingestion/hadoop.html">
|
||||
the hadoop docs
|
||||
</ExternalLink>{' '}
|
||||
and submit a JSON spec to start the task.
|
||||
</p>
|
||||
</>
|
||||
);
|
||||
|
||||
case 'example':
|
||||
if (exampleManifests && exampleManifests.length) {
|
||||
return; // Yield to example picker controls
|
||||
|
@ -834,6 +817,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
case 'index:inline':
|
||||
case 'index:static-s3':
|
||||
case 'index:static-google-blobstore':
|
||||
case 'index:hdfs':
|
||||
case 'kafka':
|
||||
case 'kinesis':
|
||||
return (
|
||||
|
@ -850,18 +834,6 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
</FormGroup>
|
||||
);
|
||||
|
||||
case 'hadoop':
|
||||
return (
|
||||
<FormGroup>
|
||||
<Button
|
||||
text="Submit task"
|
||||
rightIcon={IconNames.ARROW_RIGHT}
|
||||
intent={Intent.PRIMARY}
|
||||
onClick={() => goToTask(undefined, 'task')}
|
||||
/>
|
||||
</FormGroup>
|
||||
);
|
||||
|
||||
case 'example':
|
||||
if (!exampleManifests) return;
|
||||
return (
|
||||
|
|
Loading…
Reference in New Issue