druid/distribution/pom.xml

580 lines
35 KiB
XML
Raw Normal View History

<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<packaging>pom</packaging>
<artifactId>distribution</artifactId>
<name>distribution</name>
<description>distribution</description>
<parent>
<artifactId>druid</artifactId>
<groupId>org.apache.druid</groupId>
<version>31.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-services</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<!-- This is needed to bundle the web console -->
<groupId>org.apache.druid</groupId>
<artifactId>web-console</artifactId>
<version>${project.parent.version}</version>
</dependency>
</dependencies>
<properties>
<!-- the default value is a repeated flag from the command line, since blank value is not allowed -->
<druid.distribution.pulldeps.opts>--clean</druid.distribution.pulldeps.opts>
</properties>
<build>
<plugins>
<plugin>
<groupId>net.nicoulaj.maven.plugins</groupId>
<artifactId>checksum-maven-plugin</artifactId>
<version>1.7</version>
<executions>
<execution>
<id>dist-checksum</id>
<goals>
<goal>files</goal>
</goals>
</execution>
</executions>
<configuration>
<algorithms>
<algorithm>SHA-512</algorithm>
</algorithms>
<csvSummary>false</csvSummary>
<fileSets>
<fileSet>
<directory>${project.build.directory}</directory>
<includes>
<include>*-src.tar.gz</include>
<include>*-bin.tar.gz</include>
</includes>
</fileSet>
</fileSets>
<failIfNoFiles>false</failIfNoFiles>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>3.0.0</version>
<executions>
<execution>
<phase>prepare-package</phase>
<configuration>
<target>
<concat destfile="${project.build.directory}/../../README.BINARY">
<fileset file="${project.build.directory}/../../README" />
<fileset file="${project.build.directory}/../../LABELS" />
</concat>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-maven</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>dist</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>generate-readme</id>
<phase>initialize</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.basedir}/bin/build-textfile-readme.sh</executable>
<arguments>
<argument>${project.basedir}/../</argument>
<argument>${project.parent.version}</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>generate-binary-license</id>
<phase>initialize</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.basedir}/bin/generate-binary-license.py</executable>
<arguments>
<argument>${project.parent.basedir}/licenses/APACHE2</argument>
<argument>${project.parent.basedir}/licenses.yaml</argument>
<argument>${project.parent.basedir}/LICENSE.BINARY</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>generate-binary-notice</id>
<phase>initialize</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.basedir}/bin/generate-binary-notice.py</executable>
<arguments>
<argument>${project.parent.basedir}/NOTICE</argument>
<argument>${project.parent.basedir}/licenses.yaml</argument>
<argument>${project.parent.basedir}/NOTICE.BINARY</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>pull-deps</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />
<argument>-Ddruid.extensions.loadList=[]</argument>
<argument>-Ddruid.extensions.directory=${project.build.directory}/extensions
</argument>
<argument>
-Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies
</argument>
<argument>org.apache.druid.cli.Main</argument>
<argument>tools</argument>
<argument>pull-deps</argument>
<argument>--clean</argument>
<argument>--defaultVersion</argument>
<argument>${project.parent.version}</argument>
<argument>-l</argument>
<argument>${settings.localRepository}</argument>
<argument>-h</argument>
<argument>org.apache.hadoop:hadoop-client-api:${hadoop.compile.version}</argument>
<argument>-h</argument>
<argument>org.apache.hadoop:hadoop-client-runtime:${hadoop.compile.version}</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-avro-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-azure-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-bloom-filter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-datasketches</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-hdfs-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-histogram</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kafka-extraction-namespace</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kafka-indexing-service</argument>
<argument>-c</argument>
Add Kinesis Indexing Service to core Druid (#6431) * created seekablestream classes * created seekablestreamsupervisor class * first attempt to integrate kafa indexing service to use SeekableStream * seekablestream bug fixes * kafkarecordsupplier * integrated kafka indexing service with seekablestream * implemented resume/suspend and refactored some package names * moved kinesis indexing service into core druid extensions * merged some changes from kafka supervisor race condition * integrated kinesis-indexing-service with seekablestream * unite tests for kinesis-indexing-service * various bug fixes for kinesis-indexing-service * refactored kinesisindexingtask * finished up more kinesis unit tests * more bug fixes for kinesis-indexing-service * finsihed refactoring kinesis unit tests * removed KinesisParititons and KafkaPartitions to use SeekableStreamPartitions * kinesis-indexing-service code cleanup and docs * merge #6291 merge #6337 merge #6383 * added more docs and reordered methods * fixd kinesis tests after merging master and added docs in seekablestream * fix various things from pr comment * improve recordsupplier and add unit tests * migrated to aws-java-sdk-kinesis * merge changes from master * fix pom files and forbiddenapi checks * checkpoint JavaType bug fix * fix pom and stuff * disable checkpointing in kinesis * fix kinesis sequence number null in closed shard * merge changes from master * fixes for kinesis tasks * capitalized <partitionType, sequenceType> * removed abstract class loggers * conform to guava api restrictions * add docker for travis other modules test * address comments * improve RecordSupplier to supply records in batch * fix strict compile issue * add test scope for localstack dependency * kinesis indexing task refactoring * comments * github comments * minor fix * removed unneeded readme * fix deserialization bug * fix various bugs * KinesisRecordSupplier unable to catch up to earliest position in stream bug fix * minor changes to kinesis * implement deaggregate for kinesis * Merge remote-tracking branch 'upstream/master' into seekablestream * fix kinesis offset discrepancy with kafka * kinesis record supplier disable getPosition * pr comments * mock for kinesis tests and remove docker dependency for unit tests * PR comments * avg lag in kafkasupervisor #6587 * refacotred SequenceMetadata in taskRunners * small fix * more small fix * recordsupplier resource leak * revert .travis.yml formatting * fix style * kinesis docs * doc part2 * more docs * comments * comments*2 * revert string replace changes * comments * teamcity * comments part 1 * comments part 2 * comments part 3 * merge #6754 * fix injection binding * comments * KinesisRegion refactor * comments part idk lol * can't think of a commit msg anymore * remove possiblyResetDataSourceMetadata() for IncrementalPublishingTaskRunner * commmmmmmmmmments * extra error handling in KinesisRecordSupplier getRecords * comments * quickfix * typo * oof
2018-12-21 14:49:24 -05:00
<argument>org.apache.druid.extensions:druid-kinesis-indexing-service</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-lookups-cached-global</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-lookups-cached-single</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-multi-stage-query</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-protobuf-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:mysql-metadata-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-orc-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-parquet-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:postgresql-metadata-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kerberos</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-s3-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-aws-rds-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-ec2-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-google-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-stats</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:simple-client-sslcontext</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-basic-security</argument>
druid extension for OpenID Connect auth using pac4j lib (#8992) * druid pac4j security extension for OpenID Connect OAuth 2.0 authentication * update version in druid-pac4j pom * introducing unauthorized resource filter * authenticated but authorized /unified-webconsole.html * use httpReq.getRequestURI() for matching callback path * add documentation * minor doc addition * licesne file updates * make dependency analyze succeed * fix doc build * hopefully fixes doc build * hopefully fixes license check build * yet another try on fixing license build * revert unintentional changes to website folder * update version to 0.18.0-SNAPSHOT * check session and its expiry on each request * add crypto service * code for encrypting the cookie * update doc with cookiePassphrase * update license yaml * make sessionstore in Pac4jFilter private non static * make Pac4jFilter fields final * okta: use sha256 for hmac * remove incubating * add UTs for crypto util and session store impl * use standard charsets * add license header * remove unused file * add org.objenesis.objenesis to license.yaml * a bit of nit changes in CryptoService and embedding EncryptionResult for clarity * rename alg to cipherAlgName * take cipher alg name, mode and padding as input * add java doc for CryptoService and make it more understandable * another UT for CryptoService * cache pac4j Config * use generics clearly in Pac4jSessionStore * update cookiePassphrase doc to mention PasswordProvider * mark stuff Nullable where appropriate in Pac4jSessionStore * update doc to mention jdbc * add error log on reaching callback resource * javadoc for Pac4jCallbackResource * introduce NOOP_HTTP_ACTION_ADAPTER * add correct module name in license file * correct extensions folder name in licenses.yaml * replace druid-kubernetes-extensions to druid-pac4j * cache SecureRandom instance * rename UnauthorizedResourceFilter to AuthenticationOnlyResourceFilter
2020-03-23 21:15:45 -04:00
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-pac4j</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-ranger-security</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kubernetes-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-catalog</argument>
<argument>${druid.distribution.pulldeps.opts}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>distro-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<finalName>apache-druid-${project.parent.version}</finalName>
<tarLongFileMode>posix</tarLongFileMode>
<descriptors>
<descriptor>src/assembly/assembly.xml</descriptor>
</descriptors>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>license-maven-plugin</artifactId>
<executions>
<execution>
<id>download-licenses</id>
<goals>
<goal>download-licenses</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>apache-release</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>generate-licenses-report</id>
<phase>initialize</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.basedir}/bin/generate-license-dependency-reports.py</executable>
<arguments>
<argument>${project.basedir}/../</argument>
<argument>${project.basedir}/target</argument>
<argument>--clean-maven-artifact-transfer</argument>
<argument>--parallel</argument>
<argument>2</argument>
</arguments>
</configuration>
</execution>
<execution>
<id>check-licenses</id>
<phase>test</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.basedir}/bin/check-licenses.py</executable>
<arguments>
<argument>${project.parent.basedir}/licenses.yaml</argument>
<argument>${project.basedir}/target/license-reports</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>source-release-assembly-druid</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<finalName>apache-druid-${project.version}-src</finalName>
<tarLongFileMode>posix</tarLongFileMode>
<descriptors>
<descriptor>src/assembly/source-assembly.xml</descriptor>
</descriptors>
<appendAssemblyId>false</appendAssemblyId>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>bundle-contrib-exts</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>pull-deps-contrib-exts</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />
<argument>-Ddruid.extensions.loadList=[]</argument>
<argument>-Ddruid.extensions.directory=${project.build.directory}/extensions
</argument>
<argument>
-Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies
</argument>
<argument>org.apache.druid.cli.Main</argument>
<argument>tools</argument>
<argument>pull-deps</argument>
<argument>--defaultVersion</argument>
<argument>${project.parent.version}</argument>
<argument>-l</argument>
<argument>${settings.localRepository}</argument>
<argument>--no-default-hadoop</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:ambari-metrics-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:dropwizard-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-cassandra-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-cloudfiles-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-distinctcount</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:graphite-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-influx-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-influxdb-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:kafka-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-kubernetes-overlord-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:materialized-view-maintenance</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:materialized-view-selection</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-opentsdb-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-redis-cache</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:sqlserver-metadata-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:statsd-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:prometheus-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-thrift-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-time-min-max</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-virtual-columns</argument>
Contributing Moving-Average Query to open source. (#6430) * Contributing Moving-Average Query to open source. * Fix failing code inspections. * See if explicit types will invoke the correct comparison function. * Explicitly remove support for druid.generic.useDefaultValueForNull configuration parameter. * Update styling and headers for complience. * Refresh code with latest master changes: * Remove NullDimensionSelector. * Apply changes of RequestLogger. * Apply changes of TimelineServerView. * Small checkstyle fix. * Checkstyle fixes. * Fixing rat errors; Teamcity errors. * Removing support theta sketches. Will be added back in this pr or a following once DI conflicts with datasketches are resolved. * Implements some of the review fixes. * Contributing Moving-Average Query to open source. * Fix failing code inspections. * See if explicit types will invoke the correct comparison function. * Explicitly remove support for druid.generic.useDefaultValueForNull configuration parameter. * Update styling and headers for complience. * Refresh code with latest master changes: * Remove NullDimensionSelector. * Apply changes of RequestLogger. * Apply changes of TimelineServerView. * Small checkstyle fix. * Checkstyle fixes. * Fixing rat errors; Teamcity errors. * Removing support theta sketches. Will be added back in this pr or a following once DI conflicts with datasketches are resolved. * Implements some of the review fixes. * More fixes for review. * More fixes from review. * MapBasedRow is Unmodifiable. Create new rows instead of modifying existing ones. * Remove more changes related to datasketches support. * Refactor BaseAverager startFrom field and add a comment. * fakeEvents field: Refactor initialization and add comment. * Rename parameters (tiny change). * Fix variable name typo in test (JAN_4). * Fix styling of non camelCase fields. * Fix Preconditions.checkArgument for cycleSize. * Add more documentation to RowBucketIterable and other classes. * key/value comment on in MovingAverageIterable. * Fix anonymous makeColumnValueSelector returning null. * Replace IdentityYieldingAccumolator with Yielders.each(). * * internalNext() should return null instead of throwing exception. * Remove unused variables/prarameters. * Harden MovingAverageIterableTest (Switch anyOf to exact match). * Change internalNext() from recursion to iteration; Simplify next() and hasNext(). * Remove unused imports. * Address review comments. * Rename fakeEvents to emptyEvents. * Remove redundant parameter key from computeMovingAverage. * Check yielder as well in RowBucketIterable#hasNext() * Fix javadoc.
2019-04-26 20:07:48 -04:00
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-moving-average-query</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-momentsketch</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-tdigestsketch</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-ddsketch</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:gce-extensions</argument>
support Aliyun OSS service as deep storage (#9898) * init commit, all tests passed * fix format Signed-off-by: frank chen <frank.chen021@outlook.com> * data stored successfully * modify config path * add doc * add aliyun-oss extension to project * remove descriptor deletion code to avoid warning message output by aliyun client * fix warnings reported by lgtm-com * fix ci warnings Signed-off-by: frank chen <frank.chen021@outlook.com> * fix errors reported by intellj inspection check Signed-off-by: frank chen <frank.chen021@outlook.com> * fix doc spelling check Signed-off-by: frank chen <frank.chen021@outlook.com> * fix dependency warnings reported by ci Signed-off-by: frank chen <frank.chen021@outlook.com> * fix warnings reported by CI Signed-off-by: frank chen <frank.chen021@outlook.com> * add package configuration to support showing extension info Signed-off-by: frank chen <frank.chen021@outlook.com> * add IT test cases and fix bugs Signed-off-by: frank chen <frank.chen021@outlook.com> * 1. code review comments adopted 2. change schema from 'aliyun-oss' to 'oss' Signed-off-by: frank chen <frank.chen021@outlook.com> * add license info Signed-off-by: frank chen <frank.chen021@outlook.com> * fix doc Signed-off-by: frank chen <frank.chen021@outlook.com> * exclude execution of IT testcases of OSS extension from CI Signed-off-by: frank chen <frank.chen021@outlook.com> * put the extensions under contrib group and add to distribution * fix names in test cases * add unit test to cover OssInputSource * fix names in test cases * fix dependency problem reported by CI Signed-off-by: frank chen <frank.chen021@outlook.com>
2020-07-02 01:20:53 -04:00
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:aliyun-oss-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:opentelemetry-emitter</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-iceberg-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-deltalake-extensions</argument>
Extension to read and ingest Delta Lake tables (#15755) * something * test commit * compilation fix * more compilation fixes (fixme placeholders) * Comment out druid-kereberos build since it conflicts with newly added transitive deps from delta-lake Will need to sort out the dependencies later. * checkpoint * remove snapshot schema since we can get schema from the row * iterator bug fix * json json json * sampler flow * empty impls for read(InputStats) and sample() * conversion? * conversion, without timestamp * Web console changes to show Delta Lake * Asset bug fix and tile load * Add missing pieces to input source info, etc. * fix stuff * Use a different delta lake asset * Delta lake extension dependencies * Cleanup * Add InputSource, module init and helper code to process delta files. * Test init * Checkpoint changes * Test resources and updates * some fixes * move to the correct package * More tests * Test cleanup * TODOs * Test updates * requirements and javadocs * Adjust dependencies * Update readme * Bump up version * fixup typo in deps * forbidden api and checkstyle checks * Trim down dependencies * new lines * Fixup Intellij inspections. * Add equals() and hashCode() * chain splits, intellij inspections * review comments and todo placeholder * fix up some docs * null table path and test dependencies. Fixup broken link. * run prettify * Different test; fixes * Upgrade pyspark and delta-spark to latest (3.5.0 and 3.0.0) and regenerate tests * yank the old test resource. * add a couple of sad path tests * Updates to readme based on latest. * Version support * Extract Delta DateTime converstions to DeltaTimeUtils class and add test * More comprehensive split tests. * Some test renames. * Cleanup and update instructions. * add pruneSchema() optimization for table scans. * Oops, missed the parquet files. * Update default table and rename schema constants. * Test setup and misc changes. * Add class loader logic as the context class loader is unaware about extension classes * change some table client creation logic. * Add hadoop-aws, hadoop-common and related exclusions. * Remove org.apache.hadoop:hadoop-common * Apply suggestions from code review Co-authored-by: Victoria Lim <vtlim@users.noreply.github.com> * Add entry to .spelling to fix docs static check --------- Co-authored-by: abhishekagarwal87 <1477457+abhishekagarwal87@users.noreply.github.com> Co-authored-by: Laksh Singla <lakshsingla@gmail.com> Co-authored-by: Victoria Lim <vtlim@users.noreply.github.com>
2024-01-31 00:53:50 -05:00
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-spectator-histogram</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions.contrib:druid-rabbit-indexing-service</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>integration-test</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>pull-deps</id>
<phase>package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />
<argument>-Ddruid.extensions.loadList=[]</argument>
<argument>-Ddruid.extensions.directory=${project.build.directory}/extensions
</argument>
<argument>
-Ddruid.extensions.hadoopDependenciesDir=${project.build.directory}/hadoop-dependencies
</argument>
<argument>org.apache.druid.cli.Main</argument>
<argument>tools</argument>
<argument>pull-deps</argument>
<argument>--clean</argument>
<argument>--defaultVersion</argument>
<argument>${project.parent.version}</argument>
<argument>-l</argument>
<argument>${settings.localRepository}</argument>
<argument>-h</argument>
<argument>org.apache.hadoop:hadoop-client:${hadoop.compile.version}</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-avro-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-azure-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-datasketches</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-hdfs-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-histogram</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kafka-indexing-service</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-kinesis-indexing-service</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-lookups-cached-global</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-multi-stage-query</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-protobuf-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:mysql-metadata-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-orc-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-parquet-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:postgresql-metadata-storage</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-s3-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-ec2-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-google-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:simple-client-sslcontext</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-basic-security</argument>
<argument>-c</argument>
<argument>org.apache.druid:druid-integration-tests</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-testing-tools</argument>
<argument>${druid.distribution.pulldeps.opts}</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>distro-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<finalName>apache-druid-${project.version}-integration-test</finalName>
<tarLongFileMode>posix</tarLongFileMode>
<descriptors>
<descriptor>src/assembly/integration-test-assembly.xml</descriptor>
</descriptors>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>