Remove support for Java 8 (#17466)

All JDK 8 based CI checks have been removed.
    Images used in Dockerfile(s) have been updated to Java 17 based images.
    Documentation has been updated accordingly.
This commit is contained in:
Akshat Jain 2024-11-21 15:33:08 +05:30 committed by GitHub
parent c1d6328249
commit 17215cd677
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
55 changed files with 257 additions and 181 deletions

View File

@ -40,7 +40,7 @@ jobs:
- uses: actions/setup-java@v4 - uses: actions/setup-java@v4
with: with:
distribution: 'zulu' distribution: 'zulu'
java-version: '8' java-version: '17'
cache: 'maven' cache: 'maven'
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.

View File

@ -28,7 +28,7 @@ on:
jobs: jobs:
build: build:
if: (github.event_name == 'schedule' && github.repository == 'apache/druid') if: (github.event_name == 'schedule' && github.repository == 'apache/druid')
name: build (jdk8) name: build (jdk17)
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout branch - name: Checkout branch
@ -37,7 +37,7 @@ jobs:
- name: setup java - name: setup java
uses: actions/setup-java@v4 uses: actions/setup-java@v4
with: with:
java-version: '8' java-version: '17'
distribution: 'zulu' distribution: 'zulu'
- name: Cache Maven m2 repository - name: Cache Maven m2 repository
@ -60,8 +60,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
needs: build needs: build
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 11 runtime_jdk: 21.0.4
testing_groups: -Dgroups=${{ matrix.testing_group }} testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: middleManager use_indexer: middleManager
group: ${{ matrix.testing_group }} group: ${{ matrix.testing_group }}
@ -74,8 +74,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
needs: build needs: build
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 11 runtime_jdk: 21.0.4
testing_groups: -Dgroups=${{ matrix.testing_group }} testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: indexer use_indexer: indexer
group: ${{ matrix.testing_group }} group: ${{ matrix.testing_group }}
@ -88,8 +88,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
needs: build needs: build
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 11 runtime_jdk: 21.0.4
testing_groups: -Dgroups=${{ matrix.testing_group }} testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: middleManager use_indexer: middleManager
override_config_path: ./environment-configs/test-groups/prepopulated-data override_config_path: ./environment-configs/test-groups/prepopulated-data
@ -103,8 +103,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
needs: build needs: build
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 11 runtime_jdk: 21.0.4
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties testing_groups: -DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties
use_indexer: ${{ matrix.indexer }} use_indexer: ${{ matrix.indexer }}
group: other group: other
@ -122,7 +122,7 @@ jobs:
- name: setup java - name: setup java
uses: actions/setup-java@v4 uses: actions/setup-java@v4
with: with:
java-version: '8' java-version: '17'
distribution: 'zulu' distribution: 'zulu'
cache: maven cache: maven

View File

@ -66,8 +66,8 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
#jdk: [8, 11, 17] # jdk: [11, 17]
jdk: [8] jdk: [17]
it: [HighAvailability, MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Security, Query] it: [HighAvailability, MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Security, Query]
#indexer: [indexer, middleManager] #indexer: [indexer, middleManager]
indexer: [middleManager] indexer: [middleManager]
@ -86,8 +86,8 @@ jobs:
uses: ./.github/workflows/reusable-revised-its.yml uses: ./.github/workflows/reusable-revised-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 11 runtime_jdk: 21.0.4
use_indexer: middleManager use_indexer: middleManager
script: ./it.sh github S3DeepStorage script: ./it.sh github S3DeepStorage
it: S3DeepStorage it: S3DeepStorage
@ -103,8 +103,8 @@ jobs:
uses: ./.github/workflows/reusable-revised-its.yml uses: ./.github/workflows/reusable-revised-its.yml
if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' && (needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true') }} if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' && (needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true') }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
use_indexer: middleManager use_indexer: middleManager
script: ./it.sh github BackwardCompatibilityMain script: ./it.sh github BackwardCompatibilityMain
it: BackwardCompatibilityMain it: BackwardCompatibilityMain

View File

@ -51,8 +51,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
testing_groups: -Dgroups=${{ matrix.testing_group }} testing_groups: -Dgroups=${{ matrix.testing_group }}
override_config_path: ./environment-configs/test-groups/prepopulated-data override_config_path: ./environment-configs/test-groups/prepopulated-data
use_indexer: middleManager use_indexer: middleManager
@ -67,8 +67,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
testing_groups: -Dgroups=${{ matrix.testing_group }} testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: indexer use_indexer: indexer
group: ${{ matrix.testing_group }} group: ${{ matrix.testing_group }}
@ -82,8 +82,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
testing_groups: -Dgroups=${{ matrix.testing_group }} testing_groups: -Dgroups=${{ matrix.testing_group }}
use_indexer: middleManager use_indexer: middleManager
override_config_path: ./environment-configs/test-groups/prepopulated-data override_config_path: ./environment-configs/test-groups/prepopulated-data
@ -94,7 +94,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
jdk: [8, 17, 21] jdk: [11, 17, 21]
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
@ -115,8 +115,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
testing_groups: -Dgroups=shuffle-deep-store testing_groups: -Dgroups=shuffle-deep-store
use_indexer: ${{ matrix.indexer }} use_indexer: ${{ matrix.indexer }}
override_config_path: ./environment-configs/test-groups/shuffle-deep-store override_config_path: ./environment-configs/test-groups/shuffle-deep-store
@ -127,8 +127,8 @@ jobs:
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
testing_groups: -Dgroups=custom-coordinator-duties testing_groups: -Dgroups=custom-coordinator-duties
use_indexer: middleManager use_indexer: middleManager
override_config_path: ./environment-configs/test-groups/custom-coordinator-duties override_config_path: ./environment-configs/test-groups/custom-coordinator-duties
@ -136,7 +136,7 @@ jobs:
integration-k8s-leadership-tests: integration-k8s-leadership-tests:
needs: changes needs: changes
name: (Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test name: (Compile=openjdk17, Run=openjdk17, Cluster Build On K8s) ITNestedQueryPushDownTest integration test
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
env: env:
@ -154,7 +154,7 @@ jobs:
- name: setup java - name: setup java
uses: actions/setup-java@v4 uses: actions/setup-java@v4
with: with:
java-version: '8' java-version: '17'
distribution: 'zulu' distribution: 'zulu'
# the build step produces SNAPSHOT artifacts into the local maven repository, # the build step produces SNAPSHOT artifacts into the local maven repository,
@ -195,8 +195,8 @@ jobs:
indexer: [middleManager, indexer] indexer: [middleManager, indexer]
uses: ./.github/workflows/reusable-standard-its.yml uses: ./.github/workflows/reusable-standard-its.yml
with: with:
build_jdk: 8 build_jdk: 17
runtime_jdk: 8 runtime_jdk: 17
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties,centralized-datasource-schema,cds-task-schema-publish-disabled,cds-coordinator-metadata-query-disabled testing_groups: -DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties,centralized-datasource-schema,cds-task-schema-publish-disabled,cds-coordinator-metadata-query-disabled
use_indexer: ${{ matrix.indexer }} use_indexer: ${{ matrix.indexer }}
group: other group: other

View File

@ -42,7 +42,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
# Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429 # Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429
java: [ '8', '11', '17', '21.0.4' ] java: [ '11', '17', '21.0.4' ]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: checkout branch - name: checkout branch
@ -64,7 +64,7 @@ jobs:
- name: script checks - name: script checks
# who watches the watchers? # who watches the watchers?
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ./check_test_suite_test.py run: ./check_test_suite_test.py
- name: (openjdk17) strict compilation - name: (openjdk17) strict compilation
@ -74,43 +74,43 @@ jobs:
run: ${MVN} clean -DstrictCompile compile test-compile --fail-at-end ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} run: ${MVN} clean -DstrictCompile compile test-compile --fail-at-end ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
- name: maven install - name: maven install
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: | run: |
echo 'Running Maven install...' && echo 'Running Maven install...' &&
${MVN} clean install -q -ff -pl '!distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} clean install -q -ff -pl '!distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C &&
${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
- name: checkstyle - name: checkstyle
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ${MVN} checkstyle:checkstyle --fail-at-end run: ${MVN} checkstyle:checkstyle --fail-at-end
- name: license checks - name: license checks
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ./.github/scripts/license_checks_script.sh run: ./.github/scripts/license_checks_script.sh
- name: analyze dependencies - name: analyze dependencies
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: | run: |
./.github/scripts/analyze_dependencies_script.sh ./.github/scripts/analyze_dependencies_script.sh
- name: animal sniffer checks - name: animal sniffer checks
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ${MVN} animal-sniffer:check --fail-at-end run: ${MVN} animal-sniffer:check --fail-at-end
- name: enforcer checks - name: enforcer checks
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ${MVN} enforcer:enforce --fail-at-end run: ${MVN} enforcer:enforce --fail-at-end
- name: forbidden api checks - name: forbidden api checks
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end run: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end
- name: pmd checks - name: pmd checks
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check run: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check
- name: spotbugs checks - name: spotbugs checks
if: ${{ matrix.java == '8' }} if: ${{ matrix.java == '17' }}
run: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks' run: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks'
openrewrite: openrewrite:
@ -122,7 +122,7 @@ jobs:
- uses: actions/setup-java@v4 - uses: actions/setup-java@v4
with: with:
distribution: 'zulu' distribution: 'zulu'
java-version: '8' java-version: '17'
cache: 'maven' cache: 'maven'
- name: maven install - name: maven install

View File

@ -80,7 +80,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
# Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429 # Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429
jdk: [ '8', '11', '17', '21.0.4' ] jdk: [ '11', '17', '21.0.4' ]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout branch - name: Checkout branch
@ -162,7 +162,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
# Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429 # Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429
jdk: [ '11', '17', '21.0.4' ] jdk: [ '11', '21.0.4' ]
name: "unit tests (jdk${{ matrix.jdk }}, sql-compat=true)" name: "unit tests (jdk${{ matrix.jdk }}, sql-compat=true)"
uses: ./.github/workflows/unit-tests.yml uses: ./.github/workflows/unit-tests.yml
needs: unit-tests needs: unit-tests
@ -176,11 +176,11 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
sql_compatibility: [ false, true ] sql_compatibility: [ false, true ]
name: "unit tests (jdk8, sql-compat=${{ matrix.sql_compatibility }})" name: "unit tests (jdk17, sql-compat=${{ matrix.sql_compatibility }})"
uses: ./.github/workflows/unit-tests.yml uses: ./.github/workflows/unit-tests.yml
needs: build needs: build
with: with:
jdk: 8 jdk: 17
sql_compatibility: ${{ matrix.sql_compatibility }} sql_compatibility: ${{ matrix.sql_compatibility }}
standard-its: standard-its:

View File

@ -84,7 +84,7 @@
<resource url="http://maven.apache.org/ASSEMBLY/2.0.0" location="$PROJECT_DIR$/.idea/xml-schemas/assembly-2.0.0.xsd" /> <resource url="http://maven.apache.org/ASSEMBLY/2.0.0" location="$PROJECT_DIR$/.idea/xml-schemas/assembly-2.0.0.xsd" />
<resource url="http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" location="$PROJECT_DIR$/.idea/xml-schemas/svg11.dtd" /> <resource url="http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" location="$PROJECT_DIR$/.idea/xml-schemas/svg11.dtd" />
</component> </component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="false" project-jdk-name="1.8" project-jdk-type="JavaSDK"> <component name="ProjectRootManager" version="2" languageLevel="JDK_17" default="false" project-jdk-name="17" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/classes" /> <output url="file://$PROJECT_DIR$/classes" />
</component> </component>
</project> </project>

View File

@ -106,7 +106,7 @@ Find articles written by community members and a calendar of upcoming events on
### Building from source ### Building from source
Please note that JDK 8 or JDK 11 is required to build Druid. Please note that JDK 11 or JDK 17 is required to build Druid.
See the latest [build guide](https://druid.apache.org/docs/latest/development/build.html) for instructions on building Apache Druid from source. See the latest [build guide](https://druid.apache.org/docs/latest/development/build.html) for instructions on building Apache Druid from source.

View File

@ -64,6 +64,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.druid</groupId> <groupId>org.apache.druid</groupId>
@ -239,7 +240,6 @@
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jmh.version>1.21</jmh.version> <jmh.version>1.21</jmh.version>
<javac.target>1.8</javac.target>
<uberjar.name>benchmarks</uberjar.name> <uberjar.name>benchmarks</uberjar.name>
</properties> </properties>

View File

@ -22,7 +22,7 @@ import sys
# this script does some primitive examination of git diff to determine if a test suite needs to be run or not # this script does some primitive examination of git diff to determine if a test suite needs to be run or not
# these jobs should always be run, no matter what # these jobs should always be run, no matter what
always_run_jobs = ['license checks', '(openjdk8) packaging check', '(openjdk11) packaging check'] always_run_jobs = ['license checks', '(openjdk17) packaging check']
# ignore changes to these files completely since they don't impact CI, if the changes are only to these files then all # ignore changes to these files completely since they don't impact CI, if the changes are only to these files then all
# of CI can be skipped. however, jobs which are always run will still be run even if only these files are changed # of CI can be skipped. however, jobs which are always run will still be run even if only these files are changed

View File

@ -29,5 +29,4 @@ This ruleset defines the PMD rules for the Apache Druid project.
<rule ref="category/java/codestyle.xml/UnnecessaryImport" /> <rule ref="category/java/codestyle.xml/UnnecessaryImport" />
<rule ref="category/java/codestyle.xml/TooManyStaticImports" /> <rule ref="category/java/codestyle.xml/TooManyStaticImports" />
<rule ref="category/java/codestyle.xml/UnnecessaryFullyQualifiedName"/>
</ruleset> </ruleset>

View File

@ -137,4 +137,15 @@
<Bug pattern="SWL_SLEEP_WITH_LOCK_HELD"/> <Bug pattern="SWL_SLEEP_WITH_LOCK_HELD"/>
<Bug pattern="UL_UNRELEASED_LOCK_EXCEPTION_PATH"/> <Bug pattern="UL_UNRELEASED_LOCK_EXCEPTION_PATH"/>
<Bug pattern="URF_UNREAD_FIELD"/> <Bug pattern="URF_UNREAD_FIELD"/>
<!-- The following patterns have been excluded as part of upgrading to Java 17 as there were 100s of occurrences.
We should revisit these later. -->
<Bug pattern="CT_CONSTRUCTOR_THROW"/>
<Bug pattern="SING_SINGLETON_HAS_NONPRIVATE_CONSTRUCTOR"/>
<Bug pattern="DCN_NULLPOINTER_EXCEPTION"/>
<Bug pattern="SING_SINGLETON_INDIRECTLY_IMPLEMENTS_CLONEABLE"/>
<Bug pattern="MS_EXPOSE_REP"/>
<Bug pattern="PA_PUBLIC_PRIMITIVE_ATTRIBUTE"/>
<Bug pattern="EI_EXPOSE_STATIC_REP2"/>
<Bug pattern="SS_SHOULD_BE_STATIC"/>
<Bug pattern="SING_SINGLETON_IMPLEMENTS_SERIALIZABLE"/>
</FindBugsFilter> </FindBugsFilter>

View File

@ -23,7 +23,7 @@ ARG JDK_VERSION=17
# This is because it's not able to build the distribution on arm64 due to dependency problem of web-console. See: https://github.com/apache/druid/issues/13012 # This is because it's not able to build the distribution on arm64 due to dependency problem of web-console. See: https://github.com/apache/druid/issues/13012
# Since only java jars are shipped in the final image, it's OK to build the distribution on x64. # Since only java jars are shipped in the final image, it's OK to build the distribution on x64.
# Once the web-console dependency problem is resolved, we can remove the --platform directive. # Once the web-console dependency problem is resolved, we can remove the --platform directive.
FROM --platform=linux/amd64 maven:3.8.6-jdk-11-slim as builder FROM --platform=linux/amd64 maven:3.8.4-openjdk-17-slim as builder
# Rebuild from source in this stage # Rebuild from source in this stage
# This can be unset if the tarball was already built outside of Docker # This can be unset if the tarball was already built outside of Docker

View File

@ -195,9 +195,9 @@ and `druid.tlsPort` properties on each service. Please see `Configuration` secti
Druid uses Jetty as an embedded web server. To learn more about TLS/SSL, certificates, and related concepts in Jetty, including explanations of the configuration settings below, see "Configuring SSL/TLS KeyStores" in the [Jetty Operations Guide](https://www.eclipse.org/jetty/documentation.php). Druid uses Jetty as an embedded web server. To learn more about TLS/SSL, certificates, and related concepts in Jetty, including explanations of the configuration settings below, see "Configuring SSL/TLS KeyStores" in the [Jetty Operations Guide](https://www.eclipse.org/jetty/documentation.php).
For information about TLS/SSL support in Java in general, see the [Java Secure Socket Extension (JSSE) Reference Guide](http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html). For information about TLS/SSL support in Java in general, see the [Java Secure Socket Extension (JSSE) Reference Guide](https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html).
The [Java Cryptography Architecture The [Java Cryptography Architecture
Standard Algorithm Name Documentation for JDK 8](http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html) lists all possible Standard Algorithm Name Documentation for JDK 11](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all possible
values for the following properties, among others provided by the Java implementation. values for the following properties, among others provided by the Java implementation.
|Property|Description|Default|Required| |Property|Description|Default|Required|
@ -230,7 +230,7 @@ These properties apply to the SSLContext that will be provided to the internal H
|`druid.client.https.trustStoreAlgorithm`|Algorithm to be used by TrustManager to validate certificate chains|`javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm()`|no| |`druid.client.https.trustStoreAlgorithm`|Algorithm to be used by TrustManager to validate certificate chains|`javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm()`|no|
|`druid.client.https.trustStorePassword`|The [Password Provider](../operations/password-provider.md) or String password for the Trust Store.|none|yes| |`druid.client.https.trustStorePassword`|The [Password Provider](../operations/password-provider.md) or String password for the Trust Store.|none|yes|
This [document](http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html) lists all the possible This [document](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all the possible
values for the above mentioned configs among others provided by Java implementation. values for the above mentioned configs among others provided by Java implementation.
### Authentication and authorization ### Authentication and authorization

View File

@ -23,9 +23,9 @@ title: "Simple SSLContext Provider Module"
--> -->
This Apache Druid module contains a simple implementation of [SSLContext](http://docs.oracle.com/javase/8/docs/api/javax/net/ssl/SSLContext.html) This Apache Druid module contains a simple implementation of [SSLContext](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/javax/net/ssl/SSLContext.html)
that will be injected to be used with HttpClient that Druid processes use internally to communicate with each other. To learn more about that will be injected to be used with HttpClient that Druid processes use internally to communicate with each other. To learn more about
Java's SSL support, please refer to [this](http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html) guide. Java's SSL support, please refer to [this](https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html) guide.
|Property|Description|Default|Required| |Property|Description|Default|Required|
@ -48,5 +48,5 @@ The following table contains optional parameters for supporting client certifica
|`druid.client.https.keyManagerPassword`|The [Password Provider](../../operations/password-provider.md) or String password for the Key Manager.|none|no| |`druid.client.https.keyManagerPassword`|The [Password Provider](../../operations/password-provider.md) or String password for the Key Manager.|none|no|
|`druid.client.https.validateHostnames`|Validate the hostname of the server. This should not be disabled unless you are using [custom TLS certificate checks](../../operations/tls-support.md) and know that standard hostname validation is not needed.|true|no| |`druid.client.https.validateHostnames`|Validate the hostname of the server. This should not be disabled unless you are using [custom TLS certificate checks](../../operations/tls-support.md) and know that standard hostname validation is not needed.|true|no|
This [document](http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html) lists all the possible This [document](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all the possible
values for the above mentioned configs among others provided by Java implementation. values for the above mentioned configs among others provided by Java implementation.

View File

@ -184,7 +184,7 @@ Sample specs:
|uris|JSON array of URIs where S3 objects to be ingested are located.|None|`uris` or `prefixes` or `objects` must be set| |uris|JSON array of URIs where S3 objects to be ingested are located.|None|`uris` or `prefixes` or `objects` must be set|
|prefixes|JSON array of URI prefixes for the locations of S3 objects to be ingested. Empty objects starting with one of the given prefixes will be skipped.|None|`uris` or `prefixes` or `objects` must be set| |prefixes|JSON array of URI prefixes for the locations of S3 objects to be ingested. Empty objects starting with one of the given prefixes will be skipped.|None|`uris` or `prefixes` or `objects` must be set|
|objects|JSON array of S3 Objects to be ingested.|None|`uris` or `prefixes` or `objects` must be set| |objects|JSON array of S3 Objects to be ingested.|None|`uris` or `prefixes` or `objects` must be set|
|objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/javase/8/docs/api/java/nio/file/FileSystem.html#getPathMatcher-java.lang.String-).|None|no| |objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no|
|systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (S3 URI starting with `s3://`), `__file_bucket` (S3 bucket), and `__file_path` (S3 object key).|None|no| |systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (S3 URI starting with `s3://`), `__file_bucket` (S3 bucket), and `__file_path` (S3 object key).|None|no|
| endpointConfig |Config for overriding the default S3 endpoint and signing region. This would allow ingesting data from a different S3 store. Please see [s3 config](../development/extensions-core/s3.md#connecting-to-s3-configuration) for more information.|None|No (defaults will be used if not given) | endpointConfig |Config for overriding the default S3 endpoint and signing region. This would allow ingesting data from a different S3 store. Please see [s3 config](../development/extensions-core/s3.md#connecting-to-s3-configuration) for more information.|None|No (defaults will be used if not given)
| clientConfig |S3 client properties for the overridden s3 endpoint. This is used in conjunction with `endPointConfig`. Please see [s3 config](../development/extensions-core/s3.md#connecting-to-s3-configuration) for more information.|None|No (defaults will be used if not given) | clientConfig |S3 client properties for the overridden s3 endpoint. This is used in conjunction with `endPointConfig`. Please see [s3 config](../development/extensions-core/s3.md#connecting-to-s3-configuration) for more information.|None|No (defaults will be used if not given)
@ -289,7 +289,7 @@ Sample specs:
|uris|JSON array of URIs where Google Cloud Storage objects to be ingested are located.|None|`uris` or `prefixes` or `objects` must be set| |uris|JSON array of URIs where Google Cloud Storage objects to be ingested are located.|None|`uris` or `prefixes` or `objects` must be set|
|prefixes|JSON array of URI prefixes for the locations of Google Cloud Storage objects to be ingested. Empty objects starting with one of the given prefixes will be skipped.|None|`uris` or `prefixes` or `objects` must be set| |prefixes|JSON array of URI prefixes for the locations of Google Cloud Storage objects to be ingested. Empty objects starting with one of the given prefixes will be skipped.|None|`uris` or `prefixes` or `objects` must be set|
|objects|JSON array of Google Cloud Storage objects to be ingested.|None|`uris` or `prefixes` or `objects` must be set| |objects|JSON array of Google Cloud Storage objects to be ingested.|None|`uris` or `prefixes` or `objects` must be set|
|objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/javase/8/docs/api/java/nio/file/FileSystem.html#getPathMatcher-java.lang.String-).|None|no| |objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no|
Note that the Google Cloud Storage input source will skip all empty objects only when `prefixes` is specified. Note that the Google Cloud Storage input source will skip all empty objects only when `prefixes` is specified.
@ -377,7 +377,7 @@ Sample specs:
|uris|JSON array of URIs where the Azure objects to be ingested are located. Use this format: `azureStorage://STORAGE_ACCOUNT/CONTAINER/PATH_TO_FILE`|None|One of the following must be set:`uris`, `prefixes`, or `objects`.| |uris|JSON array of URIs where the Azure objects to be ingested are located. Use this format: `azureStorage://STORAGE_ACCOUNT/CONTAINER/PATH_TO_FILE`|None|One of the following must be set:`uris`, `prefixes`, or `objects`.|
|prefixes|JSON array of URI prefixes for the locations of Azure objects to ingest. Use this format`azureStorage://STORAGE_ACCOUNT/CONTAINER/PREFIX`. Empty objects starting with any of the given prefixes are skipped.|None|One of the following must be set:`uris`, `prefixes`, or `objects`.| |prefixes|JSON array of URI prefixes for the locations of Azure objects to ingest. Use this format`azureStorage://STORAGE_ACCOUNT/CONTAINER/PREFIX`. Empty objects starting with any of the given prefixes are skipped.|None|One of the following must be set:`uris`, `prefixes`, or `objects`.|
|objects|JSON array of Azure objects to ingest.|None|One of the following must be set:`uris`, `prefixes`, or `objects`.| |objects|JSON array of Azure objects to ingest.|None|One of the following must be set:`uris`, `prefixes`, or `objects`.|
|objectGlob|A glob for the object part of the Azure URI. In the URI `azureStorage://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azureStorage://foo/bar/file.json` because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/javase/8/docs/api/java/nio/file/FileSystem.html#getPathMatcher-java.lang.String-).|None|no| |objectGlob|A glob for the object part of the Azure URI. In the URI `azureStorage://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azureStorage://foo/bar/file.json` because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no|
|systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (Azure blob URI starting with `azureStorage://`), `__file_bucket` (Azure bucket), and `__file_path` (Azure object path).|None|no| |systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (Azure blob URI starting with `azureStorage://`), `__file_bucket` (Azure bucket), and `__file_path` (Azure object path).|None|no|
|properties|Properties object for overriding the default Azure configuration. See below for more information.|None|No (defaults will be used if not given)| |properties|Properties object for overriding the default Azure configuration. See below for more information.|None|No (defaults will be used if not given)|
@ -471,7 +471,7 @@ Sample specs:
|uris|JSON array of URIs where the Azure objects to be ingested are located, in the form `azure://<container>/<path-to-file>`|None|`uris` or `prefixes` or `objects` must be set| |uris|JSON array of URIs where the Azure objects to be ingested are located, in the form `azure://<container>/<path-to-file>`|None|`uris` or `prefixes` or `objects` must be set|
|prefixes|JSON array of URI prefixes for the locations of Azure objects to ingest, in the form `azure://<container>/<prefix>`. Empty objects starting with one of the given prefixes are skipped.|None|`uris` or `prefixes` or `objects` must be set| |prefixes|JSON array of URI prefixes for the locations of Azure objects to ingest, in the form `azure://<container>/<prefix>`. Empty objects starting with one of the given prefixes are skipped.|None|`uris` or `prefixes` or `objects` must be set|
|objects|JSON array of Azure objects to ingest.|None|`uris` or `prefixes` or `objects` must be set| |objects|JSON array of Azure objects to ingest.|None|`uris` or `prefixes` or `objects` must be set|
|objectGlob|A glob for the object part of the Azure URI. In the URI `azure://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azure://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/javase/8/docs/api/java/nio/file/FileSystem.html#getPathMatcher-java.lang.String-).|None|no| |objectGlob|A glob for the object part of the Azure URI. In the URI `azure://foo/bar/file.json`, the glob is applied to `bar/file.json`.<br /><br />The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azure://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.<br /><br />For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no|
|systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (Azure blob URI starting with `azure://`), `__file_bucket` (Azure bucket), and `__file_path` (Azure object path).|None|no| |systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (Azure blob URI starting with `azure://`), `__file_bucket` (Azure bucket), and `__file_path` (Azure object path).|None|no|
Note that the Azure input source skips all empty objects only when `prefixes` is specified. Note that the Azure input source skips all empty objects only when `prefixes` is specified.

View File

@ -27,7 +27,11 @@ a Java runtime for Druid.
## Selecting a Java runtime ## Selecting a Java runtime
Druid fully supports Java 8u92+, Java 11, and Java 17. The project team recommends Java 17. Druid fully supports Java 11 and Java 17. The project team recommends Java 17.
:::info
Note: Starting with Apache Druid 32.0.0, support for Java 8 has been removed.
:::
The project team recommends using an OpenJDK-based Java distribution. There are many free and actively-supported The project team recommends using an OpenJDK-based Java distribution. There are many free and actively-supported
distributions available, including distributions available, including

View File

@ -37,10 +37,10 @@ Apache Druid uses Jetty as its embedded web server.
To get familiar with TLS/SSL, along with related concepts like keys and certificates, To get familiar with TLS/SSL, along with related concepts like keys and certificates,
read [Configuring Secure Protocols](https://www.eclipse.org/jetty/documentation/jetty-12/operations-guide/index.html#og-protocols-ssl) in the Jetty documentation. read [Configuring Secure Protocols](https://www.eclipse.org/jetty/documentation/jetty-12/operations-guide/index.html#og-protocols-ssl) in the Jetty documentation.
To get more in-depth knowledge of TLS/SSL support in Java in general, refer to the [Java Secure Socket Extension (JSSE) Reference Guide](http://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html). To get more in-depth knowledge of TLS/SSL support in Java in general, refer to the [Java Secure Socket Extension (JSSE) Reference Guide](https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html).
The [Class SslContextFactory](https://www.eclipse.org/jetty/javadoc/jetty-11/org/eclipse/jetty/util/ssl/SslContextFactory.html) The [Class SslContextFactory](https://www.eclipse.org/jetty/javadoc/jetty-11/org/eclipse/jetty/util/ssl/SslContextFactory.html)
reference doc can help in understanding TLS/SSL configurations listed below. Finally, [Java Cryptography Architecture reference doc can help in understanding TLS/SSL configurations listed below. Finally, [Java Cryptography Architecture
Standard Algorithm Name Documentation for JDK 8](http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html) lists all possible Standard Algorithm Name Documentation for JDK 11](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all possible
values for the configs below, among others provided by Java implementation. values for the configs below, among others provided by Java implementation.
|Property|Description|Default|Required| |Property|Description|Default|Required|
@ -79,7 +79,7 @@ The following table contains non-mandatory advanced configuration options, use c
## Internal communication over TLS ## Internal communication over TLS
Whenever possible Druid processes will use HTTPS to talk to each other. To enable this communication Druid's HttpClient needs to Whenever possible Druid processes will use HTTPS to talk to each other. To enable this communication Druid's HttpClient needs to
be configured with a proper [SSLContext](http://docs.oracle.com/javase/8/docs/api/javax/net/ssl/SSLContext.html) that is able be configured with a proper [SSLContext](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/javax/net/ssl/SSLContext.html) that is able
to validate the Server Certificates, otherwise communication will fail. to validate the Server Certificates, otherwise communication will fail.
Since, there are various ways to configure SSLContext, by default, Druid looks for an instance of SSLContext Guice binding Since, there are various ways to configure SSLContext, by default, Druid looks for an instance of SSLContext Guice binding

View File

@ -255,7 +255,7 @@ For a regular dimension, it assumes the string is formatted in
[ISO-8601 date and time format](https://en.wikipedia.org/wiki/ISO_8601). [ISO-8601 date and time format](https://en.wikipedia.org/wiki/ISO_8601).
* `format` : date time format for the resulting dimension value, in [Joda Time DateTimeFormat](http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html), or null to use the default ISO8601 format. * `format` : date time format for the resulting dimension value, in [Joda Time DateTimeFormat](http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html), or null to use the default ISO8601 format.
* `locale` : locale (language and country) to use, given as a [IETF BCP 47 language tag](http://www.oracle.com/technetwork/java/javase/java8locales-2095355.html#util-text), e.g. `en-US`, `en-GB`, `fr-FR`, `fr-CA`, etc. * `locale` : locale (language and country) to use, given as a [IETF BCP 47 language tag](https://www.oracle.com/java/technologies/javase/jdk11-suported-locales.html#util-text), e.g. `en-US`, `en-GB`, `fr-FR`, `fr-CA`, etc.
* `timeZone` : time zone to use in [IANA tz database format](http://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. `Europe/Berlin` (this can possibly be different than the aggregation time-zone) * `timeZone` : time zone to use in [IANA tz database format](http://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. `Europe/Berlin` (this can possibly be different than the aggregation time-zone)
* `granularity` : [granularity](granularities.md) to apply before formatting, or omit to not apply any granularity. * `granularity` : [granularity](granularities.md) to apply before formatting, or omit to not apply any granularity.
* `asMillis` : boolean value, set to true to treat input strings as millis rather than ISO8601 strings. Additionally, if `format` is null or not specified, output will be in millis rather than ISO8601. * `asMillis` : boolean value, set to true to treat input strings as millis rather than ISO8601 strings. Additionally, if `format` is null or not specified, output will be in millis rather than ISO8601.

View File

@ -439,7 +439,7 @@ The regular expression filter is similar to the selector filter, but using regul
| -------- | ----------- | -------- | | -------- | ----------- | -------- |
| `type` | Must be "regex".| Yes | | `type` | Must be "regex".| Yes |
| `dimension` | Input column or virtual column name to filter on. | Yes | | `dimension` | Input column or virtual column name to filter on. | Yes |
| `pattern` | String pattern to match - any standard [Java regular expression](http://docs.oracle.com/javase/6/docs/api/java/util/regex/Pattern.html). | Yes | | `pattern` | String pattern to match - any standard [Java regular expression](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/regex/Pattern.html). | Yes |
| `extractionFn` | [Extraction function](./dimensionspecs.md#extraction-functions) to apply to `dimension` prior to value matching. See [filtering with extraction functions](#filtering-with-extraction-functions) for details. | No | | `extractionFn` | [Extraction function](./dimensionspecs.md#extraction-functions) to apply to `dimension` prior to value matching. See [filtering with extraction functions](#filtering-with-extraction-functions) for details. | No |
Note that it is often more optimal to use a like filter instead of a regex for simple matching of prefixes. Note that it is often more optimal to use a like filter instead of a regex for simple matching of prefixes.

View File

@ -81,7 +81,7 @@ The following built-in functions are available.
|name|description| |name|description|
|----|-----------| |----|-----------|
|concat|concat(expr, expr...) concatenate a list of strings| |concat|concat(expr, expr...) concatenate a list of strings|
|format|format(pattern[, args...]) returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/javase/8/docs/api/java/lang/String.html#format-java.lang.String-java.lang.Object...-).| |format|format(pattern[, args...]) returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/String.html#format(java.lang.String,java.lang.Object...)).|
|like|like(expr, pattern[, escape]) is equivalent to SQL `expr LIKE pattern`| |like|like(expr, pattern[, escape]) is equivalent to SQL `expr LIKE pattern`|
|lookup|lookup(expr, lookup-name[,replaceMissingValueWith]) looks up expr in a registered,`replaceMissingValueWith` is an optional constant string [query-time lookup](../querying/lookups.md)| |lookup|lookup(expr, lookup-name[,replaceMissingValueWith]) looks up expr in a registered,`replaceMissingValueWith` is an optional constant string [query-time lookup](../querying/lookups.md)|
|parse_long|parse_long(string[, radix]) parses a string as a long with the given radix, or 10 (decimal) if a radix is not provided.| |parse_long|parse_long(string[, radix]) parses a string as a long with the given radix, or 10 (decimal) if a radix is not provided.|

View File

@ -115,7 +115,7 @@ String functions accept strings and return a type appropriate to the function.
|`REPLACE(expr, substring, replacement)`|Replaces instances of `substring` in `expr` with `replacement` and returns the result.| |`REPLACE(expr, substring, replacement)`|Replaces instances of `substring` in `expr` with `replacement` and returns the result.|
|`REPEAT(expr, N)`|Repeats `expr` `N` times.| |`REPEAT(expr, N)`|Repeats `expr` `N` times.|
|`REVERSE(expr)`|Reverses `expr`.| |`REVERSE(expr)`|Reverses `expr`.|
|`STRING_FORMAT(pattern[, args...])`|Returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/javase/8/docs/api/java/lang/String.html#format-java.lang.String-java.lang.Object...-).| |`STRING_FORMAT(pattern[, args...])`|Returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/String.html#format(java.lang.String,java.lang.Object...)).|
|`STRPOS(expr, substring)`|Returns the index of `substring` within `expr`, with indexes starting from 1. If `substring` is not found, returns 0.| |`STRPOS(expr, substring)`|Returns the index of `substring` within `expr`, with indexes starting from 1. If `substring` is not found, returns 0.|
|`SUBSTRING(expr, index[, length])`|Returns a substring of `expr` starting at a given one-based index. If `length` is omitted, extracts characters to the end of the string, otherwise returns a substring of `length` UTF-16 characters.| |`SUBSTRING(expr, index[, length])`|Returns a substring of `expr` starting at a given one-based index. If `length` is omitted, extracts characters to the end of the string, otherwise returns a substring of `length` UTF-16 characters.|
|`SUBSTR(expr, index[, length])`|Alias for `SUBSTRING`.| |`SUBSTR(expr, index[, length])`|Alias for `SUBSTRING`.|

View File

@ -133,7 +133,7 @@ The [basic cluster tuning guide](../operations/basic-cluster-tuning.md) has info
We recommend running your favorite Linux distribution. You will also need We recommend running your favorite Linux distribution. You will also need
* [Java 8u92+, 11, or 17](../operations/java.md) * [Java 11 or 17](../operations/java.md)
* Python 2 or Python 3 * Python 2 or Python 3
:::info :::info
@ -141,8 +141,8 @@ We recommend running your favorite Linux distribution. You will also need
`DRUID_JAVA_HOME` or `JAVA_HOME`. For more details run the `bin/verify-java` script. `DRUID_JAVA_HOME` or `JAVA_HOME`. For more details run the `bin/verify-java` script.
::: :::
For information about installing Java, see the documentation for your OS package manager. If your Ubuntu-based OS does not have a recent enough version of Java, WebUpd8 offers [packages for those For information about installing Java, see the documentation for your OS package manager. If your Ubuntu-based OS does not have a recent enough version of Java, Linux Uprising offers [packages for those
OSes](http://www.webupd8.org/2012/09/install-oracle-java-8-in-ubuntu-via-ppa.html). OSes](https://launchpad.net/~linuxuprising/+archive/ubuntu/java).
## Download the distribution ## Download the distribution

View File

@ -40,7 +40,7 @@ You can follow these steps on a relatively modest machine, such as a workstation
The software requirements for the installation machine are: The software requirements for the installation machine are:
* Linux, Mac OS X, or other Unix-like OS. (Windows is not supported) * Linux, Mac OS X, or other Unix-like OS. (Windows is not supported)
* [Java 8u92+, 11, or 17](../operations/java.md) * [Java 11 or 17](../operations/java.md)
* Python 3 (preferred) or Python 2 * Python 3 (preferred) or Python 2
* Perl 5 * Perl 5

View File

@ -28,14 +28,14 @@ sub fail_check {
: "No Java runtime was detected on your system."; : "No Java runtime was detected on your system.";
print STDERR <<"EOT"; print STDERR <<"EOT";
Druid requires Java 8, 11, or 17. $current_version_text Druid requires Java 11 or 17. $current_version_text
If you believe this check is in error, or you want to proceed with a potentially If you believe this check is in error, or you want to proceed with a potentially
unsupported Java runtime, you can skip this check using an environment variable: unsupported Java runtime, you can skip this check using an environment variable:
export DRUID_SKIP_JAVA_CHECK=1 export DRUID_SKIP_JAVA_CHECK=1
Otherwise, install Java 8, 11, or 17 in one of the following locations. Otherwise, install Java 11 or 17 in one of the following locations.
* DRUID_JAVA_HOME * DRUID_JAVA_HOME
* JAVA_HOME * JAVA_HOME
@ -68,6 +68,6 @@ if ($?) {
} }
# If we know it won't work, die. Otherwise hope for the best. # If we know it won't work, die. Otherwise hope for the best.
if ($java_version =~ /version \"((\d+)\.(\d+).*?)\"/ && !($2 == 1 && $3 == 8) && $2 != 11 && $2 != 17 ) { if ($java_version =~ /version \"((\d+)\.(\d+).*?)\"/ && $2 != 11 && $2 != 17) {
fail_check($1); fail_check($1);
} }

View File

@ -52,11 +52,11 @@ RUN rpm --import http://repos.azulsystems.com/RPM-GPG-KEY-azulsystems && \
rpm -ivh zulu-repo-${ZULU_REPO_VER}.noarch.rpm && \ rpm -ivh zulu-repo-${ZULU_REPO_VER}.noarch.rpm && \
yum -q -y update && \ yum -q -y update && \
yum -q -y upgrade && \ yum -q -y upgrade && \
yum -q -y install zulu8-jdk && \ yum -q -y install zulu17-jdk && \
yum clean all && \ yum clean all && \
rm -rf /var/cache/yum zulu-repo_${ZULU_REPO_VER}.noarch.rpm rm -rf /var/cache/yum zulu-repo_${ZULU_REPO_VER}.noarch.rpm
ENV JAVA_HOME=/usr/lib/jvm/zulu8 ENV JAVA_HOME=/usr/lib/jvm/zulu17
ENV PATH $PATH:$JAVA_HOME/bin ENV PATH $PATH:$JAVA_HOME/bin
# hadoop # hadoop
@ -73,7 +73,7 @@ ENV HADOOP_CONF_DIR /usr/local/hadoop/etc/hadoop
ENV YARN_CONF_DIR $HADOOP_HOME/etc/hadoop ENV YARN_CONF_DIR $HADOOP_HOME/etc/hadoop
# in hadoop 3 the example file is nearly empty so we can just append stuff # in hadoop 3 the example file is nearly empty so we can just append stuff
RUN sed -i '$ a export JAVA_HOME=/usr/lib/jvm/zulu8' $HADOOP_HOME/etc/hadoop/hadoop-env.sh RUN sed -i '$ a export JAVA_HOME=/usr/lib/jvm/zulu17' $HADOOP_HOME/etc/hadoop/hadoop-env.sh
RUN sed -i '$ a export HADOOP_HOME=/usr/local/hadoop' $HADOOP_HOME/etc/hadoop/hadoop-env.sh RUN sed -i '$ a export HADOOP_HOME=/usr/local/hadoop' $HADOOP_HOME/etc/hadoop/hadoop-env.sh
RUN sed -i '$ a export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop/' $HADOOP_HOME/etc/hadoop/hadoop-env.sh RUN sed -i '$ a export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop/' $HADOOP_HOME/etc/hadoop/hadoop-env.sh
RUN sed -i '$ a export HDFS_NAMENODE_USER=root' $HADOOP_HOME/etc/hadoop/hadoop-env.sh RUN sed -i '$ a export HDFS_NAMENODE_USER=root' $HADOOP_HOME/etc/hadoop/hadoop-env.sh

View File

@ -48,7 +48,6 @@
<dependency> <dependency>
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
<version>${guice.version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>aopalliance</groupId> <groupId>aopalliance</groupId>
@ -60,7 +59,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<version>${guice.version}</version> <scope>provided</scope>
<!--$NO-MVN-MAN-VER$ --> <!--$NO-MVN-MAN-VER$ -->
</dependency> </dependency>
<dependency> <dependency>

View File

@ -145,7 +145,6 @@
<dependency> <dependency>
<groupId>com.google.inject</groupId> <groupId>com.google.inject</groupId>
<artifactId>guice</artifactId> <artifactId>guice</artifactId>
<version>4.1.0</version>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -262,6 +262,12 @@
<artifactId>reflections</artifactId> <artifactId>reflections</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.2</version>
<scope>provided</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
<extensions> <extensions>

View File

@ -17,6 +17,6 @@
~ under the License. ~ under the License.
--> -->
This module contains a simple implementation of [SslContext](http://docs.oracle.com/javase/8/docs/api/javax/net/ssl/SSLContext.html) This module contains a simple implementation of [SslContext](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/javax/net/ssl/SSLContext.html)
that will be injected to be used with HttpClient that Druid nodes use internally to communicate with each other. that will be injected to be used with HttpClient that Druid nodes use internally to communicate with each other.
More details [here](https://druid.apache.org/docs/latest/development/extensions-core/simple-client-sslcontext.html). More details [here](https://druid.apache.org/docs/latest/development/extensions-core/simple-client-sslcontext.html).

View File

@ -293,7 +293,7 @@ public class IndexGeneratorJob implements Jobby
AggregatorFactory[] aggs, AggregatorFactory[] aggs,
HadoopDruidIndexerConfig config, HadoopDruidIndexerConfig config,
@Nullable Iterable<String> oldDimOrder, @Nullable Iterable<String> oldDimOrder,
Map<String, ColumnFormat> oldCapabilities @Nullable Map<String, ColumnFormat> oldCapabilities
) )
{ {
final HadoopTuningConfig tuningConfig = config.getSchema().getTuningConfig(); final HadoopTuningConfig tuningConfig = config.getSchema().getTuningConfig();

View File

@ -91,6 +91,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.ws.rs</groupId> <groupId>javax.ws.rs</groupId>

View File

@ -1515,7 +1515,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer
boolean shouldRunPendingTasks = false; boolean shouldRunPendingTasks = false;
// must be synchronized while iterating: // must be synchronized while iterating:
// https://docs.oracle.com/javase/8/docs/api/java/util/Collections.html#synchronizedSet-java.util.Set- // https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/Collections.html#synchronizedSet(java.util.Set)
synchronized (blackListedWorkers) { synchronized (blackListedWorkers) {
for (Iterator<ZkWorker> iterator = blackListedWorkers.iterator(); iterator.hasNext(); ) { for (Iterator<ZkWorker> iterator = blackListedWorkers.iterator(); iterator.hasNext(); ) {
ZkWorker zkWorker = iterator.next(); ZkWorker zkWorker = iterator.next();

View File

@ -86,6 +86,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.curator</groupId> <groupId>org.apache.curator</groupId>
@ -478,6 +479,9 @@
<plugin> <plugin>
<artifactId>maven-failsafe-plugin</artifactId> <artifactId>maven-failsafe-plugin</artifactId>
<version>3.0.0-M7</version> <version>3.0.0-M7</version>
<configuration>
<argLine>--add-opens java.base/java.lang=ALL-UNNAMED</argLine>
</configuration>
<dependencies> <dependencies>
<!-- Required to force Failsafe to use JUnit instead of TestNG. <!-- Required to force Failsafe to use JUnit instead of TestNG.
junit47 is required to use test categories. --> junit47 is required to use test categories. -->

View File

@ -99,7 +99,7 @@ public class ITMultiStageQueryWorkerFaultTolerance
+ " regionIsoCode\n" + " regionIsoCode\n"
+ "FROM TABLE(\n" + "FROM TABLE(\n"
+ " EXTERN(\n" + " EXTERN(\n"
+ " '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\",\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n" + " '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\",\"/resources/data/batch_index/json/wikipedia_index_data1.json\",\"/resources/data/batch_index/json/wikipedia_index_data1.json\",\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n"
+ " '{\"type\":\"json\"}',\n" + " '{\"type\":\"json\"}',\n"
+ " '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n" + " '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n"
+ " )\n" + " )\n"
@ -139,7 +139,6 @@ public class ITMultiStageQueryWorkerFaultTolerance
private void killTaskAbruptly(String taskIdToKill) private void killTaskAbruptly(String taskIdToKill)
{ {
String command = "jps -mlv | grep -i peon | grep -i " + taskIdToKill + " |awk '{print $1}'"; String command = "jps -mlv | grep -i peon | grep -i " + taskIdToKill + " |awk '{print $1}'";
ITRetryUtil.retryUntil(() -> { ITRetryUtil.retryUntil(() -> {
@ -158,21 +157,17 @@ public class ITMultiStageQueryWorkerFaultTolerance
} }
String pidToKill = stdOut.lhs.trim(); String pidToKill = stdOut.lhs.trim();
if (pidToKill.length() != 0) { if (pidToKill.length() != 0) {
LOG.info("Found PID to kill %s", pidToKill);
// kill worker after 5 seconds
Thread.sleep(5000);
LOG.info("Killing pid %s", pidToKill); LOG.info("Killing pid %s", pidToKill);
druidClusterAdminClient.runCommandInMiddleManagerContainer( final Pair<String, String> killResult = druidClusterAdminClient.runCommandInMiddleManagerContainer(
"/bin/bash", "/bin/bash",
"-c", "-c",
"kill -9 " + pidToKill "kill -9 " + pidToKill
); );
LOG.info(StringUtils.format("Kill command stdout: %s, stderr: %s", killResult.lhs, killResult.rhs));
return true; return true;
} else { } else {
return false; return false;
} }
}, true, 6000, 50, StringUtils.format("Figuring out PID for task[%s] to kill abruptly", taskIdToKill)); }, true, 2000, 100, StringUtils.format("Figuring out PID for task[%s] to kill abruptly", taskIdToKill));
} }
} }

View File

@ -18,6 +18,38 @@
"deleted": 200, "deleted": 200,
"namespace": "article" "namespace": "article"
}, },
{
"__time": 1377910953000,
"isRobot": null,
"added": 57,
"delta": -143,
"deleted": 200,
"namespace": "article"
},
{
"__time": 1377910953000,
"isRobot": null,
"added": 57,
"delta": -143,
"deleted": 200,
"namespace": "article"
},
{
"__time": 1377919965000,
"isRobot": null,
"added": 459,
"delta": 330,
"deleted": 129,
"namespace": "wikipedia"
},
{
"__time": 1377919965000,
"isRobot": null,
"added": 459,
"delta": 330,
"deleted": 129,
"namespace": "wikipedia"
},
{ {
"__time": 1377919965000, "__time": 1377919965000,
"isRobot": null, "isRobot": null,
@ -42,6 +74,22 @@
"deleted": 12, "deleted": 12,
"namespace": "article" "namespace": "article"
}, },
{
"__time": 1377933081000,
"isRobot": null,
"added": 123,
"delta": 111,
"deleted": 12,
"namespace": "article"
},
{
"__time": 1377933081000,
"isRobot": null,
"added": 123,
"delta": 111,
"deleted": 12,
"namespace": "article"
},
{ {
"__time": 1377933081000, "__time": 1377933081000,
"isRobot": null, "isRobot": null,

View File

@ -28,7 +28,7 @@
# This Dockerfile prefers to use the COPY command over ADD. # This Dockerfile prefers to use the COPY command over ADD.
# See: https://phoenixnap.com/kb/docker-add-vs-copy # See: https://phoenixnap.com/kb/docker-add-vs-copy
ARG JDK_VERSION=8-slim-buster ARG JDK_VERSION=17-slim-buster
# The FROM image provides Java on top of Debian, and # The FROM image provides Java on top of Debian, and
# thus provides bash, apt-get, etc. # thus provides bash, apt-get, etc.

View File

@ -88,8 +88,7 @@ if your test has the annotation: `@Test(groups = TestNGGroup.BATCH_INDEX)` then
* Add `-pl :druid-integration-tests` when running integration tests for the second time or later without changing * Add `-pl :druid-integration-tests` when running integration tests for the second time or later without changing
the code of core modules in between to skip up-to-date checks for the whole module dependency tree. the code of core modules in between to skip up-to-date checks for the whole module dependency tree.
* Integration tests can also be run with either Java 8 or Java 11 by adding `-Djvm.runtime=#` to the `mvn` command, where `#` * Integration tests can also be run with a specific Java version by adding `-Djvm.runtime=#` to the `mvn` command (where `#` can be 11, for example).
can either be 8 or 11.
* Druid's configuration (using Docker) can be overridden by providing `-Doverride.config.path=<PATH_TO_FILE>`. * Druid's configuration (using Docker) can be overridden by providing `-Doverride.config.path=<PATH_TO_FILE>`.
The file must contain one property per line, the key must start with `druid_` and the format should be snake case. The file must contain one property per line, the key must start with `druid_` and the format should be snake case.

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
ARG JDK_VERSION=8-slim-buster ARG JDK_VERSION=17-slim-buster
FROM openjdk:$JDK_VERSION as druidbase FROM openjdk:$JDK_VERSION as druidbase
# Bundle everything into one script so cleanup can reduce image size. # Bundle everything into one script so cleanup can reduce image size.

View File

@ -281,6 +281,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
@ -371,7 +372,6 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId> <artifactId>guice-servlet</artifactId>
<version>${guice.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.confluent</groupId> <groupId>io.confluent</groupId>
@ -772,6 +772,7 @@
</property> </property>
</properties> </properties>
<argLine> <argLine>
${jdk.strong.encapsulation.argLine}
-Duser.timezone=UTC -Duser.timezone=UTC
-Dfile.encoding=UTF-8 -Dfile.encoding=UTF-8
-Ddruid.test.config.type=configFile -Ddruid.test.config.type=configFile

View File

@ -26,7 +26,7 @@ then
else else
echo "\$DRUID_INTEGRATION_TEST_JVM_RUNTIME is set with value ${DRUID_INTEGRATION_TEST_JVM_RUNTIME}" echo "\$DRUID_INTEGRATION_TEST_JVM_RUNTIME is set with value ${DRUID_INTEGRATION_TEST_JVM_RUNTIME}"
case "${DRUID_INTEGRATION_TEST_JVM_RUNTIME}" in case "${DRUID_INTEGRATION_TEST_JVM_RUNTIME}" in
8 | 11 | 17 | 21) 11 | 17 | 21)
echo "Build druid-cluster with Java $DRUID_INTEGRATION_TEST_JVM_RUNTIME" echo "Build druid-cluster with Java $DRUID_INTEGRATION_TEST_JVM_RUNTIME"
docker build -t druid/cluster \ docker build -t druid/cluster \
--build-arg JDK_VERSION=$DRUID_INTEGRATION_TEST_JVM_RUNTIME-slim-buster \ --build-arg JDK_VERSION=$DRUID_INTEGRATION_TEST_JVM_RUNTIME-slim-buster \

View File

@ -371,7 +371,7 @@ name: Guice
license_category: binary license_category: binary
module: java-core module: java-core
license_name: Apache License version 2.0 license_name: Apache License version 2.0
version: 4.1.0 version: 4.2.2
libraries: libraries:
- com.google.inject: guice - com.google.inject: guice
- com.google.inject.extensions: guice-multibindings - com.google.inject.extensions: guice-multibindings

124
pom.xml
View File

@ -70,9 +70,8 @@
</scm> </scm>
<properties> <properties>
<maven.compiler.source>1.8</maven.compiler.source> <java.version>11</java.version>
<maven.compiler.target>1.8</maven.compiler.target> <maven.compiler.release>${java.version}</maven.compiler.release>
<java.version>8</java.version>
<project.build.resourceEncoding>UTF-8</project.build.resourceEncoding> <project.build.resourceEncoding>UTF-8</project.build.resourceEncoding>
<aether.version>0.9.0.M2</aether.version> <aether.version>0.9.0.M2</aether.version>
<apache.curator.version>5.5.0</apache.curator.version> <apache.curator.version>5.5.0</apache.curator.version>
@ -96,7 +95,7 @@
<errorprone.version>2.35.1</errorprone.version> <errorprone.version>2.35.1</errorprone.version>
<fastutil.version>8.5.4</fastutil.version> <fastutil.version>8.5.4</fastutil.version>
<guava.version>32.0.1-jre</guava.version> <guava.version>32.0.1-jre</guava.version>
<guice.version>4.1.0</guice.version> <guice.version>4.2.2</guice.version>
<hamcrest.version>1.3</hamcrest.version> <hamcrest.version>1.3</hamcrest.version>
<jetty.version>9.4.56.v20240826</jetty.version> <jetty.version>9.4.56.v20240826</jetty.version>
<jersey.version>1.19.4</jersey.version> <jersey.version>1.19.4</jersey.version>
@ -131,7 +130,33 @@
<com.google.http.client.apis.version>1.42.3</com.google.http.client.apis.version> <com.google.http.client.apis.version>1.42.3</com.google.http.client.apis.version>
<com.google.apis.compute.version>v1-rev20230606-2.0.0</com.google.apis.compute.version> <com.google.apis.compute.version>v1-rev20230606-2.0.0</com.google.apis.compute.version>
<com.google.cloud.storage.version>2.29.1</com.google.cloud.storage.version> <com.google.cloud.storage.version>2.29.1</com.google.cloud.storage.version>
<jdk.strong.encapsulation.argLine><!-- empty placeholder --></jdk.strong.encapsulation.argLine> <jdk.strong.encapsulation.argLine>
<!-- Strong encapsulation parameters -->
<!-- When updating this list, update all four locations: -->
<!-- 1) ForkingTaskRunner#STRONG_ENCAPSULATION_PROPERTIES -->
<!-- 2) docs/operations/java.md, "Strong encapsulation" section -->
<!-- 3) pom.xml, jdk.strong.encapsulation.argLine (here) -->
<!-- 4) examples/bin/run-java script -->
<!-- required for DataSketches Memory -->
--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED
--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED
--add-opens=java.base/java.nio=ALL-UNNAMED
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
<!-- required for NativeIO#getfd -->
--add-opens=java.base/java.io=ALL-UNNAMED
<!-- required for Guice -->
--add-opens=java.base/java.lang=ALL-UNNAMED
<!-- required for metrics -->
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
<!-- required for certain EqualsVerifier tests (not required in production) -->
--add-opens=java.base/java.util=ALL-UNNAMED
</jdk.strong.encapsulation.argLine>
<jdk.security.manager.allow.argLine><!-- empty placeholder --></jdk.security.manager.allow.argLine> <jdk.security.manager.allow.argLine><!-- empty placeholder --></jdk.security.manager.allow.argLine>
<repoOrgId>maven.org</repoOrgId> <repoOrgId>maven.org</repoOrgId>
<repoOrgName>Maven Central Repository</repoOrgName> <repoOrgName>Maven Central Repository</repoOrgName>
@ -271,6 +296,16 @@
<enabled>false</enabled> <enabled>false</enabled>
</snapshots> </snapshots>
</repository> </repository>
<!--
maven-dependency-plugin:3.1.2 seems to have updated HTTP repository access behavior.
We get the following error "Blocked mirror for repositories: [twitter (http://maven.twttr.com, default, releases+snapshots)]"
The suggested action step is to add the mirror: https://maven.apache.org/docs/3.8.1/release-notes.html#how-to-fix-when-i-get-a-http-repository-blocked
-->
<repository>
<id>twitter</id>
<url>https://maven.twttr.com</url>
</repository>
</repositories> </repositories>
<pluginRepositories> <pluginRepositories>
@ -1507,13 +1542,13 @@
<plugin> <plugin>
<groupId>com.github.spotbugs</groupId> <groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId> <artifactId>spotbugs-maven-plugin</artifactId>
<version>4.2.0</version> <version>4.8.6.6</version>
<dependencies> <dependencies>
<!-- overwrite dependency on spotbugs if you want to specify the version of spotbugs --> <!-- overwrite dependency on spotbugs if you want to specify the version of spotbugs -->
<dependency> <dependency>
<groupId>com.github.spotbugs</groupId> <groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId> <artifactId>spotbugs</artifactId>
<version>4.2.2</version> <version>4.8.6</version>
</dependency> </dependency>
</dependencies> </dependencies>
<configuration> <configuration>
@ -1524,7 +1559,7 @@
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId> <artifactId>maven-pmd-plugin</artifactId>
<version>3.16.0</version> <version>3.26.0</version>
<configuration> <configuration>
<linkXRef>false</linkXRef> <!-- prevent "Unable to locate Source XRef to link to" warning --> <linkXRef>false</linkXRef> <!-- prevent "Unable to locate Source XRef to link to" warning -->
<printFailingErrors>true</printFailingErrors> <printFailingErrors>true</printFailingErrors>
@ -1534,6 +1569,7 @@
<excludeRoots> <excludeRoots>
<excludeRoot>target/generated-sources/</excludeRoot> <excludeRoot>target/generated-sources/</excludeRoot>
</excludeRoots> </excludeRoots>
<targetJdk>${maven.compiler.release}</targetJdk>
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
@ -1554,7 +1590,7 @@
<bundledSignatures> <bundledSignatures>
<!-- <!--
This will automatically choose the right This will automatically choose the right
signatures based on 'maven.compiler.target': signatures based on 'maven.compiler.release':
--> -->
<bundledSignature>jdk-unsafe</bundledSignature> <bundledSignature>jdk-unsafe</bundledSignature>
</bundledSignatures> </bundledSignatures>
@ -1622,6 +1658,16 @@
<ignore>sun.misc.Unsafe</ignore> <ignore>sun.misc.Unsafe</ignore>
<!-- ignore java reflection polymorphic api signatures --> <!-- ignore java reflection polymorphic api signatures -->
<ignore>java.lang.invoke.MethodHandle</ignore> <ignore>java.lang.invoke.MethodHandle</ignore>
<!--
For the following java.nio.* classes, we get errors like: "Undefined reference: java.nio.ByteBuffer java.nio.ByteBuffer.clear()"
GitHub issue: https://github.com/mojohaus/animal-sniffer/issues/4
-->
<ignore>java.nio.ByteBuffer</ignore>
<ignore>java.nio.IntBuffer</ignore>
<ignore>java.nio.CharBuffer</ignore>
<ignore>java.nio.FloatBuffer</ignore>
<ignore>java.nio.DoubleBuffer</ignore>
<ignore>java.nio.MappedByteBuffer</ignore>
</ignores> </ignores>
</configuration> </configuration>
</execution> </execution>
@ -1830,7 +1876,12 @@
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId> <artifactId>maven-dependency-plugin</artifactId>
<version>3.1.1</version> <version>3.3.0</version>
<configuration>
<ignoredNonTestScopedDependencies>
<ignoredNonTestScopedDependency>*</ignoredNonTestScopedDependency>
</ignoredNonTestScopedDependencies>
</configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
@ -1916,9 +1967,9 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId> <artifactId>maven-compiler-plugin</artifactId>
<version>3.11.0</version> <version>3.11.0</version>
<inherited>true</inherited>
<configuration> <configuration>
<source>${maven.compiler.source}</source> <release>${maven.compiler.release}</release>
<target>${maven.compiler.target}</target>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
@ -1956,54 +2007,6 @@
</jdk.security.manager.allow.argLine> </jdk.security.manager.allow.argLine>
</properties> </properties>
</profile> </profile>
<profile>
<id>java-9+</id>
<activation>
<jdk>[9,)</jdk>
</activation>
<properties>
<jdk.strong.encapsulation.argLine>
<!-- Strong encapsulation parameters -->
<!-- When updating this list, update all four locations: -->
<!-- 1) ForkingTaskRunner#STRONG_ENCAPSULATION_PROPERTIES -->
<!-- 2) docs/operations/java.md, "Strong encapsulation" section -->
<!-- 3) pom.xml, jdk.strong.encapsulation.argLine (here) -->
<!-- 4) examples/bin/run-java script -->
<!-- required for DataSketches Memory -->
--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED
--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED
--add-opens=java.base/java.nio=ALL-UNNAMED
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
<!-- required for NativeIO#getfd -->
--add-opens=java.base/java.io=ALL-UNNAMED
<!-- required for Guice -->
--add-opens=java.base/java.lang=ALL-UNNAMED
<!-- required for metrics -->
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
<!-- required for certain EqualsVerifier tests (not required in production) -->
--add-opens=java.base/java.util=ALL-UNNAMED
</jdk.strong.encapsulation.argLine>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<inherited>true</inherited>
<!-- prefer release instead of source/target in JDK 9 and above -->
<configuration>
<release>${java.version}</release>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile> <profile>
<id>strict</id> <id>strict</id>
<activation> <activation>
@ -2034,6 +2037,7 @@
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED</arg>
<arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED</arg> <arg>-J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED</arg>
<arg>-J--add-exports=java.base/sun.nio.ch=ALL-UNNAMED</arg>
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED</arg> <arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED</arg>
<arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED</arg> <arg>-J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED</arg>
</compilerArgs> </compilerArgs>

View File

@ -106,6 +106,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.code.findbugs</groupId> <groupId>com.google.code.findbugs</groupId>

View File

@ -59,7 +59,7 @@ NULL : 'null';
LONG : [0-9]+; LONG : [0-9]+;
EXP: [eE] [-]? LONG; EXP: [eE] [-]? LONG;
// DOUBLE provides partial support for java double format // DOUBLE provides partial support for java double format
// see: https://docs.oracle.com/javase/8/docs/api/java/lang/Double.html#valueOf-java.lang.String- // see: https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Double.html#valueOf(java.lang.String)
DOUBLE : 'NaN' | 'Infinity' | (LONG '.' LONG?) | (LONG EXP) | (LONG '.' LONG? EXP); DOUBLE : 'NaN' | 'Infinity' | (LONG '.' LONG?) | (LONG EXP) | (LONG '.' LONG? EXP);
IDENTIFIER : [_$a-zA-Z][_$a-zA-Z0-9]* | '"' (ESC | ~ [\"\\])* '"'; IDENTIFIER : [_$a-zA-Z][_$a-zA-Z0-9]* | '"' (ESC | ~ [\"\\])* '"';
WS : [ \t\r\n]+ -> skip ; WS : [ \t\r\n]+ -> skip ;

View File

@ -42,7 +42,7 @@ import java.security.spec.KeySpec;
* using javax.crypto package. * using javax.crypto package.
* *
* To learn about possible algorithms supported and their names, * To learn about possible algorithms supported and their names,
* See https://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html * See https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html
*/ */
public class CryptoService public class CryptoService
{ {

View File

@ -808,7 +808,7 @@ public class ParallelMergeCombiningSequence<T> extends YieldingSequenceBase<T>
/** /**
* {@link ForkJoinPool} friendly {@link BlockingQueue} feeder, adapted from 'QueueTaker' of Java documentation on * {@link ForkJoinPool} friendly {@link BlockingQueue} feeder, adapted from 'QueueTaker' of Java documentation on
* {@link ForkJoinPool.ManagedBlocker}, * {@link ForkJoinPool.ManagedBlocker},
* https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ForkJoinPool.ManagedBlocker.html * https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/concurrent/ForkJoinPool.ManagedBlocker.html
*/ */
static class QueuePusher<E> implements ForkJoinPool.ManagedBlocker static class QueuePusher<E> implements ForkJoinPool.ManagedBlocker
{ {

View File

@ -112,13 +112,9 @@ public class CursorFactoryRowsAndColumns implements CloseableShapeshifter, RowsA
cursor.advance(); cursor.advance();
} }
if (writer == null) {
return new EmptyRowsAndColumns();
} else {
final byte[] bytes = writer.toByteArray(); final byte[] bytes = writer.toByteArray();
return new ColumnBasedFrameRowsAndColumns(Frame.wrap(bytes), rowSignature); return new ColumnBasedFrameRowsAndColumns(Frame.wrap(bytes), rowSignature);
} }
} }
} }
} }
}

View File

@ -110,6 +110,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
@ -216,6 +217,11 @@
<groupId>org.apache.logging.log4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId> <artifactId>log4j-core</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.checkerframework</groupId>
<artifactId>checker-qual</artifactId>
<version>${checkerframework.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.datatype</groupId> <groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-joda</artifactId> <artifactId>jackson-datatype-joda</artifactId>

View File

@ -37,6 +37,7 @@ import org.apache.druid.utils.CloseableUtils;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
/** /**
@ -58,7 +59,7 @@ public class CuratorDruidLeaderSelector implements DruidLeaderSelector
private final AtomicReference<LeaderLatch> leaderLatch = new AtomicReference<>(); private final AtomicReference<LeaderLatch> leaderLatch = new AtomicReference<>();
private volatile boolean leader = false; private volatile boolean leader = false;
private volatile int term = 0; private final AtomicInteger term = new AtomicInteger(0);
public CuratorDruidLeaderSelector(CuratorFramework curator, @Self DruidNode self, String latchPath) public CuratorDruidLeaderSelector(CuratorFramework curator, @Self DruidNode self, String latchPath)
{ {
@ -99,7 +100,7 @@ public class CuratorDruidLeaderSelector implements DruidLeaderSelector
} }
leader = true; leader = true;
term++; term.incrementAndGet();
listener.becomeLeader(); listener.becomeLeader();
} }
catch (Exception ex) { catch (Exception ex) {
@ -161,7 +162,7 @@ public class CuratorDruidLeaderSelector implements DruidLeaderSelector
@Override @Override
public int localTerm() public int localTerm()
{ {
return term; return term.get();
} }
@Override @Override

View File

@ -160,6 +160,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.roaringbitmap</groupId> <groupId>org.roaringbitmap</groupId>

View File

@ -131,6 +131,7 @@
<dependency> <dependency>
<groupId>com.google.inject.extensions</groupId> <groupId>com.google.inject.extensions</groupId>
<artifactId>guice-multibindings</artifactId> <artifactId>guice-multibindings</artifactId>
<scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.ws.rs</groupId> <groupId>javax.ws.rs</groupId>

View File

@ -647,7 +647,7 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
</p> </p>
<p> <p>
For more information, refer to the documentation for{' '} For more information, refer to the documentation for{' '}
<ExternalLink href="https://docs.oracle.com/javase/8/docs/api/java/nio/file/FileSystem#getPathMatcher-java.lang.String-"> <ExternalLink href="https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)">
FileSystem#getPathMatcher FileSystem#getPathMatcher
</ExternalLink> </ExternalLink>
. .

View File

@ -507,7 +507,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
</p> </p>
<p> <p>
For more information, refer to the documentation for{' '} For more information, refer to the documentation for{' '}
<ExternalLink href="https://docs.oracle.com/javase/8/docs/api/java/nio/file/FileSystem#getPathMatcher-java.lang.String-"> <ExternalLink href="https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)">
FileSystem#getPathMatcher FileSystem#getPathMatcher
</ExternalLink> </ExternalLink>
. .

View File

@ -136,7 +136,7 @@ export const TIMESTAMP_SPEC_FIELDS: Field<TimestampSpec>[] = [
info: ( info: (
<p> <p>
Specify your timestamp format by using the suggestions menu or typing in a{' '} Specify your timestamp format by using the suggestions menu or typing in a{' '}
<ExternalLink href="https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter"> <ExternalLink href="https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/time/format/DateTimeFormatter.html">
format string format string
</ExternalLink> </ExternalLink>
. .