Improved Java 17 support and Java runtime docs. (#12839)

* Improved Java 17 support and Java runtime docs.

1) Add a "Java runtime" doc page with information about supported
   Java versions, garbage collection, and strong encapsulation..

2) Update asm and equalsverifier to versions that support Java 17.

3) Add additional "--add-opens" lines to surefire configuration, so
   tests can pass successfully under Java 17.

4) Switch openjdk15 tests to openjdk17.

5) Update FrameFile to specifically mention Java runtime incompatibility
   as the cause of not being able to use Memory.map.

6) Update SegmentLoadDropHandler to log an error for Errors too, not
   just Exceptions. This is important because an IllegalAccessError is
   encountered when the correct "--add-opens" line is not provided,
   which would otherwise be silently ignored.

7) Update example configs to use druid.indexer.runner.javaOptsArray
   instead of druid.indexer.runner.javaOpts. (The latter is deprecated.)

* Adjustments.

* Use run-java in more places.

* Add run-java.

* Update .gitignore.

* Exclude hadoop-client-api.

Brought in when building on Java 17.

* Swap one more usage of java.

* Fix the run-java script.

* Fix flag.

* Include link to Temurin.

* Spelling.

* Update examples/bin/run-java

Co-authored-by: Xavier Léauté <xl+github@xvrl.net>

Co-authored-by: Xavier Léauté <xl+github@xvrl.net>
This commit is contained in:
Gian Merlino 2022-08-03 23:16:05 -07:00 committed by GitHub
parent 623b075d12
commit ef6811ef88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 319 additions and 80 deletions

2
.gitignore vendored
View File

@ -25,4 +25,4 @@ README
.pmdruleset.xml
.java-version
integration-tests/gen-scripts/
bin/
/bin/

View File

@ -188,9 +188,9 @@ jobs:
jdk: openjdk11
- <<: *package
name: "(openjdk15) packaging check"
name: "(openjdk17) packaging check"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_processing_module
name: "(openjdk8) processing module test"
@ -259,9 +259,9 @@ jobs:
jdk: openjdk11
- <<: *test_processing_module
name: "(openjdk15) processing module test"
name: "(openjdk17) processing module test"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_processing_module_sqlcompat
<<: *test_processing_module
@ -276,9 +276,9 @@ jobs:
jdk: openjdk11
- <<: *test_processing_module_sqlcompat
name: "(openjdk15) processing module test (SQL Compatibility)"
name: "(openjdk17) processing module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_indexing_module
<<: *test_processing_module
@ -292,9 +292,9 @@ jobs:
jdk: openjdk11
- <<: *test_indexing_module
name: "(openjdk15) indexing modules test"
name: "(openjdk17) indexing modules test"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_indexing_module_sqlcompat
<<: *test_indexing_module
@ -308,9 +308,9 @@ jobs:
jdk: openjdk11
- <<: *test_indexing_module_sqlcompat
name: "(openjdk15) indexing modules test (SQL Compatibility)"
name: "(openjdk17) indexing modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_server_module
<<: *test_processing_module
@ -324,9 +324,9 @@ jobs:
jdk: openjdk11
- <<: *test_server_module
name: "(openjdk15) server module test"
name: "(openjdk17) server module test"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_server_module_sqlcompat
<<: *test_server_module
@ -339,9 +339,9 @@ jobs:
jdk: openjdk11
- <<: *test_server_module_sqlcompat
name: "(openjdk15) server module test (SQL Compatibility)"
name: "(openjdk17) server module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_other_modules
<<: *test_processing_module
@ -355,9 +355,9 @@ jobs:
jdk: openjdk11
- <<: *test_other_modules
name: "(openjdk15) other modules test"
name: "(openjdk17) other modules test"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- &test_other_modules_sqlcompat
<<: *test_other_modules
@ -370,9 +370,9 @@ jobs:
jdk: openjdk11
- <<: *test_other_modules_sqlcompat
name: "(openjdk15) other modules test (SQL Compatibility)"
name: "(openjdk17) other modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk15
jdk: openjdk17
- name: "web console"
install: skip

View File

@ -19,6 +19,7 @@
package org.apache.druid.utils;
import com.google.common.primitives.Ints;
import com.google.inject.Inject;
import java.io.File;
@ -36,19 +37,37 @@ import java.util.stream.Stream;
public class JvmUtils
{
private static final boolean IS_JAVA9_COMPATIBLE = isJava9Compatible(System.getProperty("java.specification.version"));
public static final int UNKNOWN_VERSION = -1;
private static final int MAJOR_VERSION = computeMajorVersion();
private static boolean isJava9Compatible(String versionString)
private static int computeMajorVersion()
{
final StringTokenizer st = new StringTokenizer(versionString, ".");
int majorVersion = Integer.parseInt(st.nextToken());
final StringTokenizer st = new StringTokenizer(System.getProperty("java.specification.version"), ".");
if (!st.hasMoreTokens()) {
return UNKNOWN_VERSION;
}
return majorVersion >= 9;
final String majorVersionString = st.nextToken();
final Integer majorVersion = Ints.tryParse(majorVersionString);
return majorVersion == null ? UNKNOWN_VERSION : majorVersion;
}
/**
* Returns the major version of the current Java runtime for Java 9 and above. For example: 9, 11, 17, etc.
*
* Returns 1 for Java 8 and earlier.
*
* Returns {@link #UNKNOWN_VERSION} if the major version cannot be determined. This is a negative number and is
* therefore lower than all valid versions.
*/
public static int majorVersion()
{
return MAJOR_VERSION;
}
public static boolean isIsJava9Compatible()
{
return IS_JAVA9_COMPATIBLE;
return MAJOR_VERSION >= 9;
}
@Inject

View File

@ -249,6 +249,7 @@ def build_compatible_license_names():
compatible_licenses['New BSD License'] = 'BSD-3-Clause License'
compatible_licenses['3-Clause BSD License'] = 'BSD-3-Clause License'
compatible_licenses['BSD 3-Clause'] = 'BSD-3-Clause License'
compatible_licenses['BSD-3-Clause'] = 'BSD-3-Clause License'
compatible_licenses['ICU License'] = 'ICU License'

View File

@ -173,7 +173,7 @@
<goal>exec</goal>
</goals>
<configuration>
<executable>java</executable>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />
@ -350,7 +350,7 @@
<goal>exec</goal>
</goals>
<configuration>
<executable>java</executable>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />
@ -548,7 +548,7 @@
<goal>exec</goal>
</goals>
<configuration>
<executable>java</executable>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />
@ -642,7 +642,7 @@
<goal>exec</goal>
</goals>
<configuration>
<executable>java</executable>
<executable>${project.parent.basedir}/examples/bin/run-java</executable>
<arguments>
<argument>-classpath</argument>
<classpath />

View File

@ -1371,7 +1371,7 @@ Middle managers pass their configurations down to their child peons. The MiddleM
|`druid.indexer.runner.classpath`|Java classpath for the peon.|System.getProperty("java.class.path")|
|`druid.indexer.runner.javaCommand`|Command required to execute java.|java|
|`druid.indexer.runner.javaOpts`|*DEPRECATED* A string of -X Java options to pass to the peon's JVM. Quotable parameters or parameters with spaces are encouraged to use javaOptsArray|""|
|`druid.indexer.runner.javaOptsArray`|A JSON array of strings to be passed in as options to the peon's JVM. This is additive to javaOpts and is recommended for properly handling arguments which contain quotes or spaces like `["-XX:OnOutOfMemoryError=kill -9 %p"]`|`[]`|
|`druid.indexer.runner.javaOptsArray`|A JSON array of strings to be passed in as options to the peon's JVM. This is additive to `druid.indexer.runner.javaOpts` and is recommended for properly handling arguments which contain quotes or spaces like `["-XX:OnOutOfMemoryError=kill -9 %p"]`|`[]`|
|`druid.indexer.runner.maxZnodeBytes`|The maximum size Znode in bytes that can be created in ZooKeeper, should be in the range of [10KiB, 2GiB). [Human-readable format](human-readable-byte.md) is supported.|512KiB|
|`druid.indexer.runner.startPort`|Starting port used for peon processes, should be greater than 1023 and less than 65536.|8100|
|`druid.indexer.runner.endPort`|Ending port used for peon processes, should be greater than or equal to `druid.indexer.runner.startPort` and less than 65536.|65535|

View File

@ -104,16 +104,8 @@ WARNING: Use --illegal-access=warn to enable warnings of further illegal reflect
WARNING: All illegal access operations will be denied in a future release
```
These messages do not cause harm, but you can avoid them by adding the following lines to your `jvm.config` files. These
lines are not part of the default JVM configs that ship with Druid, because Java 8 will not recognize these options and
will fail to start up.
```
--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED
--add-exports=java.base/jdk.internal.perf=ALL-UNNAMED
--add-opens=java.base/java.lang=ALL-UNNAMED
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
```
To avoid these, add the `--add-exports` and `--add-opens` command line parameters described in the documentation section
about [Java strong encapsulation](../operations/java.md#strong-encapsulation).
## My logs are really chatty, can I set them to asynchronously write?

View File

@ -33,10 +33,7 @@ make sure it has `/master/` in the URL.
##### Installing Java and Maven
- JDK 8, 8u92+ or JDK 11
We recommend using an OpenJDK distribution that provides long-term support and open-source licensing,
like [Amazon Corretto](https://aws.amazon.com/corretto/) or [Azul Zulu](https://www.azul.com/downloads/zulu/).
- JDK 8, 8u92+ or JDK 11. See our [Java documentation](../operations/java.md) for information about obtaining a JDK.
- [Maven version 3.x](http://maven.apache.org/download.cgi)
##### Other dependencies

94
docs/operations/java.md Normal file
View File

@ -0,0 +1,94 @@
---
id: java
title: "Java runtime"
---
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
Apache Druid is written in Java and requires a Java runtime. This page provides details about obtaining and configuring
a Java runtime for Druid.
## Selecting a Java runtime
Druid fully supports Java 8 and 11. The project team recommends Java 11. The project team does not recommend running
with Java 17, because certain Druid functionality is not currently compatible with Java 17.
The project team recommends using an OpenJDK-based Java distribution. There are many free and actively-supported
distributions available, including
[Amazon Corretto](https://docs.aws.amazon.com/corretto/latest/corretto-11-ug/what-is-corretto-11.html),
[Azul Zulu](https://www.azul.com/downloads/?version=java-11-lts&package=jdk), and
[Eclipse Temurin](https://adoptium.net/temurin/releases?version=11).
The project team does not recommend any specific distribution over any other.
Druid relies on the environment variables `JAVA_HOME` or `DRUID_JAVA_HOME` to find Java on the machine. You can set
`DRUID_JAVA_HOME` if there is more than one instance of Java. To verify Java requirements for your environment, run the
`bin/verify-java` script.
## Garbage collection
In general, the project team recommends using the G1 collector with default settings. This is the default collector in
Java 11. To enable G1 on Java 8, use `-XX:+UseG1GC`. There is no harm in explicitly specifying this on Java 11 as well.
Garbage collector selection and tuning is a form of sport in the Java community. There may be situations where adjusting
garbage collection configuration improves or worsens performance. The project team's guidance is that most people do
not need to stray away from G1 with default settings.
## Strong encapsulation
Java 9 and beyond (including Java 11) include the capability for
[strong encapsulation](https://dev.java/learn/strong-encapsulation-\(of-jdk-internals\)/) of internal JDK APIs. Druid
uses certain internal JDK APIs for functionality- and performance-related reasons. In Java 11, this leads to log
messages like the following:
```
WARNING: An illegal reflective access operation has occurred
WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
WARNING: All illegal access operations will be denied in a future release
```
These warning messages are harmless, and can be ignored. However, you can avoid them entirely if you wish by adding the
following Java command line parameters. These parameters are not part of the default configurations that ship with
Druid, because Java 8 does not recognize these parameters and fails to start up if they are provided.
To do this, add the following lines to your `jvm.config` files:
```
--add-exports=java.base/jdk.internal.perf=ALL-UNNAMED
--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED
--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED
--add-opens=java.base/java.lang=ALL-UNNAMED
--add-opens=java.base/java.io=ALL-UNNAMED
--add-opens=java.base/java.nio=ALL-UNNAMED
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
```
Additionally, tasks run by [MiddleManagers](../design/architecture.md) execute in separate JVMs. The command line for
these JVMs is given by `druid.indexer.runner.javaOptsArray` or `druid.indexer.runner.javaOpts` in
`middleManager/runtime.properties`. Java command line parameters for tasks must be specified here. For example, use
a line like the following:
```
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager","--add-exports=java.base/jdk.internal.perf=ALL-UNNAMED","--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED","--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED","--add-opens=java.base/java.lang=ALL-UNNAMED","--add-opens=java.base/java.io=ALL-UNNAMED","--add-opens=java.base/java.nio=ALL-UNNAMED","--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED","--add-opens=java.base/sun.nio.ch=ALL-UNNAMED","--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED"]
```
The `Xms`, `Xmx`, and `MaxDirectMemorySize` parameters in the line above are merely an example. You may use different
values in your specific environment.

View File

@ -372,7 +372,7 @@ These metrics are only available if the SysMonitor module is included.
|`sys/net/write/size`|Bytes written to the network.|netName, netAddress, netHwaddr|Varies.|
|`sys/net/read/size`|Bytes read from the network.|netName, netAddress, netHwaddr|Varies.|
|`sys/fs/used`|Filesystem bytes used.|fsDevName, fsDirName, fsTypeName, fsSysTypeName, fsOptions.|< max|
|`sys/fs/max`|Filesystesm bytes max.|fsDevName, fsDirName, fsTypeName, fsSysTypeName, fsOptions.|Varies.|
|`sys/fs/max`|Filesystem bytes max.|fsDevName, fsDirName, fsTypeName, fsSysTypeName, fsOptions.|Varies.|
|`sys/mem/used`|Memory used.||< max|
|`sys/mem/max`|Memory max.||Varies.|
|`sys/storage/used`|Disk space used.|fsDirName.|Varies.|

View File

@ -130,7 +130,7 @@ The [basic cluster tuning guide](../operations/basic-cluster-tuning.md) has info
## Select OS
We recommend running your favorite Linux distribution. You will also need Java 8 or 11.
We recommend running your favorite Linux distribution. You will also need [Java 8 or 11](../operations/java.md).
> If needed, you can specify where to find Java using the environment variables
> `DRUID_JAVA_HOME` or `JAVA_HOME`. For more details run the `bin/verify-java` script.

View File

@ -45,8 +45,9 @@ information on deploying Druid services across clustered machines.
The software requirements for the installation machine are:
* Linux, Mac OS X, or other Unix-like OS (Windows is not supported)
* Java 8, Update 92 or later (8u92+) or Java 11
* Linux, Mac OS X, or other Unix-like OS (Windows is not supported).
* Java 8, Update 92 or later (8u92+) or Java 11. See the [Java runtime](../operations/java.md) page for additional
information about selecting and configuring a Java runtime.
> Druid relies on the environment variables `JAVA_HOME` or `DRUID_JAVA_HOME` to find Java on the machine. You can set
`DRUID_JAVA_HOME` if there is more than one instance of Java. To verify Java requirements for your environment, run the

View File

@ -36,11 +36,6 @@ fi
CONFDIR="$(cd "$CONFDIR" && pwd)"
WHEREAMI="$(cd "$WHEREAMI" && pwd)"
JAVA_BIN="$(source "$WHEREAMI"/java-util && get_java_bin_dir)"
if [ -z "$JAVA_BIN" ]; then
>&2 echo "Could not find java - please run $WHEREAMI/verify-java to confirm it is installed."
exit 1
fi
LOG_DIR="${DRUID_LOG_DIR:=${WHEREAMI}/../log}"
# Remove possible ending slash
@ -53,6 +48,6 @@ if [ ! -d "$LOG_DIR" ]; then mkdir -p $LOG_DIR; fi
echo "Running [$1], logging to [$LOG_DIR/$1.log] if no changes made to log4j2.xml"
cd "$WHEREAMI/.."
exec "$JAVA_BIN"/java -Ddruid.node.type=$1 "-Ddruid.log.path=$LOG_DIR" `cat "$CONFDIR"/"$WHATAMI"/jvm.config | xargs` \
exec "$WHEREAMI"/run-java -Ddruid.node.type=$1 "-Ddruid.log.path=$LOG_DIR" `cat "$CONFDIR"/"$WHATAMI"/jvm.config | xargs` \
-cp "$CONFDIR"/"$WHATAMI":"$CONFDIR"/_common:"$CONFDIR"/_common/hadoop-xml:"$CONFDIR"/../_common:"$CONFDIR"/../_common/hadoop-xml:"$WHEREAMI/../lib/*" \
`cat "$CONFDIR"/$WHATAMI/main.config | xargs`

45
examples/bin/run-java Executable file
View File

@ -0,0 +1,45 @@
#!/bin/bash -eu
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
WHEREAMI="$(dirname "$0")"
JAVA_BIN="$(source "$WHEREAMI"/java-util && get_java_bin_dir)/java"
if [ -z "$JAVA_BIN" ]; then
>&2 echo "Could not find java - please run $WHEREAMI/verify-java to confirm it is installed."
exit 1
fi
JAVA_MAJOR="$("$JAVA_BIN" -version 2>&1 | sed -n -E 's/.* version "([^."]*).*/\1/p')"
if [ "$JAVA_MAJOR" != "" ] && [ "$JAVA_MAJOR" -ge "17" ]
then
# Must disable strong encapsulation for certain packages on Java 17.
exec "$JAVA_BIN" \
--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED \
--add-exports=java.base/jdk.internal.perf=ALL-UNNAMED \
--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED \
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED \
--add-opens=java.base/java.nio=ALL-UNNAMED \
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED \
--add-opens=java.base/java.io=ALL-UNNAMED \
--add-opens=java.base/java.lang=ALL-UNNAMED \
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED \
"$@"
else
exec "$JAVA_BIN" "$@"
fi

View File

@ -35,11 +35,6 @@ fi
CONFDIR="$(cd "$CONFDIR" && pwd)/zk"
WHEREAMI="$(cd "$WHEREAMI" && pwd)"
JAVA_BIN="$(source "$WHEREAMI"/java-util && get_java_bin_dir)"
if [ -z "$JAVA_BIN" ]; then
>&2 echo "Could not find java - please run $WHEREAMI/verify-java to confirm it is installed."
exit 1
fi
LOG_DIR="${DRUID_LOG_DIR:=${WHEREAMI}/../log}"
# Remove possible ending slash
@ -52,7 +47,7 @@ if [ ! -d "$LOG_DIR" ]; then mkdir -p $LOG_DIR; fi
echo "Running [ZooKeeper], logging to [$LOG_DIR/zookeeper.log] if no changes made to log4j2.xml"
cd "$WHEREAMI/.."
exec "$JAVA_BIN"/java "-Ddruid.log.path=$LOG_DIR" `cat "$CONFDIR"/jvm.config | xargs` \
exec "$WHEREAMI"/run-java "-Ddruid.log.path=$LOG_DIR" `cat "$CONFDIR"/jvm.config | xargs` \
-cp "$WHEREAMI/../lib/*:$CONFDIR" \
-Dzookeeper.jmx.log4j.disable=true \
org.apache.zookeeper.server.quorum.QuorumPeerMain \

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=4
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=8
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=4
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=2
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=2
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms256m -Xmx256m -XX:MaxDirectMemorySize=300m -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms256m","-Xmx256m","-XX:MaxDirectMemorySize=300m","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=3
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -24,7 +24,8 @@ druid.plaintextPort=8091
druid.worker.capacity=16
# Task launch parameters
druid.indexer.runner.javaOpts=-server -Xms1g -Xmx1g -XX:MaxDirectMemorySize=1g -Duser.timezone=UTC -Dfile.encoding=UTF-8 -XX:+ExitOnOutOfMemoryError -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager
druid.indexer.runner.javaCommand=bin/run-java
druid.indexer.runner.javaOptsArray=["-server","-Xms1g","-Xmx1g","-XX:MaxDirectMemorySize=1g","-Duser.timezone=UTC","-Dfile.encoding=UTF-8","-XX:+ExitOnOutOfMemoryError","-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager"]
druid.indexer.task.baseTaskDir=var/druid/task
# HTTP server threads

View File

@ -138,6 +138,10 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>

View File

@ -4052,7 +4052,7 @@ name: ASM
license_category: binary
module: java-core
license_name: BSD-3-Clause License
version: 7.1
version: 9.3
copyright: INRIA, France Telecom
license_file_path: licenses/bin/asm.BSD3
libraries:

18
pom.xml
View File

@ -815,12 +815,12 @@
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
<version>7.1</version>
<version>9.3</version>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
<version>7.1</version>
<version>9.3</version>
</dependency>
<dependency>
<groupId>org.asynchttpclient</groupId>
@ -1122,7 +1122,7 @@
<dependency>
<groupId>nl.jqno.equalsverifier</groupId>
<artifactId>equalsverifier</artifactId>
<version>3.5.5</version>
<version>3.10.1</version>
<scope>test</scope>
</dependency>
<dependency>
@ -1669,7 +1669,7 @@
</activation>
<properties>
<jdk.surefire.argLine>
<!-- required for JvmMonitor tests on Java 11+ -->
<!-- required for JvmMonitor on Java 11+ -->
--add-exports=java.base/jdk.internal.perf=ALL-UNNAMED
--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
@ -1679,6 +1679,16 @@
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED
--add-opens=java.base/java.nio=ALL-UNNAMED
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
<!-- required for NativeIO#getfd -->
--add-opens=java.base/java.io=ALL-UNNAMED
<!-- required for Guice -->
--add-opens=java.base/java.lang=ALL-UNNAMED
<!-- required for certain EqualsVerifier tests -->
<!-- (not required in production) -->
--add-opens=java.base/java.util=ALL-UNNAMED
</jdk.surefire.argLine>
</properties>
<build>

View File

@ -19,6 +19,7 @@
package org.apache.druid.frame.file;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.io.Files;
import org.apache.datasketches.memory.MapHandle;
@ -29,11 +30,13 @@ import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.IOE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.ReferenceCountingCloseableObject;
import org.apache.druid.utils.CloseableUtils;
import org.apache.druid.utils.JvmUtils;
import java.io.Closeable;
import java.io.File;
@ -330,7 +333,15 @@ public class FrameFile implements Closeable
*/
private static Pair<Memory, Closeable> mapFileDS(final File file)
{
final MapHandle mapHandle = Memory.map(file, 0, file.length(), ByteOrder.LITTLE_ENDIAN);
final MapHandle mapHandle;
try {
mapHandle = Memory.map(file, 0, file.length(), ByteOrder.LITTLE_ENDIAN);
}
catch (NoClassDefFoundError | ExceptionInInitializerError e) {
throw handleMemoryMapError(e, JvmUtils.majorVersion());
}
return Pair.of(
mapHandle.get(),
() -> {
@ -344,6 +355,22 @@ public class FrameFile implements Closeable
);
}
@VisibleForTesting
static RuntimeException handleMemoryMapError(Throwable e, int javaMajorVersion)
{
// Memory.map does not work on JDK 14+ due to issues with AllocateDirectMap.
if (javaMajorVersion >= 14) {
throw new ISE(
"Cannot read frame files larger than %,d bytes with Java %d. Try using Java 11.",
Integer.MAX_VALUE,
javaMajorVersion
);
} else {
// We don't have a good reason why this happened. Throw the original error.
throw new RE(e, "Could not map frame file");
}
}
private static long getFrameEndPosition(final Memory memory, final int frameNumber, final int numFrames)
{
final long frameEndPointerPosition =

View File

@ -41,12 +41,15 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.apache.druid.timeline.SegmentId;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.internal.matchers.ThrowableMessageMatcher;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
@ -372,6 +375,52 @@ public class FrameFileTest extends InitializedNullHandlingTest
frameFile1.newReference();
}
@Test
public void test_handleMemoryMapError_java11()
{
@SuppressWarnings("ThrowableNotThrown")
final RuntimeException e = Assert.assertThrows(
RuntimeException.class,
() -> FrameFile.handleMemoryMapError(new IllegalAccessError("foo"), 11)
);
MatcherAssert.assertThat(
e,
ThrowableMessageMatcher.hasMessage(CoreMatchers.equalTo("Could not map frame file"))
);
// Include the original error, since we don't have a better explanation.
MatcherAssert.assertThat(
e.getCause(),
CoreMatchers.instanceOf(IllegalAccessError.class)
);
}
@Test
public void test_handleMemoryMapError_java17()
{
@SuppressWarnings("ThrowableNotThrown")
final IllegalStateException e = Assert.assertThrows(
IllegalStateException.class,
() -> FrameFile.handleMemoryMapError(new IllegalAccessError("foo"), 17)
);
MatcherAssert.assertThat(
e,
ThrowableMessageMatcher.hasMessage(
CoreMatchers.containsString(
StringUtils.format(
"Cannot read frame files larger than %,d bytes with Java 17.",
Integer.MAX_VALUE
)
)
)
);
// Cause not included; we want to keep logs relatively cleaner and highlight the actual issue.
Assert.assertNull(e.getCause());
}
private int computeExpectedNumFrames()
{
return IntMath.divide(countRows(adapter), maxRowsPerFrame, RoundingMode.CEILING);

View File

@ -47,6 +47,7 @@ import org.apache.druid.segment.data.IndexedInts;
import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import org.apache.druid.segment.vector.VectorCursor;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.utils.JvmUtils;
import org.junit.Assert;
import javax.annotation.Nullable;
@ -58,7 +59,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.StringTokenizer;
import java.util.function.Supplier;
import java.util.stream.Collectors;
@ -295,8 +295,7 @@ public class FrameTestUtil
*/
public static boolean jdkCanDataSketchesMemoryMap()
{
final StringTokenizer st = new StringTokenizer(System.getProperty("java.specification.version"), ".");
return Integer.parseInt(st.nextToken()) < 14;
return JvmUtils.majorVersion() < 14;
}
private static Supplier<Object> dimensionSelectorReader(final DimensionSelector selector)

View File

@ -351,11 +351,11 @@ public class SegmentLoadDropHandler implements DataSegmentChangeHandler
result = Status.SUCCESS;
}
catch (Exception e) {
catch (Throwable e) {
log.makeAlert(e, "Failed to load segment for dataSource")
.addData("segment", segment)
.emit();
result = Status.failed(e.getMessage());
result = Status.failed(e.toString());
}
finally {
updateRequestStatus(new SegmentChangeRequestLoad(segment), result);

View File

@ -60,7 +60,7 @@ function _build_distribution() {
&& cd distribution/target \
&& tar xzf "apache-druid-$(_get_druid_version)-bin.tar.gz" \
&& cd apache-druid-$(_get_druid_version) \
&& java -classpath "lib/*" org.apache.druid.cli.Main tools pull-deps -c org.apache.druid.extensions:druid-testing-tools \
&& bin/run-java -classpath "lib/*" org.apache.druid.cli.Main tools pull-deps -c org.apache.druid.extensions:druid-testing-tools \
&& echo -e "\n\ndruid.extensions.loadList=[\"druid-hdfs-storage\", \"druid-kafka-indexing-service\", \"druid-datasketches\", \"druid-testing-tools\"]" >> conf/druid/single-server/micro-quickstart/_common/common.runtime.properties \
&& echo -e "\n\ndruid.server.http.allowedHttpMethods=[\"HEAD\"]" >> conf/druid/single-server/micro-quickstart/_common/common.runtime.properties \
)

View File

@ -1429,13 +1429,16 @@ time-iso8601
hadoopStorageDirectory
- ../docs/operations/insert-segment-to-db.md
0.14.x
- ../docs/operations/java.md
G1
Temurin
- ../docs/operations/metrics.md
0.14.x
1s
Bufferpool
EventReceiverFirehose
EventReceiverFirehoseMonitor
Filesystesm
Filesystem
JVMMonitor
QueryCountStatsMonitor
RealtimeMetricsMonitor

View File

@ -143,6 +143,7 @@
"Operations": [
"operations/druid-console",
"operations/getting-started",
"operations/java",
{
"type": "subcategory",
"label": "Security",