Merge branch 'master' into feature/query-refactoring
This commit is contained in:
commit
e2da4eb732
|
@ -88,7 +88,7 @@ set JAVA_OPTS=%JAVA_OPTS% -Dfile.encoding=UTF-8
|
||||||
REM Use our provided JNA always versus the system one
|
REM Use our provided JNA always versus the system one
|
||||||
set JAVA_OPTS=%JAVA_OPTS% -Djna.nosys=true
|
set JAVA_OPTS=%JAVA_OPTS% -Djna.nosys=true
|
||||||
|
|
||||||
set CORE_CLASSPATH=%ES_HOME%/lib/${project.build.finalName}.jar;%ES_HOME%/lib/*;%ES_HOME%/lib/sigar/*
|
set CORE_CLASSPATH=%ES_HOME%/lib/${project.build.finalName}.jar;%ES_HOME%/lib/*
|
||||||
if "%ES_CLASSPATH%" == "" (
|
if "%ES_CLASSPATH%" == "" (
|
||||||
set ES_CLASSPATH=%CORE_CLASSPATH%
|
set ES_CLASSPATH=%CORE_CLASSPATH%
|
||||||
) else (
|
) else (
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
CORE_CLASSPATH="$ES_HOME/lib/${project.build.finalName}.jar:$ES_HOME/lib/*:$ES_HOME/lib/sigar/*"
|
CORE_CLASSPATH="$ES_HOME/lib/${project.build.finalName}.jar:$ES_HOME/lib/*"
|
||||||
|
|
||||||
if [ "x$ES_CLASSPATH" = "x" ]; then
|
if [ "x$ES_CLASSPATH" = "x" ]; then
|
||||||
ES_CLASSPATH="$CORE_CLASSPATH"
|
ES_CLASSPATH="$CORE_CLASSPATH"
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
e91a355d337a0b1991f54181627d63c9973624c3
|
|
|
@ -1,201 +0,0 @@
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
|
@ -1,117 +0,0 @@
|
||||||
Copyright (c) 2004-2011 VMware, Inc.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
ADDITIONAL LICENSE INFORMATION:
|
|
||||||
|
|
||||||
Hyperic SIGAR includes some third-party open source components
|
|
||||||
in its distribution. The list below identifies the community or
|
|
||||||
organization and links to their appropriate license terms.
|
|
||||||
|
|
||||||
The Hyperic team would like to thank all the communities
|
|
||||||
of the projects listed below for their contributions.
|
|
||||||
|
|
||||||
----------------------------------------------------------
|
|
||||||
Components under the Apache License 2.0:
|
|
||||||
----------------------------------------------------------
|
|
||||||
|
|
||||||
The following components are included without modification:
|
|
||||||
|
|
||||||
- log4j -
|
|
||||||
Information: http://logging.apache.org/
|
|
||||||
License: http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
The following components are included with modification:
|
|
||||||
|
|
||||||
- cpptasks -
|
|
||||||
Information: http://ant-contrib.sourceforge.net/
|
|
||||||
License: http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
- (portions of) APR -
|
|
||||||
Information: http://apr.apache.org/
|
|
||||||
License: http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
----------------------------------------------------------
|
|
||||||
Components under BSD/MIT Style Licenses:
|
|
||||||
----------------------------------------------------------
|
|
||||||
|
|
||||||
The following components are included with modification:
|
|
||||||
|
|
||||||
- solaris get_mib2 -
|
|
||||||
Information: ftp://vic.cc.purdue.edu/pub/tools/unix/solaris/get_mib2/
|
|
||||||
License: within src/os/solaris/get_mib2.[ch]
|
|
||||||
|
|
||||||
Copyright 1995 Purdue Research Foundation, West Lafayette, Indiana
|
|
||||||
47907. All rights reserved.
|
|
||||||
|
|
||||||
Written by Victor A. Abell <abe@cc.purdue.edu>
|
|
||||||
|
|
||||||
This software is not subject to any license of the American Telephone
|
|
||||||
and Telegraph Company or the Regents of the University of California.
|
|
||||||
|
|
||||||
Permission is granted to anyone to use this software for any purpose on
|
|
||||||
any computer system, and to alter it and redistribute it freely, subject
|
|
||||||
to the following restrictions:
|
|
||||||
|
|
||||||
1. Neither Victor A Abell nor Purdue University are responsible for
|
|
||||||
any consequences of the use of this software.
|
|
||||||
|
|
||||||
2. The origin of this software must not be misrepresented, either by
|
|
||||||
explicit claim or by omission. Credit to Victor A. Abell and Purdue
|
|
||||||
University must appear in documentation and sources.
|
|
||||||
|
|
||||||
3. Altered versions must be plainly marked as such, and must not be
|
|
||||||
misrepresented as being the original software.
|
|
||||||
|
|
||||||
4. This notice may not be removed or altered.
|
|
||||||
|
|
||||||
- getline by Chris Thewalt -
|
|
||||||
Information: http://tinyurl.com/r438r
|
|
||||||
License: within src/sigar_getline.c
|
|
||||||
|
|
||||||
Copyright (C) 1991, 1992 by Chris Thewalt (thewalt@ce.berkeley.edu)
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software
|
|
||||||
for any purpose and without fee is hereby granted, provided
|
|
||||||
that the above copyright notices appear in all copies and that both the
|
|
||||||
copyright notice and this permission notice appear in supporting
|
|
||||||
documentation. This software is provided "as is" without express or
|
|
||||||
implied warranty.
|
|
||||||
|
|
||||||
- PrintfFormat.java -
|
|
||||||
Information: http://java.sun.com/developer/technicalArticles/Programming/sprintf/PrintfFormat.java
|
|
||||||
License: within bindings/java/src/org/hyperic/sigar/util/PrintfFormat.java
|
|
||||||
|
|
||||||
(c) 2000 Sun Microsystems, Inc.
|
|
||||||
ALL RIGHTS RESERVED
|
|
||||||
|
|
||||||
License Grant-
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this Software and its
|
|
||||||
documentation for NON-COMMERCIAL or COMMERCIAL purposes and without fee is
|
|
||||||
hereby granted.
|
|
||||||
|
|
||||||
This Software is provided "AS IS". All express warranties, including any
|
|
||||||
implied warranty of merchantability, satisfactory quality, fitness for a
|
|
||||||
particular purpose, or non-infringement, are disclaimed, except to the extent
|
|
||||||
that such disclaimers are held to be legally invalid.
|
|
||||||
|
|
||||||
You acknowledge that Software is not designed, licensed or intended for use in
|
|
||||||
the design, construction, operation or maintenance of any nuclear facility
|
|
||||||
("High Risk Activities"). Sun disclaims any express or implied warranty of
|
|
||||||
fitness for such uses.
|
|
||||||
|
|
||||||
Please refer to the file http://www.sun.com/policies/trademarks/ for further
|
|
||||||
important trademark information and to
|
|
||||||
http://java.sun.com/nav/business/index.html for further important licensing
|
|
||||||
information for the Java Technology.
|
|
31
core/pom.xml
31
core/pom.xml
|
@ -214,11 +214,6 @@
|
||||||
<artifactId>jna</artifactId>
|
<artifactId>jna</artifactId>
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.fusesource</groupId>
|
|
||||||
<artifactId>sigar</artifactId>
|
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<!-- remove this for java 8 -->
|
<!-- remove this for java 8 -->
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -708,20 +703,9 @@
|
||||||
<group>root</group>
|
<group>root</group>
|
||||||
</mapper>
|
</mapper>
|
||||||
</data>
|
</data>
|
||||||
<data>
|
|
||||||
<src>${project.basedir}/lib/sigar/</src>
|
|
||||||
<includes>sigar-*.jar, libsigar-*-linux.*</includes>
|
|
||||||
<type>directory</type>
|
|
||||||
<mapper>
|
|
||||||
<type>perm</type>
|
|
||||||
<prefix>${packaging.elasticsearch.home.dir}/lib/sigar</prefix>
|
|
||||||
<user>root</user>
|
|
||||||
<group>root</group>
|
|
||||||
</mapper>
|
|
||||||
</data>
|
|
||||||
<data>
|
<data>
|
||||||
<src>${project.build.directory}/lib</src>
|
<src>${project.build.directory}/lib</src>
|
||||||
<excludes>${project.build.finalName}-shaded.jar,${project.build.finalName}-sources.jar,${project.build.finalName}-tests.jar,${project.build.finalName}-test-sources.jar,slf4j-api-*.jar,sigar-*.jar</excludes>
|
<excludes>${project.build.finalName}-shaded.jar,${project.build.finalName}-sources.jar,${project.build.finalName}-tests.jar,${project.build.finalName}-test-sources.jar,slf4j-api-*.jar</excludes>
|
||||||
<type>directory</type>
|
<type>directory</type>
|
||||||
<mapper>
|
<mapper>
|
||||||
<type>perm</type>
|
<type>perm</type>
|
||||||
|
@ -889,7 +873,6 @@
|
||||||
<exclude>${project.build.finalName}-tests.jar</exclude>
|
<exclude>${project.build.finalName}-tests.jar</exclude>
|
||||||
<exclude>${project.build.finalName}-test-sources.jar</exclude>
|
<exclude>${project.build.finalName}-test-sources.jar</exclude>
|
||||||
<exclude>slf4j-api-*.jar</exclude>
|
<exclude>slf4j-api-*.jar</exclude>
|
||||||
<exclude>sigar-*.jar</exclude>
|
|
||||||
</excludes>
|
</excludes>
|
||||||
</source>
|
</source>
|
||||||
<source>
|
<source>
|
||||||
|
@ -900,18 +883,6 @@
|
||||||
</source>
|
</source>
|
||||||
</sources>
|
</sources>
|
||||||
</mapping>
|
</mapping>
|
||||||
<mapping>
|
|
||||||
<directory>${packaging.elasticsearch.home.dir}/lib/sigar</directory>
|
|
||||||
<sources>
|
|
||||||
<source>
|
|
||||||
<location>lib/sigar</location>
|
|
||||||
<includes>
|
|
||||||
<include>sigar*.jar</include>
|
|
||||||
<include>libsigar-*-linux.*</include>
|
|
||||||
</includes>
|
|
||||||
</source>
|
|
||||||
</sources>
|
|
||||||
</mapping>
|
|
||||||
<!-- Add init.d files -->
|
<!-- Add init.d files -->
|
||||||
<mapping>
|
<mapping>
|
||||||
<directory>/etc/init.d</directory>
|
<directory>/etc/init.d</directory>
|
||||||
|
|
|
@ -77,14 +77,6 @@
|
||||||
<include>plugin</include>
|
<include>plugin</include>
|
||||||
</includes>
|
</includes>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
|
|
||||||
<fileSet>
|
|
||||||
<directory>lib/sigar</directory>
|
|
||||||
<outputDirectory>lib/sigar</outputDirectory>
|
|
||||||
<includes>
|
|
||||||
<include>*</include>
|
|
||||||
</includes>
|
|
||||||
</fileSet>
|
|
||||||
</fileSets>
|
</fileSets>
|
||||||
<files>
|
<files>
|
||||||
<file>
|
<file>
|
||||||
|
|
|
@ -246,7 +246,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
if (this instanceof ElasticsearchWrapperException) {
|
Throwable ex = ExceptionsHelper.unwrapCause(this);
|
||||||
|
if (ex != this) {
|
||||||
toXContent(builder, params, this);
|
toXContent(builder, params, this);
|
||||||
} else {
|
} else {
|
||||||
builder.field("type", getExceptionName());
|
builder.field("type", getExceptionName());
|
||||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.monitor.process.JmxProcessProbe;
|
||||||
import org.elasticsearch.node.Node;
|
import org.elasticsearch.node.Node;
|
||||||
import org.elasticsearch.node.NodeBuilder;
|
import org.elasticsearch.node.NodeBuilder;
|
||||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||||
import org.hyperic.sigar.Sigar;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
|
@ -97,7 +96,7 @@ public class Bootstrap {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** initialize native resources */
|
/** initialize native resources */
|
||||||
public static void initializeNatives(boolean mlockAll, boolean ctrlHandler, boolean loadSigar) {
|
public static void initializeNatives(boolean mlockAll, boolean ctrlHandler) {
|
||||||
final ESLogger logger = Loggers.getLogger(Bootstrap.class);
|
final ESLogger logger = Loggers.getLogger(Bootstrap.class);
|
||||||
|
|
||||||
// check if the user is running as root, and bail
|
// check if the user is running as root, and bail
|
||||||
|
@ -140,18 +139,6 @@ public class Bootstrap {
|
||||||
// we've already logged this.
|
// we've already logged this.
|
||||||
}
|
}
|
||||||
|
|
||||||
if (loadSigar) {
|
|
||||||
// initialize sigar explicitly
|
|
||||||
try {
|
|
||||||
Sigar.load();
|
|
||||||
logger.trace("sigar libraries loaded successfully");
|
|
||||||
} catch (Throwable t) {
|
|
||||||
logger.trace("failed to load sigar libraries", t);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.trace("sigar not loaded, disabled via settings");
|
|
||||||
}
|
|
||||||
|
|
||||||
// init lucene random seed. it will use /dev/urandom where available:
|
// init lucene random seed. it will use /dev/urandom where available:
|
||||||
StringHelper.randomId();
|
StringHelper.randomId();
|
||||||
}
|
}
|
||||||
|
@ -162,8 +149,7 @@ public class Bootstrap {
|
||||||
|
|
||||||
private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception {
|
private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception {
|
||||||
initializeNatives(settings.getAsBoolean("bootstrap.mlockall", false),
|
initializeNatives(settings.getAsBoolean("bootstrap.mlockall", false),
|
||||||
settings.getAsBoolean("bootstrap.ctrlhandler", true),
|
settings.getAsBoolean("bootstrap.ctrlhandler", true));
|
||||||
settings.getAsBoolean("bootstrap.sigar", true));
|
|
||||||
|
|
||||||
if (addShutdownHook) {
|
if (addShutdownHook) {
|
||||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||||
|
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||||
import org.elasticsearch.cluster.routing.RoutingService;
|
import org.elasticsearch.cluster.routing.RoutingService;
|
||||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||||
import org.elasticsearch.cluster.service.InternalClusterService;
|
import org.elasticsearch.cluster.service.InternalClusterService;
|
||||||
|
@ -939,14 +940,15 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ClusterState.Builder stateBuilder = ClusterState.builder(currentState);
|
|
||||||
|
// we must return a new cluster state instance to force publishing. This is important
|
||||||
|
// for the joining node to finalize it's join and set us as a master
|
||||||
|
final ClusterState.Builder newState = ClusterState.builder(currentState);
|
||||||
if (nodeAdded) {
|
if (nodeAdded) {
|
||||||
stateBuilder.nodes(nodesBuilder);
|
newState.nodes(nodesBuilder);
|
||||||
}
|
}
|
||||||
currentState = stateBuilder.build();
|
|
||||||
// eagerly run reroute to apply the node addition
|
return newState.build();
|
||||||
RoutingAllocation.Result result = routingService.getAllocationService().reroute(currentState);
|
|
||||||
return ClusterState.builder(currentState).routingResult(result).build();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,10 +22,13 @@ package org.elasticsearch.index.mapper.internal;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.index.IndexOptions;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
|
@ -34,9 +37,10 @@ import org.elasticsearch.index.mapper.Mapper;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.index.mapper.ParseContext;
|
|
||||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||||
|
import org.elasticsearch.index.mapper.ParseContext;
|
||||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||||
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
@ -135,6 +139,62 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||||
return CONTENT_TYPE;
|
return CONTENT_TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean useTermQueryWithQueryString() {
|
||||||
|
// As we spoof the presence of an indexed field we have to override
|
||||||
|
// the default of returning false which otherwise leads MatchQuery
|
||||||
|
// et al to run an analyzer over the query string and then try to
|
||||||
|
// hit the search index. We need them to use our termQuery(..)
|
||||||
|
// method which checks index names
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This termQuery impl looks at the context to determine the index that
|
||||||
|
* is being queried and then returns a MATCH_ALL_QUERY or MATCH_NO_QUERY
|
||||||
|
* if the value matches this index. This can be useful if aliases or
|
||||||
|
* wildcards are used but the aim is to restrict the query to specific
|
||||||
|
* indices
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Query termQuery(Object value, @Nullable QueryParseContext context) {
|
||||||
|
if (context == null) {
|
||||||
|
return super.termQuery(value, context);
|
||||||
|
}
|
||||||
|
if (isSameIndex(value, context.index().getName())) {
|
||||||
|
return Queries.newMatchAllQuery();
|
||||||
|
} else {
|
||||||
|
return Queries.newMatchNoDocsQuery();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query termsQuery(List values, QueryParseContext context) {
|
||||||
|
if (context == null) {
|
||||||
|
return super.termsQuery(values, context);
|
||||||
|
}
|
||||||
|
for (Object value : values) {
|
||||||
|
if (isSameIndex(value, context.index().getName())) {
|
||||||
|
// No need to OR these clauses - we can only logically be
|
||||||
|
// running in the context of just one of these index names.
|
||||||
|
return Queries.newMatchAllQuery();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// None of the listed index names are this one
|
||||||
|
return Queries.newMatchNoDocsQuery();
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isSameIndex(Object value, String indexName) {
|
||||||
|
if (value instanceof BytesRef) {
|
||||||
|
BytesRef indexNameRef = new BytesRef(indexName);
|
||||||
|
return (indexNameRef.bytesEquals((BytesRef) value));
|
||||||
|
} else {
|
||||||
|
return indexName.equals(value.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String value(Object value) {
|
public String value(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
|
|
|
@ -192,7 +192,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns <code>true</code> iff this query builder has at least one should, must or mustNot clause.
|
* Returns <code>true</code> iff this query builder has at least one should, must, must not or filter clause.
|
||||||
* Otherwise <code>false</code>.
|
* Otherwise <code>false</code>.
|
||||||
*/
|
*/
|
||||||
public boolean hasClauses() {
|
public boolean hasClauses() {
|
||||||
|
|
|
@ -20,27 +20,21 @@
|
||||||
package org.elasticsearch.monitor;
|
package org.elasticsearch.monitor;
|
||||||
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.monitor.fs.FsProbe;
|
import org.elasticsearch.monitor.fs.FsProbe;
|
||||||
import org.elasticsearch.monitor.fs.FsService;
|
import org.elasticsearch.monitor.fs.FsService;
|
||||||
import org.elasticsearch.monitor.fs.JmxFsProbe;
|
import org.elasticsearch.monitor.fs.JmxFsProbe;
|
||||||
import org.elasticsearch.monitor.fs.SigarFsProbe;
|
|
||||||
import org.elasticsearch.monitor.jvm.JvmMonitorService;
|
import org.elasticsearch.monitor.jvm.JvmMonitorService;
|
||||||
import org.elasticsearch.monitor.jvm.JvmService;
|
import org.elasticsearch.monitor.jvm.JvmService;
|
||||||
import org.elasticsearch.monitor.network.JmxNetworkProbe;
|
import org.elasticsearch.monitor.network.JmxNetworkProbe;
|
||||||
import org.elasticsearch.monitor.network.NetworkProbe;
|
import org.elasticsearch.monitor.network.NetworkProbe;
|
||||||
import org.elasticsearch.monitor.network.NetworkService;
|
import org.elasticsearch.monitor.network.NetworkService;
|
||||||
import org.elasticsearch.monitor.network.SigarNetworkProbe;
|
|
||||||
import org.elasticsearch.monitor.os.JmxOsProbe;
|
import org.elasticsearch.monitor.os.JmxOsProbe;
|
||||||
import org.elasticsearch.monitor.os.OsProbe;
|
import org.elasticsearch.monitor.os.OsProbe;
|
||||||
import org.elasticsearch.monitor.os.OsService;
|
import org.elasticsearch.monitor.os.OsService;
|
||||||
import org.elasticsearch.monitor.os.SigarOsProbe;
|
|
||||||
import org.elasticsearch.monitor.process.JmxProcessProbe;
|
import org.elasticsearch.monitor.process.JmxProcessProbe;
|
||||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||||
import org.elasticsearch.monitor.process.ProcessService;
|
import org.elasticsearch.monitor.process.ProcessService;
|
||||||
import org.elasticsearch.monitor.process.SigarProcessProbe;
|
|
||||||
import org.elasticsearch.monitor.sigar.SigarService;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -59,29 +53,12 @@ public class MonitorModule extends AbstractModule {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void configure() {
|
protected void configure() {
|
||||||
boolean sigarLoaded = false;
|
// bind default implementations
|
||||||
try {
|
|
||||||
settings.getClassLoader().loadClass("org.hyperic.sigar.Sigar");
|
|
||||||
SigarService sigarService = new SigarService(settings);
|
|
||||||
if (sigarService.sigarAvailable()) {
|
|
||||||
bind(SigarService.class).toInstance(sigarService);
|
|
||||||
bind(ProcessProbe.class).to(SigarProcessProbe.class).asEagerSingleton();
|
|
||||||
bind(OsProbe.class).to(SigarOsProbe.class).asEagerSingleton();
|
|
||||||
bind(NetworkProbe.class).to(SigarNetworkProbe.class).asEagerSingleton();
|
|
||||||
bind(FsProbe.class).to(SigarFsProbe.class).asEagerSingleton();
|
|
||||||
sigarLoaded = true;
|
|
||||||
}
|
|
||||||
} catch (Throwable e) {
|
|
||||||
// no sigar
|
|
||||||
Loggers.getLogger(SigarService.class).trace("failed to load sigar", e);
|
|
||||||
}
|
|
||||||
if (!sigarLoaded) {
|
|
||||||
// bind non sigar implementations
|
|
||||||
bind(ProcessProbe.class).to(JmxProcessProbe.class).asEagerSingleton();
|
bind(ProcessProbe.class).to(JmxProcessProbe.class).asEagerSingleton();
|
||||||
bind(OsProbe.class).to(JmxOsProbe.class).asEagerSingleton();
|
bind(OsProbe.class).to(JmxOsProbe.class).asEagerSingleton();
|
||||||
bind(NetworkProbe.class).to(JmxNetworkProbe.class).asEagerSingleton();
|
bind(NetworkProbe.class).to(JmxNetworkProbe.class).asEagerSingleton();
|
||||||
bind(FsProbe.class).to(JmxFsProbe.class).asEagerSingleton();
|
bind(FsProbe.class).to(JmxFsProbe.class).asEagerSingleton();
|
||||||
}
|
|
||||||
// bind other services
|
// bind other services
|
||||||
bind(ProcessService.class).asEagerSingleton();
|
bind(ProcessService.class).asEagerSingleton();
|
||||||
bind(OsService.class).asEagerSingleton();
|
bind(OsService.class).asEagerSingleton();
|
||||||
|
|
|
@ -1,106 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.monitor.fs;
|
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.env.NodeEnvironment;
|
|
||||||
import org.elasticsearch.env.NodeEnvironment.NodePath;
|
|
||||||
import org.elasticsearch.monitor.sigar.SigarService;
|
|
||||||
import org.hyperic.sigar.FileSystem;
|
|
||||||
import org.hyperic.sigar.FileSystemMap;
|
|
||||||
import org.hyperic.sigar.FileSystemUsage;
|
|
||||||
import org.hyperic.sigar.Sigar;
|
|
||||||
import org.hyperic.sigar.SigarException;
|
|
||||||
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class SigarFsProbe extends AbstractComponent implements FsProbe {
|
|
||||||
|
|
||||||
private final NodeEnvironment nodeEnv;
|
|
||||||
|
|
||||||
private final SigarService sigarService;
|
|
||||||
|
|
||||||
private Map<Path, FileSystem> fileSystems = Maps.newHashMap();
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SigarFsProbe(Settings settings, NodeEnvironment nodeEnv, SigarService sigarService) {
|
|
||||||
super(settings);
|
|
||||||
this.nodeEnv = nodeEnv;
|
|
||||||
this.sigarService = sigarService;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized FsStats stats() {
|
|
||||||
if (!nodeEnv.hasNodeFile()) {
|
|
||||||
return new FsStats(System.currentTimeMillis(), new FsStats.Info[0]);
|
|
||||||
}
|
|
||||||
NodePath[] nodePaths = nodeEnv.nodePaths();
|
|
||||||
FsStats.Info[] infos = new FsStats.Info[nodePaths.length];
|
|
||||||
for (int i = 0; i < nodePaths.length; i++) {
|
|
||||||
NodePath nodePath = nodePaths[i];
|
|
||||||
Path dataLocation = nodePath.path;
|
|
||||||
|
|
||||||
FsStats.Info info = new FsStats.Info();
|
|
||||||
info.path = dataLocation.toAbsolutePath().toString();
|
|
||||||
|
|
||||||
try {
|
|
||||||
FileSystem fileSystem = fileSystems.get(dataLocation);
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
if (fileSystem == null) {
|
|
||||||
FileSystemMap fileSystemMap = sigar.getFileSystemMap();
|
|
||||||
if (fileSystemMap != null) {
|
|
||||||
fileSystem = fileSystemMap.getMountPoint(dataLocation.toAbsolutePath().toString());
|
|
||||||
fileSystems.put(dataLocation, fileSystem);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (fileSystem != null) {
|
|
||||||
info.mount = fileSystem.getDirName();
|
|
||||||
info.dev = fileSystem.getDevName();
|
|
||||||
info.type = fileSystem.getSysTypeName();
|
|
||||||
info.spins = nodePath.spins;
|
|
||||||
|
|
||||||
FileSystemUsage fileSystemUsage = sigar.getFileSystemUsage(fileSystem.getDirName());
|
|
||||||
if (fileSystemUsage != null) {
|
|
||||||
// total/free/available seem to be in megabytes?
|
|
||||||
info.total = fileSystemUsage.getTotal() * 1024;
|
|
||||||
info.free = fileSystemUsage.getFree() * 1024;
|
|
||||||
info.available = fileSystemUsage.getAvail() * 1024;
|
|
||||||
info.diskReads = fileSystemUsage.getDiskReads();
|
|
||||||
info.diskWrites = fileSystemUsage.getDiskWrites();
|
|
||||||
info.diskReadBytes = fileSystemUsage.getDiskReadBytes();
|
|
||||||
info.diskWriteBytes = fileSystemUsage.getDiskWriteBytes();
|
|
||||||
info.diskQueue = fileSystemUsage.getDiskQueue();
|
|
||||||
info.diskServiceTime = fileSystemUsage.getDiskServiceTime();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// failed...
|
|
||||||
}
|
|
||||||
|
|
||||||
infos[i] = info;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new FsStats(System.currentTimeMillis(), infos);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,166 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.monitor.network;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.monitor.sigar.SigarService;
|
|
||||||
import org.hyperic.sigar.*;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class SigarNetworkProbe extends AbstractComponent implements NetworkProbe {
|
|
||||||
|
|
||||||
private final SigarService sigarService;
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SigarNetworkProbe(Settings settings, SigarService sigarService) {
|
|
||||||
super(settings);
|
|
||||||
this.sigarService = sigarService;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public NetworkInfo networkInfo() {
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
|
|
||||||
NetworkInfo networkInfo = new NetworkInfo();
|
|
||||||
|
|
||||||
try {
|
|
||||||
NetInterfaceConfig netInterfaceConfig = sigar.getNetInterfaceConfig(null);
|
|
||||||
networkInfo.primary = new NetworkInfo.Interface(netInterfaceConfig.getName(), netInterfaceConfig.getAddress(), netInterfaceConfig.getHwaddr());
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
return networkInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized NetworkStats networkStats() {
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
|
|
||||||
NetworkStats stats = new NetworkStats();
|
|
||||||
stats.timestamp = System.currentTimeMillis();
|
|
||||||
|
|
||||||
try {
|
|
||||||
Tcp tcp = sigar.getTcp();
|
|
||||||
stats.tcp = new NetworkStats.Tcp();
|
|
||||||
stats.tcp.activeOpens = tcp.getActiveOpens();
|
|
||||||
stats.tcp.passiveOpens = tcp.getPassiveOpens();
|
|
||||||
stats.tcp.attemptFails = tcp.getAttemptFails();
|
|
||||||
stats.tcp.estabResets = tcp.getEstabResets();
|
|
||||||
stats.tcp.currEstab = tcp.getCurrEstab();
|
|
||||||
stats.tcp.inSegs = tcp.getInSegs();
|
|
||||||
stats.tcp.outSegs = tcp.getOutSegs();
|
|
||||||
stats.tcp.retransSegs = tcp.getRetransSegs();
|
|
||||||
stats.tcp.inErrs = tcp.getInErrs();
|
|
||||||
stats.tcp.outRsts = tcp.getOutRsts();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String ifconfig() {
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
try {
|
|
||||||
for (String ifname : sigar.getNetInterfaceList()) {
|
|
||||||
NetInterfaceConfig ifconfig = null;
|
|
||||||
try {
|
|
||||||
ifconfig = sigar.getNetInterfaceConfig(ifname);
|
|
||||||
} catch (SigarException e) {
|
|
||||||
sb.append(ifname + "\t" + "Not Avaialbe [" + e.getMessage() + "]");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
long flags = ifconfig.getFlags();
|
|
||||||
|
|
||||||
String hwaddr = "";
|
|
||||||
if (!NetFlags.NULL_HWADDR.equals(ifconfig.getHwaddr())) {
|
|
||||||
hwaddr = " HWaddr " + ifconfig.getHwaddr();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!ifconfig.getName().equals(ifconfig.getDescription())) {
|
|
||||||
sb.append(ifconfig.getDescription()).append('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.append(ifconfig.getName() + "\t" + "Link encap:" + ifconfig.getType() + hwaddr).append('\n');
|
|
||||||
|
|
||||||
String ptp = "";
|
|
||||||
if ((flags & NetFlags.IFF_POINTOPOINT) > 0) {
|
|
||||||
ptp = " P-t-P:" + ifconfig.getDestination();
|
|
||||||
}
|
|
||||||
|
|
||||||
String bcast = "";
|
|
||||||
if ((flags & NetFlags.IFF_BROADCAST) > 0) {
|
|
||||||
bcast = " Bcast:" + ifconfig.getBroadcast();
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.append("\t" +
|
|
||||||
"inet addr:" + ifconfig.getAddress() +
|
|
||||||
ptp + //unlikely
|
|
||||||
bcast +
|
|
||||||
" Mask:" + ifconfig.getNetmask()).append('\n');
|
|
||||||
|
|
||||||
sb.append("\t" +
|
|
||||||
NetFlags.getIfFlagsString(flags) +
|
|
||||||
" MTU:" + ifconfig.getMtu() +
|
|
||||||
" Metric:" + ifconfig.getMetric()).append('\n');
|
|
||||||
try {
|
|
||||||
NetInterfaceStat ifstat = sigar.getNetInterfaceStat(ifname);
|
|
||||||
|
|
||||||
sb.append("\t" +
|
|
||||||
"RX packets:" + ifstat.getRxPackets() +
|
|
||||||
" errors:" + ifstat.getRxErrors() +
|
|
||||||
" dropped:" + ifstat.getRxDropped() +
|
|
||||||
" overruns:" + ifstat.getRxOverruns() +
|
|
||||||
" frame:" + ifstat.getRxFrame()).append('\n');
|
|
||||||
|
|
||||||
sb.append("\t" +
|
|
||||||
"TX packets:" + ifstat.getTxPackets() +
|
|
||||||
" errors:" + ifstat.getTxErrors() +
|
|
||||||
" dropped:" + ifstat.getTxDropped() +
|
|
||||||
" overruns:" + ifstat.getTxOverruns() +
|
|
||||||
" carrier:" + ifstat.getTxCarrier()).append('\n');
|
|
||||||
sb.append("\t" + "collisions:" +
|
|
||||||
ifstat.getTxCollisions()).append('\n');
|
|
||||||
|
|
||||||
long rxBytes = ifstat.getRxBytes();
|
|
||||||
long txBytes = ifstat.getTxBytes();
|
|
||||||
|
|
||||||
sb.append("\t" +
|
|
||||||
"RX bytes:" + rxBytes +
|
|
||||||
" (" + Sigar.formatSize(rxBytes) + ")" +
|
|
||||||
" " +
|
|
||||||
"TX bytes:" + txBytes +
|
|
||||||
" (" + Sigar.formatSize(txBytes) + ")").append('\n');
|
|
||||||
} catch (SigarException e) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sb.toString();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
return "NA";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,133 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.monitor.os;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.monitor.sigar.SigarService;
|
|
||||||
import org.hyperic.sigar.*;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class SigarOsProbe extends AbstractComponent implements OsProbe {
|
|
||||||
|
|
||||||
private final SigarService sigarService;
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SigarOsProbe(Settings settings, SigarService sigarService) {
|
|
||||||
super(settings);
|
|
||||||
this.sigarService = sigarService;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public OsInfo osInfo() {
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
OsInfo info = new OsInfo();
|
|
||||||
try {
|
|
||||||
CpuInfo[] infos = sigar.getCpuInfoList();
|
|
||||||
info.cpu = new OsInfo.Cpu();
|
|
||||||
info.cpu.vendor = infos[0].getVendor();
|
|
||||||
info.cpu.model = infos[0].getModel();
|
|
||||||
info.cpu.mhz = infos[0].getMhz();
|
|
||||||
info.cpu.totalCores = infos[0].getTotalCores();
|
|
||||||
info.cpu.totalSockets = infos[0].getTotalSockets();
|
|
||||||
info.cpu.coresPerSocket = infos[0].getCoresPerSocket();
|
|
||||||
if (infos[0].getCacheSize() != Sigar.FIELD_NOTIMPL) {
|
|
||||||
info.cpu.cacheSize = infos[0].getCacheSize();
|
|
||||||
}
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
Mem mem = sigar.getMem();
|
|
||||||
info.mem = new OsInfo.Mem();
|
|
||||||
info.mem.total = mem.getTotal();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
Swap swap = sigar.getSwap();
|
|
||||||
info.swap = new OsInfo.Swap();
|
|
||||||
info.swap.total = swap.getTotal();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
return info;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public OsStats osStats() {
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
OsStats stats = new OsStats();
|
|
||||||
stats.timestamp = System.currentTimeMillis();
|
|
||||||
try {
|
|
||||||
stats.loadAverage = sigar.getLoadAverage();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
stats.uptime = (long) sigar.getUptime().getUptime();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
CpuPerc cpuPerc = sigar.getCpuPerc();
|
|
||||||
stats.cpu = new OsStats.Cpu();
|
|
||||||
stats.cpu.sys = (short) (cpuPerc.getSys() * 100);
|
|
||||||
stats.cpu.user = (short) (cpuPerc.getUser() * 100);
|
|
||||||
stats.cpu.idle = (short) (cpuPerc.getIdle() * 100);
|
|
||||||
stats.cpu.stolen = (short) (cpuPerc.getStolen() * 100);
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
Mem mem = sigar.getMem();
|
|
||||||
stats.mem = new OsStats.Mem();
|
|
||||||
stats.mem.free = mem.getFree();
|
|
||||||
stats.mem.freePercent = (short) mem.getFreePercent();
|
|
||||||
stats.mem.used = mem.getUsed();
|
|
||||||
stats.mem.usedPercent = (short) mem.getUsedPercent();
|
|
||||||
stats.mem.actualFree = mem.getActualFree();
|
|
||||||
stats.mem.actualUsed = mem.getActualUsed();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
Swap swap = sigar.getSwap();
|
|
||||||
stats.swap = new OsStats.Swap();
|
|
||||||
stats.swap.free = swap.getFree();
|
|
||||||
stats.swap.used = swap.getUsed();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,81 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.monitor.process;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.monitor.sigar.SigarService;
|
|
||||||
import org.hyperic.sigar.ProcCpu;
|
|
||||||
import org.hyperic.sigar.ProcMem;
|
|
||||||
import org.hyperic.sigar.Sigar;
|
|
||||||
import org.hyperic.sigar.SigarException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class SigarProcessProbe extends AbstractComponent implements ProcessProbe {
|
|
||||||
|
|
||||||
private final SigarService sigarService;
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SigarProcessProbe(Settings settings, SigarService sigarService) {
|
|
||||||
super(settings);
|
|
||||||
this.sigarService = sigarService;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized ProcessInfo processInfo() {
|
|
||||||
return new ProcessInfo(sigarService.sigar().getPid(), JmxProcessProbe.getMaxFileDescriptorCount());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public synchronized ProcessStats processStats() {
|
|
||||||
Sigar sigar = sigarService.sigar();
|
|
||||||
ProcessStats stats = new ProcessStats();
|
|
||||||
stats.timestamp = System.currentTimeMillis();
|
|
||||||
stats.openFileDescriptors = JmxProcessProbe.getOpenFileDescriptorCount();
|
|
||||||
try {
|
|
||||||
if (stats.openFileDescriptors == -1) {
|
|
||||||
stats.openFileDescriptors = sigar.getProcFd(sigar.getPid()).getTotal();
|
|
||||||
}
|
|
||||||
ProcCpu cpu = sigar.getProcCpu(sigar.getPid());
|
|
||||||
stats.cpu = new ProcessStats.Cpu();
|
|
||||||
stats.cpu.percent = (short) (cpu.getPercent() * 100);
|
|
||||||
stats.cpu.sys = cpu.getSys();
|
|
||||||
stats.cpu.user = cpu.getUser();
|
|
||||||
stats.cpu.total = cpu.getTotal();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
ProcMem mem = sigar.getProcMem(sigar.getPid());
|
|
||||||
stats.mem = new ProcessStats.Mem();
|
|
||||||
stats.mem.totalVirtual = mem.getSize();
|
|
||||||
stats.mem.resident = mem.getResident();
|
|
||||||
stats.mem.share = mem.getShare();
|
|
||||||
} catch (SigarException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,67 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.monitor.sigar;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.hyperic.sigar.Sigar;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class SigarService extends AbstractComponent {
|
|
||||||
|
|
||||||
private final Sigar sigar;
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SigarService(Settings settings) {
|
|
||||||
super(settings);
|
|
||||||
Sigar sigar = null;
|
|
||||||
if (settings.getAsBoolean("bootstrap.sigar", true)) {
|
|
||||||
try {
|
|
||||||
sigar = new Sigar();
|
|
||||||
// call it to make sure the library was loaded
|
|
||||||
sigar.getPid();
|
|
||||||
logger.trace("sigar loaded successfully");
|
|
||||||
} catch (Throwable t) {
|
|
||||||
logger.trace("failed to load sigar", t);
|
|
||||||
if (sigar != null) {
|
|
||||||
try {
|
|
||||||
sigar.close();
|
|
||||||
} catch (Throwable t1) {
|
|
||||||
// ignore
|
|
||||||
} finally {
|
|
||||||
sigar = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.sigar = sigar;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean sigarAvailable() {
|
|
||||||
return sigar != null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Sigar sigar() {
|
|
||||||
return this.sigar;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -32,14 +32,17 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexException;
|
import org.elasticsearch.index.IndexException;
|
||||||
import org.elasticsearch.index.query.QueryParsingException;
|
import org.elasticsearch.index.query.QueryParsingException;
|
||||||
import org.elasticsearch.index.query.TestQueryParsingException;
|
import org.elasticsearch.index.query.TestQueryParsingException;
|
||||||
import org.elasticsearch.indices.IndexMissingException;
|
import org.elasticsearch.indices.IndexMissingException;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.search.SearchParseException;
|
||||||
import org.elasticsearch.search.SearchShardTarget;
|
import org.elasticsearch.search.SearchShardTarget;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
|
import org.elasticsearch.test.TestSearchContext;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
||||||
import org.elasticsearch.transport.RemoteTransportException;
|
import org.elasticsearch.transport.RemoteTransportException;
|
||||||
|
@ -177,6 +180,16 @@ public class ElasticsearchExceptionTests extends ElasticsearchTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToXContent() throws IOException {
|
public void testToXContent() throws IOException {
|
||||||
|
{
|
||||||
|
ElasticsearchException ex = new SearchParseException(new TestSearchContext(), "foo", new XContentLocation(1,0));
|
||||||
|
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||||
|
builder.startObject();
|
||||||
|
ex.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
|
builder.endObject();
|
||||||
|
|
||||||
|
String expected = "{\"type\":\"search_parse_exception\",\"reason\":\"foo\",\"line\":1,\"col\":0}";
|
||||||
|
assertEquals(expected, builder.string());
|
||||||
|
}
|
||||||
{
|
{
|
||||||
ElasticsearchException ex = new ElasticsearchException("foo", new ElasticsearchException("bar", new IllegalArgumentException("index is closed", new RuntimeException("foobar"))));
|
ElasticsearchException ex = new ElasticsearchException("foo", new ElasticsearchException("bar", new IllegalArgumentException("index is closed", new RuntimeException("foobar"))));
|
||||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||||
|
@ -226,6 +239,7 @@ public class ElasticsearchExceptionTests extends ElasticsearchTestCase {
|
||||||
ex.toXContent(otherBuilder, ToXContent.EMPTY_PARAMS);
|
ex.toXContent(otherBuilder, ToXContent.EMPTY_PARAMS);
|
||||||
otherBuilder.endObject();
|
otherBuilder.endObject();
|
||||||
assertEquals(otherBuilder.string(), builder.string());
|
assertEquals(otherBuilder.string(), builder.string());
|
||||||
|
assertEquals("{\"type\":\"file_not_found_exception\",\"reason\":\"foo not found\"}", builder.string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.client.Requests;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.store.Store;
|
import org.elasticsearch.index.store.Store;
|
||||||
import org.elasticsearch.monitor.sigar.SigarService;
|
|
||||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||||
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
|
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
@ -134,7 +133,6 @@ public class ClusterStatsTests extends ElasticsearchIntegrationTest {
|
||||||
public void testValuesSmokeScreen() throws IOException {
|
public void testValuesSmokeScreen() throws IOException {
|
||||||
internalCluster().ensureAtMostNumDataNodes(5);
|
internalCluster().ensureAtMostNumDataNodes(5);
|
||||||
internalCluster().ensureAtLeastNumDataNodes(1);
|
internalCluster().ensureAtLeastNumDataNodes(1);
|
||||||
SigarService sigarService = internalCluster().getInstance(SigarService.class);
|
|
||||||
assertAcked(prepareCreate("test1").setSettings(settingsBuilder().put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0).build()));
|
assertAcked(prepareCreate("test1").setSettings(settingsBuilder().put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0).build()));
|
||||||
index("test1", "type", "1", "f", "f");
|
index("test1", "type", "1", "f", "f");
|
||||||
/*
|
/*
|
||||||
|
@ -150,12 +148,7 @@ public class ClusterStatsTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
assertThat(msg, response.nodesStats.getFs().getTotal().bytes(), Matchers.greaterThan(0l));
|
assertThat(msg, response.nodesStats.getFs().getTotal().bytes(), Matchers.greaterThan(0l));
|
||||||
assertThat(msg, response.nodesStats.getJvm().getVersions().size(), Matchers.greaterThan(0));
|
assertThat(msg, response.nodesStats.getJvm().getVersions().size(), Matchers.greaterThan(0));
|
||||||
if (sigarService.sigarAvailable()) {
|
|
||||||
// We only get those if we have sigar
|
|
||||||
assertThat(msg, response.nodesStats.getOs().getAvailableProcessors(), Matchers.greaterThan(0));
|
|
||||||
assertThat(msg, response.nodesStats.getOs().getAvailableMemory().bytes(), Matchers.greaterThan(0l));
|
|
||||||
assertThat(msg, response.nodesStats.getOs().getCpus().size(), Matchers.greaterThan(0));
|
|
||||||
}
|
|
||||||
assertThat(msg, response.nodesStats.getVersions().size(), Matchers.greaterThan(0));
|
assertThat(msg, response.nodesStats.getVersions().size(), Matchers.greaterThan(0));
|
||||||
assertThat(msg, response.nodesStats.getVersions().contains(Version.CURRENT), Matchers.equalTo(true));
|
assertThat(msg, response.nodesStats.getVersions().contains(Version.CURRENT), Matchers.equalTo(true));
|
||||||
assertThat(msg, response.nodesStats.getPlugins().size(), Matchers.greaterThanOrEqualTo(0));
|
assertThat(msg, response.nodesStats.getPlugins().size(), Matchers.greaterThanOrEqualTo(0));
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class ManyMappingsBenchmark {
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
System.setProperty("es.logger.prefix", "");
|
System.setProperty("es.logger.prefix", "");
|
||||||
Bootstrap.initializeNatives(true, false, false);
|
Bootstrap.initializeNatives(true, false);
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
.put("")
|
.put("")
|
||||||
.put(SETTING_NUMBER_OF_SHARDS, 5)
|
.put(SETTING_NUMBER_OF_SHARDS, 5)
|
||||||
|
|
|
@ -57,7 +57,7 @@ public class ReplicaRecoveryBenchmark {
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
System.setProperty("es.logger.prefix", "");
|
System.setProperty("es.logger.prefix", "");
|
||||||
Bootstrap.initializeNatives(true, false, false);
|
Bootstrap.initializeNatives(true, false);
|
||||||
|
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
.put("gateway.type", "local")
|
.put("gateway.type", "local")
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class GlobalOrdinalsBenchmark {
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
System.setProperty("es.logger.prefix", "");
|
System.setProperty("es.logger.prefix", "");
|
||||||
Bootstrap.initializeNatives(true, false, false);
|
Bootstrap.initializeNatives(true, false);
|
||||||
Random random = new Random();
|
Random random = new Random();
|
||||||
|
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class SubAggregationSearchCollectModeBenchmark {
|
||||||
static Node[] nodes;
|
static Node[] nodes;
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
Bootstrap.initializeNatives(true, false, false);
|
Bootstrap.initializeNatives(true, false);
|
||||||
Random random = new Random();
|
Random random = new Random();
|
||||||
|
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class TermsAggregationSearchAndIndexingBenchmark {
|
||||||
static Node[] nodes;
|
static Node[] nodes;
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
Bootstrap.initializeNatives(true, false, false);
|
Bootstrap.initializeNatives(true, false);
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
.put("refresh_interval", "-1")
|
.put("refresh_interval", "-1")
|
||||||
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
.put(SETTING_NUMBER_OF_SHARDS, 1)
|
||||||
|
|
|
@ -99,7 +99,7 @@ public class TermsAggregationSearchBenchmark {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
Bootstrap.initializeNatives(true, false, false);
|
Bootstrap.initializeNatives(true, false);
|
||||||
Random random = new Random();
|
Random random = new Random();
|
||||||
|
|
||||||
Settings settings = settingsBuilder()
|
Settings settings = settingsBuilder()
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class BootstrapForTesting {
|
||||||
|
|
||||||
static {
|
static {
|
||||||
// just like bootstrap, initialize natives, then SM
|
// just like bootstrap, initialize natives, then SM
|
||||||
Bootstrap.initializeNatives(true, true, true);
|
Bootstrap.initializeNatives(true, true);
|
||||||
|
|
||||||
// check for jar hell
|
// check for jar hell
|
||||||
try {
|
try {
|
||||||
|
@ -85,8 +85,6 @@ public class BootstrapForTesting {
|
||||||
// target/classes, target/test-classes
|
// target/classes, target/test-classes
|
||||||
Security.addPath(perms, basedir.resolve("target").resolve("classes"), "read,readlink");
|
Security.addPath(perms, basedir.resolve("target").resolve("classes"), "read,readlink");
|
||||||
Security.addPath(perms, basedir.resolve("target").resolve("test-classes"), "read,readlink");
|
Security.addPath(perms, basedir.resolve("target").resolve("test-classes"), "read,readlink");
|
||||||
// lib/sigar
|
|
||||||
Security.addPath(perms, basedir.resolve("lib").resolve("sigar"), "read,readlink");
|
|
||||||
// .m2/repository
|
// .m2/repository
|
||||||
Path m2repoDir = PathUtils.get(Objects.requireNonNull(System.getProperty("m2.repository"),
|
Path m2repoDir = PathUtils.get(Objects.requireNonNull(System.getProperty("m2.repository"),
|
||||||
"please set ${m2.repository} in pom.xml"));
|
"please set ${m2.repository} in pom.xml"));
|
||||||
|
|
|
@ -1,42 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.monitor;
|
|
||||||
|
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
|
||||||
import org.hyperic.sigar.Sigar;
|
|
||||||
|
|
||||||
public class SigarTests extends ElasticsearchTestCase {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
assumeTrue("we can only ensure sigar is working when running from maven",
|
|
||||||
Boolean.parseBoolean(System.getProperty("tests.maven")));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSigarLoads() throws Exception {
|
|
||||||
Sigar.load();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSigarWorks() throws Exception {
|
|
||||||
Sigar sigar = new Sigar();
|
|
||||||
assertNotNull(sigar.getCpu());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -451,14 +451,14 @@ public class MoreLikeThisTests extends ElasticsearchIntegrationTest {
|
||||||
logger.info("Indexing a single document ...");
|
logger.info("Indexing a single document ...");
|
||||||
XContentBuilder doc = jsonBuilder().startObject();
|
XContentBuilder doc = jsonBuilder().startObject();
|
||||||
for (int i = 0; i < numFields; i++) {
|
for (int i = 0; i < numFields; i++) {
|
||||||
doc.field("field"+i, generateRandomStringArray(5, 10, false));
|
doc.field("field"+i, generateRandomStringArray(5, 10, false)+"a"); // make sure they are not all empty
|
||||||
}
|
}
|
||||||
doc.endObject();
|
doc.endObject();
|
||||||
indexRandom(true, client().prepareIndex("test", "type1", "0").setSource(doc));
|
indexRandom(true, client().prepareIndex("test", "type1", "0").setSource(doc));
|
||||||
|
|
||||||
logger.info("Checking the document matches ...");
|
logger.info("Checking the document matches ...");
|
||||||
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery()
|
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery()
|
||||||
.like((Item) new Item().doc(doc).index("test").type("type1"))
|
.like((Item) new Item().doc(doc).index("test").type("type1").routing("0")) // routing to ensure we hit the shard with the doc
|
||||||
.minTermFreq(0)
|
.minTermFreq(0)
|
||||||
.minDocFreq(0)
|
.minDocFreq(0)
|
||||||
.maxQueryTerms(100)
|
.maxQueryTerms(100)
|
||||||
|
|
|
@ -698,6 +698,58 @@ public class SearchQueryTests extends ElasticsearchIntegrationTest {
|
||||||
assertSearchHits(searchResponse, "1", "3");
|
assertSearchHits(searchResponse, "1", "3");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void term_indexQueryTestsIndexed() throws Exception {
|
||||||
|
term_indexQueryTests("not_analyzed");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void term_indexQueryTestsNotIndexed() throws Exception {
|
||||||
|
term_indexQueryTests("no");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void term_indexQueryTests(String index) throws Exception {
|
||||||
|
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
|
||||||
|
String[] indexNames = { "test1", "test2" };
|
||||||
|
for (String indexName : indexNames) {
|
||||||
|
assertAcked(client()
|
||||||
|
.admin()
|
||||||
|
.indices()
|
||||||
|
.prepareCreate(indexName)
|
||||||
|
.setSettings(indexSettings)
|
||||||
|
.addMapping(
|
||||||
|
"type1",
|
||||||
|
jsonBuilder().startObject().startObject("type1").startObject("_index").field("index", index).endObject()
|
||||||
|
.endObject().endObject()));
|
||||||
|
|
||||||
|
indexRandom(true, client().prepareIndex(indexName, "type1", indexName + "1").setSource("field1", "value1"));
|
||||||
|
|
||||||
|
}
|
||||||
|
for (String indexName : indexNames) {
|
||||||
|
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termQuery("_index", indexName))).get();
|
||||||
|
SearchResponse searchResponse = assertSearchResponse(request);
|
||||||
|
assertHitCount(searchResponse, 1l);
|
||||||
|
assertSearchHits(searchResponse, indexName + "1");
|
||||||
|
}
|
||||||
|
for (String indexName : indexNames) {
|
||||||
|
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexName))).get();
|
||||||
|
SearchResponse searchResponse = assertSearchResponse(request);
|
||||||
|
assertHitCount(searchResponse, 1l);
|
||||||
|
assertSearchHits(searchResponse, indexName + "1");
|
||||||
|
}
|
||||||
|
for (String indexName : indexNames) {
|
||||||
|
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("_index", indexName))).get();
|
||||||
|
SearchResponse searchResponse = assertSearchResponse(request);
|
||||||
|
assertHitCount(searchResponse, 1l);
|
||||||
|
assertSearchHits(searchResponse, indexName + "1");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexNames))).get();
|
||||||
|
SearchResponse searchResponse = assertSearchResponse(request);
|
||||||
|
assertHitCount(searchResponse, indexNames.length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLimitFilter() throws Exception {
|
public void testLimitFilter() throws Exception {
|
||||||
assertAcked(client().admin().indices().prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
assertAcked(client().admin().indices().prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1));
|
||||||
|
|
|
@ -279,7 +279,6 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
builder.put("path.data", dataPath.toString());
|
builder.put("path.data", dataPath.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
builder.put("bootstrap.sigar", rarely(random));
|
|
||||||
builder.put("path.home", baseDir);
|
builder.put("path.home", baseDir);
|
||||||
builder.put("path.repo", baseDir.resolve("repos"));
|
builder.put("path.repo", baseDir.resolve("repos"));
|
||||||
builder.put("transport.tcp.port", BASE_PORT + "-" + (BASE_PORT+100));
|
builder.put("transport.tcp.port", BASE_PORT + "-" + (BASE_PORT+100));
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.transport.netty;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||||
import org.elasticsearch.common.component.Lifecycle;
|
import org.elasticsearch.common.component.Lifecycle;
|
||||||
|
@ -31,8 +32,8 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.elasticsearch.test.junit.rule.RepeatOnExceptionRule;
|
|
||||||
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
import org.elasticsearch.test.cache.recycler.MockBigArrays;
|
||||||
|
import org.elasticsearch.test.junit.rule.RepeatOnExceptionRule;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.BindTransportException;
|
import org.elasticsearch.transport.BindTransportException;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
@ -43,6 +44,7 @@ import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.net.ServerSocket;
|
||||||
import java.net.Socket;
|
import java.net.Socket;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
|
@ -182,14 +184,30 @@ public class NettyTransportMultiPortTests extends ElasticsearchTestCase {
|
||||||
private int[] getRandomPorts(int numberOfPorts) {
|
private int[] getRandomPorts(int numberOfPorts) {
|
||||||
IntHashSet ports = new IntHashSet();
|
IntHashSet ports = new IntHashSet();
|
||||||
|
|
||||||
|
int nextPort = randomIntBetween(49152, 65535);
|
||||||
for (int i = 0; i < numberOfPorts; i++) {
|
for (int i = 0; i < numberOfPorts; i++) {
|
||||||
int port = randomIntBetween(49152, 65535);
|
boolean foundPortInRange = false;
|
||||||
while (ports.contains(port)) {
|
while (!foundPortInRange) {
|
||||||
port = randomIntBetween(49152, 65535);
|
if (!ports.contains(nextPort)) {
|
||||||
}
|
logger.debug("looking to see if port [{}]is available", nextPort);
|
||||||
ports.add(port);
|
try (ServerSocket serverSocket = new ServerSocket()) {
|
||||||
}
|
// Set SO_REUSEADDR as we may bind here and not be able
|
||||||
|
// to reuse the address immediately without it.
|
||||||
|
serverSocket.setReuseAddress(NetworkUtils.defaultReuseAddress());
|
||||||
|
serverSocket.bind(new InetSocketAddress(nextPort));
|
||||||
|
|
||||||
|
// bind was a success
|
||||||
|
logger.debug("port [{}] available.", nextPort);
|
||||||
|
foundPortInRange = true;
|
||||||
|
ports.add(nextPort);
|
||||||
|
} catch (IOException e) {
|
||||||
|
// Do nothing
|
||||||
|
logger.debug("port [{}] not available.", e, nextPort);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nextPort = randomIntBetween(49152, 65535);
|
||||||
|
}
|
||||||
|
}
|
||||||
return ports.toArray();
|
return ports.toArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,5 +13,5 @@
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="org.elasticsearch.bootstrap.Elasticsearch"/>
|
<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="org.elasticsearch.bootstrap.Elasticsearch"/>
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="elasticsearch"/>
|
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="elasticsearch"/>
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.SOURCE_PATH_PROVIDER" value="org.eclipse.m2e.launchconfig.sourcepathProvider"/>
|
<stringAttribute key="org.eclipse.jdt.launching.SOURCE_PATH_PROVIDER" value="org.eclipse.m2e.launchconfig.sourcepathProvider"/>
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms256m -Xmx1g -Djava.awt.headless=true -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs/heapdump.hprof -Delasticsearch -Des.foreground=yes -Djava.library.path=lib/sigar -ea"/>
|
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms256m -Xmx1g -Djava.awt.headless=true -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs/heapdump.hprof -Delasticsearch -Des.foreground=yes -ea"/>
|
||||||
</launchConfiguration>
|
</launchConfiguration>
|
||||||
|
|
|
@ -29,7 +29,7 @@ my $Issue_URL = "http://github.com/${User_Repo}issues/";
|
||||||
|
|
||||||
my @Groups = qw(
|
my @Groups = qw(
|
||||||
breaking deprecation feature
|
breaking deprecation feature
|
||||||
enhancement bug regression doc test
|
enhancement bug regression build doc test
|
||||||
);
|
);
|
||||||
my %Group_Labels = (
|
my %Group_Labels = (
|
||||||
breaking => 'Breaking changes',
|
breaking => 'Breaking changes',
|
||||||
|
@ -41,7 +41,7 @@ my %Group_Labels = (
|
||||||
bug => 'Bug fixes',
|
bug => 'Bug fixes',
|
||||||
regression => 'Regression',
|
regression => 'Regression',
|
||||||
test => 'Tests',
|
test => 'Tests',
|
||||||
other => 'Not classified',
|
other => 'NOT CLASSIFIED',
|
||||||
);
|
);
|
||||||
|
|
||||||
use JSON();
|
use JSON();
|
||||||
|
|
|
@ -79,10 +79,10 @@ def readServerOutput(p, startupEvent, failureEvent):
|
||||||
startupEvent.set()
|
startupEvent.set()
|
||||||
print('ES: **process exit**\n')
|
print('ES: **process exit**\n')
|
||||||
break
|
break
|
||||||
line = line.decode('utf-8')
|
line = line.decode('utf-8').rstrip()
|
||||||
if line.endswith('started') and not startupEvent.isSet():
|
if line.endswith('started') and not startupEvent.isSet():
|
||||||
startupEvent.set()
|
startupEvent.set()
|
||||||
print('ES: %s' % line.rstrip())
|
print('ES: %s' % line)
|
||||||
except:
|
except:
|
||||||
print()
|
print()
|
||||||
print('Exception reading Elasticsearch output:')
|
print('Exception reading Elasticsearch output:')
|
||||||
|
@ -109,14 +109,14 @@ if __name__ == '__main__':
|
||||||
run('tar -xzf core/target/releases/%s -C %s' % (artifact, tmp_dir))
|
run('tar -xzf core/target/releases/%s -C %s' % (artifact, tmp_dir))
|
||||||
es_install_dir = os.path.join(tmp_dir, artifact[:-7])
|
es_install_dir = os.path.join(tmp_dir, artifact[:-7])
|
||||||
es_plugin_path = os.path.join(es_install_dir, 'bin/plugin')
|
es_plugin_path = os.path.join(es_install_dir, 'bin/plugin')
|
||||||
plugin_names = set()
|
installed_plugin_names = set()
|
||||||
print('Find plugins:')
|
print('Find plugins:')
|
||||||
for name in os.listdir('plugins'):
|
for name in os.listdir('plugins'):
|
||||||
if name not in ('target', 'pom.xml'):
|
if name not in ('target', 'pom.xml'):
|
||||||
url = 'file://%s/plugins/%s/target/releases/elasticsearch-%s-2.0.0-SNAPSHOT.zip' % (os.path.abspath('.'), name, name)
|
url = 'file://%s/plugins/%s/target/releases/elasticsearch-%s-2.0.0-SNAPSHOT.zip' % (os.path.abspath('.'), name, name)
|
||||||
print(' install plugin %s...' % name)
|
print(' install plugin %s...' % name)
|
||||||
run('%s; %s --url %s -install %s' % (JAVA_ENV, es_plugin_path, url, name))
|
run('%s; %s --url %s -install %s' % (JAVA_ENV, es_plugin_path, url, name))
|
||||||
plugin_names.add(name)
|
installed_plugin_names.add(name)
|
||||||
|
|
||||||
print('Start Elasticsearch')
|
print('Start Elasticsearch')
|
||||||
|
|
||||||
|
@ -153,11 +153,12 @@ if __name__ == '__main__':
|
||||||
for _, node in nodes.items():
|
for _, node in nodes.items():
|
||||||
node_plugins = node['plugins']
|
node_plugins = node['plugins']
|
||||||
for node_plugin in node_plugins:
|
for node_plugin in node_plugins:
|
||||||
if not plugin_names.get(node_plugin['name'], False):
|
plugin_name = node_plugin['name']
|
||||||
raise RuntimeError('Unexpeced plugin %s' % node_plugin['name'])
|
if plugin_name not in installed_plugin_names:
|
||||||
del plugin_names[node_plugin['name']]
|
raise RuntimeError('Unexpeced plugin %s' % plugin_name)
|
||||||
if plugin_names:
|
installed_plugin_names.remove(plugin_name)
|
||||||
raise RuntimeError('Plugins not loaded %s' % list(plugin_names.keys()))
|
if len(installed_plugin_names) > 0:
|
||||||
|
raise RuntimeError('Plugins not loaded %s' % installed_plugin_names)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
|
raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
[[query-dsl-match-query]]
|
[[query-dsl-match-query]]
|
||||||
=== Match Query
|
=== Match Query
|
||||||
|
|
||||||
A family of `match` queries that accept text/numerics/dates, analyzes
|
|
||||||
it, and constructs a query out of it. For example:
|
A family of `match` queries that accepts text/numerics/dates, analyzes
|
||||||
|
them, and constructs a query. For example:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
|
@ -207,11 +207,6 @@
|
||||||
<artifactId>jna</artifactId>
|
<artifactId>jna</artifactId>
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.fusesource</groupId>
|
|
||||||
<artifactId>sigar</artifactId>
|
|
||||||
<scope>provided</scope>
|
|
||||||
</dependency>
|
|
||||||
<!-- Required by the REST test framework -->
|
<!-- Required by the REST test framework -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.httpcomponents</groupId>
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
|
|
19
pom.xml
19
pom.xml
|
@ -430,24 +430,6 @@
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.fusesource</groupId>
|
|
||||||
<artifactId>sigar</artifactId>
|
|
||||||
<version>1.6.4</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<!-- We don't use this since the publish pom is then messed up -->
|
|
||||||
<!--
|
|
||||||
<dependency>
|
|
||||||
<groupId>sigar</groupId>
|
|
||||||
<artifactId>sigar</artifactId>
|
|
||||||
<version>1.6.4</version>
|
|
||||||
<scope>system</scope>
|
|
||||||
<systemPath>${basedir}/lib/sigar/sigar-1.6.4.jar</systemPath>
|
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- For coverage analysis -->
|
<!-- For coverage analysis -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jacoco</groupId>
|
<groupId>org.jacoco</groupId>
|
||||||
|
@ -624,7 +606,6 @@
|
||||||
<param>-Xmx${tests.heap.size}</param>
|
<param>-Xmx${tests.heap.size}</param>
|
||||||
<param>-Xms${tests.heap.size}</param>
|
<param>-Xms${tests.heap.size}</param>
|
||||||
<param>${java.permGenSpace}</param>
|
<param>${java.permGenSpace}</param>
|
||||||
<param>-Djava.library.path=${project.basedir}/lib/sigar</param>
|
|
||||||
<param>-XX:MaxDirectMemorySize=512m</param>
|
<param>-XX:MaxDirectMemorySize=512m</param>
|
||||||
<param>-Des.logger.prefix=</param>
|
<param>-Des.logger.prefix=</param>
|
||||||
<param>-XX:+HeapDumpOnOutOfMemoryError</param>
|
<param>-XX:+HeapDumpOnOutOfMemoryError</param>
|
||||||
|
|
Loading…
Reference in New Issue