diff --git a/.gitignore b/.gitignore index 3b0aecc1081..a31eff71e7d 100644 --- a/.gitignore +++ b/.gitignore @@ -37,4 +37,4 @@ eclipse-build nb-configuration.xml nbactions.xml -/dependency-reduced-pom.xml +dependency-reduced-pom.xml diff --git a/bin/elasticsearch b/bin/elasticsearch index 23591222fb0..bd324a7b5c4 100755 --- a/bin/elasticsearch +++ b/bin/elasticsearch @@ -57,7 +57,7 @@ # Maven will replace the project.name with elasticsearch below. If that # hasn't been done, we assume that this is not a packaged version and the # user has forgotten to run Maven to create a package. -IS_PACKAGED_VERSION='${project.name}' +IS_PACKAGED_VERSION='${project.artifactId}' if [ "$IS_PACKAGED_VERSION" != "elasticsearch" ]; then cat >&2 << EOF Error: You must build the project with Maven or download a pre-built package diff --git a/config/logging.yml b/config/logging.yml index a76eb175fc4..035106eeec6 100644 --- a/config/logging.yml +++ b/config/logging.yml @@ -4,6 +4,10 @@ rootLogger: ${es.logger.level}, console, file logger: # log action execution errors for easier debugging action: DEBUG + + # deprecation logging, turn to DEBUG to see them + deprecation: INFO, deprecation_log_file + # reduce the logging for aws, too much is logged under the default INFO com.amazonaws: WARN org.apache.http: INFO @@ -24,6 +28,7 @@ logger: additivity: index.search.slowlog: false index.indexing.slowlog: false + deprecation: false appender: console: @@ -51,6 +56,14 @@ appender: #type: pattern #conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" + deprecation_log_file: + type: dailyRollingFile + file: ${path.logs}/${cluster.name}_deprecation.log + datePattern: "'.'yyyy-MM-dd" + layout: + type: pattern + conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" + index_search_slow_log_file: type: dailyRollingFile file: ${path.logs}/${cluster.name}_index_search_slowlog.log diff --git a/dev-tools/build_release.py b/dev-tools/build_release.py index ca3223c0a92..8ae300d98fd 100644 --- a/dev-tools/build_release.py +++ b/dev-tools/build_release.py @@ -30,6 +30,7 @@ import socket import urllib.request import subprocess +from functools import partial from http.client import HTTPConnection from http.client import HTTPSConnection @@ -72,6 +73,11 @@ PLUGINS = [('license', 'elasticsearch/license/latest'), LOG = env.get('ES_RELEASE_LOG', '/tmp/elasticsearch_release.log') +# console colors +COLOR_OK = '\033[92m' +COLOR_END = '\033[0m' +COLOR_FAIL = '\033[91m' + def log(msg): log_plain('\n%s' % msg) @@ -137,9 +143,6 @@ def get_tag_hash(tag): def get_current_branch(): return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip() -verify_java_version('1.7') # we require to build with 1.7 -verify_mvn_java_version('1.7', MVN) - # Utility that returns the name of the release branch for a given version def release_branch(version): return 'release_branch_%s' % version @@ -545,14 +548,6 @@ def print_sonatype_notice(): """) -def check_s3_credentials(): - if not env.get('AWS_ACCESS_KEY_ID', None) or not env.get('AWS_SECRET_ACCESS_KEY', None): - raise RuntimeError('Could not find "AWS_ACCESS_KEY_ID" / "AWS_SECRET_ACCESS_KEY" in the env variables please export in order to upload to S3') - -def check_gpg_credentials(): - if not env.get('GPG_KEY_ID', None) or not env.get('GPG_PASSPHRASE', None): - raise RuntimeError('Could not find "GPG_KEY_ID" / "GPG_PASSPHRASE" in the env variables please export in order to sign the packages (also make sure that GPG_KEYRING is set when not in ~/.gnupg)') - def check_command_exists(name, cmd): try: subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) @@ -562,9 +557,6 @@ def check_command_exists(name, cmd): VERSION_FILE = 'src/main/java/org/elasticsearch/Version.java' POM_FILE = 'pom.xml' -# we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml -print_sonatype_notice() - # finds the highest available bwc version to test against def find_bwc_version(release_version, bwc_dir='backwards'): log(' Lookup bwc version in directory [%s]' % bwc_dir) @@ -618,6 +610,60 @@ def check_norelease(path='src'): if pattern.search(line): raise RuntimeError('Found //norelease comment in %s line %s' % (full_path, line_number)) +def run_and_print(text, run_function): + try: + print(text, end='') + run_function() + print(COLOR_OK + 'OK' + COLOR_END) + return True + except RuntimeError: + print(COLOR_FAIL + 'NOT OK' + COLOR_END) + return False + +def check_env_var(text, env_var): + try: + print(text, end='') + env[env_var] + print(COLOR_OK + 'OK' + COLOR_END) + return True + except KeyError: + print(COLOR_FAIL + 'NOT OK' + COLOR_END) + return False + +def check_environment_and_commandline_tools(check_only): + checks = list() + checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_ACCESS_KEY_ID... ', 'AWS_SECRET_ACCESS_KEY')) + checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY_ID... ', 'AWS_ACCESS_KEY_ID')) + checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_USERNAME... ', 'SONATYPE_USERNAME')) + checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_PASSWORD... ', 'SONATYPE_PASSWORD')) + checks.append(check_env_var('Checking for GPG env configuration GPG_KEY_ID... ', 'GPG_KEY_ID')) + checks.append(check_env_var('Checking for GPG env configuration GPG_PASSPHRASE... ', 'GPG_PASSPHRASE')) + checks.append(check_env_var('Checking for S3 repo upload env configuration S3_BUCKET_SYNC_TO... ', 'S3_BUCKET_SYNC_TO')) + checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_NAME... ', 'GIT_AUTHOR_NAME')) + checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_EMAIL... ', 'GIT_AUTHOR_EMAIL')) + + checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version'))) + checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version'))) + checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version'))) + checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v'))) + checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version'))) + checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version'))) + checks.append(run_and_print('Checking command: apt-ftparchive... ', partial(check_command_exists, 'apt-ftparchive', 'apt-ftparchive --version'))) + + # boto, check error code being returned + location = os.path.dirname(os.path.realpath(__file__)) + command = 'python %s/upload-s3.py -h' % (location) + checks.append(run_and_print('Testing boto python dependency... ', partial(check_command_exists, 'python-boto', command))) + + checks.append(run_and_print('Checking java version... ', partial(verify_java_version, '1.7'))) + checks.append(run_and_print('Checking java mvn version... ', partial(verify_mvn_java_version, '1.7', MVN))) + + if check_only: + sys.exit(0) + + if False in checks: + print("Exiting due to failing checks") + sys.exit(0) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release') @@ -636,9 +682,12 @@ if __name__ == '__main__': help='Smoke tests the given release') parser.add_argument('--bwc', '-w', dest='bwc', metavar='backwards', default='backwards', help='Backwards compatibility version path to use to run compatibility tests against') + parser.add_argument('--check-only', dest='check_only', action='store_true', + help='Checks and reports for all requirements and then exits') parser.set_defaults(dryrun=True) parser.set_defaults(smoke=None) + parser.set_defaults(check_only=False) args = parser.parse_args() bwc_path = args.bwc src_branch = args.branch @@ -649,18 +698,19 @@ if __name__ == '__main__': build = not args.smoke smoke_test_version = args.smoke + check_environment_and_commandline_tools(args.check_only) + + # we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml + print_sonatype_notice() + + # we require to build with 1.7 + verify_java_version('1.7') + verify_mvn_java_version('1.7', MVN) + if os.path.exists(LOG): raise RuntimeError('please remove old release log %s first' % LOG) - check_gpg_credentials() - check_command_exists('gpg', 'gpg --version') - check_command_exists('expect', 'expect -v') - if not dry_run: - check_s3_credentials() - check_command_exists('createrepo', 'createrepo --version') - check_command_exists('s3cmd', 's3cmd --version') - check_command_exists('apt-ftparchive', 'apt-ftparchive --version') print('WARNING: dryrun is set to "false" - this will push and publish the release') input('Press Enter to continue...') diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index a4e20793849..20677b404af 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -180,11 +180,11 @@ The default value of `alpha` is `0.5`, and the setting accepts any float from 0- [[single_0.2alpha]] -.Single Exponential moving average with window of size 10, alpha = 0.2 +.EWMA with window of size 10, alpha = 0.2 image::images/pipeline_movavg/single_0.2alpha.png[] [[single_0.7alpha]] -.Single Exponential moving average with window of size 10, alpha = 0.7 +.EWMA with window of size 10, alpha = 0.7 image::images/pipeline_movavg/single_0.7alpha.png[] ==== Holt-Linear @@ -223,13 +223,111 @@ to see. Small values emphasize long-term trends (such as a constant linear tren values emphasize short-term trends. This will become more apparently when you are predicting values. [[double_0.2beta]] -.Double Exponential moving average with window of size 100, alpha = 0.5, beta = 0.2 +.Holt-Linear moving average with window of size 100, alpha = 0.5, beta = 0.2 image::images/pipeline_movavg/double_0.2beta.png[] [[double_0.7beta]] -.Double Exponential moving average with window of size 100, alpha = 0.5, beta = 0.7 +.Holt-Linear moving average with window of size 100, alpha = 0.5, beta = 0.7 image::images/pipeline_movavg/double_0.7beta.png[] +==== Holt-Winters + +The `holt_winters` model (aka "triple exponential") incorporates a third exponential term which +tracks the seasonal aspect of your data. This aggregation therefore smooths based on three components: "level", "trend" +and "seasonality". + +The level and trend calculation is identical to `holt` The seasonal calculation looks at the difference between +the current point, and the point one period earlier. + +Holt-Winters requires a little more handholding than the other moving averages. You need to specify the "periodicity" +of your data: e.g. if your data has cyclic trends every 7 days, you would set `period: 7`. Similarly if there was +a monthly trend, you would set it to `30`. There is currently no periodicity detection, although that is planned +for future enhancements. + +There are two varieties of Holt-Winters: additive and multiplicative. + +===== "Cold Start" + +Unfortunately, due to the nature of Holt-Winters, it requires two periods of data to "bootstrap" the algorithm. This +means that your `window` must always be *at least* twice the size of your period. An exception will be thrown if it +isn't. It also means that Holt-Winters will not emit a value for the first `2 * period` buckets; the current algorithm +does not backcast. + +[[holt_winters_cold_start]] +.Holt-Winters showing a "cold" start where no values are emitted +image::images/pipeline_movavg/triple_untruncated.png[] + +Because the "cold start" obscures what the moving average looks like, the rest of the Holt-Winters images are truncated +to not show the "cold start". Just be aware this will always be present at the beginning of your moving averages! + +===== Additive Holt-Winters + +Additive seasonality is the default; it can also be specified by setting `"type": "add"`. This variety is preferred +when the seasonal affect is additive to your data. E.g. you could simply subtract the seasonal effect to "de-seasonalize" +your data into a flat trend. + +The default value of `alpha`, `beta` and `gamma` is `0.5`, and the settings accept any float from 0-1 inclusive. +The default value of `period` is `1`. + +[source,js] +-------------------------------------------------- +{ + "the_movavg":{ + "moving_avg":{ + "buckets_path": "the_sum", + "model" : "holt_winters", + "settings" : { + "type" : "add", + "alpha" : 0.5, + "beta" : 0.5, + "gamma" : 0.5, + "period" : 7 + } + } +} +-------------------------------------------------- + + +[[holt_winters_add]] +.Holt-Winters moving average with window of size 120, alpha = 0.5, beta = 0.7, gamma = 0.3, period = 30 +image::images/pipeline_movavg/triple.png[] + +===== Multiplicative Holt-Winters + +Multiplicative is specified by setting `"type": "mult"`. This variety is preferred when the seasonal affect is +multiplied against your data. E.g. if the seasonal affect is x5 the data, rather than simply adding to it. + +The default value of `alpha`, `beta` and `gamma` is `0.5`, and the settings accept any float from 0-1 inclusive. +The default value of `period` is `1`. + +[WARNING] +====== +Multiplicative Holt-Winters works by dividing each data point by the seasonal value. This is problematic if any of +your data is zero, or if there are gaps in the data (since this results in a divid-by-zero). To combat this, the +`mult` Holt-Winters pads all values by a very small amount (1*10^-10^) so that all values are non-zero. This affects +the result, but only minimally. If your data is non-zero, or you prefer to see `NaN` when zero's are encountered, +you can disable this behavior with `pad: false` +====== + +[source,js] +-------------------------------------------------- +{ + "the_movavg":{ + "moving_avg":{ + "buckets_path": "the_sum", + "model" : "holt_winters", + "settings" : { + "type" : "mult", + "alpha" : 0.5, + "beta" : 0.5, + "gamma" : 0.5, + "period" : 7, + "pad" : true + } + } +} +-------------------------------------------------- + ==== Prediction All the moving average model support a "prediction" mode, which will attempt to extrapolate into the future given the @@ -263,7 +361,7 @@ value, we can extrapolate based on local constant trends (in this case the predi of the series was heading in a downward direction): [[double_prediction_local]] -.Double Exponential moving average with window of size 100, predict = 20, alpha = 0.5, beta = 0.8 +.Holt-Linear moving average with window of size 100, predict = 20, alpha = 0.5, beta = 0.8 image::images/pipeline_movavg/double_prediction_local.png[] In contrast, if we choose a small `beta`, the predictions are based on the global constant trend. In this series, the @@ -272,3 +370,10 @@ global trend is slightly positive, so the prediction makes a sharp u-turn and be [[double_prediction_global]] .Double Exponential moving average with window of size 100, predict = 20, alpha = 0.5, beta = 0.1 image::images/pipeline_movavg/double_prediction_global.png[] + +The `holt_winters` model has the potential to deliver the best predictions, since it also incorporates seasonal +fluctuations into the model: + +[[holt_winters_prediction_global]] +.Holt-Winters moving average with window of size 120, predict = 25, alpha = 0.8, beta = 0.2, gamma = 0.7, period = 30 +image::images/pipeline_movavg/triple_prediction.png[] diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index a7cf7136a83..bdc03a0998b 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -5,6 +5,7 @@ An analyzer of type `custom` that allows to combine a `Tokenizer` with zero or more `Token Filters`, and zero or more `Char Filters`. The custom analyzer accepts a logical/registered name of the tokenizer to use, and a list of logical/registered names of token filters. +The name of the custom analyzer must not start with "_". The following are settings that can be set for a `custom` analyzer type: diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 25b9ac4fcea..7dfb3936e35 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -81,6 +81,113 @@ being consumed by a monitoring tool, rather than intended for human consumption. The default for the `human` flag is `false`. +[float] +=== Response Filtering + +All REST APIs accept a `filter_path` parameter that can be used to reduce +the response returned by elasticsearch. This parameter takes a comma +separated list of filters expressed with the dot notation: + +[source,sh] +-------------------------------------------------- +curl -XGET 'localhost:9200/_search?pretty&filter_path=took,hits.hits._id,hits.hits._score' +{ + "took" : 3, + "hits" : { + "hits" : [ + { + "_id" : "3640", + "_score" : 1.0 + }, + { + "_id" : "3642", + "_score" : 1.0 + } + ] + } +} +-------------------------------------------------- + +It also supports the `*` wildcard character to match any field or part +of a field's name: + +[source,sh] +-------------------------------------------------- +curl -XGET 'localhost:9200/_nodes/stats?filter_path=nodes.*.ho*' +{ + "nodes" : { + "lvJHed8uQQu4brS-SXKsNA" : { + "host" : "portable" + } + } +} +-------------------------------------------------- + +And the `**` wildcard can be used to include fields without knowing the +exact path of the field. For example, we can return the Lucene version +of every segment with this request: + +[source,sh] +-------------------------------------------------- +curl 'localhost:9200/_segments?pretty&filter_path=indices.**.version' +{ + "indices" : { + "movies" : { + "shards" : { + "0" : [ { + "segments" : { + "_0" : { + "version" : "5.2.0" + } + } + } ], + "2" : [ { + "segments" : { + "_0" : { + "version" : "5.2.0" + } + } + } ] + } + }, + "books" : { + "shards" : { + "0" : [ { + "segments" : { + "_0" : { + "version" : "5.2.0" + } + } + } ] + } + } + } +} +-------------------------------------------------- + +Note that elasticsearch sometimes returns directly the raw value of a field, +like the `_source` field. If you want to filter _source fields, you should +consider combining the already existing `_source` parameter (see +<> for more details) with the `filter_path` + parameter like this: + +[source,sh] +-------------------------------------------------- +curl -XGET 'localhost:9200/_search?pretty&filter_path=hits.hits._source&_source=title' +{ + "hits" : { + "hits" : [ { + "_source":{"title":"Book #2"} + }, { + "_source":{"title":"Book #1"} + }, { + "_source":{"title":"Book #3"} + } ] + } +} +-------------------------------------------------- + + [float] === Flat Settings diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index d857006d2b1..bc29cc92d64 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -66,6 +66,10 @@ only those columns to appear. 192.168.56.30 9300 43.9 Ramsey, Doug -------------------------------------------------- +You can also request multiple columns using simple wildcards like +`/_cat/thread_pool?h=ip,bulk.*` to get all headers (or aliases) starting +with `bulk.`. + [float] [[numeric-formats]] === Numeric formats @@ -120,4 +124,4 @@ include::cat/thread_pool.asciidoc[] include::cat/shards.asciidoc[] -include::cat/segments.asciidoc[] \ No newline at end of file +include::cat/segments.asciidoc[] diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index cf2db23056c..ea1b6176578 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -228,5 +228,7 @@ it's current version is equal to the specified one. This behavior is the same for all version types with the exception of version type `FORCE` which always retrieves the document. -Note that Elasticsearch do not store older versions of documents. Only the current version can be retrieved. - +Internally, Elasticsearch has marked the old document as deleted and added an +entirely new document. The old version of the document doesn’t disappear +immediately, although you won’t be able to access it. Elasticsearch cleans up +deleted documents in the background as you continue to index more data. diff --git a/docs/reference/images/pipeline_movavg/triple.png b/docs/reference/images/pipeline_movavg/triple.png new file mode 100644 index 00000000000..8aaf281c1bf Binary files /dev/null and b/docs/reference/images/pipeline_movavg/triple.png differ diff --git a/docs/reference/images/pipeline_movavg/triple_prediction.png b/docs/reference/images/pipeline_movavg/triple_prediction.png new file mode 100644 index 00000000000..fb34881d1e3 Binary files /dev/null and b/docs/reference/images/pipeline_movavg/triple_prediction.png differ diff --git a/docs/reference/images/pipeline_movavg/triple_untruncated.png b/docs/reference/images/pipeline_movavg/triple_untruncated.png new file mode 100644 index 00000000000..4f7528ea5fe Binary files /dev/null and b/docs/reference/images/pipeline_movavg/triple_untruncated.png differ diff --git a/docs/reference/migration/migrate_2_0.asciidoc b/docs/reference/migration/migrate_2_0.asciidoc index 2e7fcfe828e..34b022b95d2 100644 --- a/docs/reference/migration/migrate_2_0.asciidoc +++ b/docs/reference/migration/migrate_2_0.asciidoc @@ -404,6 +404,12 @@ The `count` search type has been deprecated. All benefits from this search type now be achieved by using the `query_then_fetch` search type (which is the default) and setting `size` to `0`. +=== The count api internally uses the search api + +The count api is now a shortcut to the search api with `size` set to 0. As a +result, a total failure will result in an exception being returned rather +than a normal response with `count` set to `0` and shard failures. + === JSONP support JSONP callback support has now been removed. CORS should be used to access Elasticsearch diff --git a/docs/reference/modules/plugins.asciidoc b/docs/reference/modules/plugins.asciidoc index 3a9f867a4dd..40c288280cc 100644 --- a/docs/reference/modules/plugins.asciidoc +++ b/docs/reference/modules/plugins.asciidoc @@ -293,6 +293,7 @@ deprecated[1.5.0,Rivers have been deprecated. See https://www.elastic.co/blog/d * https://github.com/karmi/elasticsearch-paramedic[Paramedic Plugin] (by Karel Minařík) * https://github.com/polyfractal/elasticsearch-segmentspy[SegmentSpy Plugin] (by Zachary Tong) * https://github.com/xyu/elasticsearch-whatson[Whatson Plugin] (by Xiao Yu) +* https://github.com/lmenezes/elasticsearch-kopf[Kopf Plugin] (by lmenezes) [float] [[repository-plugins]] diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index eed595e25ba..b822b223ce5 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -337,3 +337,22 @@ the http://logging.apache.org/log4j/1.2/manual.html[log4j documentation]. Additional Appenders and other logging classes provided by http://logging.apache.org/log4j/extras/[log4j-extras] are also available, out of the box. + +[float] +[[deprecation-logging]] +==== Deprecation logging + +In addition to regular logging, Elasticsearch allows you to enable logging +of deprecated actions. For example this allows you to determine early, if +you need to migrate certain functionality in the future. By default, +deprecation logging is disabled. You can enable it in the `config/logging.yml` +file by setting the deprecation log level to `DEBUG`. + +[source,yaml] +-------------------------------------------------- +deprecation: DEBUG, deprecation_log_file +-------------------------------------------------- + +This will create a daily rolling deprecation log file in your log directory. +Check this file regularly, especially when you intend to upgrade to a new +major version. diff --git a/docs/reference/setup/repositories.asciidoc b/docs/reference/setup/repositories.asciidoc index 964913be94a..5e1f5eeb671 100644 --- a/docs/reference/setup/repositories.asciidoc +++ b/docs/reference/setup/repositories.asciidoc @@ -51,13 +51,22 @@ Run apt-get update and the repository is ready for use. You can install it with: sudo apt-get update && sudo apt-get install elasticsearch -------------------------------------------------- -Configure Elasticsearch to automatically start during bootup: +Configure Elasticsearch to automatically start during bootup. If your +distribution is using SysV init, then you will need to run: [source,sh] -------------------------------------------------- sudo update-rc.d elasticsearch defaults 95 10 -------------------------------------------------- +Otherwise if your distribution is using systemd: + +[source,sh] +-------------------------------------------------- +sudo /bin/systemctl daemon-reload +sudo /bin/systemctl enable elasticsearch.service +-------------------------------------------------- + [float] === YUM diff --git a/pom.xml b/pom.xml index 8a5b11f1bee..b6df03658ef 100644 --- a/pom.xml +++ b/pom.xml @@ -2,12 +2,12 @@ - elasticsearch 4.0.0 org.elasticsearch elasticsearch 2.0.0-SNAPSHOT jar + Elasticsearch core Elasticsearch - Open Source, Distributed, RESTful Search Engine 2009 @@ -43,6 +43,9 @@ /var/log/elasticsearch ${packaging.elasticsearch.home.dir}/plugins /var/run/elasticsearch + /usr/lib/systemd/system + /usr/lib/sysctl.d + /usr/lib/tmpfiles.d false dpkg-sig @@ -291,7 +294,7 @@ sigar sigar system - ${basedir}/lib/sigar/sigar-1.6.4.jar + ${project.basedir}/lib/sigar/sigar-1.6.4.jar true --> @@ -302,19 +305,19 @@ - src/packaging/common/packaging.properties + ${project.basedir}/src/packaging/common/packaging.properties - ${basedir}/src/main/java + ${project.basedir}/src/main/java **/*.json **/*.yml - ${basedir}/src/main/resources + ${project.basedir}/src/main/resources **/*.* @@ -324,7 +327,7 @@ - ${basedir}/src/test/java + ${project.basedir}/src/test/java **/*.json **/*.yml @@ -334,19 +337,19 @@ true - ${basedir}/src/test/java + ${project.basedir}/src/test/java **/*.gz - ${basedir}/src/test/resources + ${project.basedir}/src/test/resources **/*.* - ${basedir}/rest-api-spec + ${project.basedir}/rest-api-spec rest-api-spec api/*.json @@ -543,14 +546,14 @@ ${project.build.directory}/bin - ${basedir}/bin + ${project.basedir}/bin true *.exe - ${basedir}/bin + ${project.basedir}/bin false *.exe @@ -569,12 +572,12 @@ ${project.build.directory}/generated-packaging/deb/ - src/packaging/common/packaging.properties - src/packaging/deb/packaging.properties + ${project.basedir}/src/packaging/common/packaging.properties + ${project.basedir}/src/packaging/deb/packaging.properties - src/packaging/common/ + ${project.basedir}/src/packaging/common/ true **/* @@ -584,7 +587,7 @@ - src/packaging/deb/ + ${project.basedir}/src/packaging/deb/ true **/* @@ -615,8 +618,8 @@ ${project.build.directory}/generated-packaging/rpm/ - src/packaging/common/packaging.properties - src/packaging/rpm/packaging.properties + ${project.basedir}/src/packaging/common/packaging.properties + ${project.basedir}/src/packaging/rpm/packaging.properties @@ -660,8 +663,8 @@ false ${project.build.directory}/releases/ - ${basedir}/src/main/assemblies/targz-bin.xml - ${basedir}/src/main/assemblies/zip-bin.xml + ${project.basedir}/src/main/assemblies/targz-bin.xml + ${project.basedir}/src/main/assemblies/zip-bin.xml @@ -800,7 +803,19 @@ ${project.build.directory}/generated-packaging/deb/systemd/elasticsearch.service - /usr/lib/systemd/system/elasticsearch.service + ${packaging.elasticsearch.systemd.dir}/elasticsearch.service + file + + + + ${project.build.directory}/generated-packaging/deb/systemd/sysctl/elasticsearch.conf + ${packaging.elasticsearch.systemd.sysctl.dir}/elasticsearch.conf + file + + + + ${project.build.directory}/generated-packaging/deb/systemd/elasticsearch.conf + ${packaging.elasticsearch.tmpfilesd.dir}/elasticsearch.conf file @@ -978,8 +993,8 @@ - /usr/lib/systemd/system/ - 755 + ${packaging.elasticsearch.systemd.dir} + false true @@ -990,21 +1005,22 @@ + - /usr/lib/sysctl.d/ - 755 + ${packaging.elasticsearch.systemd.sysctl.dir} true - ${project.build.directory}/generated-packaging/rpm/systemd/sysctl.d + ${project.build.directory}/generated-packaging/rpm/systemd/sysctl elasticsearch.conf + - /usr/lib/tmpfiles.d/ + ${packaging.elasticsearch.tmpfilesd.dir} true diff --git a/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/test/cat.thread_pool/10_basic.yaml index edb87ce27b9..283e353b7a3 100755 --- a/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -29,6 +29,18 @@ / #pid id host ip port ^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \s+ \n)+ $/ + + - do: + cat.thread_pool: + h: bulk.m* + + - match: + $body: | + /^ bulk.min \s+ bulk.max \s+ \n + (\s+ \d+ \s+ \d+ \s+ \n)+ $/ + +#(\s+ \d+ \s+ \d+ \n)+ $/ + - do: cat.thread_pool: h: id,ba,fa,gea,ga,ia,maa,ma,oa,pa diff --git a/rest-api-spec/test/nodes.stats/20_response_filtering.yaml b/rest-api-spec/test/nodes.stats/20_response_filtering.yaml new file mode 100644 index 00000000000..4031f405259 --- /dev/null +++ b/rest-api-spec/test/nodes.stats/20_response_filtering.yaml @@ -0,0 +1,154 @@ +--- +"Nodes Stats with response filtering": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + # Nodes Stats with no filtering + - do: + nodes.stats: {} + + - is_true: cluster_name + - is_true: nodes + - is_true: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - gte: { nodes.$master.jvm.buffer_pools.direct.used_in_bytes: 0 } + + # Nodes Stats with only "cluster_name" field + - do: + nodes.stats: + filter_path: cluster_name + + - is_true: cluster_name + - is_false: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.jvm + + # Nodes Stats with "nodes" field and sub-fields + - do: + nodes.stats: + filter_path: nodes.* + + - is_false: cluster_name + - is_true: nodes + - is_true: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - gte: { nodes.$master.jvm.buffer_pools.direct.used_in_bytes: 0 } + + # Nodes Stats with "nodes.*.indices" field and sub-fields + - do: + nodes.stats: + filter_path: nodes.*.indices + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_false: nodes.$master.jvm + + # Nodes Stats with "nodes.*.name" and "nodes.*.indices.docs.count" fields + - do: + nodes.stats: + filter_path: [ "nodes.*.name", "nodes.*.indices.docs.count" ] + + - is_false: cluster_name + - is_true: nodes + - is_true: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_false: nodes.$master.indices.segments + - is_false: nodes.$master.jvm + + # Nodes Stats with all "count" fields + - do: + nodes.stats: + filter_path: "nodes.**.count" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_true: nodes.$master.indices + - is_true: nodes.$master.indices.docs + - gte: { nodes.$master.indices.docs.count: 0 } + - is_true: nodes.$master.indices.segments + - gte: { nodes.$master.indices.segments.count: 0 } + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - is_false: nodes.$master.jvm.buffer_pools.direct.used_in_bytes + + # Nodes Stats with all "count" fields in sub-fields of "jvm" field + - do: + nodes.stats: + filter_path: "nodes.**.jvm.**.count" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.indices.docs.count + - is_false: nodes.$master.indices.segments.count + - is_true: nodes.$master.jvm + - is_true: nodes.$master.jvm.threads + - gte: { nodes.$master.jvm.threads.count: 0 } + - is_true: nodes.$master.jvm.buffer_pools.direct + - gte: { nodes.$master.jvm.buffer_pools.direct.count: 0 } + - is_false: nodes.$master.jvm.buffer_pools.direct.used_in_bytes + + # Nodes Stats with "nodes.*.fs.data" fields + - do: + nodes.stats: + filter_path: "nodes.*.fs.data" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.jvm + - is_true: nodes.$master.fs.data + - is_true: nodes.$master.fs.data.0.path + - is_true: nodes.$master.fs.data.0.type + - is_true: nodes.$master.fs.data.0.total_in_bytes + + # Nodes Stats with "nodes.*.fs.data.t*" fields + - do: + nodes.stats: + filter_path: "nodes.*.fs.data.t*" + + - is_false: cluster_name + - is_true: nodes + - is_false: nodes.$master.name + - is_false: nodes.$master.indices + - is_false: nodes.$master.jvm + - is_true: nodes.$master.fs.data + - is_false: nodes.$master.fs.data.0.path + - is_true: nodes.$master.fs.data.0.type + - is_true: nodes.$master.fs.data.0.total_in_bytes diff --git a/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/test/search/10_source_filtering.yaml index 40a67ba7e1f..a78a5a2a28f 100644 --- a/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/test/search/10_source_filtering.yaml @@ -90,3 +90,9 @@ - match: { hits.hits.0.fields: { include.field2 : [v2] }} - is_true: hits.hits.0._source + + - do: + search: + fielddata_fields: [ "count" ] + - match: { hits.hits.0.fields.count: [1] } + diff --git a/rest-api-spec/test/search/70_response_filtering.yaml b/rest-api-spec/test/search/70_response_filtering.yaml new file mode 100644 index 00000000000..ade3b68e256 --- /dev/null +++ b/rest-api-spec/test/search/70_response_filtering.yaml @@ -0,0 +1,87 @@ +--- +"Search with response filtering": + - do: + indices.create: + index: test + - do: + index: + index: test + type: test + id: 1 + body: { foo: bar } + + - do: + index: + index: test + type: test + id: 2 + body: { foo: bar } + + - do: + indices.refresh: + index: [test] + + - do: + search: + index: test + filter_path: "*" + body: "{ query: { match_all: {} } }" + + - is_true: took + - is_true: _shards.total + - is_true: hits.total + - is_true: hits.hits.0._index + - is_true: hits.hits.0._type + - is_true: hits.hits.0._id + - is_true: hits.hits.1._index + - is_true: hits.hits.1._type + - is_true: hits.hits.1._id + + - do: + search: + index: test + filter_path: "took" + body: "{ query: { match_all: {} } }" + + - is_true: took + - is_false: _shards.total + - is_false: hits.total + - is_false: hits.hits.0._index + - is_false: hits.hits.0._type + - is_false: hits.hits.0._id + - is_false: hits.hits.1._index + - is_false: hits.hits.1._type + - is_false: hits.hits.1._id + + - do: + search: + index: test + filter_path: "_shards.*" + body: "{ query: { match_all: {} } }" + + - is_false: took + - is_true: _shards.total + - is_false: hits.total + - is_false: hits.hits.0._index + - is_false: hits.hits.0._type + - is_false: hits.hits.0._id + - is_false: hits.hits.1._index + - is_false: hits.hits.1._type + - is_false: hits.hits.1._id + + - do: + search: + index: test + filter_path: [ "hits.**._i*", "**.total" ] + body: "{ query: { match_all: {} } }" + + - is_false: took + - is_true: _shards.total + - is_true: hits.total + - is_true: hits.hits.0._index + - is_false: hits.hits.0._type + - is_true: hits.hits.0._id + - is_true: hits.hits.1._index + - is_false: hits.hits.1._type + - is_true: hits.hits.1._id + diff --git a/src/main/assemblies/common-bin.xml b/src/main/assemblies/common-bin.xml index f9b0a0ae215..49a00eab4ee 100644 --- a/src/main/assemblies/common-bin.xml +++ b/src/main/assemblies/common-bin.xml @@ -74,15 +74,15 @@ README.textile - / + LICENSE.txt - / + NOTICE.txt - / + diff --git a/src/main/java/org/elasticsearch/action/ActionModule.java b/src/main/java/org/elasticsearch/action/ActionModule.java index 1c273f67b5b..0decb393405 100644 --- a/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/src/main/java/org/elasticsearch/action/ActionModule.java @@ -122,8 +122,6 @@ import org.elasticsearch.action.admin.indices.warmer.put.TransportPutWarmerActio import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; -import org.elasticsearch.action.count.CountAction; -import org.elasticsearch.action.count.TransportCountAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.exists.ExistsAction; @@ -273,7 +271,6 @@ public class ActionModule extends AbstractModule { registerAction(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class, TransportShardMultiTermsVectorAction.class); registerAction(DeleteAction.INSTANCE, TransportDeleteAction.class); - registerAction(CountAction.INSTANCE, TransportCountAction.class); registerAction(ExistsAction.INSTANCE, TransportExistsAction.class); registerAction(SuggestAction.INSTANCE, TransportSuggestAction.class); registerAction(UpdateAction.INSTANCE, TransportUpdateAction.class); diff --git a/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index e675e8befed..aaf5c9e48fd 100644 --- a/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -20,15 +20,9 @@ package org.elasticsearch.action; import com.google.common.base.Preconditions; - -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.PlainListenableActionFuture; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.threadpool.ThreadPool; /** @@ -87,7 +81,7 @@ public abstract class ActionRequestBuilder listener) { + public void execute(ActionListener listener) { client.execute(action, beforeExecute(request), listener); } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java index f373689566f..aff58a7b327 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.health; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -37,7 +37,7 @@ import static org.elasticsearch.common.unit.TimeValue.readTimeValue; /** * */ -public class ClusterHealthRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class ClusterHealthRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices; private TimeValue timeout = new TimeValue(30, TimeUnit.SECONDS); diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index d183ddd865c..c4cb95c501b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.health; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.common.Strings; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportClusterHealthAction extends TransportMasterNodeReadOperationAction { +public class TransportClusterHealthAction extends TransportMasterNodeReadAction { private final ClusterName clusterName; private final GatewayAllocator gatewayAllocator; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java index 9485395057e..635be28a646 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** */ -public class NodeHotThreads extends NodeOperationResponse { +public class NodeHotThreads extends BaseNodeResponse { private String hotThreads; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java index fe092d7dc81..f7ab360c59f 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; import org.elasticsearch.Version; -import org.elasticsearch.action.support.nodes.NodesOperationRequest; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -30,7 +30,7 @@ import java.util.concurrent.TimeUnit; /** */ -public class NodesHotThreadsRequest extends NodesOperationRequest { +public class NodesHotThreadsRequest extends BaseNodesRequest { int threads = 3; String type = "cpu"; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java index 2b04435eeb4..22d4795fc95 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** */ -public class NodesHotThreadsResponse extends NodesOperationResponse { +public class NodesHotThreadsResponse extends BaseNodesResponse { NodesHotThreadsResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index 8b509ef2d94..a73982e5c0c 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -22,8 +22,8 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -41,7 +41,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesHotThreadsAction extends TransportNodesOperationAction { +public class TransportNodesHotThreadsAction extends TransportNodesAction { @Inject public TransportNodesHotThreadsAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, @@ -92,7 +92,7 @@ public class TransportNodesHotThreadsAction extends TransportNodesOperationActio return false; } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { NodesHotThreadsRequest request; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 66904933db4..f3c5eb7e1fc 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.node.info; import com.google.common.collect.ImmutableMap; import org.elasticsearch.Build; import org.elasticsearch.Version; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -42,7 +42,7 @@ import java.util.Map; /** * Node information (static, does not change over time). */ -public class NodeInfo extends NodeOperationResponse { +public class NodeInfo extends BaseNodeResponse { @Nullable private ImmutableMap serviceAttributes; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 589471af36d..d01167ceeca 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.support.nodes.NodesOperationRequest; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** * A request to get node (cluster) level information. */ -public class NodesInfoRequest extends NodesOperationRequest { +public class NodesInfoRequest extends BaseNodesRequest { private boolean settings = true; private boolean os = true; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java index b7ce109484d..e872be90142 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,7 +34,7 @@ import java.util.Map; /** * */ -public class NodesInfoResponse extends NodesOperationResponse implements ToXContent { +public class NodesInfoResponse extends BaseNodesResponse implements ToXContent { public NodesInfoResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 29a904fa551..74221fc79ed 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.admin.cluster.node.info; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesInfoAction extends TransportNodesOperationAction { +public class TransportNodesInfoAction extends TransportNodesAction { private final NodeService nodeService; @@ -87,7 +87,7 @@ public class TransportNodesInfoAction extends TransportNodesOperationAction { +public class NodesStatsRequest extends BaseNodesRequest { private CommonStatsFlags indices = new CommonStatsFlags(); private boolean os; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java index 27affe2b898..e57fd552d7c 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.io.IOException; /** * */ -public class NodesStatsResponse extends NodesOperationResponse implements ToXContent { +public class NodesStatsResponse extends BaseNodesResponse implements ToXContent { NodesStatsResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 1f8dea37aae..d808f859037 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -21,8 +21,8 @@ package org.elasticsearch.action.admin.cluster.node.stats; import com.google.common.collect.Lists; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesStatsAction extends TransportNodesOperationAction { +public class TransportNodesStatsAction extends TransportNodesAction { private final NodeService nodeService; @@ -87,7 +87,7 @@ public class TransportNodesStatsAction extends TransportNodesOperationAction { +public class TransportDeleteRepositoryAction extends TransportMasterNodeAction { private final RepositoriesService repositoriesService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java index 86c999982d1..4f5f99b2072 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequest.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.admin.cluster.repositories.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Get repository request */ -public class GetRepositoriesRequest extends MasterNodeReadOperationRequest { +public class GetRepositoriesRequest extends MasterNodeReadRequest { private String[] repositories = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java index 45026f08786..5c58628dc35 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/TransportGetRepositoriesAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.repositories.get; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -39,7 +39,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for get repositories operation */ -public class TransportGetRepositoriesAction extends TransportMasterNodeReadOperationAction { +public class TransportGetRepositoriesAction extends TransportMasterNodeReadAction { @Inject public TransportGetRepositoriesAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java index bde461ccf8a..a395d9b9ef2 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/TransportPutRepositoryAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.repositories.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for register repository operation */ -public class TransportPutRepositoryAction extends TransportMasterNodeOperationAction { +public class TransportPutRepositoryAction extends TransportMasterNodeAction { private final RepositoriesService repositoriesService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java index 978738d9f8c..ed687ae2bfd 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/TransportVerifyRepositoryAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.repositories.verify; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for verifying repository operation */ -public class TransportVerifyRepositoryAction extends TransportMasterNodeOperationAction { +public class TransportVerifyRepositoryAction extends TransportMasterNodeAction { private final RepositoriesService repositoriesService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index 1d4f0a6185e..d9829d1e078 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.reroute; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public class TransportClusterRerouteAction extends TransportMasterNodeOperationAction { +public class TransportClusterRerouteAction extends TransportMasterNodeAction { private final AllocationService allocationService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index db872f868fd..31375a8912d 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -49,7 +49,7 @@ import static org.elasticsearch.cluster.ClusterState.builder; /** * */ -public class TransportClusterUpdateSettingsAction extends TransportMasterNodeOperationAction { +public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAction { private final AllocationService allocationService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index de8e1fcdfab..21ecf8a4c4f 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.shards; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -32,7 +32,7 @@ import java.io.IOException; /** */ -public class ClusterSearchShardsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class ClusterSearchShardsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices; @Nullable private String routing; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 1575f3c61a8..10ef0348e46 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.shards; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -42,7 +42,7 @@ import static com.google.common.collect.Sets.newHashSet; /** */ -public class TransportClusterSearchShardsAction extends TransportMasterNodeReadOperationAction { +public class TransportClusterSearchShardsAction extends TransportMasterNodeReadAction { @Inject public TransportClusterSearchShardsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 9e8c87bbbae..1373eed1507 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -60,7 +60,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBo *
  • must not contain invalid file name characters {@link org.elasticsearch.common.Strings#INVALID_FILENAME_CHARS}
  • * */ -public class CreateSnapshotRequest extends MasterNodeOperationRequest implements IndicesRequest.Replaceable { +public class CreateSnapshotRequest extends MasterNodeRequest implements IndicesRequest.Replaceable { private String snapshot; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 533d0778925..e97633932d1 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.create; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for create snapshot operation */ -public class TransportCreateSnapshotAction extends TransportMasterNodeOperationAction { +public class TransportCreateSnapshotAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java index 5d6c5544184..d997786d5fc 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,7 +35,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * files that are associated with this particular snapshot. All files that are shared with * at least one other existing snapshot are left intact. */ -public class DeleteSnapshotRequest extends MasterNodeOperationRequest { +public class DeleteSnapshotRequest extends MasterNodeRequest { private String repository; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java index 5b5a31bedde..97f89c9f647 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/TransportDeleteSnapshotAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for delete snapshot operation */ -public class TransportDeleteSnapshotAction extends TransportMasterNodeOperationAction { +public class TransportDeleteSnapshotAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index 6b6927b5e0d..03edadd755b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Get snapshot request */ -public class GetSnapshotsRequest extends MasterNodeOperationRequest { +public class GetSnapshotsRequest extends MasterNodeRequest { public static final String ALL_SNAPSHOTS = "_all"; public static final String CURRENT_SNAPSHOT = "_current"; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 9ead8554943..6f480219857 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -39,7 +39,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport Action for get snapshots operation */ -public class TransportGetSnapshotsAction extends TransportMasterNodeOperationAction { +public class TransportGetSnapshotsAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 63f595747a1..ddb177e273b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -48,7 +48,7 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBo /** * Restore snapshot request */ -public class RestoreSnapshotRequest extends MasterNodeOperationRequest { +public class RestoreSnapshotRequest extends MasterNodeRequest { private String snapshot; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 70ab71b6e8f..205a2d1178b 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Transport action for restore snapshot operation */ -public class TransportRestoreSnapshotAction extends TransportMasterNodeOperationAction { +public class TransportRestoreSnapshotAction extends TransportMasterNodeAction { private final RestoreService restoreService; @Inject diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java index 878ca704345..1f358915662 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -32,7 +32,7 @@ import java.io.IOException; /** */ -public class SnapshotIndexShardStatus extends BroadcastShardOperationResponse implements ToXContent { +public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContent { private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java index c84ad073bd8..b7b2b631b31 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Get snapshot status request */ -public class SnapshotsStatusRequest extends MasterNodeOperationRequest { +public class SnapshotsStatusRequest extends MasterNodeRequest { private String repository = "_all"; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index b23010e3ecc..22a1753660e 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -47,7 +47,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * Transport client that collects snapshot shard statuses from data nodes */ -public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction { +public class TransportNodesSnapshotsStatus extends TransportNodesAction { public static final String ACTION_NAME = SnapshotsStatusAction.NAME + "[nodes]"; @@ -128,7 +128,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction return true; } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { private SnapshotId[] snapshotIds; @@ -157,7 +157,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction } } - public static class NodesSnapshotStatus extends NodesOperationResponse { + public static class NodesSnapshotStatus extends BaseNodesResponse { private FailedNodeException[] failures; @@ -194,7 +194,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { private SnapshotId[] snapshotIds; @@ -230,7 +230,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesOperationAction } } - public static class NodeSnapshotStatus extends NodeOperationResponse { + public static class NodeSnapshotStatus extends BaseNodeResponse { private ImmutableMap> status; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index edfc9d5fd32..a38e894e90f 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -49,7 +49,7 @@ import static com.google.common.collect.Sets.newHashSet; /** */ -public class TransportSnapshotsStatusAction extends TransportMasterNodeOperationAction { +public class TransportSnapshotsStatusAction extends TransportMasterNodeAction { private final SnapshotsService snapshotsService; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java index 574ed0170f3..4edd26812a0 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequest.java @@ -19,11 +19,10 @@ package org.elasticsearch.action.admin.cluster.state; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +32,7 @@ import java.io.IOException; /** * */ -public class ClusterStateRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class ClusterStateRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private boolean routingTable = true; private boolean nodes = true; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index d4d647b99b0..e989b7a5df8 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.cluster.state; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -39,7 +39,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportClusterStateAction extends TransportMasterNodeReadOperationAction { +public class TransportClusterStateAction extends TransportMasterNodeReadAction { private final ClusterName clusterName; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java index 24222da8b5a..d0f91f7e9b9 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodeResponse.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -31,7 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -public class ClusterStatsNodeResponse extends NodeOperationResponse { +public class ClusterStatsNodeResponse extends BaseNodeResponse { private NodeInfo nodeInfo; private NodeStats nodeStats; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index 3a0c26af10c..d33f9acf296 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; -import org.elasticsearch.action.support.nodes.NodesOperationRequest; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; /** * A request to get cluster level stats. */ -public class ClusterStatsRequest extends NodesOperationRequest { +public class ClusterStatsRequest extends BaseNodesRequest { ClusterStatsRequest() { } diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java index 3b84e86ea8e..aebdf6c31c3 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,7 +37,7 @@ import java.util.Map; /** * */ -public class ClusterStatsResponse extends NodesOperationResponse implements ToXContent { +public class ClusterStatsResponse extends BaseNodesResponse implements ToXContent { ClusterStatsNodes nodesStats; ClusterStatsIndices indicesStats; diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index c2254ed1294..40c1c8b9e74 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.nodes.NodeOperationRequest; -import org.elasticsearch.action.support.nodes.TransportNodesOperationAction; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportClusterStatsAction extends TransportNodesOperationAction { private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, @@ -142,7 +142,7 @@ public class TransportClusterStatsAction extends TransportNodesOperationAction { +public class PendingClusterTasksRequest extends MasterNodeReadRequest { @Override public ActionRequestValidationException validate() { diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java index 40f634cef2c..5b7f2a881c3 100644 --- a/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java +++ b/src/main/java/org/elasticsearch/action/admin/cluster/tasks/TransportPendingClusterTasksAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.cluster.tasks; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -33,7 +33,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public class TransportPendingClusterTasksAction extends TransportMasterNodeReadOperationAction { +public class TransportPendingClusterTasksAction extends TransportMasterNodeReadAction { private final ClusterService clusterService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 2ba0c606614..4bedc3e3ee8 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -23,7 +23,7 @@ import com.google.common.collect.Sets; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -42,7 +42,7 @@ import java.util.*; /** * Add/remove aliases action */ -public class TransportIndicesAliasesAction extends TransportMasterNodeOperationAction { +public class TransportIndicesAliasesAction extends TransportMasterNodeAction { private final MetaDataIndexAliasesService indexAliasesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java index a85202a308c..29754edeb6f 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/TransportAliasesExistAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.alias.exists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -33,7 +33,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public class TransportAliasesExistAction extends TransportMasterNodeReadOperationAction { +public class TransportAliasesExistAction extends TransportMasterNodeReadAction { @Inject public TransportAliasesExistAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java index e22e110c942..182b86fd149 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java @@ -18,11 +18,10 @@ */ package org.elasticsearch.action.admin.indices.alias.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.AliasesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,7 +30,7 @@ import java.io.IOException; /** */ -public class GetAliasesRequest extends MasterNodeReadOperationRequest implements AliasesRequest { +public class GetAliasesRequest extends MasterNodeReadRequest implements AliasesRequest { private String[] indices = Strings.EMPTY_ARRAY; private String[] aliases = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 9c1475136a8..a1088d4fbcd 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.alias.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,7 +36,7 @@ import java.util.List; /** */ -public class TransportGetAliasesAction extends TransportMasterNodeReadOperationAction { +public class TransportGetAliasesAction extends TransportMasterNodeReadAction { @Inject public TransportGetAliasesAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index d631f8b8d0a..655defeddaf 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -18,10 +18,8 @@ */ package org.elasticsearch.action.admin.indices.analyze; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.single.custom.SingleCustomOperationRequest; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -48,8 +46,7 @@ public class AnalyzeRequest extends SingleCustomOperationRequest private String field; - AnalyzeRequest() { - + public AnalyzeRequest() { } /** diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java index 323d8869024..cf471ab0c77 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,7 +28,7 @@ import java.io.IOException; /** * */ -public class ClearIndicesCacheRequest extends BroadcastOperationRequest { +public class ClearIndicesCacheRequest extends BroadcastRequest { private boolean filterCache = false; private boolean fieldDataCache = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java index a9f094892d4..cd3355cae87 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class ClearIndicesCacheResponse extends BroadcastOperationResponse { +public class ClearIndicesCacheResponse extends BroadcastResponse { ClearIndicesCacheResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java index 46044b7862d..8aefde1eb70 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +29,7 @@ import java.io.IOException; /** * */ -class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest { +class ShardClearIndicesCacheRequest extends BroadcastShardRequest { private boolean filterCache = false; private boolean fieldDataCache = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java index d89bfe088f6..c2931df6003 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ShardClearIndicesCacheResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.cache.clear; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardClearIndicesCacheResponse extends BroadcastShardOperationResponse { +class ShardClearIndicesCacheResponse extends BroadcastShardResponse { ShardClearIndicesCacheResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 8e356f2fd84..88595c70647 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -47,7 +47,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Indices clear cache action. */ -public class TransportClearIndicesCacheAction extends TransportBroadcastOperationAction { +public class TransportClearIndicesCacheAction extends TransportBroadcastAction { private final IndicesService indicesService; private final IndicesQueryCache indicesQueryCache; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java index 43343e83894..26a7fa1ffdf 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.close; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Close index action */ -public class TransportCloseIndexAction extends TransportMasterNodeOperationAction { +public class TransportCloseIndexAction extends TransportMasterNodeAction { private final MetaDataIndexStateService indexStateService; private final DestructiveOperations destructiveOperations; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index eca8894e43d..9c671cc9505 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.create; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * Create index action. */ -public class TransportCreateIndexAction extends TransportMasterNodeOperationAction { +public class TransportCreateIndexAction extends TransportMasterNodeAction { private final MetaDataCreateIndexService createIndexService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java index c52fc57e653..3720f5fe78b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -37,7 +37,7 @@ import static org.elasticsearch.common.unit.TimeValue.readTimeValue; /** * A request to delete an index. Best created with {@link org.elasticsearch.client.Requests#deleteIndexRequest(String)}. */ -public class DeleteIndexRequest extends MasterNodeOperationRequest implements IndicesRequest.Replaceable { +public class DeleteIndexRequest extends MasterNodeRequest implements IndicesRequest.Replaceable { private String[] indices; // Delete index should work by default on both open and closed indices. diff --git a/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java index 4c0de6d799b..a25b56eef47 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.delete; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Delete index action. */ -public class TransportDeleteIndexAction extends TransportMasterNodeOperationAction { +public class TransportDeleteIndexAction extends TransportMasterNodeAction { private final MetaDataDeleteIndexService deleteIndexService; private final DestructiveOperations destructiveOperations; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java index e104090e962..e822f45e7f6 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsRequest.java @@ -19,11 +19,10 @@ package org.elasticsearch.action.admin.indices.exists.indices; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +31,7 @@ import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class IndicesExistsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class IndicesExistsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java index 0c360468e98..019e8c2f34b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/TransportIndicesExistsAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.exists.indices; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Indices exists action. */ -public class TransportIndicesExistsAction extends TransportMasterNodeReadOperationAction { +public class TransportIndicesExistsAction extends TransportMasterNodeReadAction { @Inject public TransportIndicesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java index fb617f233d3..ef1dc16b190 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TransportTypesExistsAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.exists.types; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -35,7 +35,7 @@ import org.elasticsearch.transport.TransportService; /** * Types exists transport action. */ -public class TransportTypesExistsAction extends TransportMasterNodeReadOperationAction { +public class TransportTypesExistsAction extends TransportMasterNodeReadAction { @Inject public TransportTypesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService, diff --git a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java index 868f32a7a78..8eeb7422bf3 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsRequest.java @@ -18,11 +18,10 @@ */ package org.elasticsearch.action.admin.indices.exists.types; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +31,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** */ -public class TypesExistsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class TypesExistsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices; private String[] types; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java index e34ac8cfab5..57d9455ff91 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -38,7 +37,7 @@ import java.io.IOException; * @see org.elasticsearch.client.IndicesAdminClient#flush(FlushRequest) * @see FlushResponse */ -public class FlushRequest extends BroadcastOperationRequest { +public class FlushRequest extends BroadcastRequest { private boolean force = false; private boolean waitIfOngoing = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java index c4c52e7071c..a158b02611b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class FlushResponse extends BroadcastOperationResponse { +public class FlushResponse extends BroadcastResponse { FlushResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java index 55153562c15..0e38181fa61 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +29,7 @@ import java.io.IOException; /** * */ -class ShardFlushRequest extends BroadcastShardOperationRequest { +class ShardFlushRequest extends BroadcastShardRequest { private FlushRequest request = new FlushRequest(); ShardFlushRequest() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java index 3ba91768732..6f2cc6a5522 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardFlushResponse extends BroadcastShardOperationResponse { +class ShardFlushResponse extends BroadcastShardResponse { ShardFlushResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index c9f637e4371..e546d6f616b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -45,7 +45,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Flush Action. */ -public class TransportFlushAction extends TransportBroadcastOperationAction { +public class TransportFlushAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java index 9772754d330..1f853b6b397 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.mapping.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -36,7 +36,7 @@ import org.elasticsearch.transport.TransportService; /** * Put mapping action. */ -public class TransportPutMappingAction extends TransportMasterNodeOperationAction { +public class TransportPutMappingAction extends TransportMasterNodeAction { private final MetaDataMappingService metaDataMappingService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 83063492c07..1df33c56463 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.open; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Open index action */ -public class TransportOpenIndexAction extends TransportMasterNodeOperationAction { +public class TransportOpenIndexAction extends TransportMasterNodeAction { private final MetaDataIndexStateService indexStateService; private final DestructiveOperations destructiveOperations; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java index d5b822f58cb..3510a3b7f96 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.action.admin.indices.optimize; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,7 +36,7 @@ import java.io.IOException; * @see org.elasticsearch.client.IndicesAdminClient#optimize(OptimizeRequest) * @see OptimizeResponse */ -public class OptimizeRequest extends BroadcastOperationRequest { +public class OptimizeRequest extends BroadcastRequest { public static final class Defaults { public static final int MAX_NUM_SEGMENTS = -1; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java index d4a189eb7f4..88341ef2619 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.optimize; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class OptimizeResponse extends BroadcastOperationResponse { +public class OptimizeResponse extends BroadcastResponse { OptimizeResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java index 9113581038b..05aeabe21a6 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeRequest.java @@ -20,8 +20,7 @@ package org.elasticsearch.action.admin.indices.optimize; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -31,7 +30,7 @@ import java.io.IOException; /** * */ -final class ShardOptimizeRequest extends BroadcastShardOperationRequest { +final class ShardOptimizeRequest extends BroadcastShardRequest { private OptimizeRequest request = new OptimizeRequest(); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java index 61adb62f1d6..1c9dc4482d9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/ShardOptimizeResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.optimize; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardOptimizeResponse extends BroadcastShardOperationResponse { +class ShardOptimizeResponse extends BroadcastShardResponse { ShardOptimizeResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java index c4f276126fa..17a18bae971 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/TransportOptimizeAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -45,7 +45,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Optimize index/indices action. */ -public class TransportOptimizeAction extends TransportBroadcastOperationAction { +public class TransportOptimizeAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java index f2a97ef5fb5..8878713765b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequest.java @@ -19,17 +19,17 @@ package org.elasticsearch.action.admin.indices.recovery; -import java.io.IOException; - -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import java.io.IOException; + /** * Request for recovery information */ -public class RecoveryRequest extends BroadcastOperationRequest { +public class RecoveryRequest extends BroadcastRequest { private boolean detailed = false; // Provides extra details in the response private boolean activeOnly = false; // Only reports on active recoveries diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java index 9fd15cd371e..fea33688c14 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.recovery; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; @@ -35,7 +35,7 @@ import java.util.Map; /** * Information regarding the recovery state of indices and their associated shards. */ -public class RecoveryResponse extends BroadcastOperationResponse implements ToXContent { +public class RecoveryResponse extends BroadcastResponse implements ToXContent { private boolean detailed = false; private Map> shardResponses = new HashMap<>(); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java index 2e12de4f39f..a4104fbc449 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/ShardRecoveryResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.recovery; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +33,7 @@ import java.io.IOException; /** * Information regarding the recovery state of a shard. */ -public class ShardRecoveryResponse extends BroadcastShardOperationResponse implements ToXContent { +public class ShardRecoveryResponse extends BroadcastShardResponse implements ToXContent { RecoveryState recoveryState; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java index 2996247963f..2483efbc498 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; * Transport action for shard recovery operation. This transport action does not actually * perform shard recovery, it only reports on recoveries (both active and complete). */ -public class TransportRecoveryAction extends TransportBroadcastOperationAction { +public class TransportRecoveryAction extends TransportBroadcastAction { private final IndicesService indicesService; @@ -149,7 +149,7 @@ public class TransportRecoveryAction extends TransportBroadcastOperationAction { +public class RefreshRequest extends BroadcastRequest { RefreshRequest() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java index 3130b0713da..28295fdd0a0 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.List; * * */ -public class RefreshResponse extends BroadcastOperationResponse { +public class RefreshResponse extends BroadcastResponse { RefreshResponse() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java index da3c5fea9eb..37ea2cc46de 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshRequest.java @@ -19,17 +19,13 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.index.shard.ShardId; -import java.io.IOException; - /** * */ -class ShardRefreshRequest extends BroadcastShardOperationRequest { +class ShardRefreshRequest extends BroadcastShardRequest { ShardRefreshRequest() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java index c2ab17890eb..4de0f5877dd 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/ShardRefreshResponse.java @@ -19,13 +19,13 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.index.shard.ShardId; /** * */ -class ShardRefreshResponse extends BroadcastShardOperationResponse { +class ShardRefreshResponse extends BroadcastShardResponse { ShardRefreshResponse() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index 001793ecd17..e2fe442f951 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -45,7 +45,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Refresh action. */ -public class TransportRefreshAction extends TransportBroadcastOperationAction { +public class TransportRefreshAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java index 42cdc51ed32..2e8e3ac0cf8 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/seal/SealIndicesRequest.java @@ -19,14 +19,14 @@ package org.elasticsearch.action.admin.indices.seal; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import java.util.Arrays; /** * A request to seal one or more indices. */ -public class SealIndicesRequest extends BroadcastOperationRequest { +public class SealIndicesRequest extends BroadcastRequest { SealIndicesRequest() { } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index 983e5350a5d..6b0dc8697f3 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -24,7 +24,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.util.Accountable; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -36,12 +36,11 @@ import org.elasticsearch.index.engine.Segment; import java.io.IOException; import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -public class IndicesSegmentResponse extends BroadcastOperationResponse implements ToXContent { +public class IndicesSegmentResponse extends BroadcastResponse implements ToXContent { private ShardSegments[] shards; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java index cefc2ebc3bf..570fa89e026 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequest.java @@ -19,15 +19,14 @@ package org.elasticsearch.action.admin.indices.segments; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -public class IndicesSegmentsRequest extends BroadcastOperationRequest { +public class IndicesSegmentsRequest extends BroadcastRequest { protected boolean verbose = false; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java index 09d20271776..d33df00fc8c 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/ShardSegments.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.segments; import com.google.common.collect.ImmutableList; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +33,7 @@ import java.util.List; import static org.elasticsearch.cluster.routing.ImmutableShardRouting.readShardRoutingEntry; -public class ShardSegments extends BroadcastShardOperationResponse implements Iterable { +public class ShardSegments extends BroadcastShardResponse implements Iterable { private ShardRouting shardRouting; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index fa9639424b4..f043d8ebdb0 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,8 +36,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -51,7 +51,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportIndicesSegmentsAction extends TransportBroadcastOperationAction { +public class TransportIndicesSegmentsAction extends TransportBroadcastAction { private final IndicesService indicesService; @@ -122,7 +122,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastOperationA return new ShardSegments(indexShard.routingEntry(), indexShard.engine().segments(request.verbose)); } - static class IndexShardSegmentRequest extends BroadcastShardOperationRequest { + static class IndexShardSegmentRequest extends BroadcastShardRequest { boolean verbose; IndexShardSegmentRequest() { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java index 2f01e28f969..daf011ece62 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java @@ -19,12 +19,11 @@ package org.elasticsearch.action.admin.indices.settings.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +32,7 @@ import java.io.IOException; /** */ -public class GetSettingsRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public class GetSettingsRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index c3a8948d1bb..f48b2c4853f 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.settings.get; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -40,7 +40,7 @@ import java.util.Map; /** */ -public class TransportGetSettingsAction extends TransportMasterNodeReadOperationAction { +public class TransportGetSettingsAction extends TransportMasterNodeReadAction { private final SettingsFilter settingsFilter; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index 8185badb5af..1278b97934b 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.settings.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; @@ -37,7 +37,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportUpdateSettingsAction extends TransportMasterNodeOperationAction { +public class TransportUpdateSettingsAction extends TransportMasterNodeAction { private final MetaDataUpdateSettingsService updateSettingsService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java index b8a94ab4d51..a99c0f52def 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.stats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -34,7 +34,7 @@ import java.io.IOException; *

    All the stats to be returned can be cleared using {@link #clear()}, at which point, specific * stats can be enabled. */ -public class IndicesStatsRequest extends BroadcastOperationRequest { +public class IndicesStatsRequest extends BroadcastRequest { private CommonStatsFlags flags = new CommonStatsFlags(); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index f2f10c48f0a..2d9bf1e78f9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -24,7 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.io.stream.StreamInput; @@ -41,7 +41,7 @@ import java.util.Set; /** */ -public class IndicesStatsResponse extends BroadcastOperationResponse implements ToXContent { +public class IndicesStatsResponse extends BroadcastResponse implements ToXContent { private ShardStats[] shards; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 951c4b95223..fbba68cbee9 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -36,7 +36,7 @@ import static org.elasticsearch.cluster.routing.ImmutableShardRouting.readShardR /** */ -public class ShardStats extends BroadcastShardOperationResponse implements ToXContent { +public class ShardStats extends BroadcastShardResponse implements ToXContent { private ShardRouting shardRouting; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 75191bb903f..b4b0d6a4435 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -52,7 +52,7 @@ import static com.google.common.collect.Lists.newArrayList; /** */ -public class TransportIndicesStatsAction extends TransportBroadcastOperationAction { +public class TransportIndicesStatsAction extends TransportBroadcastAction { private final IndicesService indicesService; @@ -190,7 +190,7 @@ public class TransportIndicesStatsAction extends TransportBroadcastOperationActi return new ShardStats(indexShard, indexShard.routingEntry(), flags); } - static class IndexShardStatsRequest extends BroadcastShardOperationRequest { + static class IndexShardStatsRequest extends BroadcastShardRequest { // TODO if there are many indices, the request might hold a large indices array..., we don't really need to serialize it IndicesStatsRequest request; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java index 08801434868..f33d35b66a5 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.template.delete; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,11 +30,11 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to delete an index template. */ -public class DeleteIndexTemplateRequest extends MasterNodeOperationRequest { +public class DeleteIndexTemplateRequest extends MasterNodeRequest { private String name; - DeleteIndexTemplateRequest() { + public DeleteIndexTemplateRequest() { } /** @@ -44,6 +44,14 @@ public class DeleteIndexTemplateRequest extends MasterNodeOperationRequest { +public class TransportDeleteIndexTemplateAction extends TransportMasterNodeAction { private final MetaDataIndexTemplateService indexTemplateService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java index 949944fa61e..aeefc63bfa0 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java @@ -18,9 +18,8 @@ */ package org.elasticsearch.action.admin.indices.template.get; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +31,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * Request that allows to retrieve index templates */ -public class GetIndexTemplatesRequest extends MasterNodeReadOperationRequest { +public class GetIndexTemplatesRequest extends MasterNodeReadRequest { private String[] names; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java b/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java index c2600112542..039ca1a726f 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetIndexTemplatesAction.java @@ -22,7 +22,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -39,7 +39,7 @@ import java.util.List; /** * */ -public class TransportGetIndexTemplatesAction extends TransportMasterNodeReadOperationAction { +public class TransportGetIndexTemplatesAction extends TransportMasterNodeReadAction { @Inject public TransportGetIndexTemplatesAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 6e21ba4753f..a4b10cb7783 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -50,7 +50,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; /** * A request to create an index template. */ -public class PutIndexTemplateRequest extends MasterNodeOperationRequest implements IndicesRequest { +public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest { private String name; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index 5c4979e6253..492dbf352c7 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.admin.indices.template.put; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -34,7 +34,7 @@ import org.elasticsearch.transport.TransportService; /** * Put index template action. */ -public class TransportPutIndexTemplateAction extends TransportMasterNodeOperationAction { +public class TransportPutIndexTemplateAction extends TransportMasterNodeAction { private final MetaDataIndexTemplateService indexTemplateService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java index 7b771f32091..648ab21afd6 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -32,7 +32,7 @@ import java.io.IOException; /** * Internal validate request executed directly against a specific index shard. */ -class ShardValidateQueryRequest extends BroadcastShardOperationRequest { +class ShardValidateQueryRequest extends BroadcastShardRequest { private BytesReference source; private String[] types = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java index d4e75578f1f..43d3ad82305 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -31,7 +31,7 @@ import java.io.IOException; * * */ -class ShardValidateQueryResponse extends BroadcastShardOperationResponse { +class ShardValidateQueryResponse extends BroadcastShardResponse { private boolean valid; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index baa4949d29d..5d8e98beac1 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -63,7 +63,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportValidateQueryAction extends TransportBroadcastOperationAction { +public class TransportValidateQueryAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index d5612235995..3499852c515 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -44,7 +44,7 @@ import java.util.Map; *

    The request requires the query source to be set either using {@link #source(QuerySourceBuilder)}, * or {@link #source(byte[])}. */ -public class ValidateQueryRequest extends BroadcastOperationRequest { +public class ValidateQueryRequest extends BroadcastRequest { private BytesReference source; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java index 6c0a065d3d2..3d1ef78d2bf 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.validate.query; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -36,7 +36,7 @@ import static org.elasticsearch.action.admin.indices.validate.query.QueryExplana * * */ -public class ValidateQueryResponse extends BroadcastOperationResponse { +public class ValidateQueryResponse extends BroadcastResponse { private boolean valid; diff --git a/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java b/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java index bcf3bad07fc..9e135e9a715 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/warmer/delete/TransportDeleteWarmerAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.warmer.delete; import com.google.common.collect.Lists; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -47,7 +47,7 @@ import java.util.List; * * Note: this is an internal API and should not be used / called by any client code. */ -public class TransportDeleteWarmerAction extends TransportMasterNodeOperationAction { +public class TransportDeleteWarmerAction extends TransportMasterNodeAction { @Inject public TransportDeleteWarmerAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters) { diff --git a/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java b/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java index e92eb3195d7..0b11e0bcf5d 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/warmer/put/TransportPutWarmerAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -51,7 +51,7 @@ import java.util.List; * * Note: this is an internal API and should not be used / called by any client code. */ -public class TransportPutWarmerAction extends TransportMasterNodeOperationAction { +public class TransportPutWarmerAction extends TransportMasterNodeAction { private final TransportSearchAction searchAction; diff --git a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 617c3fc32bd..715c1d716d9 100644 --- a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -246,6 +246,7 @@ public class BulkRequest extends ActionRequest implements Composite public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { XContent xContent = XContentFactory.xContent(data); + int line = 0; int from = 0; int length = data.length(); byte marker = xContent.streamSeparator(); @@ -254,8 +255,9 @@ public class BulkRequest extends ActionRequest implements Composite if (nextMarker == -1) { break; } - // now parse the action + line++; + // now parse the action try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) { // move pointers from = nextMarker + 1; @@ -285,43 +287,53 @@ public class BulkRequest extends ActionRequest implements Composite // at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id) // or START_OBJECT which will have another set of parameters + token = parser.nextToken(); - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("_index".equals(currentFieldName)) { - if (!allowExplicitIndex) { - throw new IllegalArgumentException("explicit index in bulk is not allowed"); - } - index = parser.text(); - } else if ("_type".equals(currentFieldName)) { - type = parser.text(); - } else if ("_id".equals(currentFieldName)) { - id = parser.text(); - } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { - routing = parser.text(); - } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { - parent = parser.text(); - } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { - timestamp = parser.text(); - } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { - if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(parser.text(), null).millis(); + if (token == XContentParser.Token.START_OBJECT) { + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("_index".equals(currentFieldName)) { + if (!allowExplicitIndex) { + throw new IllegalArgumentException("explicit index in bulk is not allowed"); + } + index = parser.text(); + } else if ("_type".equals(currentFieldName)) { + type = parser.text(); + } else if ("_id".equals(currentFieldName)) { + id = parser.text(); + } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { + routing = parser.text(); + } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { + parent = parser.text(); + } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { + timestamp = parser.text(); + } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + ttl = TimeValue.parseTimeValue(parser.text(), null).millis(); + } else { + ttl = parser.longValue(); + } + } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { + opType = parser.text(); + } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { + version = parser.longValue(); + } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { + versionType = VersionType.fromString(parser.text()); + } else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) { + retryOnConflict = parser.intValue(); } else { - ttl = parser.longValue(); + throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]"); } - } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { - opType = parser.text(); - } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { - version = parser.longValue(); - } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { - versionType = VersionType.fromString(parser.text()); - } else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) { - retryOnConflict = parser.intValue(); + } else { + throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); } } + } else if (token != XContentParser.Token.END_OBJECT) { + throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected " + XContentParser.Token.START_OBJECT + + " or " + XContentParser.Token.END_OBJECT + " but found [" + token + "]"); } if ("delete".equals(action)) { @@ -331,6 +343,8 @@ public class BulkRequest extends ActionRequest implements Composite if (nextMarker == -1) { break; } + line++; + // order is important, we set parent after routing, so routing will be set to parent if not set explicitly // we use internalAdd so we don't fork here, this allows us not to copy over the big byte array to small chunks // of index request. diff --git a/src/main/java/org/elasticsearch/action/count/CountAction.java b/src/main/java/org/elasticsearch/action/count/CountAction.java index 4c7c8a2fcc2..4cc6210b60c 100644 --- a/src/main/java/org/elasticsearch/action/count/CountAction.java +++ b/src/main/java/org/elasticsearch/action/count/CountAction.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; /** + * Action that shortcuts to the search api with size set to 0. It doesn't have a corresponding + * transport action, it just runs the search api internally. */ public class CountAction extends Action { @@ -35,7 +37,7 @@ public class CountAction extends Action { +public class CountRequest extends BroadcastRequest { public static final float DEFAULT_MIN_SCORE = -1f; @@ -67,12 +68,8 @@ public class CountRequest extends BroadcastOperationRequest { private String[] types = Strings.EMPTY_ARRAY; - long nowInMillis; private int terminateAfter = DEFAULT_TERMINATE_AFTER; - CountRequest() { - } - /** * Constructs a new count request against the provided indices. No indices provided means it will * run against all indices. @@ -81,12 +78,6 @@ public class CountRequest extends BroadcastOperationRequest { super(indices); } - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = super.validate(); - return validationException; - } - /** * The minimum score of the documents to include in the count. */ @@ -121,6 +112,7 @@ public class CountRequest extends BroadcastOperationRequest { /** * The source to execute in the form of a map. */ + @SuppressWarnings("unchecked") public CountRequest source(Map querySource) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -228,24 +220,12 @@ public class CountRequest extends BroadcastOperationRequest { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - minScore = in.readFloat(); - routing = in.readOptionalString(); - preference = in.readOptionalString(); - source = in.readBytesReference(); - types = in.readStringArray(); - terminateAfter = in.readVInt(); + throw new UnsupportedOperationException("CountRequest doesn't support being sent over the wire, just a shortcut to the search api"); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeFloat(minScore); - out.writeOptionalString(routing); - out.writeOptionalString(preference); - out.writeBytesReference(source); - out.writeStringArray(types); - out.writeVInt(terminateAfter); + throw new UnsupportedOperationException("CountRequest doesn't support being sent over the wire, just a shortcut to the search api"); } @Override @@ -258,4 +238,23 @@ public class CountRequest extends BroadcastOperationRequest { } return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", source[" + sSource + "]"; } + + public SearchRequest toSearchRequest() { + SearchRequest searchRequest = new SearchRequest(indices()); + searchRequest.indicesOptions(indicesOptions()); + searchRequest.types(types()); + searchRequest.routing(routing()); + searchRequest.preference(preference()); + searchRequest.source(source()); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(0); + if (minScore() != DEFAULT_MIN_SCORE) { + searchSourceBuilder.minScore(minScore()); + } + if (terminateAfter() != DEFAULT_TERMINATE_AFTER) { + searchSourceBuilder.terminateAfter(terminateAfter()); + } + searchRequest.extraSource(searchSourceBuilder); + return searchRequest; + } } diff --git a/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java b/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java index fd30a2d3fc7..3716bf21a68 100644 --- a/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/count/CountRequestBuilder.java @@ -20,6 +20,10 @@ package org.elasticsearch.action.count; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.DelegatingActionListener; import org.elasticsearch.action.support.QuerySourceBuilder; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; @@ -145,6 +149,17 @@ public class CountRequestBuilder extends BroadcastOperationRequestBuilder listener) { + CountRequest countRequest = beforeExecute(request); + client.execute(SearchAction.INSTANCE, countRequest.toSearchRequest(), new DelegatingActionListener(listener) { + @Override + protected CountResponse getDelegatedFromInstigator(SearchResponse response) { + return new CountResponse(response); + } + }); + } + @Override public String toString() { if (sourceBuilder != null) { diff --git a/src/main/java/org/elasticsearch/action/count/CountResponse.java b/src/main/java/org/elasticsearch/action/count/CountResponse.java index 394d266eedc..916c4ef9373 100644 --- a/src/main/java/org/elasticsearch/action/count/CountResponse.java +++ b/src/main/java/org/elasticsearch/action/count/CountResponse.java @@ -19,30 +19,27 @@ package org.elasticsearch.action.count; -import java.io.IOException; -import java.util.List; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; +import java.io.IOException; +import java.util.Arrays; + /** * The response of the count action. */ -public class CountResponse extends BroadcastOperationResponse { +public class CountResponse extends BroadcastResponse { - private boolean terminatedEarly; - private long count; + private final boolean terminatedEarly; + private final long count; - CountResponse() { - - } - - CountResponse(long count, boolean hasTerminatedEarly, int totalShards, int successfulShards, int failedShards, List shardFailures) { - super(totalShards, successfulShards, failedShards, shardFailures); - this.count = count; - this.terminatedEarly = hasTerminatedEarly; + public CountResponse(SearchResponse searchResponse) { + super(searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), Arrays.asList(searchResponse.getShardFailures())); + this.count = searchResponse.getHits().totalHits(); + this.terminatedEarly = searchResponse.isTerminatedEarly() != null && searchResponse.isTerminatedEarly(); } /** @@ -65,15 +62,11 @@ public class CountResponse extends BroadcastOperationResponse { @Override public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - count = in.readVLong(); - terminatedEarly = in.readBoolean(); + throw new UnsupportedOperationException("CountResponse doesn't support being sent over the wire, just a shortcut to the search api"); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVLong(count); - out.writeBoolean(terminatedEarly); + throw new UnsupportedOperationException("CountResponse doesn't support being sent over the wire, just a shortcut to the search api"); } } diff --git a/src/main/java/org/elasticsearch/action/count/ShardCountRequest.java b/src/main/java/org/elasticsearch/action/count/ShardCountRequest.java deleted file mode 100644 index 14a8b0026f9..00000000000 --- a/src/main/java/org/elasticsearch/action/count/ShardCountRequest.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.count; - -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER; - -/** - * Internal count request executed directly against a specific index shard. - */ -class ShardCountRequest extends BroadcastShardOperationRequest { - - private float minScore; - private int terminateAfter; - - private BytesReference querySource; - - private String[] types = Strings.EMPTY_ARRAY; - - private long nowInMillis; - - @Nullable - private String[] filteringAliases; - - ShardCountRequest() { - - } - - ShardCountRequest(ShardId shardId, @Nullable String[] filteringAliases, CountRequest request) { - super(shardId, request); - this.minScore = request.minScore(); - this.querySource = request.source(); - this.types = request.types(); - this.filteringAliases = filteringAliases; - this.nowInMillis = request.nowInMillis; - this.terminateAfter = request.terminateAfter(); - } - - public float minScore() { - return minScore; - } - - public BytesReference querySource() { - return querySource; - } - - public String[] types() { - return this.types; - } - - public String[] filteringAliases() { - return filteringAliases; - } - - public long nowInMillis() { - return this.nowInMillis; - } - - public int terminateAfter() { - return this.terminateAfter; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - minScore = in.readFloat(); - - querySource = in.readBytesReference(); - - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); - } - } - int aliasesSize = in.readVInt(); - if (aliasesSize > 0) { - filteringAliases = new String[aliasesSize]; - for (int i = 0; i < aliasesSize; i++) { - filteringAliases[i] = in.readString(); - } - } - nowInMillis = in.readVLong(); - terminateAfter = in.readVInt(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeFloat(minScore); - - out.writeBytesReference(querySource); - - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); - } - if (filteringAliases != null) { - out.writeVInt(filteringAliases.length); - for (String alias : filteringAliases) { - out.writeString(alias); - } - } else { - out.writeVInt(0); - } - out.writeVLong(nowInMillis); - out.writeVInt(terminateAfter); - } -} diff --git a/src/main/java/org/elasticsearch/action/count/ShardCountResponse.java b/src/main/java/org/elasticsearch/action/count/ShardCountResponse.java deleted file mode 100644 index 1847ba0cb02..00000000000 --- a/src/main/java/org/elasticsearch/action/count/ShardCountResponse.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.count; - -import org.elasticsearch.Version; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -/** - * Internal count response of a shard count request executed directly against a specific shard. - * - * - */ -class ShardCountResponse extends BroadcastShardOperationResponse { - - private long count; - private boolean terminatedEarly; - - ShardCountResponse() { - - } - - ShardCountResponse(ShardId shardId, long count, boolean terminatedEarly) { - super(shardId); - this.count = count; - this.terminatedEarly = terminatedEarly; - } - - public long getCount() { - return this.count; - } - - public boolean terminatedEarly() { - return this.terminatedEarly; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - count = in.readVLong(); - terminatedEarly = in.readBoolean(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVLong(count); - out.writeBoolean(terminatedEarly); - } -} diff --git a/src/main/java/org/elasticsearch/action/count/TransportCountAction.java b/src/main/java/org/elasticsearch/action/count/TransportCountAction.java deleted file mode 100644 index 93e0a378ed6..00000000000 --- a/src/main/java/org/elasticsearch/action/count/TransportCountAction.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.count; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; -import org.elasticsearch.cache.recycler.PageCacheRecycler; -import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.routing.GroupShardsIterator; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.DefaultSearchContext; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; -import org.elasticsearch.search.query.QueryPhaseExecutionException; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReferenceArray; - -import static com.google.common.collect.Lists.newArrayList; -import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER; - -/** - * - */ -public class TransportCountAction extends TransportBroadcastOperationAction { - - private final IndicesService indicesService; - private final ScriptService scriptService; - private final PageCacheRecycler pageCacheRecycler; - private final BigArrays bigArrays; - - @Inject - public TransportCountAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, - IndicesService indicesService, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, - BigArrays bigArrays, ActionFilters actionFilters) { - super(settings, CountAction.NAME, threadPool, clusterService, transportService, actionFilters, - CountRequest.class, ShardCountRequest.class, ThreadPool.Names.SEARCH); - this.indicesService = indicesService; - this.scriptService = scriptService; - this.pageCacheRecycler = pageCacheRecycler; - this.bigArrays = bigArrays; - } - - @Override - protected void doExecute(CountRequest request, ActionListener listener) { - request.nowInMillis = System.currentTimeMillis(); - super.doExecute(request, listener); - } - - @Override - protected ShardCountRequest newShardRequest(int numShards, ShardRouting shard, CountRequest request) { - String[] filteringAliases = clusterService.state().metaData().filteringAliases(shard.index(), request.indices()); - return new ShardCountRequest(shard.shardId(), filteringAliases, request); - } - - @Override - protected ShardCountResponse newShardResponse() { - return new ShardCountResponse(); - } - - @Override - protected GroupShardsIterator shards(ClusterState clusterState, CountRequest request, String[] concreteIndices) { - Map> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices()); - return clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference()); - } - - @Override - protected ClusterBlockException checkGlobalBlock(ClusterState state, CountRequest request) { - return state.blocks().globalBlockedException(ClusterBlockLevel.READ); - } - - @Override - protected ClusterBlockException checkRequestBlock(ClusterState state, CountRequest countRequest, String[] concreteIndices) { - return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices); - } - - @Override - protected CountResponse newResponse(CountRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { - int successfulShards = 0; - int failedShards = 0; - long count = 0; - boolean terminatedEarly = false; - List shardFailures = null; - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - // simply ignore non active shards - } else if (shardResponse instanceof BroadcastShardOperationFailedException) { - failedShards++; - if (shardFailures == null) { - shardFailures = newArrayList(); - } - shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); - } else { - count += ((ShardCountResponse) shardResponse).getCount(); - if (((ShardCountResponse) shardResponse).terminatedEarly()) { - terminatedEarly = true; - } - successfulShards++; - } - } - return new CountResponse(count, terminatedEarly, shardsResponses.length(), successfulShards, failedShards, shardFailures); - } - - @Override - protected ShardCountResponse shardOperation(ShardCountRequest request) { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId().id()); - - SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id()); - SearchContext context = new DefaultSearchContext(0, - new ShardSearchLocalRequest(request.types(), request.nowInMillis(), request.filteringAliases()), - shardTarget, indexShard.acquireSearcher("count"), indexService, indexShard, - scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter()); - SearchContext.setCurrent(context); - - try { - // TODO: min score should move to be "null" as a value that is not initialized... - if (request.minScore() != -1) { - context.minimumScore(request.minScore()); - } - BytesReference source = request.querySource(); - if (source != null && source.length() > 0) { - try { - QueryParseContext.setTypes(request.types()); - context.parsedQuery(indexService.queryParserService().parseQuery(source)); - } finally { - QueryParseContext.removeTypes(); - } - } - final boolean hasTerminateAfterCount = request.terminateAfter() != DEFAULT_TERMINATE_AFTER; - boolean terminatedEarly = false; - context.preProcess(); - try { - long count; - if (hasTerminateAfterCount) { - final Lucene.EarlyTerminatingCollector countCollector = - Lucene.createCountBasedEarlyTerminatingCollector(request.terminateAfter()); - terminatedEarly = Lucene.countWithEarlyTermination(context.searcher(), context.query(), countCollector); - count = countCollector.count(); - } else { - count = Lucene.count(context.searcher(), context.query()); - } - return new ShardCountResponse(request.shardId(), count, terminatedEarly); - } catch (Exception e) { - throw new QueryPhaseExecutionException(context, "failed to execute count", e); - } - } finally { - // this will also release the index searcher - context.close(); - SearchContext.removeCurrent(); - } - } -} diff --git a/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java b/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java index 84c5d32aaf5..32ff0b1e014 100644 --- a/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/exists/ExistsRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.exists; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -38,7 +38,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; -public class ExistsRequest extends BroadcastOperationRequest { +public class ExistsRequest extends BroadcastRequest { public static final float DEFAULT_MIN_SCORE = -1f; private float minScore = DEFAULT_MIN_SCORE; diff --git a/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java b/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java index 6b1c58990dd..f271dc65f7f 100644 --- a/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java +++ b/src/main/java/org/elasticsearch/action/exists/ExistsResponse.java @@ -20,14 +20,14 @@ package org.elasticsearch.action.exists; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.List; -public class ExistsResponse extends BroadcastOperationResponse { +public class ExistsResponse extends BroadcastResponse { private boolean exists = false; diff --git a/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java b/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java index a8f8bff91fa..276e6ea117e 100644 --- a/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java +++ b/src/main/java/org/elasticsearch/action/exists/ShardExistsRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.exists; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -29,7 +29,7 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -class ShardExistsRequest extends BroadcastShardOperationRequest { +class ShardExistsRequest extends BroadcastShardRequest { private float minScore; diff --git a/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java b/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java index e94330c042f..25f813ee424 100644 --- a/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java +++ b/src/main/java/org/elasticsearch/action/exists/ShardExistsResponse.java @@ -19,14 +19,14 @@ package org.elasticsearch.action.exists; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; -class ShardExistsResponse extends BroadcastShardOperationResponse { +class ShardExistsResponse extends BroadcastShardResponse { private boolean exists; diff --git a/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java b/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java index cf4e41ce965..f9118e8c05a 100644 --- a/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java +++ b/src/main/java/org/elasticsearch/action/exists/TransportExistsAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -39,8 +39,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.script.ScriptService; @@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; import static com.google.common.collect.Lists.newArrayList; import static org.elasticsearch.action.exists.ExistsRequest.DEFAULT_MIN_SCORE; -public class TransportExistsAction extends TransportBroadcastOperationAction { +public class TransportExistsAction extends TransportBroadcastAction { private final IndicesService indicesService; private final ScriptService scriptService; diff --git a/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index f1b7b571833..8a8eaee36cf 100644 --- a/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.explain; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -35,7 +35,7 @@ import java.io.IOException; /** * Explain request encapsulating the explain query and document identifier to get an explanation for. */ -public class ExplainRequest extends SingleShardOperationRequest { +public class ExplainRequest extends SingleShardRequest { private String type = "_all"; private String id; diff --git a/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index c2ca4c7558a..8cfba3dd1ce 100644 --- a/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -33,13 +33,13 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.DefaultSearchContext; @@ -56,7 +56,7 @@ import java.io.IOException; * Explain transport action. Computes the explain on the targeted shard. */ // TODO: AggregatedDfs. Currently the idf can be different then when executing a normal search with explain. -public class TransportExplainAction extends TransportShardSingleOperationAction { +public class TransportExplainAction extends TransportSingleShardAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java index ff61fe88ee9..e157865ecdf 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.fieldstats; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,7 +29,7 @@ import java.io.IOException; /** */ -public class FieldStatsRequest extends BroadcastOperationRequest { +public class FieldStatsRequest extends BroadcastRequest { public final static String DEFAULT_LEVEL = "cluster"; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java index e6f69e9791a..a8f66ca56ea 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.fieldstats; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +32,7 @@ import java.util.Map; /** */ -public class FieldStatsResponse extends BroadcastOperationResponse { +public class FieldStatsResponse extends BroadcastResponse { private Map> indicesMergedFieldStats; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java index fb46ff66d3b..0ce83d99296 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.fieldstats; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -28,7 +28,7 @@ import java.io.IOException; /** */ -public class FieldStatsShardRequest extends BroadcastShardOperationRequest { +public class FieldStatsShardRequest extends BroadcastShardRequest { private String[] fields; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java index ada4552e94c..c1094ce4d3e 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsShardResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.fieldstats; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +30,7 @@ import java.util.Map; /** */ -public class FieldStatsShardResponse extends BroadcastShardOperationResponse { +public class FieldStatsShardResponse extends BroadcastShardResponse { private Map fieldStats; diff --git a/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java b/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java index 12a6f41e13d..43e78ec5b87 100644 --- a/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java +++ b/src/main/java/org/elasticsearch/action/fieldstats/TransportFieldStatsTransportAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -47,10 +47,13 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicReferenceArray; -public class TransportFieldStatsTransportAction extends TransportBroadcastOperationAction { +public class TransportFieldStatsTransportAction extends TransportBroadcastAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/get/GetRequest.java b/src/main/java/org/elasticsearch/action/get/GetRequest.java index 6a8497ace71..1d2769017bc 100644 --- a/src/main/java/org/elasticsearch/action/get/GetRequest.java +++ b/src/main/java/org/elasticsearch/action/get/GetRequest.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -43,7 +43,7 @@ import java.io.IOException; * @see org.elasticsearch.client.Requests#getRequest(String) * @see org.elasticsearch.client.Client#get(GetRequest) */ -public class GetRequest extends SingleShardOperationRequest { +public class GetRequest extends SingleShardRequest { private String type; private String id; diff --git a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java index eb3e25c1330..8a6d552807b 100644 --- a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java +++ b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java @@ -20,19 +20,15 @@ package org.elasticsearch.action.get; import com.carrotsearch.hppc.IntArrayList; -import com.carrotsearch.hppc.LongArrayList; -import org.elasticsearch.Version; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.VersionType; -import org.elasticsearch.search.fetch.source.FetchSourceContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; -public class MultiGetShardRequest extends SingleShardOperationRequest { +public class MultiGetShardRequest extends SingleShardRequest { private int shardId; private String preference; diff --git a/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 2324835b9dc..08774a2b421 100644 --- a/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.get; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -40,7 +40,7 @@ import org.elasticsearch.transport.TransportService; /** * Performs the get operation. */ -public class TransportGetAction extends TransportShardSingleOperationAction { +public class TransportGetAction extends TransportSingleShardAction { private final IndicesService indicesService; private final boolean realtime; diff --git a/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index fb6bac8cdc8..fb1b751a9eb 100644 --- a/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -23,21 +23,21 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportShardMultiGetAction extends TransportShardSingleOperationAction { +public class TransportShardMultiGetAction extends TransportSingleShardAction { private static final String ACTION_NAME = MultiGetAction.NAME + "[shard]"; diff --git a/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java b/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java index f68745e0adf..9c9a3859585 100644 --- a/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java +++ b/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -43,7 +43,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a percolate operation. */ -public class PercolateRequest extends BroadcastOperationRequest implements CompositeIndicesRequest { +public class PercolateRequest extends BroadcastRequest implements CompositeIndicesRequest { private String documentType; private String routing; @@ -55,7 +55,7 @@ public class PercolateRequest extends BroadcastOperationRequest, ToXContent { +public class PercolateResponse extends BroadcastResponse implements Iterable, ToXContent { public static final Match[] EMPTY = new Match[0]; diff --git a/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java b/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java index 8364eb1610d..f0b1a96e1d5 100644 --- a/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java +++ b/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.percolate; -import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,7 +30,7 @@ import java.io.IOException; /** */ -public class PercolateShardRequest extends BroadcastShardOperationRequest { +public class PercolateShardRequest extends BroadcastShardRequest { private String documentType; private BytesReference source; diff --git a/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java b/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java index b731208def7..c626cda581e 100644 --- a/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java +++ b/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java @@ -19,9 +19,8 @@ package org.elasticsearch.action.percolate; import com.google.common.collect.ImmutableList; - import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -42,7 +41,7 @@ import java.util.Map; /** */ -public class PercolateShardResponse extends BroadcastShardOperationResponse { +public class PercolateShardResponse extends BroadcastShardResponse { private static final BytesRef[] EMPTY_MATCHES = new BytesRef[0]; private static final float[] EMPTY_SCORES = new float[0]; diff --git a/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index d1ee7be3b19..622b541a0ea 100644 --- a/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -26,7 +26,7 @@ import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -52,7 +52,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * */ -public class TransportPercolateAction extends TransportBroadcastOperationAction { +public class TransportPercolateAction extends TransportBroadcastAction { private final PercolatorService percolatorService; private final TransportGetAction getAction; diff --git a/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java index ce38859174f..adca1883470 100644 --- a/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ b/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; @@ -49,7 +49,7 @@ import java.util.List; /** */ -public class TransportShardMultiPercolateAction extends TransportShardSingleOperationAction { +public class TransportShardMultiPercolateAction extends TransportSingleShardAction { private final PercolatorService percolatorService; @@ -108,7 +108,7 @@ public class TransportShardMultiPercolateAction extends TransportShardSingleOper } - public static class Request extends SingleShardOperationRequest implements IndicesRequest { + public static class Request extends SingleShardRequest implements IndicesRequest { private int shardId; private String preference; diff --git a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java index d4c48e5034f..794dd9badf7 100644 --- a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java +++ b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.suggest; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,7 +30,7 @@ import java.io.IOException; /** * Internal suggest request executed directly against a specific index shard. */ -final class ShardSuggestRequest extends BroadcastShardOperationRequest { +final class ShardSuggestRequest extends BroadcastShardRequest { private BytesReference suggestSource; diff --git a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java index f6495c60163..bca29800bd1 100644 --- a/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java +++ b/src/main/java/org/elasticsearch/action/suggest/ShardSuggestResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.suggest; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +30,7 @@ import java.io.IOException; /** * Internal suggest response of a shard suggest request executed directly against a specific shard. */ -class ShardSuggestResponse extends BroadcastShardOperationResponse { +class ShardSuggestResponse extends BroadcastShardResponse { private final Suggest suggest; diff --git a/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java b/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java index fdb5d7a0117..c75e262bac4 100644 --- a/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java +++ b/src/main/java/org/elasticsearch/action/suggest/SuggestRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.suggest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -48,7 +48,7 @@ import java.util.Arrays; * @see org.elasticsearch.client.Requests#suggestRequest(String...) * @see org.elasticsearch.search.suggest.SuggestBuilders */ -public final class SuggestRequest extends BroadcastOperationRequest { +public final class SuggestRequest extends BroadcastRequest { @Nullable private String routing; diff --git a/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java b/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java index 24a8922c5f9..445e804b5b5 100644 --- a/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java +++ b/src/main/java/org/elasticsearch/action/suggest/SuggestResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.suggest; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,7 +35,7 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; /** * The response of the suggest action. */ -public final class SuggestResponse extends BroadcastOperationResponse { +public final class SuggestResponse extends BroadcastResponse { private final Suggest suggest; diff --git a/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 9874783ae79..ac046e6552d 100644 --- a/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -36,8 +36,8 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.suggest.stats.ShardSuggestService; import org.elasticsearch.indices.IndicesService; @@ -58,7 +58,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Defines the transport of a suggestion request across the cluster */ -public class TransportSuggestAction extends TransportBroadcastOperationAction { +public class TransportSuggestAction extends TransportBroadcastAction { private final IndicesService indicesService; private final SuggestPhase suggestPhase; diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java index 50fb7b097f2..6170d967002 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.client.ElasticsearchClient; /** */ -public abstract class BroadcastOperationRequestBuilder, Response extends BroadcastOperationResponse, RequestBuilder extends BroadcastOperationRequestBuilder> +public abstract class BroadcastOperationRequestBuilder, Response extends BroadcastResponse, RequestBuilder extends BroadcastOperationRequestBuilder> extends ActionRequestBuilder { protected BroadcastOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequest.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java similarity index 88% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java index 80eeacedbba..19adbdce01a 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java @@ -31,20 +31,20 @@ import java.io.IOException; /** * */ -public abstract class BroadcastOperationRequest extends ActionRequest implements IndicesRequest.Replaceable { +public abstract class BroadcastRequest extends ActionRequest implements IndicesRequest.Replaceable { protected String[] indices; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); - protected BroadcastOperationRequest() { + protected BroadcastRequest() { } - protected BroadcastOperationRequest(ActionRequest originalRequest) { + protected BroadcastRequest(ActionRequest originalRequest) { super(originalRequest); } - protected BroadcastOperationRequest(String[] indices) { + protected BroadcastRequest(String[] indices) { this.indices = indices; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastResponse.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastResponse.java index 2fd4f97ca29..560c7ec9869 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastResponse.java @@ -19,30 +19,30 @@ package org.elasticsearch.action.support.broadcast; -import static org.elasticsearch.action.support.DefaultShardOperationFailedException.readShardOperationFailed; - -import java.io.IOException; -import java.util.List; - import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.action.support.DefaultShardOperationFailedException.readShardOperationFailed; + /** * Base class for all broadcast operation based responses. */ -public abstract class BroadcastOperationResponse extends ActionResponse { +public abstract class BroadcastResponse extends ActionResponse { private static final ShardOperationFailedException[] EMPTY = new ShardOperationFailedException[0]; private int totalShards; private int successfulShards; private int failedShards; private ShardOperationFailedException[] shardFailures = EMPTY; - protected BroadcastOperationResponse() { + protected BroadcastResponse() { } - protected BroadcastOperationResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { + protected BroadcastResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { this.totalShards = totalShards; this.successfulShards = successfulShards; this.failedShards = failedShards; @@ -73,7 +73,7 @@ public abstract class BroadcastOperationResponse extends ActionResponse { /** * The list of shard failures exception. */ - public ShardOperationFailedException[] getShardFailures() { + public ShardOperationFailedException[] getShardFailures() { return shardFailures; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationRequest.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java similarity index 86% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java index f6c917fbf9b..e416cd517e1 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java @@ -32,22 +32,22 @@ import java.io.IOException; /** * */ -public abstract class BroadcastShardOperationRequest extends TransportRequest implements IndicesRequest { +public abstract class BroadcastShardRequest extends TransportRequest implements IndicesRequest { private ShardId shardId; protected OriginalIndices originalIndices; - protected BroadcastShardOperationRequest() { + protected BroadcastShardRequest() { } - protected BroadcastShardOperationRequest(ShardId shardId, BroadcastOperationRequest request) { + protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) { super(request); this.shardId = shardId; this.originalIndices = new OriginalIndices(request); } - protected BroadcastShardOperationRequest(ShardId shardId, OriginalIndices originalIndices) { + protected BroadcastShardRequest(ShardId shardId, OriginalIndices originalIndices) { this.shardId = shardId; this.originalIndices = originalIndices; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationResponse.java b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java similarity index 89% rename from src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java index cc0c64b532d..bf7d271bb6f 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java @@ -29,15 +29,15 @@ import java.io.IOException; /** * */ -public abstract class BroadcastShardOperationResponse extends TransportResponse { +public abstract class BroadcastShardResponse extends TransportResponse { ShardId shardId; - protected BroadcastShardOperationResponse() { + protected BroadcastShardResponse() { } - protected BroadcastShardOperationResponse(ShardId shardId) { + protected BroadcastShardResponse(ShardId shardId) { this.shardId = shardId; } diff --git a/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java b/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java similarity index 95% rename from src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java rename to src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 6ff55467f7d..c77f3ec766b 100644 --- a/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -43,7 +43,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public abstract class TransportBroadcastOperationAction +public abstract class TransportBroadcastAction extends HandledTransportAction { protected final ThreadPool threadPool; @@ -52,8 +52,8 @@ public abstract class TransportBroadcastOperationAction request, Class shardRequest, String shardExecutor) { + protected TransportBroadcastAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + Class request, Class shardRequest, String shardExecutor) { super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; diff --git a/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index ea3a1e43bff..870b1077456 100644 --- a/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -33,7 +33,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; * Abstract class that allows to mark action requests that support acknowledgements. * Facilitates consistency across different api. */ -public abstract class AcknowledgedRequest extends MasterNodeOperationRequest implements AckedRequest { +public abstract class AcknowledgedRequest extends MasterNodeRequest implements AckedRequest { public static final TimeValue DEFAULT_ACK_TIMEOUT = timeValueSeconds(30); diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java index 5d7af81927b..0b3b5af36d2 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java @@ -22,15 +22,13 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.unit.TimeValue; /** * Base request builder for master node operations */ -public abstract class MasterNodeOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeOperationRequestBuilder> +public abstract class MasterNodeOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeOperationRequestBuilder> extends ActionRequestBuilder { protected MasterNodeOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java index 02c83298c25..7955abfbe96 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java @@ -21,14 +21,12 @@ package org.elasticsearch.action.support.master; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.client.IndicesAdminClient; /** * Base request builder for master node read operations that can be executed on the local node as well */ -public abstract class MasterNodeReadOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeReadOperationRequestBuilder> +public abstract class MasterNodeReadOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeReadOperationRequestBuilder> extends MasterNodeOperationRequestBuilder { protected MasterNodeReadOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequest.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java index df47cae3f1a..b190a6e93c4 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.support.master; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +27,7 @@ import java.io.IOException; /** * Base request for master based read operations that allows to read the cluster state from the local node if needed */ -public abstract class MasterNodeReadOperationRequest extends MasterNodeOperationRequest { +public abstract class MasterNodeReadRequest extends MasterNodeRequest { protected boolean local = false; diff --git a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequest.java b/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index e1e90e932ff..d3621cac58c 100644 --- a/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -29,17 +29,17 @@ import java.io.IOException; /** * A based request for master based operation. */ -public abstract class MasterNodeOperationRequest extends ActionRequest { +public abstract class MasterNodeRequest extends ActionRequest { public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); protected TimeValue masterNodeTimeout = DEFAULT_MASTER_NODE_TIMEOUT; - protected MasterNodeOperationRequest() { + protected MasterNodeRequest() { } - protected MasterNodeOperationRequest(ActionRequest request) { + protected MasterNodeRequest(ActionRequest request) { super(request); } diff --git a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java similarity index 94% rename from src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java rename to src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 9e1c662cf60..f8c0c07da58 100644 --- a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -36,20 +36,23 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.*; +import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportService; /** * A base class for operations that needs to be performed on the master node. */ -public abstract class TransportMasterNodeOperationAction extends HandledTransportAction { +public abstract class TransportMasterNodeAction extends HandledTransportAction { protected final TransportService transportService; protected final ClusterService clusterService; final String executor; - protected TransportMasterNodeOperationAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - Class request) { + protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, + Class request) { super(settings, actionName, threadPool, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; diff --git a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadOperationAction.java b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java similarity index 80% rename from src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadOperationAction.java rename to src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index 383de7ceb53..c33b9fde774 100644 --- a/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -30,13 +30,13 @@ import org.elasticsearch.transport.TransportService; * A base class for read operations that needs to be performed on the master node. * Can also be executed on the local node if needed. */ -public abstract class TransportMasterNodeReadOperationAction extends TransportMasterNodeOperationAction { +public abstract class TransportMasterNodeReadAction extends TransportMasterNodeAction { public static final String FORCE_LOCAL_SETTING = "action.master.force_local"; private Boolean forceLocal; - protected TransportMasterNodeReadOperationAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Class request) { + protected TransportMasterNodeReadAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Class request) { super(settings, actionName, transportService, clusterService, threadPool, actionFilters,request); this.forceLocal = settings.getAsBoolean(FORCE_LOCAL_SETTING, null); } diff --git a/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 5f0d34675b4..fbd095bb874 100644 --- a/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.support.master.info; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,7 +30,7 @@ import java.io.IOException; /** */ -public abstract class ClusterInfoRequest extends MasterNodeReadOperationRequest implements IndicesRequest.Replaceable { +public abstract class ClusterInfoRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { private String[] indices = Strings.EMPTY_ARRAY; private String[] types = Strings.EMPTY_ARRAY; diff --git a/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java b/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java index fdf6352ef18..d1bdb86e1bb 100644 --- a/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java +++ b/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.support.master.info; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; @@ -30,7 +30,7 @@ import org.elasticsearch.transport.TransportService; /** */ -public abstract class TransportClusterInfoAction extends TransportMasterNodeReadOperationAction { +public abstract class TransportClusterInfoAction extends TransportMasterNodeReadAction { public TransportClusterInfoAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Class request) { super(settings, actionName, transportService, clusterService, threadPool, actionFilters, request); diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationRequest.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java similarity index 88% rename from src/main/java/org/elasticsearch/action/support/nodes/NodeOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java index 4d8a42619ad..e25577e2f70 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java @@ -28,15 +28,15 @@ import java.io.IOException; /** * */ -public abstract class NodeOperationRequest extends TransportRequest { +public abstract class BaseNodeRequest extends TransportRequest { private String nodeId; - protected NodeOperationRequest() { + protected BaseNodeRequest() { } - protected NodeOperationRequest(NodesOperationRequest request, String nodeId) { + protected BaseNodeRequest(BaseNodesRequest request, String nodeId) { super(request); this.nodeId = nodeId; } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeResponse.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodeResponse.java index 3415a07c8a2..33e4596e9b2 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeResponse.java @@ -29,14 +29,14 @@ import java.io.IOException; /** * A base class for node level operations. */ -public abstract class NodeOperationResponse extends TransportResponse { +public abstract class BaseNodeResponse extends TransportResponse { private DiscoveryNode node; - protected NodeOperationResponse() { + protected BaseNodeResponse() { } - protected NodeOperationResponse(DiscoveryNode node) { + protected BaseNodeResponse(DiscoveryNode node) { assert node != null; this.node = node; } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequest.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java similarity index 90% rename from src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java index 113e03f1f3c..462c873c633 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java @@ -31,7 +31,7 @@ import java.io.IOException; /** * */ -public abstract class NodesOperationRequest extends ActionRequest { +public abstract class BaseNodesRequest extends ActionRequest { public static String[] ALL_NODES = Strings.EMPTY_ARRAY; @@ -39,16 +39,16 @@ public abstract class NodesOperationRequest ext private TimeValue timeout; - protected NodesOperationRequest() { + protected BaseNodesRequest() { } - protected NodesOperationRequest(ActionRequest request, String... nodesIds) { + protected BaseNodesRequest(ActionRequest request, String... nodesIds) { super(request); this.nodesIds = nodesIds; } - protected NodesOperationRequest(String... nodesIds) { + protected BaseNodesRequest(String... nodesIds) { this.nodesIds = nodesIds; } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationResponse.java b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java similarity index 79% rename from src/main/java/org/elasticsearch/action/support/nodes/NodesOperationResponse.java rename to src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java index b2a699529bb..db0a2a89aa7 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationResponse.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesResponse.java @@ -34,16 +34,16 @@ import java.util.Map; /** * */ -public abstract class NodesOperationResponse extends ActionResponse implements Iterable { +public abstract class BaseNodesResponse extends ActionResponse implements Iterable { private ClusterName clusterName; - protected NodeResponse[] nodes; - private Map nodesMap; + protected TNodeResponse[] nodes; + private Map nodesMap; - protected NodesOperationResponse() { + protected BaseNodesResponse() { } - protected NodesOperationResponse(ClusterName clusterName, NodeResponse[] nodes) { + protected BaseNodesResponse(ClusterName clusterName, TNodeResponse[] nodes) { this.clusterName = clusterName; this.nodes = nodes; } @@ -64,23 +64,23 @@ public abstract class NodesOperationResponse iterator() { + public Iterator iterator() { return getNodesMap().values().iterator(); } - public Map getNodesMap() { + public Map getNodesMap() { if (nodesMap == null) { nodesMap = Maps.newHashMap(); - for (NodeResponse nodeResponse : nodes) { + for (TNodeResponse nodeResponse : nodes) { nodesMap.put(nodeResponse.getNode().id(), nodeResponse); } } diff --git a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java b/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java index 303671db595..cf8190f2c32 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.unit.TimeValue; /** */ -public abstract class NodesOperationRequestBuilder, Response extends NodesOperationResponse, RequestBuilder extends NodesOperationRequestBuilder> +public abstract class NodesOperationRequestBuilder, Response extends BaseNodesResponse, RequestBuilder extends NodesOperationRequestBuilder> extends ActionRequestBuilder { protected NodesOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java b/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java similarity index 87% rename from src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java rename to src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 78503930357..065f4ad744a 100644 --- a/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java +++ b/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -39,7 +39,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public abstract class TransportNodesOperationAction extends HandledTransportAction { +public abstract class TransportNodesAction extends HandledTransportAction { protected final ClusterName clusterName; protected final ClusterService clusterService; @@ -47,9 +47,9 @@ public abstract class TransportNodesOperationAction request, Class nodeRequest, String nodeExecutor) { + protected TransportNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, + ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + Class request, Class nodeRequest, String nodeExecutor) { super(settings, actionName, threadPool, transportService, actionFilters, request); this.clusterName = clusterName; this.clusterService = clusterService; @@ -61,7 +61,7 @@ public abstract class TransportNodesOperationAction listener) { + protected void doExecute(NodesRequest request, ActionListener listener) { new AsyncAction(request, listener).start(); } @@ -69,9 +69,9 @@ public abstract class TransportNodesOperationAction listener; + private final ActionListener listener; private final ClusterState clusterState; private final AtomicReferenceArray responses; private final AtomicInteger counter = new AtomicInteger(); - private AsyncAction(Request request, ActionListener listener) { + private AsyncAction(NodesRequest request, ActionListener listener) { this.request = request; this.listener = listener; clusterState = clusterService.state(); @@ -179,7 +179,7 @@ public abstract class TransportNodesOperationAction, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder> +public abstract class SingleShardOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder> extends ActionRequestBuilder { protected SingleShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { diff --git a/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequest.java b/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java similarity index 89% rename from src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequest.java rename to src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java index 74db0435709..180ea877618 100644 --- a/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequest.java +++ b/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java @@ -33,7 +33,7 @@ import java.io.IOException; /** * */ -public abstract class SingleShardOperationRequest extends ActionRequest implements IndicesRequest { +public abstract class SingleShardRequest extends ActionRequest implements IndicesRequest { ShardId internalShardId; @@ -41,18 +41,18 @@ public abstract class SingleShardOperationRequest extends TransportAction { +public abstract class TransportSingleShardAction extends TransportAction { protected final ClusterService clusterService; @@ -53,8 +53,8 @@ public abstract class TransportShardSingleOperationAction request, String executor) { + protected TransportSingleShardAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, + Class request, String executor) { super(settings, actionName, threadPool, actionFilters); this.clusterService = clusterService; this.transportService = transportService; diff --git a/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java b/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java index 96fdd91c84f..c33e32eabb4 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.termvectors; import com.carrotsearch.hppc.IntArrayList; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +28,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -public class MultiTermVectorsShardRequest extends SingleShardOperationRequest { +public class MultiTermVectorsShardRequest extends SingleShardRequest { private int shardId; private String preference; diff --git a/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index 9198013d307..bc1332e4da3 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.get.MultiGetRequest; -import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -50,7 +50,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; * Note, the {@link #index()}, {@link #type(String)} and {@link #id(String)} are * required. */ -public class TermVectorsRequest extends SingleShardOperationRequest implements DocumentRequest { +public class TermVectorsRequest extends SingleShardRequest implements DocumentRequest { private String type; diff --git a/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java index f2e9a16d989..9aad9cb479b 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java +++ b/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; @@ -36,7 +36,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class TransportShardMultiTermsVectorAction extends TransportShardSingleOperationAction { +public class TransportShardMultiTermsVectorAction extends TransportSingleShardAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index e03abfee230..623ced5d5ef 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -22,15 +22,15 @@ package org.elasticsearch.action.termvectors; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -38,7 +38,7 @@ import org.elasticsearch.transport.TransportService; /** * Performs the get operation. */ -public class TransportTermVectorsAction extends TransportShardSingleOperationAction { +public class TransportTermVectorsAction extends TransportSingleShardAction { private final IndicesService indicesService; diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java index db4624d7464..0171a90ec95 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyRequest.java @@ -24,7 +24,7 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; @@ -38,7 +38,7 @@ import java.util.Set; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -public class DfsOnlyRequest extends BroadcastOperationRequest { +public class DfsOnlyRequest extends BroadcastRequest { private SearchRequest searchRequest = new SearchRequest(); diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java index 150e7e2289b..db1cddff046 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/DfsOnlyResponse.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.termvectors.dfs; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; @@ -32,7 +32,7 @@ import java.util.List; /** * A response of a dfs only request. */ -public class DfsOnlyResponse extends BroadcastOperationResponse { +public class DfsOnlyResponse extends BroadcastResponse { private AggregatedDfs dfs; private long tookInMillis; diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java index c18892aed67..687910c5922 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyRequest.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.termvectors.dfs; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest; +import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,7 +29,7 @@ import org.elasticsearch.search.internal.ShardSearchTransportRequest; import java.io.IOException; -class ShardDfsOnlyRequest extends BroadcastShardOperationRequest { +class ShardDfsOnlyRequest extends BroadcastShardRequest { private ShardSearchTransportRequest shardSearchRequest = new ShardSearchTransportRequest(); diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java index 8f414467972..688a475ea64 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/ShardDfsOnlyResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.termvectors.dfs; -import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -30,7 +30,7 @@ import java.io.IOException; /** * */ -class ShardDfsOnlyResponse extends BroadcastShardOperationResponse { +class ShardDfsOnlyResponse extends BroadcastShardResponse { private DfsSearchResult dfsSearchResult = new DfsSearchResult(); diff --git a/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java b/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java index c1d698725e9..682ff47377c 100644 --- a/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java +++ b/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction; +import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -51,7 +51,7 @@ import static com.google.common.collect.Lists.newArrayList; /** * Get the dfs only with no fetch phase. This is for internal use only. */ -public class TransportDfsOnlyAction extends TransportBroadcastOperationAction { +public class TransportDfsOnlyAction extends TransportBroadcastAction { public static final String NAME = "internal:index/termvectors/dfs"; diff --git a/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index cc1b7e059ad..15851b9dc18 100644 --- a/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.inject.spi.Message; -import org.elasticsearch.common.jna.Kernel32Library; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -48,7 +46,6 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import static com.google.common.collect.Sets.newHashSet; -import static org.elasticsearch.common.jna.Kernel32Library.ConsoleCtrlHandler; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; /** @@ -122,7 +119,7 @@ public class Bootstrap { // force remainder of JNA to be loaded (if available). try { - Kernel32Library.getInstance(); + JNAKernel32Library.getInstance(); } catch (Throwable ignored) { // we've already logged this. } @@ -143,6 +140,10 @@ public class Bootstrap { StringHelper.randomId(); } + public static boolean isMemoryLocked() { + return Natives.isMemoryLocked(); + } + private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception { initializeNatives(settings.getAsBoolean("bootstrap.mlockall", false), settings.getAsBoolean("bootstrap.ctrlhandler", true), diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java b/src/main/java/org/elasticsearch/bootstrap/ConsoleCtrlHandler.java similarity index 68% rename from src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java rename to src/main/java/org/elasticsearch/bootstrap/ConsoleCtrlHandler.java index 43721e37309..6433b336d9d 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParser.java +++ b/src/main/java/org/elasticsearch/bootstrap/ConsoleCtrlHandler.java @@ -17,18 +17,17 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.bootstrap; +public interface ConsoleCtrlHandler { -import org.elasticsearch.common.Nullable; + int CTRL_CLOSE_EVENT = 2; -import java.util.Map; - -/** - * Common interface for parsers used by the various Moving Average models - */ -public interface MovAvgModelParser { - public MovAvgModel parse(@Nullable Map settings); - - public String getName(); + /** + * Handles the Ctrl event. + * + * @param code the code corresponding to the Ctrl sent. + * @return true if the handler processed the event, false otherwise. If false, the next handler will be called. + */ + boolean handle(int code); } diff --git a/src/main/java/org/elasticsearch/common/jna/CLibrary.java b/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java similarity index 77% rename from src/main/java/org/elasticsearch/common/jna/CLibrary.java rename to src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java index d3e2c19188d..97bf98e60f6 100644 --- a/src/main/java/org/elasticsearch/common/jna/CLibrary.java +++ b/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import com.sun.jna.Native; import org.elasticsearch.common.logging.ESLogger; @@ -27,9 +27,9 @@ import org.elasticsearch.common.logging.Loggers; /** * */ -public class CLibrary { +class JNACLibrary { - private static ESLogger logger = Loggers.getLogger(CLibrary.class); + private static final ESLogger logger = Loggers.getLogger(JNACLibrary.class); public static final int MCL_CURRENT = 1; public static final int MCL_FUTURE = 2; @@ -39,17 +39,15 @@ public class CLibrary { static { try { Native.register("c"); - } catch (NoClassDefFoundError e) { - logger.warn("JNA not found. native methods (mlockall) will be disabled."); } catch (UnsatisfiedLinkError e) { logger.warn("unable to link C library. native methods (mlockall) will be disabled."); } } - public static native int mlockall(int flags); + static native int mlockall(int flags); - public static native int geteuid(); + static native int geteuid(); - private CLibrary() { + private JNACLibrary() { } } diff --git a/src/main/java/org/elasticsearch/common/jna/Kernel32Library.java b/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java similarity index 84% rename from src/main/java/org/elasticsearch/common/jna/Kernel32Library.java rename to src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java index d2b634ae768..57af6b145df 100644 --- a/src/main/java/org/elasticsearch/common/jna/Kernel32Library.java +++ b/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import com.google.common.collect.ImmutableList; import com.sun.jna.*; @@ -35,9 +35,9 @@ import java.util.List; /** * Library for Windows/Kernel32 */ -public class Kernel32Library { +class JNAKernel32Library { - private static ESLogger logger = Loggers.getLogger(Kernel32Library.class); + private static final ESLogger logger = Loggers.getLogger(JNAKernel32Library.class); // Callbacks must be kept around in order to be able to be called later, // when the Windows ConsoleCtrlHandler sends an event. @@ -45,10 +45,10 @@ public class Kernel32Library { // Native library instance must be kept around for the same reason. private final static class Holder { - private final static Kernel32Library instance = new Kernel32Library(); + private final static JNAKernel32Library instance = new JNAKernel32Library(); } - private Kernel32Library() { + private JNAKernel32Library() { if (Constants.WINDOWS) { try { Native.register("kernel32"); @@ -61,7 +61,7 @@ public class Kernel32Library { } } - public static Kernel32Library getInstance() { + static JNAKernel32Library getInstance() { return Holder.instance; } @@ -73,7 +73,7 @@ public class Kernel32Library { * @throws java.lang.UnsatisfiedLinkError if the Kernel32 library is not loaded or if the native function is not found * @throws java.lang.NoClassDefFoundError if the library for native calls is missing */ - public boolean addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + boolean addConsoleCtrlHandler(ConsoleCtrlHandler handler) { boolean result = false; if (handler != null) { NativeHandlerCallback callback = new NativeHandlerCallback(handler); @@ -85,7 +85,7 @@ public class Kernel32Library { return result; } - public ImmutableList getCallbacks() { + ImmutableList getCallbacks() { return ImmutableList.builder().addAll(callbacks).build(); } @@ -98,7 +98,7 @@ public class Kernel32Library { * @throws java.lang.UnsatisfiedLinkError if the Kernel32 library is not loaded or if the native function is not found * @throws java.lang.NoClassDefFoundError if the library for native calls is missing */ - public native boolean SetConsoleCtrlHandler(StdCallLibrary.StdCallCallback handler, boolean add); + native boolean SetConsoleCtrlHandler(StdCallLibrary.StdCallCallback handler, boolean add); /** * Handles consoles event with WIN API @@ -123,20 +123,6 @@ public class Kernel32Library { } } - public interface ConsoleCtrlHandler { - - public static final int CTRL_CLOSE_EVENT = 2; - - /** - * Handles the Ctrl event. - * - * @param code the code corresponding to the Ctrl sent. - * @return true if the handler processed the event, false otherwise. If false, the next handler will be called. - */ - boolean handle(int code); - } - - /** * Memory protection constraints * @@ -167,6 +153,18 @@ public class Kernel32Library { } } + public static class SizeT extends IntegerType { + + public SizeT() { + this(0); + } + + public SizeT(long value) { + super(Native.SIZE_T_SIZE, value); + } + + } + /** * Locks the specified region of the process's virtual address space into physical * memory, ensuring that subsequent access to the region will not incur a page fault. @@ -177,7 +175,7 @@ public class Kernel32Library { * @param size The size of the region to be locked, in bytes. * @return true if the function succeeds */ - public native boolean VirtualLock(Pointer address, SizeT size); + native boolean VirtualLock(Pointer address, SizeT size); /** * Retrieves information about a range of pages within the virtual address space of a specified process. @@ -190,7 +188,7 @@ public class Kernel32Library { * @param length The size of the buffer pointed to by the memoryInfo parameter, in bytes. * @return the actual number of bytes returned in the information buffer. */ - public native int VirtualQueryEx(Pointer handle, Pointer address, MemoryBasicInformation memoryInfo, int length); + native int VirtualQueryEx(Pointer handle, Pointer address, MemoryBasicInformation memoryInfo, int length); /** * Sets the minimum and maximum working set sizes for the specified process. @@ -202,7 +200,7 @@ public class Kernel32Library { * @param maxSize The maximum working set size for the process, in bytes. * @return true if the function succeeds. */ - public native boolean SetProcessWorkingSetSize(Pointer handle, SizeT minSize, SizeT maxSize); + native boolean SetProcessWorkingSetSize(Pointer handle, SizeT minSize, SizeT maxSize); /** * Retrieves a pseudo handle for the current process. @@ -211,7 +209,7 @@ public class Kernel32Library { * * @return a pseudo handle to the current process. */ - public native Pointer GetCurrentProcess(); + native Pointer GetCurrentProcess(); /** * Closes an open object handle. @@ -221,5 +219,5 @@ public class Kernel32Library { * @param handle A valid handle to an open object. * @return true if the function succeeds. */ - public native boolean CloseHandle(Pointer handle); + native boolean CloseHandle(Pointer handle); } diff --git a/src/main/java/org/elasticsearch/common/jna/Natives.java b/src/main/java/org/elasticsearch/bootstrap/JNANatives.java similarity index 76% rename from src/main/java/org/elasticsearch/common/jna/Natives.java rename to src/main/java/org/elasticsearch/bootstrap/JNANatives.java index fa8e074713a..eb29df85cdb 100644 --- a/src/main/java/org/elasticsearch/common/jna/Natives.java +++ b/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -17,32 +17,34 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import com.sun.jna.Native; -import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import org.apache.lucene.util.Constants; -import org.elasticsearch.common.jna.Kernel32Library.ConsoleCtrlHandler; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.monitor.jvm.JvmInfo; import java.util.Locale; -/** - * - */ -public class Natives { +import static org.elasticsearch.bootstrap.JNAKernel32Library.SizeT; + +/** + * This class performs the actual work with JNA and library bindings to call native methods. It should only be used after + * we are sure that the JNA classes are available to the JVM + */ +class JNANatives { + + private static final ESLogger logger = Loggers.getLogger(JNANatives.class); - private static ESLogger logger = Loggers.getLogger(Natives.class); // Set to true, in case native mlockall call was successful public static boolean LOCAL_MLOCKALL = false; - public static void tryMlockall() { + static void tryMlockall() { int errno = Integer.MIN_VALUE; try { - int result = CLibrary.mlockall(CLibrary.MCL_CURRENT); + int result = JNACLibrary.mlockall(JNACLibrary.MCL_CURRENT); if (result != 0) { errno = Native.getLastError(); } else { @@ -54,7 +56,7 @@ public class Natives { } if (errno != Integer.MIN_VALUE) { - if (errno == CLibrary.ENOMEM && System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux")) { + if (errno == JNACLibrary.ENOMEM && System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux")) { logger.warn("Unable to lock JVM memory (ENOMEM)." + " This can result in part of the JVM being swapped out." + " Increase RLIMIT_MEMLOCK (ulimit)."); @@ -66,21 +68,20 @@ public class Natives { } /** Returns true if user is root, false if not, or if we don't know */ - public static boolean definitelyRunningAsRoot() { + static boolean definitelyRunningAsRoot() { if (Constants.WINDOWS) { return false; // don't know } try { - return CLibrary.geteuid() == 0; + return JNACLibrary.geteuid() == 0; } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it return false; } } - public static void tryVirtualLock() - { - Kernel32Library kernel = Kernel32Library.getInstance(); + static void tryVirtualLock() { + JNAKernel32Library kernel = JNAKernel32Library.getInstance(); Pointer process = null; try { process = kernel.GetCurrentProcess(); @@ -91,12 +92,12 @@ public class Natives { if (!kernel.SetProcessWorkingSetSize(process, size, size)) { logger.warn("Unable to lock JVM memory. Failed to set working set size. Error code " + Native.getLastError()); } else { - Kernel32Library.MemoryBasicInformation memInfo = new Kernel32Library.MemoryBasicInformation(); + JNAKernel32Library.MemoryBasicInformation memInfo = new JNAKernel32Library.MemoryBasicInformation(); long address = 0; while (kernel.VirtualQueryEx(process, new Pointer(address), memInfo, memInfo.size()) != 0) { - boolean lockable = memInfo.State.longValue() == Kernel32Library.MEM_COMMIT - && (memInfo.Protect.longValue() & Kernel32Library.PAGE_NOACCESS) != Kernel32Library.PAGE_NOACCESS - && (memInfo.Protect.longValue() & Kernel32Library.PAGE_GUARD) != Kernel32Library.PAGE_GUARD; + boolean lockable = memInfo.State.longValue() == JNAKernel32Library.MEM_COMMIT + && (memInfo.Protect.longValue() & JNAKernel32Library.PAGE_NOACCESS) != JNAKernel32Library.PAGE_NOACCESS + && (memInfo.Protect.longValue() & JNAKernel32Library.PAGE_GUARD) != JNAKernel32Library.PAGE_GUARD; if (lockable) { kernel.VirtualLock(memInfo.BaseAddress, new SizeT(memInfo.RegionSize.longValue())); } @@ -114,18 +115,16 @@ public class Natives { } } - public static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { // The console Ctrl handler is necessary on Windows platforms only. if (Constants.WINDOWS) { try { - boolean result = Kernel32Library.getInstance().addConsoleCtrlHandler(handler); + boolean result = JNAKernel32Library.getInstance().addConsoleCtrlHandler(handler); if (result) { logger.debug("console ctrl handler correctly set"); } else { logger.warn("unknown error " + Native.getLastError() + " when adding console ctrl handler:"); } - } catch (NoClassDefFoundError e) { - logger.warn("JNA not found: native methods and handlers will be disabled."); } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it } diff --git a/src/main/java/org/elasticsearch/bootstrap/Natives.java b/src/main/java/org/elasticsearch/bootstrap/Natives.java new file mode 100644 index 00000000000..2048895f59e --- /dev/null +++ b/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; + +/** + * The Natives class is a wrapper class that checks if the classes necessary for calling native methods are available on + * startup. If they are not available, this class will avoid calling code that loads these classes. + */ +class Natives { + private static final ESLogger logger = Loggers.getLogger(Natives.class); + + // marker to determine if the JNA class files are available to the JVM + private static boolean jnaAvailable = false; + + static { + try { + // load one of the main JNA classes to see if the classes are available. this does not ensure that all native + // libraries are available, only the ones necessary by JNA to function + Class.forName("com.sun.jna.Native"); + jnaAvailable = true; + } catch (ClassNotFoundException e) { + logger.warn("JNA not found. native methods will be disabled.", e); + } catch (UnsatisfiedLinkError e) { + logger.warn("unable to load JNA native support library, native methods will be disabled.", e); + } + } + + static void tryMlockall() { + if (!jnaAvailable) { + logger.warn("cannot mlockall because JNA is not available"); + return; + } + JNANatives.tryMlockall(); + } + + static boolean definitelyRunningAsRoot() { + if (!jnaAvailable) { + logger.warn("cannot check if running as root because JNA is not available"); + return false; + } + return JNANatives.definitelyRunningAsRoot(); + } + + static void tryVirtualLock() { + if (!jnaAvailable) { + logger.warn("cannot mlockall because JNA is not available"); + return; + } + JNANatives.tryVirtualLock(); + } + + static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { + if (!jnaAvailable) { + logger.warn("cannot register console handler because JNA is not available"); + return; + } + JNANatives.addConsoleCtrlHandler(handler); + } + + static boolean isMemoryLocked() { + if (!jnaAvailable) { + return false; + } + return JNANatives.LOCAL_MLOCKALL; + } +} diff --git a/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 18b0a377ac0..75e8ada560b 100644 --- a/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -259,6 +259,8 @@ import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; +import org.elasticsearch.action.support.AdapterActionFuture; +import org.elasticsearch.action.support.DelegatingActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.termvectors.*; @@ -335,7 +337,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client protected abstract > void doExecute(final Action action, final Request request, ActionListener listener); - @Override public ActionFuture index(final IndexRequest request) { return execute(IndexAction.INSTANCE, request); @@ -594,12 +595,24 @@ public abstract class AbstractClient extends AbstractComponent implements Client @Override public ActionFuture count(final CountRequest request) { - return execute(CountAction.INSTANCE, request); + AdapterActionFuture actionFuture = new AdapterActionFuture() { + @Override + protected CountResponse convert(SearchResponse listenerResponse) { + return new CountResponse(listenerResponse); + } + }; + execute(SearchAction.INSTANCE, request.toSearchRequest(), actionFuture); + return actionFuture; } @Override public void count(final CountRequest request, final ActionListener listener) { - execute(CountAction.INSTANCE, request, listener); + execute(SearchAction.INSTANCE, request.toSearchRequest(), new DelegatingActionListener(listener) { + @Override + protected CountResponse getDelegatedFromInstigator(SearchResponse response) { + return new CountResponse(response); + } + }); } @Override diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index 5b66fafec12..3d5d938bde4 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask; @@ -223,7 +223,7 @@ public class MetaDataDeleteIndexService extends AbstractComponent { final String index; TimeValue timeout = TimeValue.timeValueSeconds(10); - TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; public Request(String index) { this.index = index; diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 3e945b161eb..840532e8637 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -23,7 +23,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.support.master.MasterNodeOperationRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask; @@ -231,7 +231,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { List aliases = Lists.newArrayList(); Map customs = Maps.newHashMap(); - TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; public PutRequest(String cause, String name) { this.cause = cause; @@ -304,7 +304,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent { public static class RemoveRequest { final String name; - TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; public RemoveRequest(String name) { this.name = name; diff --git a/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 353d27747cd..a31bf119402 100644 --- a/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.component; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -29,16 +30,18 @@ import org.elasticsearch.common.settings.Settings; public abstract class AbstractComponent { protected final ESLogger logger; - + protected final DeprecationLogger deprecationLogger; protected final Settings settings; public AbstractComponent(Settings settings) { this.logger = Loggers.getLogger(getClass(), settings); + this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; } public AbstractComponent(Settings settings, Class customClass) { this.logger = Loggers.getLogger(customClass, settings); + this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; } diff --git a/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java new file mode 100644 index 00000000000..870b5f61466 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +/** + * A logger that logs deprecation notices. + */ +public class DeprecationLogger { + + private final ESLogger logger; + + /** + * Creates a new deprecation logger based on the parent logger. Automatically + * prefixes the logger name with "deprecation", if it starts with "org.elasticsearch.", + * it replaces "org.elasticsearch" with "org.elasticsearch.deprecation" to maintain + * the "org.elasticsearch" namespace. + */ + public DeprecationLogger(ESLogger parentLogger) { + String name = parentLogger.getName(); + if (name.startsWith("org.elasticsearch")) { + name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation."); + } else { + name = "deprecation." + name; + } + this.logger = ESLoggerFactory.getLogger(parentLogger.getPrefix(), name); + } + + /** + * Logs a deprecated message. + */ + public void deprecated(String msg, Object... params) { + logger.debug(msg, params); + } +} diff --git a/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 67434293b6e..a0645c6a4d3 100644 --- a/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -66,6 +66,14 @@ public abstract class ESLoggerFactory { return defaultFactory.newInstance(name.intern()); } + public static DeprecationLogger getDeprecationLogger(String name) { + return new DeprecationLogger(getLogger(name)); + } + + public static DeprecationLogger getDeprecationLogger(String prefix, String name) { + return new DeprecationLogger(getLogger(prefix, name)); + } + public static ESLogger getRootLogger() { return defaultFactory.rootLogger(); } diff --git a/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java b/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java index 1bfb2d83d6a..b95e0987c90 100644 --- a/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java +++ b/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.logging.log4j; -import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; diff --git a/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/src/main/java/org/elasticsearch/common/lucene/Lucene.java index e38af307a8a..385607d89ba 100644 --- a/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -159,8 +159,11 @@ public class Lucene { /** * Reads the segments infos from the given commit, failing if it fails to load */ - public static SegmentInfos readSegmentInfos(IndexCommit commit, Directory directory) throws IOException { - return SegmentInfos.readCommit(directory, commit.getSegmentsFileName()); + public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOException { + // Using commit.getSegmentsFileName() does NOT work here, have to + // manually create the segment filename + String filename = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", commit.getGeneration()); + return SegmentInfos.readCommit(commit.getDirectory(), filename); } /** diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContent.java b/src/main/java/org/elasticsearch/common/xcontent/XContent.java index d9cf704725c..101098d67a4 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContent.java @@ -40,6 +40,11 @@ public interface XContent { */ XContentGenerator createGenerator(OutputStream os) throws IOException; + /** + * Creates a new generator using the provided output stream and some filters. + */ + XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException; + /** * Creates a new generator using the provided writer. */ diff --git a/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index b2cf8738fe0..fb31bd89a95 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -77,6 +77,10 @@ public final class XContentBuilder implements BytesStream, Releasable { return new XContentBuilder(xContent, new BytesStreamOutput()); } + public static XContentBuilder builder(XContent xContent, String[] filters) throws IOException { + return new XContentBuilder(xContent, new BytesStreamOutput(), filters); + } + private XContentGenerator generator; private final OutputStream bos; @@ -92,8 +96,17 @@ public final class XContentBuilder implements BytesStream, Releasable { * to call {@link #close()} when the builder is done with. */ public XContentBuilder(XContent xContent, OutputStream bos) throws IOException { + this(xContent, bos, null); + } + + /** + * Constructs a new builder using the provided xcontent, an OutputStream and some filters. The + * filters are used to filter fields that won't be written to the OutputStream. Make sure + * to call {@link #close()} when the builder is done with. + */ + public XContentBuilder(XContent xContent, OutputStream bos, String[] filters) throws IOException { this.bos = bos; - this.generator = xContent.createGenerator(bos); + this.generator = xContent.createGenerator(bos, filters); } public XContentBuilder fieldCaseConversion(FieldCaseConversion fieldCaseConversion) { diff --git a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java index 06f8605ec24..5210a82527e 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContent.java @@ -20,11 +20,15 @@ package org.elasticsearch.common.xcontent.cbor; import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -59,14 +63,27 @@ public class CborXContent implements XContent { throw new ElasticsearchParseException("cbor does not support stream parsing..."); } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new CborXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new CborXContentGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator cborGenerator = new FilteringJsonGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new CborXContentGenerator(cborGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new CborXContentGenerator(cborFactory.createGenerator(writer)); + return newXContentGenerator(cborFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java index c410d777b0d..70b92b0708c 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/cbor/CborXContentGenerator.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.xcontent.cbor; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.cbor.CBORParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; import org.elasticsearch.common.xcontent.json.JsonXContentGenerator; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.OutputStream; */ public class CborXContentGenerator extends JsonXContentGenerator { - public CborXContentGenerator(JsonGenerator generator) { + public CborXContentGenerator(BaseJsonGenerator generator) { super(generator); } diff --git a/src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java new file mode 100644 index 00000000000..0b485508c32 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/xcontent/json/BaseJsonGenerator.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.json; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.base.GeneratorBase; +import com.fasterxml.jackson.core.util.JsonGeneratorDelegate; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +public class BaseJsonGenerator extends JsonGeneratorDelegate { + + protected final GeneratorBase base; + + public BaseJsonGenerator(JsonGenerator generator, JsonGenerator base) { + super(generator, true); + if (base instanceof GeneratorBase) { + this.base = (GeneratorBase) base; + } else { + this.base = null; + } + } + + public BaseJsonGenerator(JsonGenerator generator) { + this(generator, generator); + } + + protected void writeStartRaw(String fieldName) throws IOException { + writeFieldName(fieldName); + writeRaw(':'); + } + + public void writeEndRaw() { + assert base != null : "JsonGenerator should be of instance GeneratorBase but was: " + delegate.getClass(); + if (base != null) { + base.getOutputContext().writeValue(); + } + } + + protected void writeRawValue(byte[] content, OutputStream bos) throws IOException { + flush(); + bos.write(content); + } + + protected void writeRawValue(byte[] content, int offset, int length, OutputStream bos) throws IOException { + flush(); + bos.write(content, offset, length); + } + + protected void writeRawValue(InputStream content, OutputStream bos) throws IOException { + flush(); + Streams.copy(content, bos); + } + + protected void writeRawValue(BytesReference content, OutputStream bos) throws IOException { + flush(); + content.writeTo(bos); + } +} diff --git a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java index 5b8fd1b0e59..47da7934939 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContent.java @@ -25,7 +25,9 @@ import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -63,14 +65,27 @@ public class JsonXContent implements XContent { return '\n'; } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new JsonXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new JsonXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator jsonGenerator = new FilteringJsonGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new JsonXContentGenerator(jsonGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new JsonXContentGenerator(jsonFactory.createGenerator(writer)); + return newXContentGenerator(jsonFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 71b41919b55..a7946218e21 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -19,11 +19,8 @@ package org.elasticsearch.common.xcontent.json; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.base.GeneratorBase; import com.fasterxml.jackson.core.io.SerializedString; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.*; import java.io.IOException; @@ -35,18 +32,11 @@ import java.io.OutputStream; */ public class JsonXContentGenerator implements XContentGenerator { - protected final JsonGenerator generator; + protected final BaseJsonGenerator generator; private boolean writeLineFeedAtEnd; - private final GeneratorBase base; - public JsonXContentGenerator(JsonGenerator generator) { + public JsonXContentGenerator(BaseJsonGenerator generator) { this.generator = generator; - if (generator instanceof GeneratorBase) { - base = (GeneratorBase) generator; - } else { - base = null; - } - } @Override @@ -261,29 +251,23 @@ public class JsonXContentGenerator implements XContentGenerator { @Override public void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - bos.write(content); - finishWriteRaw(); + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, bos); + generator.writeEndRaw(); } @Override public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - bos.write(content, offset, length); - finishWriteRaw(); + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, offset, length, bos); + generator.writeEndRaw(); } @Override public void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - Streams.copy(content, bos); - finishWriteRaw(); + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, bos); + generator.writeEndRaw(); } @Override @@ -308,18 +292,9 @@ public class JsonXContentGenerator implements XContentGenerator { } protected void writeObjectRaw(String fieldName, BytesReference content, OutputStream bos) throws IOException { - generator.writeFieldName(fieldName); - generator.writeRaw(':'); - flush(); - content.writeTo(bos); - finishWriteRaw(); - } - - private void finishWriteRaw() { - assert base != null : "JsonGenerator should be of instance GeneratorBase but was: " + generator.getClass(); - if (base != null) { - base.getOutputContext().writeValue(); - } + generator.writeStartRaw(fieldName); + generator.writeRawValue(content, bos); + generator.writeEndRaw(); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java index 50fd0442905..8a21ce1d93a 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java @@ -20,13 +20,15 @@ package org.elasticsearch.common.xcontent.smile; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; -import org.elasticsearch.common.xcontent.json.JsonXContentParser; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -62,14 +64,27 @@ public class SmileXContent implements XContent { return (byte) 0xFF; } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new SmileXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator smileGenerator = new FilteringJsonGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new SmileXContentGenerator(smileGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new SmileXContentGenerator(smileFactory.createGenerator(writer)); + return newXContentGenerator(smileFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java index fb0b7d5e4b8..b8c1b3dad65 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContentGenerator.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.xcontent.smile; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.smile.SmileParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; import org.elasticsearch.common.xcontent.json.JsonXContentGenerator; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.OutputStream; */ public class SmileXContentGenerator extends JsonXContentGenerator { - public SmileXContentGenerator(JsonGenerator generator) { + public SmileXContentGenerator(BaseJsonGenerator generator) { super(generator); } diff --git a/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java new file mode 100644 index 00000000000..215af370b31 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilterContext.java @@ -0,0 +1,225 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import com.fasterxml.jackson.core.JsonGenerator; +import org.elasticsearch.common.regex.Regex; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A FilterContext contains the description of a field about to be written by a JsonGenerator. + */ +public class FilterContext { + + /** + * The field/property name to be write + */ + private String property; + + /** + * List of XContentFilter matched by the current filtering context + */ + private List matchings; + + /** + * Flag to indicate if the field/property must be written + */ + private Boolean write = null; + + /** + * Flag to indicate if the field/property match a filter + */ + private boolean match = false; + + /** + * Points to the parent context + */ + private FilterContext parent; + + /** + * Type of the field/property + */ + private Type type = Type.VALUE; + + protected enum Type { + VALUE, + OBJECT, + ARRAY, + ARRAY_OF_OBJECT + } + + public FilterContext(String property, FilterContext parent) { + this.property = property; + this.parent = parent; + } + + public void reset(String property) { + this.property = property; + this.write = null; + if (matchings != null) { + matchings.clear(); + } + this.match = false; + this.type = Type.VALUE; + } + + public void reset(String property, FilterContext parent) { + reset(property); + this.parent = parent; + if (parent.isMatch()) { + match = true; + } + } + + public FilterContext parent() { + return parent; + } + + public List matchings() { + return matchings; + } + + public void addMatching(String[] matching) { + if (matchings == null) { + matchings = new ArrayList<>(); + } + matchings.add(matching); + } + + public boolean isRoot() { + return parent == null; + } + + public boolean isArray() { + return Type.ARRAY.equals(type); + } + + public void initArray() { + this.type = Type.ARRAY; + } + + public boolean isObject() { + return Type.OBJECT.equals(type); + } + + public void initObject() { + this.type = Type.OBJECT; + } + + public boolean isArrayOfObject() { + return Type.ARRAY_OF_OBJECT.equals(type); + } + + public void initArrayOfObject() { + this.type = Type.ARRAY_OF_OBJECT; + } + + public boolean isMatch() { + return match; + } + + /** + * This method contains the logic to check if a field/property must be included + * or not. + */ + public boolean include() { + if (write == null) { + if (parent != null) { + // the parent context matches the end of a filter list: + // by default we include all the sub properties so we + // don't need to check if the sub properties also match + if (parent.isMatch()) { + write = true; + match = true; + return write; + } + + if (parent.matchings() != null) { + + // Iterates over the filters matched by the parent context + // and checks if the current context also match + for (String[] matcher : parent.matchings()) { + if (matcher.length > 0) { + String field = matcher[0]; + + if ("**".equals(field)) { + addMatching(matcher); + } + + if ((field != null) && (Regex.simpleMatch(field, property))) { + int remaining = matcher.length - 1; + + // the current context matches the end of a filter list: + // it must be written and it is flagged as a direct match + if (remaining == 0) { + write = true; + match = true; + return write; + } else { + String[] submatching = new String[remaining]; + System.arraycopy(matcher, 1, submatching, 0, remaining); + addMatching(submatching); + } + } + } + } + } + } else { + // Root object is always written + write = true; + } + + if (write == null) { + write = false; + } + } + return write; + } + + /** + * Ensure that the full path to the current field is write by the JsonGenerator + * + * @param generator + * @throws IOException + */ + public void writePath(JsonGenerator generator) throws IOException { + if (parent != null) { + parent.writePath(generator); + } + + if ((write == null) || (!write)) { + write = true; + + if (property == null) { + generator.writeStartObject(); + } else { + generator.writeFieldName(property); + if (isArray()) { + generator.writeStartArray(); + } else if (isObject() || isArrayOfObject()) { + generator.writeStartObject(); + } + } + } + } +} diff --git a/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java new file mode 100644 index 00000000000..2748b4b5097 --- /dev/null +++ b/src/main/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGenerator.java @@ -0,0 +1,423 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import com.fasterxml.jackson.core.Base64Variant; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.SerializableString; +import com.google.common.collect.ImmutableList; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayDeque; +import java.util.List; +import java.util.Queue; + +/** + * A FilteringJsonGenerator uses antpath-like filters to include/exclude fields when writing XContent streams. + * + * When writing a XContent stream, this class instantiates (or reuses) a FilterContext instance for each + * field (or property) that must be generated. This filter context is used to check if the field/property must be + * written according to the current list of XContentFilter filters. + */ +public class FilteringJsonGenerator extends BaseJsonGenerator { + + /** + * List of previous contexts + * (MAX_CONTEXTS contexts are kept around in order to be reused) + */ + private Queue contexts = new ArrayDeque<>(); + private static final int MAX_CONTEXTS = 10; + + /** + * Current filter context + */ + private FilterContext context; + + public FilteringJsonGenerator(JsonGenerator generator, String[] filters) { + super(generator); + + ImmutableList.Builder builder = ImmutableList.builder(); + if (filters != null) { + for (String filter : filters) { + String[] matcher = Strings.delimitedListToStringArray(filter, "."); + if (matcher != null) { + builder.add(matcher); + } + } + } + + // Creates a root context that matches all filtering rules + this.context = get(null, null, builder.build()); + } + + /** + * Get a new context instance (and reset it if needed) + */ + private FilterContext get(String property, FilterContext parent) { + FilterContext ctx = contexts.poll(); + if (ctx == null) { + ctx = new FilterContext(property, parent); + } else { + ctx.reset(property, parent); + } + return ctx; + } + + /** + * Get a new context instance (and reset it if needed) + */ + private FilterContext get(String property, FilterContext context, List matchings) { + FilterContext ctx = get(property, context); + if (matchings != null) { + for (String[] matching : matchings) { + ctx.addMatching(matching); + } + } + return ctx; + } + + /** + * Adds a context instance to the pool in order to reuse it if needed + */ + private void put(FilterContext ctx) { + if (contexts.size() <= MAX_CONTEXTS) { + contexts.offer(ctx); + } + } + + @Override + public void writeStartArray() throws IOException { + context.initArray(); + if (context.include()) { + super.writeStartArray(); + } + } + + @Override + public void writeStartArray(int size) throws IOException { + context.initArray(); + if (context.include()) { + super.writeStartArray(size); + } + } + + @Override + public void writeEndArray() throws IOException { + // Case of array of objects + if (context.isArrayOfObject()) { + // Release current context and go one level up + FilterContext parent = context.parent(); + put(context); + context = parent; + } + + if (context.include()) { + super.writeEndArray(); + } + } + + @Override + public void writeStartObject() throws IOException { + // Case of array of objects + if (context.isArray()) { + // Get a context for the anonymous object + context = get(null, context, context.matchings()); + context.initArrayOfObject(); + } + + if (!context.isArrayOfObject()) { + context.initObject(); + } + + if (context.include()) { + super.writeStartObject(); + } + + context = get(null, context); + } + + @Override + public void writeEndObject() throws IOException { + if (!context.isRoot()) { + // Release current context and go one level up + FilterContext parent = context.parent(); + put(context); + context = parent; + } + + if (context.include()) { + super.writeEndObject(); + } + } + + @Override + public void writeFieldName(String name) throws IOException { + context.reset(name); + + if (context.include()) { + // Ensure that the full path to the field is written + context.writePath(delegate); + super.writeFieldName(name); + } + } + + @Override + public void writeFieldName(SerializableString name) throws IOException { + context.reset(name.getValue()); + + if (context.include()) { + // Ensure that the full path to the field is written + context.writePath(delegate); + super.writeFieldName(name); + } + } + + @Override + public void writeString(String text) throws IOException { + if (context.include()) { + super.writeString(text); + } + } + + @Override + public void writeString(char[] text, int offset, int len) throws IOException { + if (context.include()) { + super.writeString(text, offset, len); + } + } + + @Override + public void writeString(SerializableString text) throws IOException { + if (context.include()) { + super.writeString(text); + } + } + + @Override + public void writeRawUTF8String(byte[] text, int offset, int length) throws IOException { + if (context.include()) { + super.writeRawUTF8String(text, offset, length); + } + } + + @Override + public void writeUTF8String(byte[] text, int offset, int length) throws IOException { + if (context.include()) { + super.writeUTF8String(text, offset, length); + } + } + + @Override + public void writeRaw(String text) throws IOException { + if (context.include()) { + super.writeRaw(text); + } + } + + @Override + public void writeRaw(String text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRaw(text, offset, len); + } + } + + @Override + public void writeRaw(SerializableString raw) throws IOException { + if (context.include()) { + super.writeRaw(raw); + } + } + + @Override + public void writeRaw(char[] text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRaw(text, offset, len); + } + } + + @Override + public void writeRaw(char c) throws IOException { + if (context.include()) { + super.writeRaw(c); + } + } + + @Override + public void writeRawValue(String text) throws IOException { + if (context.include()) { + super.writeRawValue(text); + } + } + + @Override + public void writeRawValue(String text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRawValue(text, offset, len); + } + } + + @Override + public void writeRawValue(char[] text, int offset, int len) throws IOException { + if (context.include()) { + super.writeRawValue(text, offset, len); + } + } + + @Override + public void writeBinary(Base64Variant b64variant, byte[] data, int offset, int len) throws IOException { + if (context.include()) { + super.writeBinary(b64variant, data, offset, len); + } + } + + @Override + public int writeBinary(Base64Variant b64variant, InputStream data, int dataLength) throws IOException { + if (context.include()) { + return super.writeBinary(b64variant, data, dataLength); + } + return 0; + } + + @Override + public void writeNumber(short v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(int v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(long v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(BigInteger v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(double v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(float v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(BigDecimal v) throws IOException { + if (context.include()) { + super.writeNumber(v); + } + } + + @Override + public void writeNumber(String encodedValue) throws IOException, UnsupportedOperationException { + if (context.include()) { + super.writeNumber(encodedValue); + } + } + + @Override + public void writeBoolean(boolean state) throws IOException { + if (context.include()) { + super.writeBoolean(state); + } + } + + @Override + public void writeNull() throws IOException { + if (context.include()) { + super.writeNull(); + } + } + + @Override + public void copyCurrentEvent(JsonParser jp) throws IOException { + if (context.include()) { + super.copyCurrentEvent(jp); + } + } + + @Override + public void copyCurrentStructure(JsonParser jp) throws IOException { + if (context.include()) { + super.copyCurrentStructure(jp); + } + } + + @Override + protected void writeRawValue(byte[] content, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, bos); + } + } + + @Override + protected void writeRawValue(byte[] content, int offset, int length, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, offset, length, bos); + } + } + + @Override + protected void writeRawValue(InputStream content, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, bos); + } + } + + @Override + protected void writeRawValue(BytesReference content, OutputStream bos) throws IOException { + if (context.include()) { + super.writeRawValue(content, bos); + } + } + + @Override + public void close() throws IOException { + contexts.clear(); + super.close(); + } +} diff --git a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java index e7111849f7e..388cd992e2b 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java +++ b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContent.java @@ -20,11 +20,15 @@ package org.elasticsearch.common.xcontent.yaml; import com.fasterxml.jackson.core.JsonEncoding; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; +import org.elasticsearch.common.xcontent.support.filtering.FilteringJsonGenerator; import java.io.*; @@ -58,14 +62,27 @@ public class YamlXContent implements XContent { throw new ElasticsearchParseException("yaml does not support stream parsing..."); } + private XContentGenerator newXContentGenerator(JsonGenerator jsonGenerator) { + return new YamlXContentGenerator(new BaseJsonGenerator(jsonGenerator)); + } + @Override public XContentGenerator createGenerator(OutputStream os) throws IOException { - return new YamlXContentGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8)); + return newXContentGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8)); + } + + @Override + public XContentGenerator createGenerator(OutputStream os, String[] filters) throws IOException { + if (CollectionUtils.isEmpty(filters)) { + return createGenerator(os); + } + FilteringJsonGenerator yamlGenerator = new FilteringJsonGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8), filters); + return new YamlXContentGenerator(yamlGenerator); } @Override public XContentGenerator createGenerator(Writer writer) throws IOException { - return new YamlXContentGenerator(yamlFactory.createGenerator(writer)); + return newXContentGenerator(yamlFactory.createGenerator(writer)); } @Override diff --git a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java index 350554ac6f1..62967247a82 100644 --- a/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java +++ b/src/main/java/org/elasticsearch/common/xcontent/yaml/YamlXContentGenerator.java @@ -19,10 +19,10 @@ package org.elasticsearch.common.xcontent.yaml; -import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.BaseJsonGenerator; import org.elasticsearch.common.xcontent.json.JsonXContentGenerator; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.OutputStream; */ public class YamlXContentGenerator extends JsonXContentGenerator { - public YamlXContentGenerator(JsonGenerator generator) { + public YamlXContentGenerator(BaseJsonGenerator generator) { super(generator); } diff --git a/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java index 3b7c765d7d1..4573e2dac12 100644 --- a/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java +++ b/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java @@ -24,8 +24,8 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -48,29 +48,29 @@ import java.util.*; * and once the results are back, it makes sure to schedule a reroute to make sure those results will * be taken into account. */ -public abstract class AsyncShardFetch implements Releasable { +public abstract class AsyncShardFetch implements Releasable { /** * An action that lists the relevant shard data that needs to be fetched. */ - public interface List, NodeResponse extends NodeOperationResponse> { + public interface List, NodeResponse extends BaseNodeResponse> { void list(ShardId shardId, IndexMetaData indexMetaData, String[] nodesIds, ActionListener listener); } protected final ESLogger logger; protected final String type; private final ShardId shardId; - private final List, T> action; + private final List, T> action; private final Map> cache = new HashMap<>(); private final Set nodesToIgnore = new HashSet<>(); private boolean closed; @SuppressWarnings("unchecked") - protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, List, T> action) { + protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, List, T> action) { this.logger = logger; this.type = type; this.shardId = shardId; - this.action = (List, T>) action; + this.action = (List, T>) action; } public synchronized void close() { @@ -253,9 +253,9 @@ public abstract class AsyncShardFetch implement // visible for testing void asyncFetch(final ShardId shardId, final String[] nodesIds, final MetaData metaData) { IndexMetaData indexMetaData = metaData.index(shardId.getIndex()); - action.list(shardId, indexMetaData, nodesIds, new ActionListener>() { + action.list(shardId, indexMetaData, nodesIds, new ActionListener>() { @Override - public void onResponse(NodesOperationResponse response) { + public void onResponse(BaseNodesResponse response) { processAsyncFetch(shardId, response.getNodes(), response.failures()); } @@ -274,7 +274,7 @@ public abstract class AsyncShardFetch implement * The result of a fetch operation. Make sure to first check {@link #hasData()} before * fetching the actual data. */ - public static class FetchResult { + public static class FetchResult { private final ShardId shardId; private final Map data; diff --git a/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index a7647f73e50..f4385947dc8 100644 --- a/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -25,8 +25,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; -import org.elasticsearch.action.support.nodes.NodesOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -513,12 +513,12 @@ public class GatewayAllocator extends AbstractComponent { return changed; } - static class InternalAsyncFetch extends AsyncShardFetch { + static class InternalAsyncFetch extends AsyncShardFetch { private final ClusterService clusterService; private final AllocationService allocationService; - public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List, T> action, + public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List, T> action, ClusterService clusterService, AllocationService allocationService) { super(logger, type, shardId, action); this.clusterService = clusterService; diff --git a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 900a2e7ffc7..6fa20433283 100644 --- a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -45,7 +45,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesListGatewayMetaState extends TransportNodesOperationAction { +public class TransportNodesListGatewayMetaState extends TransportNodesAction { public static final String ACTION_NAME = "internal:gateway/local/meta_state"; @@ -113,7 +113,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA return true; } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { public Request() { } @@ -133,7 +133,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA } } - public static class NodesGatewayMetaState extends NodesOperationResponse { + public static class NodesGatewayMetaState extends BaseNodesResponse { private FailedNodeException[] failures; @@ -170,7 +170,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { NodeRequest() { } @@ -190,7 +190,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesOperationA } } - public static class NodeGatewayMetaState extends NodeOperationResponse { + public static class NodeGatewayMetaState extends BaseNodeResponse { private MetaData metaData; diff --git a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 649e0a1c31a..d5692b3a5aa 100644 --- a/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate * shards after node or cluster restarts. */ -public class TransportNodesListGatewayStartedShards extends TransportNodesOperationAction +public class TransportNodesListGatewayStartedShards extends TransportNodesAction implements AsyncShardFetch.List { public static final String ACTION_NAME = "internal:gateway/local/started_shards"; @@ -148,7 +148,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat return true; } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { private ShardId shardId; private String indexUUID; @@ -186,7 +186,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat } } - public static class NodesGatewayStartedShards extends NodesOperationResponse { + public static class NodesGatewayStartedShards extends BaseNodesResponse { private FailedNodeException[] failures; @@ -221,7 +221,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { private ShardId shardId; private String indexUUID; @@ -258,7 +258,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesOperat } } - public static class NodeGatewayStartedShards extends NodeOperationResponse { + public static class NodeGatewayStartedShards extends BaseNodeResponse { private long version = -1; diff --git a/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java index 6b2cd15a18d..3cfecc4cf07 100644 --- a/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java +++ b/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java @@ -19,6 +19,7 @@ package org.elasticsearch.index; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -30,9 +31,8 @@ import org.elasticsearch.index.settings.IndexSettings; public abstract class AbstractIndexComponent implements IndexComponent { protected final ESLogger logger; - + protected final DeprecationLogger deprecationLogger; protected final Index index; - protected final Settings indexSettings; /** @@ -45,6 +45,7 @@ public abstract class AbstractIndexComponent implements IndexComponent { this.index = index; this.indexSettings = indexSettings; this.logger = Loggers.getLogger(getClass(), indexSettings, index); + this.deprecationLogger = new DeprecationLogger(logger); } @Override diff --git a/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java index e1c23ceb2f8..1cc37b8cda8 100644 --- a/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java +++ b/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java @@ -251,6 +251,11 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable defaultSearchAnalyzer = analyzers.containsKey("default_search") ? analyzers.get("default_search") : analyzers.get("default"); defaultSearchQuoteAnalyzer = analyzers.containsKey("default_search_quote") ? analyzers.get("default_search_quote") : defaultSearchAnalyzer; + for (Map.Entry analyzer : analyzers.entrySet()) { + if (analyzer.getKey().startsWith("_")) { + throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\""); + } + } this.analyzers = ImmutableMap.copyOf(analyzers); } diff --git a/src/main/java/org/elasticsearch/index/engine/Engine.java b/src/main/java/org/elasticsearch/index/engine/Engine.java index 3a2f1eba93d..e2811bb0698 100644 --- a/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -21,15 +21,7 @@ package org.elasticsearch.index.engine; import com.google.common.base.Preconditions; -import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SegmentCommitInfo; -import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.index.SegmentReader; -import org.apache.lucene.index.Term; +import org.apache.lucene.index.*; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; @@ -324,7 +316,18 @@ public abstract class Engine implements Closeable { return new CommitStats(getLastCommittedSegmentInfos()); } - + /** + * Read the last segments info from the commit pointed to by the searcher manager + */ + protected static SegmentInfos readLastCommittedSegmentInfos(SearcherManager sm) throws IOException { + IndexSearcher searcher = sm.acquire(); + try { + IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit(); + return Lucene.readSegmentInfos(latestCommit); + } finally { + sm.release(searcher); + } + } /** * Global stats on segments. diff --git a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 151298acd07..393dff33907 100644 --- a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -275,7 +275,7 @@ public class InternalEngine extends Engine { try { final DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter, true), shardId); searcherManager = new SearcherManager(directoryReader, searcherFactory); - lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); success = true; return searcherManager; } catch (IOException e) { diff --git a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java index e09acaa0e28..301f6176a00 100644 --- a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java @@ -79,7 +79,7 @@ public class ShadowEngine extends Engine { if (Lucene.waitForIndex(store.directory(), nonexistentRetryTime)) { reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(store.directory()), shardId); this.searcherManager = new SearcherManager(reader, searcherFactory); - this.lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + this.lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); success = true; } else { throw new IndexShardException(shardId, "failed to open a shadow engine after" + @@ -148,7 +148,7 @@ public class ShadowEngine extends Engine { store.incRef(); try (ReleasableLock lock = readLock.acquire()) { // reread the last committed segment infos - lastCommittedSegmentInfos = store.readLastCommittedSegmentsInfo(); + lastCommittedSegmentInfos = readLastCommittedSegmentInfos(searcherManager); } catch (Throwable e) { if (isClosed.get() == false) { logger.warn("failed to read latest segment infos on flush", e); diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index c5387cf3459..e4f61db2df1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.analysis.FieldNameAnalyzer; import java.util.Collection; import java.util.Iterator; -import java.util.List; import java.util.Map; /** @@ -98,11 +97,11 @@ public final class DocumentFieldMappers implements Iterable { return fieldMappers.get(field); } - List simpleMatchToIndexNames(String pattern) { + Collection simpleMatchToIndexNames(String pattern) { return fieldMappers.simpleMatchToIndexNames(pattern); } - public List simpleMatchToFullName(String pattern) { + public Collection simpleMatchToFullName(String pattern) { return fieldMappers.simpleMatchToFullName(pattern); } diff --git a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java index 18f71d142db..d751c95910e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java +++ b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.mapper; -import com.google.common.collect.Lists; +import com.google.common.collect.Sets; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.CopyOnWriteHashMap; import org.elasticsearch.common.regex.Regex; @@ -27,7 +27,7 @@ import org.elasticsearch.common.regex.Regex; import java.util.Collection; import java.util.Collections; import java.util.Iterator; -import java.util.List; +import java.util.Set; /** * A class that holds a map of field mappers from name, index name, and full name. @@ -114,8 +114,8 @@ class FieldMappersLookup implements Iterable { /** * Returns a list of the index names of a simple match regex like pattern against full name and index name. */ - public List simpleMatchToIndexNames(String pattern) { - List fields = Lists.newArrayList(); + public Collection simpleMatchToIndexNames(String pattern) { + Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { fields.add(fieldMapper.names().indexName()); @@ -129,8 +129,8 @@ class FieldMappersLookup implements Iterable { /** * Returns a list of the full names of a simple match regex like pattern against full name and index name. */ - public List simpleMatchToFullName(String pattern) { - List fields = Lists.newArrayList(); + public Collection simpleMatchToFullName(String pattern) { + Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { fields.add(fieldMapper.names().fullName()); diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8a6e5264f8f..85a3aae5bd4 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -481,14 +481,14 @@ public class MapperService extends AbstractIndexComponent { * Returns all the fields that match the given pattern. If the pattern is prefixed with a type * then the fields will be returned with a type prefix. */ - public List simpleMatchToIndexNames(String pattern) { + public Collection simpleMatchToIndexNames(String pattern) { return simpleMatchToIndexNames(pattern, null); } /** * Returns all the fields that match the given pattern, with an optional narrowing * based on a list of types. */ - public List simpleMatchToIndexNames(String pattern, @Nullable String[] types) { + public Collection simpleMatchToIndexNames(String pattern, @Nullable String[] types) { if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return ImmutableList.of(pattern); diff --git a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java index 3da9a66b843..31ee3bdc37e 100644 --- a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java @@ -19,11 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.*; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,7 +28,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import java.io.IOException; -import java.util.List; +import java.util.Collection; /** * @@ -87,7 +83,7 @@ public class ExistsQueryParser extends BaseQueryParserTemp { fieldPattern = fieldPattern + ".*"; } - List fields = parseContext.simpleMatchToIndexNames(fieldPattern); + Collection fields = parseContext.simpleMatchToIndexNames(fieldPattern); if (fields.isEmpty()) { // no fields exists, so we should not match anything return Queries.newMatchNoDocsQuery(); diff --git a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index e0a25f6b806..ccedef6da87 100644 --- a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -19,11 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.*; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,7 +28,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import java.io.IOException; -import java.util.List; +import java.util.Collection; /** * @@ -99,7 +95,7 @@ public class MissingQueryParser extends BaseQueryParserTemp { fieldPattern = fieldPattern + ".*"; } - List fields = parseContext.simpleMatchToIndexNames(fieldPattern); + Collection fields = parseContext.simpleMatchToIndexNames(fieldPattern); if (fields.isEmpty()) { if (existence) { // if we ask for existence of fields, and we found none, then we should match on all diff --git a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index c010d6eda08..2b87c4cd1a6 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; @@ -38,11 +37,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperBuilders; -import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.similarity.SimilarityService; @@ -52,12 +47,7 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * @@ -298,7 +288,7 @@ public class QueryParseContext { } } - public List simpleMatchToIndexNames(String pattern) { + public Collection simpleMatchToIndexNames(String pattern) { return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes()); } diff --git a/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java b/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java index 9f9bdd5bde2..bb19dd49080 100644 --- a/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java +++ b/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.shard; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -30,15 +31,15 @@ import org.elasticsearch.index.settings.IndexSettings; public abstract class AbstractIndexShardComponent implements IndexShardComponent { protected final ESLogger logger; - + protected final DeprecationLogger deprecationLogger; protected final ShardId shardId; - protected final Settings indexSettings; protected AbstractIndexShardComponent(ShardId shardId, @IndexSettings Settings indexSettings) { this.shardId = shardId; this.indexSettings = indexSettings; this.logger = Loggers.getLogger(getClass(), indexSettings, shardId); + this.deprecationLogger = new DeprecationLogger(logger); } @Override diff --git a/src/main/java/org/elasticsearch/index/store/Store.java b/src/main/java/org/elasticsearch/index/store/Store.java index 722c00c506d..c889dd16c20 100644 --- a/src/main/java/org/elasticsearch/index/store/Store.java +++ b/src/main/java/org/elasticsearch/index/store/Store.java @@ -151,8 +151,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * @throws IOException if the index is corrupted or the segments file is not present */ private static SegmentInfos readSegmentsInfo(IndexCommit commit, Directory directory) throws IOException { + assert commit == null || commit.getDirectory() == directory; try { - return commit == null ? Lucene.readSegmentInfos(directory) : Lucene.readSegmentInfos(commit, directory); + return commit == null ? Lucene.readSegmentInfos(directory) : Lucene.readSegmentInfos(commit); } catch (EOFException eof) { // TODO this should be caught by lucene - EOF is almost certainly an index corruption throw new CorruptIndexException("Read past EOF while reading segment infos", "commit(" + commit + ")", eof); diff --git a/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 04cfa6bda3d..3490f6d2d8c 100644 --- a/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -58,7 +58,7 @@ import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ -public class TransportNodesListShardStoreMetaData extends TransportNodesOperationAction +public class TransportNodesListShardStoreMetaData extends TransportNodesAction implements AsyncShardFetch.List { public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; @@ -241,7 +241,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } - static class Request extends NodesOperationRequest { + static class Request extends BaseNodesRequest { private ShardId shardId; @@ -277,7 +277,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } } - public static class NodesStoreFilesMetaData extends NodesOperationResponse { + public static class NodesStoreFilesMetaData extends BaseNodesResponse { private FailedNodeException[] failures; @@ -314,7 +314,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } - static class NodeRequest extends NodeOperationRequest { + static class NodeRequest extends BaseNodeRequest { private ShardId shardId; @@ -344,7 +344,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesOperatio } } - public static class NodeStoreFilesMetaData extends NodeOperationResponse { + public static class NodeStoreFilesMetaData extends BaseNodeResponse { private StoreFilesMetaData storeFilesMetaData; diff --git a/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java b/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java index c99c92faccb..49d140e203b 100644 --- a/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java +++ b/src/main/java/org/elasticsearch/monitor/process/ProcessInfo.java @@ -19,10 +19,10 @@ package org.elasticsearch.monitor.process; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; @@ -50,7 +50,7 @@ public class ProcessInfo implements Streamable, Serializable, ToXContent { public ProcessInfo(long id, long maxFileDescriptors) { this.id = id; this.maxFileDescriptors = maxFileDescriptors; - this.mlockall = Natives.LOCAL_MLOCKALL; + this.mlockall = Bootstrap.isMemoryLocked(); } public long refreshInterval() { diff --git a/src/main/java/org/elasticsearch/node/NodeBuilder.java b/src/main/java/org/elasticsearch/node/NodeBuilder.java index edd89efbee4..9107cf0833e 100644 --- a/src/main/java/org/elasticsearch/node/NodeBuilder.java +++ b/src/main/java/org/elasticsearch/node/NodeBuilder.java @@ -45,7 +45,7 @@ import org.elasticsearch.common.settings.Settings; *

    *

      * Node node = NodeBuilder.nodeBuilder()
    - *                      .settings(ImmutableSettings.settingsBuilder().put("node.data", false)
    + *                      .settings(Settings.settingsBuilder().put("node.data", false)
      *                      .node();
      * 
    *

    diff --git a/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 693188307e3..02820158a6e 100644 --- a/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -115,7 +115,7 @@ public class BytesRestResponse extends RestResponse { } private static XContentBuilder convert(RestChannel channel, RestStatus status, Throwable t) throws IOException { - XContentBuilder builder = channel.newBuilder().startObject(); + XContentBuilder builder = channel.newErrorBuilder().startObject(); if (t == null) { builder.field("error", "unknown"); } else if (channel.detailedErrorsEnabled()) { diff --git a/src/main/java/org/elasticsearch/rest/RestChannel.java b/src/main/java/org/elasticsearch/rest/RestChannel.java index 773d939493f..cd185b07958 100644 --- a/src/main/java/org/elasticsearch/rest/RestChannel.java +++ b/src/main/java/org/elasticsearch/rest/RestChannel.java @@ -44,10 +44,15 @@ public abstract class RestChannel { } public XContentBuilder newBuilder() throws IOException { - return newBuilder(request.hasContent() ? request.content() : null); + return newBuilder(request.hasContent() ? request.content() : null, request.hasParam("filter_path")); } - public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource) throws IOException { + public XContentBuilder newErrorBuilder() throws IOException { + // Disable filtering when building error responses + return newBuilder(request.hasContent() ? request.content() : null, false); + } + + public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource, boolean useFiltering) throws IOException { XContentType contentType = XContentType.fromRestContentType(request.param("format", request.header("Content-Type"))); if (contentType == null) { // try and guess it from the auto detect source @@ -59,7 +64,9 @@ public abstract class RestChannel { // default to JSON contentType = XContentType.JSON; } - XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(contentType), bytesOutput()); + + String[] filters = useFiltering ? request.paramAsStringArrayOrEmptyIfAll("filter_path") : null; + XContentBuilder builder = new XContentBuilder(XContentFactory.xContent(contentType), bytesOutput(), filters); if (request.paramAsBoolean("pretty", false)) { builder.prettyPrint().lfAtEnd(); } diff --git a/src/main/java/org/elasticsearch/rest/RestController.java b/src/main/java/org/elasticsearch/rest/RestController.java index f2e36d2b22c..3e3360337de 100644 --- a/src/main/java/org/elasticsearch/rest/RestController.java +++ b/src/main/java/org/elasticsearch/rest/RestController.java @@ -187,7 +187,7 @@ public class RestController extends AbstractLifecycleComponent { // error_trace cannot be used when we disable detailed errors if (channel.detailedErrorsEnabled() == false && request.paramAsBoolean("error_trace", false)) { try { - XContentBuilder builder = channel.newBuilder(); + XContentBuilder builder = channel.newErrorBuilder(); builder.startObject().field("error","error traces in responses are disabled.").endObject().string(); RestResponse response = new BytesRestResponse(BAD_REQUEST, builder); response.addHeader("Content-Type", "application/json"); diff --git a/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java index db3954ec5e8..1fe07156d99 100644 --- a/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java +++ b/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java @@ -73,7 +73,7 @@ public class RestGetSourceAction extends BaseRestHandler { client.get(getRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(GetResponse response) throws Exception { - XContentBuilder builder = channel.newBuilder(response.getSourceInternal()); + XContentBuilder builder = channel.newBuilder(response.getSourceInternal(), false); if (!response.isExists()) { return new BytesRestResponse(NOT_FOUND, builder); } else { diff --git a/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index a0d5b279e71..3d3ecdfa880 100644 --- a/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -88,7 +88,7 @@ public class RestIndexAction extends BaseRestHandler { indexRequest.opType(IndexRequest.OpType.fromString(sOpType)); } catch (IllegalArgumentException eia){ try { - XContentBuilder builder = channel.newBuilder(); + XContentBuilder builder = channel.newErrorBuilder(); channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); } catch (IOException e1) { logger.warn("Failed to send response", e1); diff --git a/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java b/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java index 35e3f2cc473..33145be6a47 100644 --- a/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java +++ b/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java @@ -85,7 +85,7 @@ public class RestPutIndexedScriptAction extends BaseRestHandler { putRequest.opType(IndexRequest.OpType.fromString(sOpType)); } catch (IllegalArgumentException eia){ try { - XContentBuilder builder = channel.newBuilder(); + XContentBuilder builder = channel.newErrorBuilder(); channel.sendResponse(new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); return; } catch (IOException e1) { diff --git a/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 0d06031a92e..550b0f63978 100644 --- a/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -195,6 +195,20 @@ public class RestSearchAction extends BaseRestHandler { } } } + String sFieldDataFields = request.param("fielddata_fields"); + if (sFieldDataFields != null) { + if (searchSourceBuilder == null) { + searchSourceBuilder = new SearchSourceBuilder(); + } + if (Strings.hasText(sFieldDataFields)) { + String[] sFields = Strings.splitStringByCommaToArray(sFieldDataFields); + if (sFields != null) { + for (String field : sFields) { + searchSourceBuilder.fieldDataField(field); + } + } + } + } FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request); if (fetchSourceContext != null) { if (searchSourceBuilder == null) { diff --git a/src/main/java/org/elasticsearch/rest/action/support/RestActions.java b/src/main/java/org/elasticsearch/rest/action/support/RestActions.java index 67ceb83c33b..bd17c1d5944 100644 --- a/src/main/java/org/elasticsearch/rest/action/support/RestActions.java +++ b/src/main/java/org/elasticsearch/rest/action/support/RestActions.java @@ -22,7 +22,7 @@ package org.elasticsearch.rest.action.support; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.QuerySourceBuilder; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; @@ -62,7 +62,7 @@ public class RestActions { static final XContentBuilderString FAILURES = new XContentBuilderString("failures"); } - public static void buildBroadcastShardsHeader(XContentBuilder builder, ToXContent.Params params, BroadcastOperationResponse response) throws IOException { + public static void buildBroadcastShardsHeader(XContentBuilder builder, ToXContent.Params params, BroadcastResponse response) throws IOException { buildBroadcastShardsHeader(builder, params, response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(), response.getShardFailures()); } diff --git a/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 27141aa08df..3e6eb713529 100644 --- a/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -32,8 +33,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.*; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.util.*; /** */ @@ -96,11 +96,12 @@ public class RestTable { return new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, bytesOut.bytes()); } - private static List buildDisplayHeaders(Table table, RestRequest request) { - String pHeaders = request.param("h"); + static List buildDisplayHeaders(Table table, RestRequest request) { List display = new ArrayList<>(); - if (pHeaders != null) { - for (String possibility : Strings.splitStringByCommaToArray(pHeaders)) { + if (request.hasParam("h")) { + Set headers = expandHeadersFromRequest(table, request); + + for (String possibility : headers) { DisplayHeader dispHeader = null; if (table.getAsMap().containsKey(possibility)) { @@ -147,6 +148,40 @@ public class RestTable { return display; } + /** + * Extracts all the required fields from the RestRequest 'h' parameter. In order to support wildcards like + * 'bulk.*' this needs potentially parse all the configured headers and its aliases and needs to ensure + * that everything is only added once to the returned headers, even if 'h=bulk.*.bulk.*' is specified + * or some headers are contained twice due to matching aliases + */ + private static Set expandHeadersFromRequest(Table table, RestRequest request) { + Set headers = new LinkedHashSet<>(table.getHeaders().size()); + + // check headers and aliases + for (String header : Strings.splitStringByCommaToArray(request.param("h"))) { + if (Regex.isSimpleMatchPattern(header)) { + for (Table.Cell tableHeaderCell : table.getHeaders()) { + String configuredHeader = tableHeaderCell.value.toString(); + if (Regex.simpleMatch(header, configuredHeader)) { + headers.add(configuredHeader); + } else if (tableHeaderCell.attr.containsKey("alias")) { + String[] aliases = Strings.splitStringByCommaToArray(tableHeaderCell.attr.get("alias")); + for (String alias : aliases) { + if (Regex.simpleMatch(header, alias)) { + headers.add(configuredHeader); + break; + } + } + } + } + } else { + headers.add(header); + } + } + + return headers; + } + public static int[] buildHelpWidths(Table table, RestRequest request) { int[] width = new int[3]; for (Table.Cell cell : table.getHeaders()) { diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java index eacc7471743..303d6c784e3 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java +++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -63,6 +64,18 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio return aggregations; } + /** + * Create a new copy of this {@link Aggregation} with the same settings as + * this {@link Aggregation} and contains the provided sub-aggregations. + * + * @param subAggregations + * the buckets to use in the new {@link Aggregation} + * @return the new {@link Aggregation} + */ + public InternalSingleBucketAggregation create(InternalAggregations subAggregations) { + return newAggregation(getName(), getDocCount(), subAggregations); + } + /** * Create a new empty sub aggregation. This must be a new instance on each call. */ diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java index d68660d4c7d..cb14b0df4c2 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import java.util.ArrayList; @@ -45,20 +46,34 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator { @SuppressWarnings("unchecked") @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { - @SuppressWarnings("rawtypes") - InternalMultiBucketAggregation multiBucketsAgg = (InternalMultiBucketAggregation) aggregation; - List buckets = multiBucketsAgg.getBuckets(); - List newBuckets = new ArrayList<>(); - for (int i = 0; i < buckets.size(); i++) { - InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); - InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); - List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); - aggs.add(aggToAdd); - InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), bucket); - newBuckets.add(newBucket); - } + if (aggregation instanceof InternalMultiBucketAggregation) { + @SuppressWarnings("rawtypes") + InternalMultiBucketAggregation multiBucketsAgg = (InternalMultiBucketAggregation) aggregation; + List buckets = multiBucketsAgg.getBuckets(); + List newBuckets = new ArrayList<>(); + for (int i = 0; i < buckets.size(); i++) { + InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); + InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); + List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), + AGGREGATION_TRANFORM_FUNCTION)); + aggs.add(aggToAdd); + InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), + bucket); + newBuckets.add(newBucket); + } - return multiBucketsAgg.create(newBuckets); + return multiBucketsAgg.create(newBuckets); + } else if (aggregation instanceof InternalSingleBucketAggregation) { + InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation; + InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext); + List aggs = new ArrayList<>(Lists.transform(singleBucketAgg.getAggregations().asList(), + AGGREGATION_TRANFORM_FUNCTION)); + aggs.add(aggToAdd); + return singleBucketAgg.create(new InternalAggregations(aggs)); + } else { + throw new IllegalStateException("Aggregation [" + aggregation.getName() + "] must be a bucket aggregation [" + + aggregation.type().name() + "]"); + } } public abstract InternalAggregation doReduce(Aggregations aggregations, ReduceContext context); diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java index b4f54fd4957..261f811a751 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelParser; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelParserMapper; import org.elasticsearch.search.aggregations.support.format.ValueFormat; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; @@ -140,12 +139,12 @@ public class MovAvgParser implements PipelineAggregator.Parser { formatter = ValueFormat.Patternable.Number.format(format).formatter(); } - MovAvgModelParser modelParser = movAvgModelParserMapper.get(model); + MovAvgModel.AbstractModelParser modelParser = movAvgModelParserMapper.get(model); if (modelParser == null) { throw new SearchParseException(context, "Unknown model [" + model + "] specified. Valid options are:" + movAvgModelParserMapper.getAllNames().toString(), parser.getTokenLocation()); } - MovAvgModel movAvgModel = modelParser.parse(settings); + MovAvgModel movAvgModel = modelParser.parse(settings, pipelineAggregatorName, context, window); return new MovAvgPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, formatter, gapPolicy, window, predict, movAvgModel); diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index c6e3d943bb8..af2db718846 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -111,34 +111,36 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { EvictingQueue values = EvictingQueue.create(this.window); long lastKey = 0; - long interval = Long.MAX_VALUE; Object currentKey; for (InternalHistogram.Bucket bucket : buckets) { Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy); currentKey = bucket.getKey(); + // Default is to reuse existing bucket. Simplifies the rest of the logic, + // since we only change newBucket if we can add to it + InternalHistogram.Bucket newBucket = bucket; + if (!(thisBucketValue == null || thisBucketValue.equals(Double.NaN))) { values.offer(thisBucketValue); - double movavg = model.next(values); + // Some models (e.g. HoltWinters) have certain preconditions that must be met + if (model.hasValue(values.size())) { + double movavg = model.next(values); - List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), FUNCTION)); - aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList(), metaData())); - InternalHistogram.Bucket newBucket = factory.createBucket(currentKey, bucket.getDocCount(), new InternalAggregations( - aggs), bucket.getKeyed(), bucket.getFormatter()); - newBuckets.add(newBucket); - - } else { - newBuckets.add(bucket); + List aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION)); + aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList(), metaData())); + newBucket = factory.createBucket(currentKey, bucket.getDocCount(), new InternalAggregations( + aggs), bucket.getKeyed(), bucket.getFormatter()); + } } + newBuckets.add(newBucket); + if (predict > 0) { if (currentKey instanceof Number) { - interval = Math.min(interval, ((Number) bucket.getKey()).longValue() - lastKey); lastKey = ((Number) bucket.getKey()).longValue(); } else if (currentKey instanceof DateTime) { - interval = Math.min(interval, ((DateTime) bucket.getKey()).getMillis() - lastKey); lastKey = ((DateTime) bucket.getKey()).getMillis(); } else { throw new AggregationExecutionException("Expected key of type Number or DateTime but got [" + currentKey + "]"); @@ -147,7 +149,6 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { } - if (buckets.size() > 0 && predict > 0) { boolean keyed; @@ -159,9 +160,11 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { for (int i = 0; i < predictions.length; i++) { List aggs = new ArrayList<>(); aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList(), metaData())); - InternalHistogram.Bucket newBucket = factory.createBucket(lastKey + (interval * (i + 1)), 0, new InternalAggregations( + long newKey = histo.getRounding().nextRoundingValue(lastKey); + InternalHistogram.Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations( aggs), keyed, formatter); newBuckets.add(newBucket); + lastKey = newKey; } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java index b6ee7f8ddb6..2f33855d50e 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -83,7 +84,7 @@ public class EwmaModel extends MovAvgModel { out.writeDouble(alpha); } - public static class SingleExpModelParser implements MovAvgModelParser { + public static class SingleExpModelParser extends AbstractModelParser { @Override public String getName() { @@ -91,15 +92,13 @@ public class EwmaModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { - Double alpha; - if (settings == null || (alpha = (Double)settings.get("alpha")) == null) { - alpha = 0.5; - } + double alpha = parseDoubleParam(context, settings, "alpha", 0.5); return new EwmaModel(alpha); } + } public static class EWMAModelBuilder implements MovAvgModelBuilder { diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java index a78a5486460..3a7fd963c43 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.*; @@ -142,7 +143,7 @@ public class HoltLinearModel extends MovAvgModel { out.writeDouble(beta); } - public static class DoubleExpModelParser implements MovAvgModelParser { + public static class DoubleExpModelParser extends AbstractModelParser { @Override public String getName() { @@ -150,19 +151,10 @@ public class HoltLinearModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { - - Double alpha; - Double beta; - - if (settings == null || (alpha = (Double)settings.get("alpha")) == null) { - alpha = 0.5; - } - - if (settings == null || (beta = (Double)settings.get("beta")) == null) { - beta = 0.5; - } + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { + double alpha = parseDoubleParam(context, settings, "alpha", 0.5); + double beta = parseDoubleParam(context, settings, "beta", 0.5); return new HoltLinearModel(alpha, beta); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java new file mode 100644 index 00000000000..ef3c7354500 --- /dev/null +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java @@ -0,0 +1,422 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.movavg.models; + + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.SearchParseException; +import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.*; + +/** + * Calculate a triple exponential weighted moving average + */ +public class HoltWintersModel extends MovAvgModel { + + protected static final ParseField NAME_FIELD = new ParseField("holt_winters"); + + public enum SeasonalityType { + ADDITIVE((byte) 0, "add"), MULTIPLICATIVE((byte) 1, "mult"); + + /** + * Parse a string SeasonalityType into the byte enum + * + * @param text SeasonalityType in string format (e.g. "add") + * @return SeasonalityType enum + */ + @Nullable + public static SeasonalityType parse(String text) { + if (text == null) { + return null; + } + SeasonalityType result = null; + for (SeasonalityType policy : values()) { + if (policy.parseField.match(text)) { + if (result == null) { + result = policy; + } else { + throw new IllegalStateException("Text can be parsed to 2 different seasonality types: text=[" + text + + "], " + "policies=" + Arrays.asList(result, policy)); + } + } + } + if (result == null) { + final List validNames = new ArrayList<>(); + for (SeasonalityType policy : values()) { + validNames.add(policy.getName()); + } + throw new ElasticsearchParseException("Invalid seasonality type: [" + text + "], accepted values: " + validNames); + } + return result; + } + + private final byte id; + private final ParseField parseField; + + SeasonalityType(byte id, String name) { + this.id = id; + this.parseField = new ParseField(name); + } + + /** + * Serialize the SeasonalityType to the output stream + * + * @param out + * @throws IOException + */ + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + + /** + * Deserialize the SeasonalityType from the input stream + * + * @param in + * @return SeasonalityType Enum + * @throws IOException + */ + public static SeasonalityType readFrom(StreamInput in) throws IOException { + byte id = in.readByte(); + for (SeasonalityType seasonalityType : values()) { + if (id == seasonalityType.id) { + return seasonalityType; + } + } + throw new IllegalStateException("Unknown Seasonality Type with id [" + id + "]"); + } + + /** + * Return the english-formatted name of the SeasonalityType + * + * @return English representation of SeasonalityType + */ + public String getName() { + return parseField.getPreferredName(); + } + } + + + /** + * Controls smoothing of data. Alpha = 1 retains no memory of past values + * (e.g. random walk), while alpha = 0 retains infinite memory of past values (e.g. + * mean of the series). Useful values are somewhere in between + */ + private double alpha; + + /** + * Equivalent to alpha, but controls the smoothing of the trend instead of the data + */ + private double beta; + + private double gamma; + + private int period; + + private SeasonalityType seasonalityType; + + private boolean pad; + private double padding; + + public HoltWintersModel(double alpha, double beta, double gamma, int period, SeasonalityType seasonalityType, boolean pad) { + this.alpha = alpha; + this.beta = beta; + this.gamma = gamma; + this.period = period; + this.seasonalityType = seasonalityType; + this.pad = pad; + + // Only pad if we are multiplicative and padding is enabled + // The padding amount is not currently user-configurable...i dont see a reason to expose it? + this.padding = seasonalityType.equals(SeasonalityType.MULTIPLICATIVE) && pad ? 0.0000000001 : 0; + } + + + @Override + public boolean hasValue(int windowLength) { + // We need at least (period * 2) data-points (e.g. two "seasons") + return windowLength >= period * 2; + } + + /** + * Predicts the next `n` values in the series, using the smoothing model to generate new values. + * Unlike the other moving averages, HoltWinters has forecasting/prediction built into the algorithm. + * Prediction is more than simply adding the next prediction to the window and repeating. HoltWinters + * will extrapolate into the future by applying the trend and seasonal information to the smoothed data. + * + * @param values Collection of numerics to movingAvg, usually windowed + * @param numPredictions Number of newly generated predictions to return + * @param Type of numeric + * @return Returns an array of doubles, since most smoothing methods operate on floating points + */ + @Override + public double[] predict(Collection values, int numPredictions) { + return next(values, numPredictions); + } + + @Override + public double next(Collection values) { + return next(values, 1)[0]; + } + + /** + * Calculate a doubly exponential weighted moving average + * + * @param values Collection of values to calculate avg for + * @param numForecasts number of forecasts into the future to return + * + * @param Type T extending Number + * @return Returns a Double containing the moving avg for the window + */ + public double[] next(Collection values, int numForecasts) { + + if (values.size() < period * 2) { + // We need at least two full "seasons" to use HW + // This should have been caught earlier, we can't do anything now...bail + throw new AggregationExecutionException("Holt-Winters aggregation requires at least (2 * period == 2 * " + + period + " == "+(2 * period)+") data-points to function. Only [" + values.size() + "] were provided."); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[values.size()]; + + int counter = 0; + double[] vs = new double[values.size()]; + for (T v : values) { + vs[counter] = v.doubleValue() + padding; + counter += 1; + } + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + // TODO if perf is a problem, we can specialize a subclass to avoid conditionals on each iteration + if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } else { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } + + b = beta * (s - last_s) + (1 - beta) * last_b; + + if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } else { + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } + + last_s = s; + last_b = b; + } + + double[] forecastValues = new double[numForecasts]; + int seasonCounter = (values.size() - 1) - period; + + for (int i = 0; i < numForecasts; i++) { + + // TODO perhaps pad out seasonal to a power of 2 and use a mask instead of modulo? + if (seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)) { + forecastValues[i] = s + (i * b) * seasonal[seasonCounter % values.size()]; + } else { + forecastValues[i] = s + (i * b) + seasonal[seasonCounter % values.size()]; + } + + seasonCounter += 1; + } + + return forecastValues; + } + + public static final MovAvgModelStreams.Stream STREAM = new MovAvgModelStreams.Stream() { + @Override + public MovAvgModel readResult(StreamInput in) throws IOException { + double alpha = in.readDouble(); + double beta = in.readDouble(); + double gamma = in.readDouble(); + int period = in.readVInt(); + SeasonalityType type = SeasonalityType.readFrom(in); + boolean pad = in.readBoolean(); + + return new HoltWintersModel(alpha, beta, gamma, period, type, pad); + } + + @Override + public String getName() { + return NAME_FIELD.getPreferredName(); + } + }; + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(STREAM.getName()); + out.writeDouble(alpha); + out.writeDouble(beta); + out.writeDouble(gamma); + out.writeVInt(period); + seasonalityType.writeTo(out); + out.writeBoolean(pad); + } + + public static class HoltWintersModelParser extends AbstractModelParser { + + @Override + public String getName() { + return NAME_FIELD.getPreferredName(); + } + + @Override + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { + + double alpha = parseDoubleParam(context, settings, "alpha", 0.5); + double beta = parseDoubleParam(context, settings, "beta", 0.5); + double gamma = parseDoubleParam(context, settings, "gamma", 0.5); + int period = parseIntegerParam(context, settings, "period", 1); + + if (windowSize < 2 * period) { + throw new SearchParseException(context, "Field [window] must be at least twice as large as the period when " + + "using Holt-Winters. Value provided was [" + windowSize + "], which is less than (2*period) == " + + (2 * period), null); + } + + SeasonalityType seasonalityType = SeasonalityType.ADDITIVE; + + if (settings != null) { + Object value = settings.get("type"); + if (value != null) { + if (value instanceof String) { + seasonalityType = SeasonalityType.parse((String)value); + } else { + throw new SearchParseException(context, "Parameter [type] must be a String, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + } + } + + boolean pad = parseBoolParam(context, settings, "pad", seasonalityType.equals(SeasonalityType.MULTIPLICATIVE)); + + return new HoltWintersModel(alpha, beta, gamma, period, seasonalityType, pad); + } + } + + public static class HoltWintersModelBuilder implements MovAvgModelBuilder { + + private double alpha = 0.5; + private double beta = 0.5; + private double gamma = 0.5; + private int period = 1; + private SeasonalityType seasonalityType = SeasonalityType.ADDITIVE; + private boolean pad = true; + + /** + * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values + * (e.g. a random walk), while alpha = 0 retains infinite memory of past values (e.g. + * the series mean). Useful values are somewhere in between. Defaults to 0.5. + * + * @param alpha A double between 0-1 inclusive, controls data smoothing + * + * @return The builder to continue chaining + */ + public HoltWintersModelBuilder alpha(double alpha) { + this.alpha = alpha; + return this; + } + + /** + * Equivalent to alpha, but controls the smoothing of the trend instead of the data + * + * @param beta a double between 0-1 inclusive, controls trend smoothing + * + * @return The builder to continue chaining + */ + public HoltWintersModelBuilder beta(double beta) { + this.beta = beta; + return this; + } + + public HoltWintersModelBuilder gamma(double gamma) { + this.gamma = gamma; + return this; + } + + public HoltWintersModelBuilder period(int period) { + this.period = period; + return this; + } + + public HoltWintersModelBuilder seasonalityType(SeasonalityType type) { + this.seasonalityType = type; + return this; + } + + public HoltWintersModelBuilder pad(boolean pad) { + this.pad = pad; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); + builder.startObject(MovAvgParser.SETTINGS.getPreferredName()); + builder.field("alpha", alpha); + builder.field("beta", beta); + builder.field("gamma", gamma); + builder.field("period", period); + builder.field("type", seasonalityType.getName()); + builder.field("pad", pad); + builder.endObject(); + return builder; + } + } +} + diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java index 24780a345c7..c894f776ed4 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -70,7 +71,7 @@ public class LinearModel extends MovAvgModel { out.writeString(STREAM.getName()); } - public static class LinearModelParser implements MovAvgModelParser { + public static class LinearModelParser extends AbstractModelParser { @Override public String getName() { @@ -78,7 +79,7 @@ public class LinearModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { return new LinearModel(); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java index 1fa30811f9c..5f41b24531b 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java @@ -21,14 +21,31 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models; import com.google.common.collect.EvictingQueue; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchParseException; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Map; public abstract class MovAvgModel { + /** + * Checks to see this model can produce a new value, without actually running the algo. + * This can be used for models that have certain preconditions that need to be met in order + * to short-circuit execution + * + * @param windowLength Length of current window + * @return Returns `true` if calling next() will produce a value, `false` otherwise + */ + public boolean hasValue(int windowLength) { + // Default implementation can always provide a next() value + return true; + } + /** * Returns the next value in the series, according to the underlying smoothing model * @@ -90,6 +107,122 @@ public abstract class MovAvgModel { * @throws IOException */ public abstract void writeTo(StreamOutput out) throws IOException; + + /** + * Abstract class which also provides some concrete parsing functionality. + */ + public abstract static class AbstractModelParser { + + /** + * Returns the name of the model + * + * @return The model's name + */ + public abstract String getName(); + + /** + * Parse a settings hash that is specific to this model + * + * @param settings Map of settings, extracted from the request + * @param pipelineName Name of the parent pipeline agg + * @param context The parser context that we are in + * @param windowSize Size of the window for this moving avg + * @return A fully built moving average model + */ + public abstract MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize); + + + /** + * Extracts a 0-1 inclusive double from the settings map, otherwise throws an exception + * + * @param context Search query context + * @param settings Map of settings provided to this model + * @param name Name of parameter we are attempting to extract + * @param defaultValue Default value to be used if value does not exist in map + * + * @throws SearchParseException + * + * @return Double value extracted from settings map + */ + protected double parseDoubleParam(SearchContext context, @Nullable Map settings, String name, double defaultValue) { + if (settings == null) { + return defaultValue; + } + + Object value = settings.get(name); + if (value == null) { + return defaultValue; + } else if (value instanceof Double) { + double v = (Double)value; + if (v >= 0 && v <= 1) { + return v; + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be between 0-1 inclusive. Provided" + + "value was [" + v + "]", null); + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be a double, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + + /** + * Extracts an integer from the settings map, otherwise throws an exception + * + * @param context Search query context + * @param settings Map of settings provided to this model + * @param name Name of parameter we are attempting to extract + * @param defaultValue Default value to be used if value does not exist in map + * + * @throws SearchParseException + * + * @return Integer value extracted from settings map + */ + protected int parseIntegerParam(SearchContext context, @Nullable Map settings, String name, int defaultValue) { + if (settings == null) { + return defaultValue; + } + + Object value = settings.get(name); + if (value == null) { + return defaultValue; + } else if (value instanceof Integer) { + return (Integer)value; + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be an integer, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + + /** + * Extracts a boolean from the settings map, otherwise throws an exception + * + * @param context Search query context + * @param settings Map of settings provided to this model + * @param name Name of parameter we are attempting to extract + * @param defaultValue Default value to be used if value does not exist in map + * + * @throws SearchParseException + * + * @return Boolean value extracted from settings map + */ + protected boolean parseBoolParam(SearchContext context, @Nullable Map settings, String name, boolean defaultValue) { + if (settings == null) { + return defaultValue; + } + + Object value = settings.get(name); + if (value == null) { + return defaultValue; + } else if (value instanceof Boolean) { + return (Boolean)value; + } + + throw new SearchParseException(context, "Parameter [" + name + "] must be a boolean, type `" + + value.getClass().getSimpleName() + "` provided instead", null); + } + } + } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java index 12a61d42d2a..6233270edf7 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelModule.java @@ -31,23 +31,24 @@ import java.util.List; */ public class MovAvgModelModule extends AbstractModule { - private List> parsers = Lists.newArrayList(); + private List> parsers = Lists.newArrayList(); public MovAvgModelModule() { registerParser(SimpleModel.SimpleModelParser.class); registerParser(LinearModel.LinearModelParser.class); registerParser(EwmaModel.SingleExpModelParser.class); registerParser(HoltLinearModel.DoubleExpModelParser.class); + registerParser(HoltWintersModel.HoltWintersModelParser.class); } - public void registerParser(Class parser) { + public void registerParser(Class parser) { parsers.add(parser); } @Override protected void configure() { - Multibinder parserMapBinder = Multibinder.newSetBinder(binder(), MovAvgModelParser.class); - for (Class clazz : parsers) { + Multibinder parserMapBinder = Multibinder.newSetBinder(binder(), MovAvgModel.AbstractModelParser.class); + for (Class clazz : parsers) { parserMapBinder.addBinding().to(clazz); } bind(MovAvgModelParserMapper.class); diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java index 2115f7e047b..bfd0c15c1c0 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelParserMapper.java @@ -32,19 +32,19 @@ import java.util.Set; */ public class MovAvgModelParserMapper { - protected ImmutableMap movAvgParsers; + protected ImmutableMap movAvgParsers; @Inject - public MovAvgModelParserMapper(Set parsers) { - MapBuilder builder = MapBuilder.newMapBuilder(); - for (MovAvgModelParser parser : parsers) { + public MovAvgModelParserMapper(Set parsers) { + MapBuilder builder = MapBuilder.newMapBuilder(); + for (MovAvgModel.AbstractModelParser parser : parsers) { builder.put(parser.getName(), parser); } movAvgParsers = builder.immutableMap(); } public @Nullable - MovAvgModelParser get(String parserName) { + MovAvgModel.AbstractModelParser get(String parserName) { return movAvgParsers.get(parserName); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java index 68ffc3dd9aa..78055b063eb 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser; +import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Collection; @@ -63,7 +64,7 @@ public class SimpleModel extends MovAvgModel { out.writeString(STREAM.getName()); } - public static class SimpleModelParser implements MovAvgModelParser { + public static class SimpleModelParser extends AbstractModelParser { @Override public String getName() { @@ -71,7 +72,7 @@ public class SimpleModel extends MovAvgModel { } @Override - public MovAvgModel parse(@Nullable Map settings) { + public MovAvgModel parse(@Nullable Map settings, String pipelineName, SearchContext context, int windowSize) { return new SimpleModel(); } } diff --git a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java index 41f90b94d84..7f5dd14005c 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java +++ b/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/TransportMovAvgModelModule.java @@ -36,6 +36,7 @@ public class TransportMovAvgModelModule extends AbstractModule { registerStream(LinearModel.STREAM); registerStream(EwmaModel.STREAM); registerStream(HoltLinearModel.STREAM); + registerStream(HoltWintersModel.STREAM); } public void registerStream(MovAvgModelStreams.Stream stream) { diff --git a/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 57dfe4a362b..55e74d9303d 100644 --- a/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -47,10 +47,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; /** * A search source builder allowing to easily build search source. Simple @@ -188,6 +185,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Constructs a new search source builder with a query from a map. */ + @SuppressWarnings("unchecked") public SearchSourceBuilder query(Map query) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -251,6 +249,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Constructs a new search source builder with a query from a map. */ + @SuppressWarnings("unchecked") public SearchSourceBuilder postFilter(Map postFilter) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -415,9 +414,6 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Set the rescore window size for rescores that don't specify their window. - * - * @param defaultRescoreWindowSize - * @return */ public SearchSourceBuilder defaultRescoreWindowSize(int defaultRescoreWindowSize) { this.defaultRescoreWindowSize = defaultRescoreWindowSize; @@ -427,6 +423,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Sets a raw (xcontent / json) addAggregation. */ + @SuppressWarnings("unchecked") public SearchSourceBuilder aggregations(Map aggregations) { try { XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); @@ -482,9 +479,6 @@ public class SearchSourceBuilder extends ToXContentToBytes { /** * Indicates whether the response should contain the stored _source for * every hit - * - * @param fetch - * @return */ public SearchSourceBuilder fetchSource(boolean fetch) { if (this.fetchSourceContext == null) { @@ -563,9 +557,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { if (fieldNames == null) { fieldNames = new ArrayList<>(); } - for (String field : fields) { - fieldNames.add(field); - } + Collections.addAll(fieldNames, fields); return this; } @@ -777,7 +769,7 @@ public class SearchSourceBuilder extends ToXContentToBytes { } if (trackScores) { - builder.field("track_scores", trackScores); + builder.field("track_scores", true); } if (indexBoost != null) { diff --git a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index a61fabb0034..c997624ff60 100644 --- a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -63,7 +63,7 @@ public class FastVectorHighlighter implements Highlighter { FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; FieldMapper mapper = highlighterContext.mapper; - if (!(mapper.fieldType().storeTermVectors() && mapper.fieldType().storeTermVectorOffsets() && mapper.fieldType().storeTermVectorPositions())) { + if (canHighlight(mapper) == false) { throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with term vector with position offsets to be used with fast vector highlighter"); } @@ -79,13 +79,13 @@ public class FastVectorHighlighter implements Highlighter { if (field.fieldOptions().requireFieldMatch()) { if (cache.fieldMatchFieldQuery == null) { // we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...) - cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query.originalQuery(), hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); + cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.fieldMatchFieldQuery; } else { if (cache.noFieldMatchFieldQuery == null) { // we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...) - cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query.originalQuery(), hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); + cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch()); } fieldQuery = cache.noFieldMatchFieldQuery; } @@ -177,6 +177,11 @@ public class FastVectorHighlighter implements Highlighter { } } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions(); + } + private class MapperHighlightEntry { public FragListBuilder fragListBuilder; public FragmentsBuilder fragmentsBuilder; diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index a730b6612d0..5b9ab72641a 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.highlight; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.Query; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; @@ -34,7 +34,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; -import java.util.List; +import java.util.Collection; import java.util.Map; import static com.google.common.collect.Maps.newHashMap; @@ -44,6 +44,8 @@ import static com.google.common.collect.Maps.newHashMap; */ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { + private static final ImmutableList STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = ImmutableList.of("fvh", "postings", "plain"); + private final Highlighters highlighters; @Inject @@ -75,7 +77,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { public void hitExecute(SearchContext context, HitContext hitContext) { Map highlightFields = newHashMap(); for (SearchContextHighlight.Field field : context.highlight().fields()) { - List fieldNamesToHighlight; + Collection fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().type()); fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field()); @@ -90,6 +92,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { } } + boolean fieldNameContainsWildcards = field.field().contains("*"); for (String fieldName : fieldNamesToHighlight) { FieldMapper fieldMapper = getMapperForField(fieldName, context, hitContext); if (fieldMapper == null) { @@ -98,35 +101,32 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { String highlighterType = field.fieldOptions().highlighterType(); if (highlighterType == null) { - boolean useFastVectorHighlighter = fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions(); - if (useFastVectorHighlighter) { - highlighterType = "fvh"; - } else if (fieldMapper.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { - highlighterType = "postings"; - } else { - highlighterType = "plain"; + for(String highlighterCandidate : STANDARD_HIGHLIGHTERS_BY_PRECEDENCE) { + if (highlighters.get(highlighterCandidate).canHighlight(fieldMapper)) { + highlighterType = highlighterCandidate; + break; + } } + assert highlighterType != null; } - Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { throw new IllegalArgumentException("unknown highlighter type [" + highlighterType + "] for the field [" + fieldName + "]"); } - HighlighterContext.HighlightQuery highlightQuery; - if (field.fieldOptions().highlightQuery() == null) { - highlightQuery = new HighlighterContext.HighlightQuery(context.parsedQuery().query(), context.query(), context.queryRewritten()); - } else { - highlightQuery = new HighlighterContext.HighlightQuery(field.fieldOptions().highlightQuery(), field.fieldOptions().highlightQuery(), false); - } + Query highlightQuery = field.fieldOptions().highlightQuery() == null ? context.parsedQuery().query() : field.fieldOptions().highlightQuery(); HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, hitContext, highlightQuery); + + if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) { + // if several fieldnames matched the wildcard then we want to skip those that we cannot highlight + continue; + } HighlightField highlightField = highlighter.highlight(highlighterContext); if (highlightField != null) { highlightFields.put(highlightField.name(), highlightField); } } } - hitContext.hit().highlightFields(highlightFields); } diff --git a/src/main/java/org/elasticsearch/search/highlight/Highlighter.java b/src/main/java/org/elasticsearch/search/highlight/Highlighter.java index 407cdc7b1ae..26c3dc0bf21 100644 --- a/src/main/java/org/elasticsearch/search/highlight/Highlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/Highlighter.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.search.highlight; +import org.elasticsearch.index.mapper.FieldMapper; + /** * */ @@ -26,4 +28,6 @@ public interface Highlighter { String[] names(); HighlightField highlight(HighlighterContext highlighterContext); + + boolean canHighlight(FieldMapper fieldMapper); } diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java b/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java index f3dd9ff1ba8..e791aad4310 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlighterContext.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.highlight; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.Query; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -34,10 +33,10 @@ public class HighlighterContext { public final FieldMapper mapper; public final SearchContext context; public final FetchSubPhase.HitContext hitContext; - public final HighlightQuery query; + public final Query query; public HighlighterContext(String fieldName, SearchContextHighlight.Field field, FieldMapper mapper, SearchContext context, - FetchSubPhase.HitContext hitContext, HighlightQuery query) { + FetchSubPhase.HitContext hitContext, Query query) { this.fieldName = fieldName; this.field = field; this.mapper = mapper; @@ -45,28 +44,4 @@ public class HighlighterContext { this.hitContext = hitContext; this.query = query; } - - public static class HighlightQuery { - private final Query originalQuery; - private final Query query; - private final boolean queryRewritten; - - protected HighlightQuery(Query originalQuery, Query query, boolean queryRewritten) { - this.originalQuery = originalQuery; - this.query = query; - this.queryRewritten = queryRewritten; - } - - public boolean queryRewritten() { - return queryRewritten; - } - - public Query originalQuery() { - return originalQuery; - } - - public Query query() { - return query; - } - } } diff --git a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 3e0eca6c468..460b2df05cd 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -23,8 +23,8 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; -import org.apache.lucene.search.Query; import org.apache.lucene.search.highlight.*; +import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; @@ -69,8 +69,7 @@ public class PlainHighlighter implements Highlighter { org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - Query query = highlighterContext.query.originalQuery(); - QueryScorer queryScorer = new CustomQueryScorer(query, field.fieldOptions().requireFieldMatch() ? mapper.names().indexName() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.names().indexName() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -119,7 +118,14 @@ public class PlainHighlighter implements Highlighter { } } } catch (Exception e) { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + if (e instanceof BytesRefHash.MaxBytesLengthExceededException) { + // this can happen if for example a field is not_analyzed and ignore_above option is set. + // the field will be ignored when indexing but the huge term is still in the source and + // the plain highlighter will parse the source and try to analyze it. + return null; + } else { + throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + } } if (field.fieldOptions().scoreOrdered()) { CollectionUtil.introSort(fragsList, new Comparator() { @@ -166,6 +172,11 @@ public class PlainHighlighter implements Highlighter { return null; } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return true; + } + private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, TokenStream tokenStream) throws IOException { try { if (!tokenStream.hasAttribute(OffsetAttribute.class)) { diff --git a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index 1614f294ff2..dcbb810d4dd 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -50,7 +50,7 @@ public class PostingsHighlighter implements Highlighter { FieldMapper fieldMapper = highlighterContext.mapper; SearchContextHighlight.Field field = highlighterContext.field; - if (fieldMapper.fieldType().indexOptions() != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { + if (canHighlight(fieldMapper) == false) { throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with positions and offsets in the postings list to be used with postings highlighter"); } @@ -91,7 +91,7 @@ public class PostingsHighlighter implements Highlighter { } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.names().indexName(), highlighterContext.query.originalQuery(), searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); @@ -126,6 +126,11 @@ public class PostingsHighlighter implements Highlighter { return null; } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return fieldMapper.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; + } + private static String mergeFieldValues(List fieldValues, char valuesSeparator) { //postings highlighter accepts all values in a single string, as offsets etc. need to match with content //loaded from stored fields, we merge all values using a proper separator diff --git a/src/main/java/org/elasticsearch/tribe/TribeService.java b/src/main/java/org/elasticsearch/tribe/TribeService.java index 540d3254e5c..546977c3aca 100644 --- a/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.support.master.TransportMasterNodeReadOperationAction; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -99,7 +99,7 @@ public class TribeService extends AbstractLifecycleComponent { if (sb.get("cluster.name") == null) { sb.put("cluster.name", "tribe_" + Strings.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM } - sb.put(TransportMasterNodeReadOperationAction.FORCE_LOCAL_SETTING, true); + sb.put(TransportMasterNodeReadAction.FORCE_LOCAL_SETTING, true); return sb.build(); } diff --git a/src/packaging/common/systemd/elasticsearch.conf b/src/packaging/common/systemd/elasticsearch.conf new file mode 100644 index 00000000000..98dd5e61c25 --- /dev/null +++ b/src/packaging/common/systemd/elasticsearch.conf @@ -0,0 +1 @@ +d ${packaging.elasticsearch.pid.dir} 0755 ${packaging.elasticsearch.user} ${packaging.elasticsearch.group} - - diff --git a/src/packaging/common/systemd/elasticsearch.service b/src/packaging/common/systemd/elasticsearch.service new file mode 100644 index 00000000000..a4c269973bf --- /dev/null +++ b/src/packaging/common/systemd/elasticsearch.service @@ -0,0 +1,50 @@ +[Unit] +Description=Elasticsearch +Documentation=http://www.elastic.co +Wants=network-online.target +After=network-online.target + +[Service] +Environment=ES_HOME=${packaging.elasticsearch.home.dir} +Environment=CONF_DIR=${packaging.elasticsearch.conf.dir} +Environment=CONF_FILE=${packaging.elasticsearch.conf.dir}/elasticsearch.yml +Environment=DATA_DIR=${packaging.elasticsearch.data.dir} +Environment=LOG_DIR=${packaging.elasticsearch.log.dir} +Environment=PID_DIR=${packaging.elasticsearch.pid.dir} +EnvironmentFile=-${packaging.env.file} + +User=${packaging.elasticsearch.user} +Group=${packaging.elasticsearch.group} + +ExecStart=${packaging.elasticsearch.bin.dir}/elasticsearch \ + -Des.pidfile=$PID_DIR/elasticsearch.pid \ + -Des.default.path.home=$ES_HOME \ + -Des.default.path.logs=$LOG_DIR \ + -Des.default.path.data=$DATA_DIR \ + -Des.default.config=$CONF_FILE \ + -Des.default.path.conf=$CONF_DIR + +# Connects standard output to /dev/null +StandardOutput=null + +# Connects standard error to journal +StandardError=journal + +# When a JVM receives a SIGTERM signal it exits with code 143 +SuccessExitStatus=143 + +# Specifies the maximum file descriptor number that can be opened by this process +LimitNOFILE=${packaging.os.max.open.files} + +# Specifies the maximum number of bytes of memory that may be locked into RAM +# Set to "infinity" if you use the 'bootstrap.mlockall: true' option +# in elasticsearch.yml and 'MAX_LOCKED_MEMORY=unlimited' in ${packaging.env.file} +#LimitMEMLOCK=infinity + +# Shutdown delay in seconds, before process is tried to be killed with KILL (if configured) +TimeoutStopSec=20 + +[Install] +WantedBy=multi-user.target + +# Built for ${project.name}-${project.version} (${packaging.type}) diff --git a/src/packaging/common/systemd/sysctl/elasticsearch.conf b/src/packaging/common/systemd/sysctl/elasticsearch.conf new file mode 100644 index 00000000000..052cd89cf0b --- /dev/null +++ b/src/packaging/common/systemd/sysctl/elasticsearch.conf @@ -0,0 +1 @@ +vm.max_map_count=${packaging.os.max.map.count} diff --git a/src/packaging/deb/init.d/elasticsearch b/src/packaging/deb/init.d/elasticsearch index 336030310cc..ad192157231 100755 --- a/src/packaging/deb/init.d/elasticsearch +++ b/src/packaging/deb/init.d/elasticsearch @@ -94,6 +94,9 @@ MAX_MAP_COUNT=262144 # Path to the GC log file #ES_GC_LOG_FILE=/var/log/elasticsearch/gc.log +# Elasticsearch PID file directory +PID_DIR="${packaging.elasticsearch.pid.dir}" + # End of variables that can be overwritten in $DEFAULT # overwrite settings from default file @@ -102,7 +105,7 @@ if [ -f "$DEFAULT" ]; then fi # Define other required variables -PID_FILE=/var/run/$NAME.pid +PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch DAEMON_OPTS="-d -p $PID_FILE --default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$LOG_DIR --default.path.data=$DATA_DIR --default.path.conf=$CONF_DIR" diff --git a/src/packaging/deb/systemd/elasticsearch.service b/src/packaging/deb/systemd/elasticsearch.service deleted file mode 100644 index 1945f9fb12a..00000000000 --- a/src/packaging/deb/systemd/elasticsearch.service +++ /dev/null @@ -1,30 +0,0 @@ -[Unit] -Description=Starts and stops a single elasticsearch instance on this system -Documentation=http://www.elasticsearch.org -Wants=network-online.target -After=network-online.target - -[Service] -Environment=CONF_FILE=${packaging.elasticsearch.conf.dir}/elasticsearch.yml -Environment=ES_HOME=${packaging.elasticsearch.home.dir} -Environment=LOG_DIR=${packaging.elasticsearch.log.dir} -Environment=DATA_DIR=${packaging.elasticsearch.data.dir} -Environment=CONF_DIR=${packaging.elasticsearch.conf.dir} -EnvironmentFile=-${packaging.env.file} -User=elasticsearch -Group=elasticsearch -ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ - -Des.default.config=$CONF_FILE \ - -Des.default.path.home=$ES_HOME \ - -Des.default.path.logs=$LOG_DIR \ - -Des.default.path.data=$DATA_DIR \ - -Des.default.path.conf=$CONF_DIR -# See MAX_OPEN_FILES in sysconfig -LimitNOFILE=65535 -# See MAX_LOCKED_MEMORY in sysconfig, use "infinity" when MAX_LOCKED_MEMORY=unlimited and using bootstrap.mlockall: true -#LimitMEMLOCK=infinity -# Shutdown delay in seconds, before process is tried to be killed with KILL (if configured) -TimeoutStopSec=20 - -[Install] -WantedBy=multi-user.target diff --git a/src/packaging/rpm/init.d/elasticsearch b/src/packaging/rpm/init.d/elasticsearch index 1b666c64578..5fe9d748952 100644 --- a/src/packaging/rpm/init.d/elasticsearch +++ b/src/packaging/rpm/init.d/elasticsearch @@ -41,6 +41,7 @@ LOG_DIR="${packaging.elasticsearch.log.dir}" DATA_DIR="${packaging.elasticsearch.data.dir}" CONF_DIR="${packaging.elasticsearch.conf.dir}" CONF_FILE="${packaging.elasticsearch.conf.dir}/elasticsearch.yml" +PID_DIR="${packaging.elasticsearch.pid.dir}" # Source the default env file ES_ENV_FILE="${packaging.env.file}" @@ -50,7 +51,7 @@ fi exec="$ES_HOME/bin/elasticsearch" prog="elasticsearch" -pidfile=/var/run/elasticsearch/${prog}.pid +pidfile="$PID_DIR/${prog}.pid" export ES_HEAP_SIZE export ES_HEAP_NEWSIZE diff --git a/src/packaging/rpm/systemd/elasticsearch.conf b/src/packaging/rpm/systemd/elasticsearch.conf deleted file mode 100644 index 9db225e74a7..00000000000 --- a/src/packaging/rpm/systemd/elasticsearch.conf +++ /dev/null @@ -1 +0,0 @@ -d /run/elasticsearch 0755 elasticsearch elasticsearch - - diff --git a/src/packaging/rpm/systemd/elasticsearch.service b/src/packaging/rpm/systemd/elasticsearch.service deleted file mode 100644 index f5bb286b8fa..00000000000 --- a/src/packaging/rpm/systemd/elasticsearch.service +++ /dev/null @@ -1,25 +0,0 @@ -[Unit] -Description=Starts and stops a single elasticsearch instance on this system -Documentation=http://www.elasticsearch.org - -[Service] -Type=forking -Environment=CONF_FILE=${packaging.elasticsearch.conf.dir}/elasticsearch.yml -Environment=ES_HOME=${packaging.elasticsearch.home.dir} -Environment=LOG_DIR=${packaging.elasticsearch.log.dir} -Environment=DATA_DIR=${packaging.elasticsearch.data.dir} -Environment=CONF_DIR=${packaging.elasticsearch.conf.dir} -EnvironmentFile=-${packaging.env.file} -User=elasticsearch -Group=elasticsearch -PIDFile=/var/run/elasticsearch/elasticsearch.pid -ExecStart=/usr/share/elasticsearch/bin/elasticsearch -d -p /var/run/elasticsearch/elasticsearch.pid -Des.default.config=$CONF_FILE -Des.default.path.home=$ES_HOME -Des.default.path.logs=$LOG_DIR -Des.default.path.data=$DATA_DIR -Des.default.path.conf=$CONF_DIR -# See MAX_OPEN_FILES in sysconfig -LimitNOFILE=65535 -# See MAX_LOCKED_MEMORY in sysconfig, use "infinity" when MAX_LOCKED_MEMORY=unlimited and using bootstrap.mlockall: true -#LimitMEMLOCK=infinity -# Shutdown delay in seconds, before process is tried to be killed with KILL (if configured) -TimeoutStopSec=20 - -[Install] -WantedBy=multi-user.target diff --git a/src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf b/src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf deleted file mode 100644 index 62ea54d8697..00000000000 --- a/src/packaging/rpm/systemd/sysctl.d/elasticsearch.conf +++ /dev/null @@ -1 +0,0 @@ -vm.max_map_count=262144 diff --git a/src/test/java/org/elasticsearch/action/IndicesRequestTests.java b/src/test/java/org/elasticsearch/action/IndicesRequestTests.java index 62df5e8fd54..7825d62fe81 100644 --- a/src/test/java/org/elasticsearch/action/IndicesRequestTests.java +++ b/src/test/java/org/elasticsearch/action/IndicesRequestTests.java @@ -57,8 +57,6 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.count.CountAction; -import org.elasticsearch.action.count.CountRequest; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.exists.ExistsAction; @@ -356,18 +354,6 @@ public class IndicesRequestTests extends ElasticsearchIntegrationTest { assertIndicesSubset(indices, multiGetShardAction); } - @Test - public void testCount() { - String countShardAction = CountAction.NAME + "[s]"; - interceptTransportActions(countShardAction); - - CountRequest countRequest = new CountRequest(randomIndicesOrAliases()); - internalCluster().clientNodeClient().count(countRequest).actionGet(); - - clearInterceptedActions(); - assertSameIndices(countRequest, countShardAction); - } - @Test public void testExists() { String existsShardAction = ExistsAction.NAME + "[s]"; diff --git a/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java b/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java index 2c27101c5d2..4b3ad487495 100644 --- a/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java +++ b/src/test/java/org/elasticsearch/action/admin/HotThreadsTest.java @@ -32,14 +32,9 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.index.query.QueryBuilders.andQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.notQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.Matchers.lessThan; /** @@ -90,7 +85,7 @@ public class HotThreadsTest extends ElasticsearchIntegrationTest { boolean success = false; try { assertThat(nodeHotThreads, notNullValue()); - Map nodesMap = nodeHotThreads.getNodesMap(); + Map nodesMap = nodeHotThreads.getNodesMap(); assertThat(nodesMap.size(), equalTo(cluster().size())); for (NodeHotThreads ht : nodeHotThreads) { assertNotNull(ht.getHotThreads()); diff --git a/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 3e1e762b45c..040bb81ef6b 100644 --- a/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -117,4 +117,56 @@ public class BulkRequestTests extends ElasticsearchTestCase { assertThat(bulkRequest.requests().get(1), instanceOf(UpdateRequest.class)); assertThat(bulkRequest.requests().get(2), instanceOf(DeleteRequest.class)); } + + @Test + public void testSimpleBulk6() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the wrong format of line 1"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the wrong format of line 1: " + e.getMessage(), + e.getMessage().contains("Malformed action/metadata line [1], expected a simple value for field [_source] but found [START_OBJECT]"), equalTo(true)); + } + } + + @Test + public void testSimpleBulk7() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the wrong format of line 5"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the wrong format of line 5: " + e.getMessage(), + e.getMessage().contains("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]"), equalTo(true)); + } + } + + @Test + public void testSimpleBulk8() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the unknown paramater _foo"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(), + e.getMessage().contains("Action/metadata line [3] contains an unknown parameter [_foo]"), equalTo(true)); + } + } + + @Test + public void testSimpleBulk9() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json"); + BulkRequest bulkRequest = new BulkRequest(); + try { + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + fail("should have thrown an exception about the wrong format of line 3"); + } catch (IllegalArgumentException e) { + assertThat("message contains error about the wrong format of line 3: " + e.getMessage(), + e.getMessage().contains("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]"), equalTo(true)); + } + } } diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json new file mode 100644 index 00000000000..e9c97965595 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk6.json @@ -0,0 +1,6 @@ +{"index": {"_index": "test", "_type": "doc", "_source": {"hello": "world"}, "_id": 0}} +{"field1": "value0"} +{"index": {"_index": "test", "_type": "doc", "_id": 1}} +{"field1": "value1"} +{"index": {"_index": "test", "_type": "doc", "_id": 2}} +{"field1": "value2"} diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json new file mode 100644 index 00000000000..a642d9ce4fe --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk7.json @@ -0,0 +1,6 @@ +{"index": {"_index": "test", "_type": "doc", "_id": 0}} +{"field1": "value0"} +{"index": {"_index": "test", "_type": "doc", "_id": 1}} +{"field1": "value1"} +{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unkown": ["foo", "bar"]}} +{"field1": "value2"} diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json new file mode 100644 index 00000000000..c1a94b1d159 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk8.json @@ -0,0 +1,6 @@ +{"index": {"_index": "test", "_type": "doc", "_id": 0}} +{"field1": "value0"} +{"index": {"_index": "test", "_type": "doc", "_id": 1, "_foo": "bar"}} +{"field1": "value1"} +{"index": {"_index": "test", "_type": "doc", "_id": 2}} +{"field1": "value2"} diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json new file mode 100644 index 00000000000..ebdbf750116 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk9.json @@ -0,0 +1,4 @@ +{"index": {}} +{"field1": "value0"} +{"index": ["bar"] } +{"field1": "value1"} diff --git a/src/test/java/org/elasticsearch/action/count/CountRequestTests.java b/src/test/java/org/elasticsearch/action/count/CountRequestTests.java new file mode 100644 index 00000000000..a972ff56d12 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/count/CountRequestTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.count; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.QuerySourceBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; + +public class CountRequestTests extends ElasticsearchTestCase { + + @Test + public void testToSearchRequest() { + CountRequest countRequest; + if (randomBoolean()) { + countRequest = new CountRequest(randomStringArray()); + } else { + countRequest = new CountRequest(); + } + if (randomBoolean()) { + countRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + if (randomBoolean()) { + countRequest.types(randomStringArray()); + } + if (randomBoolean()) { + countRequest.routing(randomStringArray()); + } + if (randomBoolean()) { + countRequest.preference(randomAsciiOfLengthBetween(1, 10)); + } + if (randomBoolean()) { + countRequest.source(new QuerySourceBuilder().setQuery(QueryBuilders.termQuery("field", "value"))); + } + if (randomBoolean()) { + countRequest.minScore(randomFloat()); + } + if (randomBoolean()) { + countRequest.terminateAfter(randomIntBetween(1, 1000)); + } + + SearchRequest searchRequest = countRequest.toSearchRequest(); + assertThat(searchRequest.indices(), equalTo(countRequest.indices())); + assertThat(searchRequest.indicesOptions(), equalTo(countRequest.indicesOptions())); + assertThat(searchRequest.types(), equalTo(countRequest.types())); + assertThat(searchRequest.routing(), equalTo(countRequest.routing())); + assertThat(searchRequest.preference(), equalTo(countRequest.preference())); + + if (countRequest.source() == null) { + assertThat(searchRequest.source(), nullValue()); + } else { + Map sourceMap = XContentHelper.convertToMap(searchRequest.source(), false).v2(); + assertThat(sourceMap.size(), equalTo(1)); + assertThat(sourceMap.get("query"), notNullValue()); + } + + Map extraSourceMap = XContentHelper.convertToMap(searchRequest.extraSource(), false).v2(); + int count = 1; + assertThat((Integer)extraSourceMap.get("size"), equalTo(0)); + if (countRequest.minScore() == CountRequest.DEFAULT_MIN_SCORE) { + assertThat(extraSourceMap.get("min_score"), nullValue()); + } else { + assertThat(((Number)extraSourceMap.get("min_score")).floatValue(), equalTo(countRequest.minScore())); + count++; + } + if (countRequest.terminateAfter() == SearchContext.DEFAULT_TERMINATE_AFTER) { + assertThat(extraSourceMap.get("terminate_after"), nullValue()); + } else { + assertThat((Integer)extraSourceMap.get("terminate_after"), equalTo(countRequest.terminateAfter())); + count++; + } + assertThat(extraSourceMap.size(), equalTo(count)); + } + + private static String[] randomStringArray() { + int count = randomIntBetween(1, 5); + String[] indices = new String[count]; + for (int i = 0; i < count; i++) { + indices[i] = randomAsciiOfLengthBetween(1, 10); + } + return indices; + } +} diff --git a/src/test/java/org/elasticsearch/action/count/CountResponseTests.java b/src/test/java/org/elasticsearch/action/count/CountResponseTests.java new file mode 100644 index 00000000000..bbe6c64edf3 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/count/CountResponseTests.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.count; + +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.search.internal.InternalSearchHits; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class CountResponseTests extends ElasticsearchTestCase { + + @Test + public void testFromSearchResponse() { + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(new InternalSearchHits(null, randomLong(), randomFloat()), null, null, randomBoolean(), randomBoolean()); + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[randomIntBetween(0, 5)]; + for (int i = 0; i < shardSearchFailures.length; i++) { + shardSearchFailures[i] = new ShardSearchFailure(new IllegalArgumentException()); + } + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, randomIntBetween(0, 100), randomIntBetween(0, 100), randomIntBetween(0, 100), shardSearchFailures); + + CountResponse countResponse = new CountResponse(searchResponse); + assertThat(countResponse.getTotalShards(), equalTo(searchResponse.getTotalShards())); + assertThat(countResponse.getSuccessfulShards(), equalTo(searchResponse.getSuccessfulShards())); + assertThat(countResponse.getFailedShards(), equalTo(searchResponse.getFailedShards())); + assertThat(countResponse.getShardFailures(), equalTo((ShardOperationFailedException[])searchResponse.getShardFailures())); + assertThat(countResponse.getCount(), equalTo(searchResponse.getHits().totalHits())); + assertThat(countResponse.terminatedEarly(), equalTo(searchResponse.isTerminatedEarly())); + } +} diff --git a/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java b/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java index 236c8821b3c..b40d29a948c 100644 --- a/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/mapping/ManyMappingsBenchmark.java @@ -21,8 +21,8 @@ package org.elasticsearch.benchmark.mapping; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -85,8 +85,9 @@ public class ManyMappingsBenchmark { public static void main(String[] args) throws Exception { System.setProperty("es.logger.prefix", ""); - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Settings settings = settingsBuilder() + .put("") .put(SETTING_NUMBER_OF_SHARDS, 5) .put(SETTING_NUMBER_OF_REPLICAS, 0) .build(); diff --git a/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java b/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java index 66c4e62bdbc..1ac7c433038 100644 --- a/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/recovery/ReplicaRecoveryBenchmark.java @@ -20,10 +20,10 @@ package org.elasticsearch.benchmark.recovery; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; @@ -57,7 +57,7 @@ public class ReplicaRecoveryBenchmark { public static void main(String[] args) throws Exception { System.setProperty("es.logger.prefix", ""); - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Settings settings = settingsBuilder() .put("gateway.type", "local") diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java index 69eadffb52d..c415dbf9b2b 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/GlobalOrdinalsBenchmark.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.benchmark.search.aggregations.TermsAggregationSearchBenchmark.StatsResult; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -66,7 +66,7 @@ public class GlobalOrdinalsBenchmark { public static void main(String[] args) throws Exception { System.setProperty("es.logger.prefix", ""); - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Random random = new Random(); Settings settings = settingsBuilder() diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java index 96c80b5051c..bf13b774edc 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/SubAggregationSearchCollectModeBenchmark.java @@ -27,10 +27,10 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -71,7 +71,7 @@ public class SubAggregationSearchCollectModeBenchmark { static Node[] nodes; public static void main(String[] args) throws Exception { - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Random random = new Random(); Settings settings = settingsBuilder() diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java index f57c9848886..45f7dbf9562 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchAndIndexingBenchmark.java @@ -26,9 +26,9 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -71,7 +71,7 @@ public class TermsAggregationSearchAndIndexingBenchmark { static Node[] nodes; public static void main(String[] args) throws Exception { - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Settings settings = settingsBuilder() .put("refresh_interval", "-1") .put(SETTING_NUMBER_OF_SHARDS, 1) diff --git a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java index 55d3db53dd0..e92a24c1c99 100644 --- a/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java +++ b/src/test/java/org/elasticsearch/benchmark/search/aggregations/TermsAggregationSearchBenchmark.java @@ -28,10 +28,10 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.common.StopWatch; -import org.elasticsearch.common.jna.Natives; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; @@ -99,7 +99,7 @@ public class TermsAggregationSearchBenchmark { } public static void main(String[] args) throws Exception { - Natives.tryMlockall(); + Bootstrap.initializeNatives(true, false, false); Random random = new Random(); Settings settings = settingsBuilder() diff --git a/src/test/java/org/elasticsearch/common/jna/NativesTests.java b/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java similarity index 73% rename from src/test/java/org/elasticsearch/common/jna/NativesTests.java rename to src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java index 014a2ce5b27..ead01b38cd8 100644 --- a/src/test/java/org/elasticsearch/common/jna/NativesTests.java +++ b/src/test/java/org/elasticsearch/bootstrap/JNANativesTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.bootstrap; import org.apache.lucene.util.Constants; import org.elasticsearch.test.ElasticsearchTestCase; @@ -25,23 +25,23 @@ import org.junit.Test; import static org.hamcrest.Matchers.equalTo; -public class NativesTests extends ElasticsearchTestCase { +public class JNANativesTests extends ElasticsearchTestCase { @Test public void testMlockall() { if (Constants.MAC_OS_X) { - assertFalse("Memory locking is not available on OS X platforms", Natives.LOCAL_MLOCKALL); + assertFalse("Memory locking is not available on OS X platforms", JNANatives.LOCAL_MLOCKALL); } } @Test public void testConsoleCtrlHandler() { if (Constants.WINDOWS) { - assertNotNull(Kernel32Library.getInstance()); - assertThat(Kernel32Library.getInstance().getCallbacks().size(), equalTo(1)); + assertNotNull(JNAKernel32Library.getInstance()); + assertThat(JNAKernel32Library.getInstance().getCallbacks().size(), equalTo(1)); } else { - assertNotNull(Kernel32Library.getInstance()); - assertThat(Kernel32Library.getInstance().getCallbacks().size(), equalTo(0)); + assertNotNull(JNAKernel32Library.getInstance()); + assertThat(JNAKernel32Library.getInstance().getCallbacks().size(), equalTo(0)); } } } diff --git a/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java b/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java index 09b4b601ee3..baaa5045be7 100644 --- a/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java +++ b/src/test/java/org/elasticsearch/broadcast/BroadcastActionsTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.broadcast; import com.google.common.base.Charsets; -import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.count.CountResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -33,7 +33,6 @@ import static org.elasticsearch.client.Requests.countRequest; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class BroadcastActionsTests extends ElasticsearchIntegrationTest { @@ -72,14 +71,10 @@ public class BroadcastActionsTests extends ElasticsearchIntegrationTest { for (int i = 0; i < 5; i++) { // test failed (simply query that can't be parsed) - CountResponse countResponse = client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet(); - - assertThat(countResponse.getCount(), equalTo(0l)); - assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); - assertThat(countResponse.getSuccessfulShards(), equalTo(0)); - assertThat(countResponse.getFailedShards(), equalTo(numShards.numPrimaries)); - for (ShardOperationFailedException exp : countResponse.getShardFailures()) { - assertThat(exp.reason(), containsString("QueryParsingException")); + try { + client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet(); + } catch(SearchPhaseExecutionException e) { + assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries)); } } } diff --git a/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java b/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java index 2588912467b..d236ad5ecf8 100644 --- a/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java +++ b/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java @@ -91,7 +91,6 @@ public class JDKESLoggerTests extends ElasticsearchTestCase { assertThat(record.getMessage(), equalTo("This is a trace")); assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); assertThat(record.getSourceMethodName(), equalTo("locationInfoTest")); - } private static class TestHandler extends Handler { diff --git a/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java b/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java index d854b2bd33d..f0d0c076eba 100644 --- a/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java +++ b/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java @@ -24,6 +24,7 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.spi.LocationInfo; import org.apache.log4j.spi.LoggingEvent; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ElasticsearchTestCase; @@ -42,6 +43,8 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { private ESLogger esTestLogger; private TestAppender testAppender; private String testLevel; + private DeprecationLogger deprecationLogger; + private TestAppender deprecationAppender; @Override public void setUp() throws Exception { @@ -61,6 +64,13 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { assertThat(testLogger.getLevel(), equalTo(Level.TRACE)); testAppender = new TestAppender(); testLogger.addAppender(testAppender); + + // deprecation setup, needs to be set to debug to log + deprecationLogger = Log4jESLoggerFactory.getDeprecationLogger("test"); + deprecationAppender = new TestAppender(); + ESLogger logger = Log4jESLoggerFactory.getLogger("deprecation.test"); + logger.setLevel("DEBUG"); + (((Log4jESLogger) logger).logger()).addAppender(deprecationAppender); } @Override @@ -70,6 +80,8 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { esTestLogger.setLevel(testLevel); Logger testLogger = ((Log4jESLogger) esTestLogger).logger(); testLogger.removeAppender(testAppender); + Logger deprecationLogger = ((Log4jESLogger) Log4jESLoggerFactory.getLogger("deprecation.test")).logger(); + deprecationLogger.removeAppender(deprecationAppender); } @Test @@ -122,7 +134,16 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase { assertThat(locationInfo, notNullValue()); assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("locationInfoTest")); - + } + + @Test + public void testDeprecationLogger() { + deprecationLogger.deprecated("This is a deprecation message"); + List deprecationEvents = deprecationAppender.getEvents(); + LoggingEvent event = deprecationEvents.get(0); + assertThat(event, notNullValue()); + assertThat(event.getLevel(), equalTo(Level.DEBUG)); + assertThat(event.getRenderedMessage(), equalTo("This is a deprecation message")); } private static class TestAppender extends AppenderSkeleton { diff --git a/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java b/src/test/java/org/elasticsearch/common/settings/SettingsTests.java similarity index 99% rename from src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java rename to src/test/java/org/elasticsearch/common/settings/SettingsTests.java index f8a542a9cc6..c8334ba0691 100644 --- a/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java +++ b/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.*; /** */ -public class ImmutableSettingsTests extends ElasticsearchTestCase { +public class SettingsTests extends ElasticsearchTestCase { @Test public void testCamelCaseSupport() { diff --git a/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java b/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java index d4d5d14a86d..8c7b0c1f255 100644 --- a/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java +++ b/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java @@ -19,6 +19,6 @@ package org.elasticsearch.common.settings.bar; -//used in ImmutableSettingsTest +//used in SettingsTest public class BarTestClass { } diff --git a/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java b/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java index 36f152778b4..6d8ca4a7986 100644 --- a/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java +++ b/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java @@ -19,6 +19,6 @@ package org.elasticsearch.common.settings.foo; -// used in ImmutableSettingsTest +// used in SettingsTest public class FooTestClass { } diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java new file mode 100644 index 00000000000..d07bf442887 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTests.java @@ -0,0 +1,524 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.IOException; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public abstract class AbstractFilteringJsonGeneratorTests extends ElasticsearchTestCase { + + protected abstract XContentType getXContentType(); + + protected abstract void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder); + + protected void assertString(XContentBuilder expected, XContentBuilder builder) { + assertNotNull(builder); + assertNotNull(expected); + + // Verify that the result is equal to the expected string + assertThat(builder.bytes().toUtf8(), is(expected.bytes().toUtf8())); + } + + protected void assertBinary(XContentBuilder expected, XContentBuilder builder) { + assertNotNull(builder); + assertNotNull(expected); + + try { + XContent xContent = XContentFactory.xContent(builder.contentType()); + XContentParser jsonParser = xContent.createParser(expected.bytes()); + XContentParser testParser = xContent.createParser(builder.bytes()); + + while (true) { + XContentParser.Token token1 = jsonParser.nextToken(); + XContentParser.Token token2 = testParser.nextToken(); + if (token1 == null) { + assertThat(token2, nullValue()); + return; + } + assertThat(token1, equalTo(token2)); + switch (token1) { + case FIELD_NAME: + assertThat(jsonParser.currentName(), equalTo(testParser.currentName())); + break; + case VALUE_STRING: + assertThat(jsonParser.text(), equalTo(testParser.text())); + break; + case VALUE_NUMBER: + assertThat(jsonParser.numberType(), equalTo(testParser.numberType())); + assertThat(jsonParser.numberValue(), equalTo(testParser.numberValue())); + break; + } + } + } catch (Exception e) { + fail("Fail to verify the result of the XContentBuilder: " + e.getMessage()); + } + } + + private XContentBuilder newXContentBuilder(String... filters) throws IOException { + return XContentBuilder.builder(getXContentType().xContent(), filters); + } + + /** + * Build a sample using a given XContentBuilder + */ + private XContentBuilder sample(XContentBuilder builder) throws IOException { + assertNotNull(builder); + builder.startObject() + .field("title", "My awesome book") + .field("pages", 456) + .field("price", 27.99) + .field("timestamp", 1428582942867L) + .nullField("default") + .startArray("tags") + .value("elasticsearch") + .value("java") + .endArray() + .startArray("authors") + .startObject() + .field("name", "John Doe") + .field("lastname", "John") + .field("firstname", "Doe") + .endObject() + .startObject() + .field("name", "William Smith") + .field("lastname", "William") + .field("firstname", "Smith") + .endObject() + .endArray() + .startObject("properties") + .field("weight", 0.8d) + .startObject("language") + .startObject("en") + .field("lang", "English") + .field("available", true) + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Hampton St") + .field("city", "London") + .endObject() + .startObject() + .field("name", "address #2") + .field("street", "Queen St") + .field("city", "Stornoway") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .field("lang", "French") + .field("available", false) + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Rue Mouffetard") + .field("city", "Paris") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + return builder; + } + + /** + * Instanciates a new XContentBuilder with the given filters and builds a sample with it. + */ + private XContentBuilder sample(String... filters) throws IOException { + return sample(newXContentBuilder(filters)); + } + + @Test + public void testNoFiltering() throws Exception { + XContentBuilder expected = sample(); + + assertXContentBuilder(expected, sample()); + assertXContentBuilder(expected, sample("*")); + assertXContentBuilder(expected, sample("**")); + } + + @Test + public void testNoMatch() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject().endObject(); + + assertXContentBuilder(expected, sample("xyz")); + } + + @Test + public void testSimpleField() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .field("title", "My awesome book") + .endObject(); + + assertXContentBuilder(expected, sample("title")); + } + + @Test + public void testSimpleFieldWithWildcard() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .field("price", 27.99) + .startObject("properties") + .field("weight", 0.8d) + .startObject("language") + .startObject("en") + .field("lang", "English") + .field("available", true) + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Hampton St") + .field("city", "London") + .endObject() + .startObject() + .field("name", "address #2") + .field("street", "Queen St") + .field("city", "Stornoway") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .field("lang", "French") + .field("available", false) + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .field("street", "Rue Mouffetard") + .field("city", "Paris") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("pr*")); + } + + @Test + public void testMultipleFields() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .field("title", "My awesome book") + .field("pages", 456) + .endObject(); + + assertXContentBuilder(expected, sample("title", "pages")); + } + + @Test + public void testSimpleArray() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("tags") + .value("elasticsearch") + .value("java") + .endArray() + .endObject(); + + assertXContentBuilder(expected, sample("tags")); + } + + @Test + public void testSimpleArrayOfObjects() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("authors") + .startObject() + .field("name", "John Doe") + .field("lastname", "John") + .field("firstname", "Doe") + .endObject() + .startObject() + .field("name", "William Smith") + .field("lastname", "William") + .field("firstname", "Smith") + .endObject() + .endArray() + .endObject(); + + assertXContentBuilder(expected, sample("authors")); + assertXContentBuilder(expected, sample("authors.*")); + assertXContentBuilder(expected, sample("authors.*name")); + } + + @Test + public void testSimpleArrayOfObjectsProperty() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("authors") + .startObject() + .field("lastname", "John") + .endObject() + .startObject() + .field("lastname", "William") + .endObject() + .endArray() + .endObject(); + + assertXContentBuilder(expected, sample("authors.lastname")); + assertXContentBuilder(expected, sample("authors.l*")); + } + + @Test + public void testRecurseField1() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startArray("authors") + .startObject() + .field("name", "John Doe") + .endObject() + .startObject() + .field("name", "William Smith") + . endObject() + .endArray() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .startObject() + .field("name", "address #2") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("**.name")); + } + + @Test + public void testRecurseField2() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .startObject() + .field("name", "address #2") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("properties.**.name")); + } + + @Test + public void testRecurseField3() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .startArray("addresses") + .startObject() + .field("name", "address #1") + .endObject() + .startObject() + .field("name", "address #2") + .endObject() + .endArray() + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("properties.*.en.**.name")); + } + + @Test + public void testRecurseField4() throws Exception { + XContentBuilder expected = newXContentBuilder().startObject() + .startObject("properties") + .startObject("language") + .startObject("en") + .startArray("distributors") + .startObject() + .field("name", "The Book Shop") + .endObject() + .startObject() + .field("name", "Sussex Books House") + .endObject() + .endArray() + .endObject() + .startObject("fr") + .startArray("distributors") + .startObject() + .field("name", "La Maison du Livre") + .endObject() + .startObject() + .field("name", "Thetra") + .endObject() + .endArray() + .endObject() + .endObject() + .endObject() + .endObject(); + + assertXContentBuilder(expected, sample("properties.**.distributors.name")); + } + + @Test + public void testRawField() throws Exception { + + XContentBuilder expectedRawField = newXContentBuilder().startObject().field("foo", 0).startObject("raw").field("content", "hello world!").endObject().endObject(); + XContentBuilder expectedRawFieldFiltered = newXContentBuilder().startObject().field("foo", 0).endObject(); + XContentBuilder expectedRawFieldNotFiltered =newXContentBuilder().startObject().startObject("raw").field("content", "hello world!").endObject().endObject(); + + BytesReference raw = newXContentBuilder().startObject().field("content", "hello world!").endObject().bytes(); + + // Test method: rawField(String fieldName, BytesReference content) + assertXContentBuilder(expectedRawField, newXContentBuilder().startObject().field("foo", 0).rawField("raw", raw).endObject()); + assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*").startObject().field("foo", 0).rawField("raw", raw).endObject()); + assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*").startObject().field("foo", 0).rawField("raw", raw).endObject()); + + // Test method: rawField(String fieldName, byte[] content) + assertXContentBuilder(expectedRawField, newXContentBuilder().startObject().field("foo", 0).rawField("raw", raw.toBytes()).endObject()); + assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*").startObject().field("foo", 0).rawField("raw", raw.toBytes()).endObject()); + assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*").startObject().field("foo", 0).rawField("raw", raw.toBytes()).endObject()); + + // Test method: rawField(String fieldName, InputStream content) + assertXContentBuilder(expectedRawField, newXContentBuilder().startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*").startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*").startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + } + + @Test + public void testArrays() throws Exception { + // Test: Array of values (no filtering) + XContentBuilder expected = newXContentBuilder().startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject(); + assertXContentBuilder(expected, newXContentBuilder("t*").startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject()); + assertXContentBuilder(expected, newXContentBuilder("tags").startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject()); + + // Test: Array of values (with filtering) + assertXContentBuilder(newXContentBuilder().startObject().endObject(), newXContentBuilder("foo").startObject().startArray("tags").value("lorem").value("ipsum").value("dolor").endArray().endObject()); + + // Test: Array of objects (no filtering) + expected = newXContentBuilder().startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject(); + assertXContentBuilder(expected, newXContentBuilder("t*").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + assertXContentBuilder(expected, newXContentBuilder("tags").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + + // Test: Array of objects (with filtering) + assertXContentBuilder(newXContentBuilder().startObject().endObject(), newXContentBuilder("foo").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + + // Test: Array of objects (with partial filtering) + expected = newXContentBuilder().startObject().startArray("tags").startObject().field("firstname", "ipsum").endObject().endArray().endObject(); + assertXContentBuilder(expected, newXContentBuilder("t*.firstname").startObject().startArray("tags").startObject().field("lastname", "lorem").endObject().startObject().field("firstname", "ipsum").endObject().endArray().endObject()); + + } +} diff --git a/src/main/java/org/elasticsearch/common/jna/SizeT.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java similarity index 61% rename from src/main/java/org/elasticsearch/common/jna/SizeT.java rename to src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java index ab2fcd70552..fab77a26be7 100644 --- a/src/main/java/org/elasticsearch/common/jna/SizeT.java +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/CborFilteringGeneratorTests.java @@ -17,19 +17,20 @@ * under the License. */ -package org.elasticsearch.common.jna; +package org.elasticsearch.common.xcontent.support.filtering; -import com.sun.jna.IntegerType; -import com.sun.jna.Native; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; -public class SizeT extends IntegerType { +public class CborFilteringGeneratorTests extends JsonFilteringGeneratorTests { - public SizeT() { - this(0); + @Override + protected XContentType getXContentType() { + return XContentType.CBOR; } - public SizeT(long value) { - super(Native.SIZE_T_SIZE, value); + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertBinary(expected, builder); } - } diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java new file mode 100644 index 00000000000..97ce4fcb838 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilteringJsonGeneratorBenchmark.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Locale; + +/** + * Benchmark class to compare filtered and unfiltered XContent generators. + */ +public class FilteringJsonGeneratorBenchmark { + + public static void main(String[] args) throws IOException { + final XContent XCONTENT = JsonXContent.jsonXContent; + + System.out.println("Executing " + FilteringJsonGeneratorBenchmark.class + "..."); + + System.out.println("Warming up..."); + run(XCONTENT, 500_000, 100, 0.5); + System.out.println("Warmed up."); + + System.out.println("nb documents | nb fields | nb fields written | % fields written | time (millis) | rate (docs/sec) | avg size"); + + for (int nbFields : Arrays.asList(10, 25, 50, 100, 250)) { + for (int nbDocs : Arrays.asList(100, 1000, 10_000, 100_000, 500_000)) { + for (double ratio : Arrays.asList(0.0, 1.0, 0.99, 0.95, 0.9, 0.75, 0.5, 0.25, 0.1, 0.05, 0.01)) { + run(XCONTENT, nbDocs, nbFields, ratio); + } + } + } + System.out.println("Done."); + } + + private static void run(XContent xContent, long nbIterations, int nbFields, double ratio) throws IOException { + String[] fields = fields(nbFields); + String[] filters = fields((int) (nbFields * ratio)); + + long size = 0; + BytesStreamOutput os = new BytesStreamOutput(); + + long start = System.nanoTime(); + for (int i = 0; i < nbIterations; i++) { + XContentBuilder builder = new XContentBuilder(xContent, os, filters); + builder.startObject(); + + for (String field : fields) { + builder.field(field, System.nanoTime()); + } + builder.endObject(); + + size += builder.bytes().length(); + os.reset(); + } + double milliseconds = (System.nanoTime() - start) / 1_000_000d; + + System.out.printf(Locale.ROOT, "%12d | %9d | %17d | %14.2f %% | %10.3f ms | %15.2f | %8.0f %n", + nbIterations, nbFields, + (int) (nbFields * ratio), + (ratio * 100d), + milliseconds, + ((double) nbIterations) / (milliseconds / 1000d), + size / ((double) nbIterations)); + } + + /** + * Returns a String array of field names starting from "field_0" with a length of n. + * If n=3, the array is ["field_0","field_1","field_2"] + */ + private static String[] fields(int n) { + String[] fields = new String[n]; + for (int i = 0; i < n; i++) { + fields[i] = "field_" + i; + } + return fields; + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java new file mode 100644 index 00000000000..9468746fac6 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/JsonFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class JsonFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.JSON; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertString(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java new file mode 100644 index 00000000000..a12e12be172 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/SmileFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class SmileFilteringGeneratorTests extends JsonFilteringGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.SMILE; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertBinary(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java new file mode 100644 index 00000000000..d7e3a934ec4 --- /dev/null +++ b/src/test/java/org/elasticsearch/common/xcontent/support/filtering/YamlFilteringGeneratorTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.xcontent.support.filtering; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +public class YamlFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTests { + + @Override + protected XContentType getXContentType() { + return XContentType.YAML; + } + + @Override + protected void assertXContentBuilder(XContentBuilder expected, XContentBuilder builder) { + assertString(expected, builder); + } +} diff --git a/src/test/java/org/elasticsearch/count/query/CountQueryTests.java b/src/test/java/org/elasticsearch/count/query/CountQueryTests.java deleted file mode 100644 index e693be7c8bb..00000000000 --- a/src/test/java/org/elasticsearch/count/query/CountQueryTests.java +++ /dev/null @@ -1,843 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.count.query; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.count.CountResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.CommonTermsQueryBuilder.Operator; -import org.elasticsearch.index.query.MatchQueryBuilder; -import org.elasticsearch.index.query.MatchQueryBuilder.Type; -import org.elasticsearch.index.query.MultiMatchQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WrapperQueryBuilder; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ElasticsearchIntegrationTest; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; -import org.junit.Test; - -import java.io.IOException; - -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.existsQuery; -import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; -import static org.elasticsearch.index.query.QueryBuilders.idsQuery; -import static org.elasticsearch.index.query.QueryBuilders.limitQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.missingQuery; -import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsLookupQuery; -import static org.elasticsearch.index.query.QueryBuilders.termsQuery; -import static org.elasticsearch.index.query.QueryBuilders.typeQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; - -public class CountQueryTests extends ElasticsearchIntegrationTest { - - @Test - public void passQueryAsStringTest() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefresh(true).get(); - - CountResponse countResponse = client().prepareCount().setSource(new BytesArray("{ \"query\" : { \"term\" : { \"field1\" : \"value1_1\" }}}").array()).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testIndexOptions() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,index_options=docs")); - - client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)).get(); - assertHitCount(countResponse, 0l); - assertThat(countResponse.getFailedShards(), anyOf(equalTo(1), equalTo(2))); - assertThat(countResponse.getFailedShards(), equalTo(countResponse.getShardFailures().length)); - for (ShardOperationFailedException shardFailure : countResponse.getShardFailures()) { - assertThat(shardFailure.status(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat(shardFailure.reason(), containsString("[field \"field1\" was indexed without position data; cannot run PhraseQuery")); - } - } - - @Test - public void testCommonTermsQuery() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,analyzer=whitespace") - .setSettings(SETTING_NUMBER_OF_SHARDS, 1)); - - indexRandom(true, - client().prepareIndex("test", "type1", "3").setSource("field1", "quick lazy huge brown pidgin", "field2", "the quick lazy huge brown fox jumps over the tree"), - client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox"), - client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") ); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).get(); - assertHitCount(countResponse, 3l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.AND)).get(); - assertHitCount(countResponse, 2l); - - // Default - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3)).get(); - assertHitCount(countResponse, 3l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the huge fox").lowFreqMinimumShouldMatch("2")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("3")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("4")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setSource(new BytesArray("{ \"query\" : { \"common\" : { \"field1\" : { \"query\" : \"the lazy fox brown\", \"cutoff_frequency\" : 1, \"minimum_should_match\" : { \"high_freq\" : 4 } } } } }").array()).get(); - assertHitCount(countResponse, 1l); - - // Default - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).analyzer("standard")).get(); - assertHitCount(countResponse, 3l); - // standard drops "the" since its a stopword - - // try the same with match query - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.OR)).get(); - assertHitCount(countResponse, 3l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND).analyzer("stop")).get(); - assertHitCount(countResponse, 3l); - // standard drops "the" since its a stopword - - // try the same with multi match query - countResponse = client().prepareCount().setQuery(QueryBuilders.multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(MatchQueryBuilder.Operator.AND)).get(); - assertHitCount(countResponse, 3l); - } - - @Test - public void queryStringAnalyzedWildcard() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("value*").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("*ue*").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("*ue_1").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("val*e_1").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("v?l*e?1").analyzeWildcard(true)).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testLowercaseExpandedTerms() { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value_1", "field2", "value_2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("VALUE_3~1").lowercaseExpandedTerms(true)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("VALUE_3~1").lowercaseExpandedTerms(false)).get(); - assertHitCount(countResponse, 0l); - countResponse = client().prepareCount().setQuery(queryStringQuery("ValUE_*").lowercaseExpandedTerms(true)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("vAl*E_1")).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]")).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]").lowercaseExpandedTerms(false)).get(); - assertHitCount(countResponse, 0l); - } - - @Test //https://github.com/elasticsearch/elasticsearch/issues/3540 - public void testDateRangeInQueryString() { - //the mapping needs to be provided upfront otherwise we are not sure how many failures we get back - //as with dynamic mappings some shards might be lacking behind and parse a different query - assertAcked(prepareCreate("test").addMapping( - "type", "past", "type=date", "future", "type=date" - )); - ensureGreen(); - - NumShards test = getNumShards("test"); - - String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); - String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); - - client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test").setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get(); - //D is an unsupported unit in date math - assertThat(countResponse.getSuccessfulShards(), equalTo(0)); - assertThat(countResponse.getFailedShards(), equalTo(test.numPrimaries)); - assertThat(countResponse.getShardFailures().length, equalTo(test.numPrimaries)); - for (ShardOperationFailedException shardFailure : countResponse.getShardFailures()) { - assertThat(shardFailure.status(), equalTo(RestStatus.BAD_REQUEST)); - assertThat(shardFailure.reason(), allOf(containsString("Failed to parse"), containsString("unit [D] not supported for date math"))); - } - } - - @Test - public void typeFilterTypeIndexedTests() throws Exception { - typeFilterTests("not_analyzed"); - } - - @Test - public void typeFilterTypeNotIndexedTests() throws Exception { - typeFilterTests("no"); - } - - private void typeFilterTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_type").field("index", index).endObject() - .endObject().endObject()) - .addMapping("type2", jsonBuilder().startObject().startObject("type2") - .startObject("_type").field("index", index).endObject() - .endObject().endObject())); - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type2", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value1"), - client().prepareIndex("test", "type2", "2").setSource("field1", "value1"), - client().prepareIndex("test", "type2", "3").setSource("field1", "value1")); - - assertHitCount(client().prepareCount().setQuery(filteredQuery(matchAllQuery(), typeQuery("type1"))).get(), 2l); - assertHitCount(client().prepareCount().setQuery(filteredQuery(matchAllQuery(), typeQuery("type2"))).get(), 3l); - - assertHitCount(client().prepareCount().setTypes("type1").setQuery(matchAllQuery()).get(), 2l); - assertHitCount(client().prepareCount().setTypes("type2").setQuery(matchAllQuery()).get(), 3l); - - assertHitCount(client().prepareCount().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 5l); - } - - @Test - public void idsQueryTestsIdIndexed() throws Exception { - idsQueryTests("not_analyzed"); - } - - @Test - public void idsQueryTestsIdNotIndexed() throws Exception { - idsQueryTests("no"); - } - - private void idsQueryTests(String index) throws Exception { - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); - assertAcked(prepareCreate("test").setSettings(indexSettings) - .addMapping("type1", jsonBuilder().startObject().startObject("type1") - .startObject("_id").field("index", index).endObject() - .endObject().endObject())); - - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), - client().prepareIndex("test", "type1", "3").setSource("field1", "value3")); - - CountResponse countResponse = client().prepareCount().setQuery(constantScoreQuery(idsQuery("type1").ids("1", "3"))).get(); - assertHitCount(countResponse, 2l); - - // no type - countResponse = client().prepareCount().setQuery(constantScoreQuery(idsQuery().ids("1", "3"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(idsQuery("type1").ids("1", "3")).get(); - assertHitCount(countResponse, 2l); - - // no type - countResponse = client().prepareCount().setQuery(idsQuery().ids("1", "3")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(idsQuery("type1").ids("7", "10")).get(); - assertHitCount(countResponse, 0l); - - // repeat..., with terms - countResponse = client().prepareCount().setTypes("type1").setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testLimitFilter() throws Exception { - assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 1)); - - indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1"), - client().prepareIndex("test", "type1", "2").setSource("field1", "value1_2"), - client().prepareIndex("test", "type1", "3").setSource("field2", "value2_3"), - client().prepareIndex("test", "type1", "4").setSource("field3", "value3_4")); - - CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), limitQuery(2))).get(); - assertHitCount(countResponse, 4l); // limit is a no-op - } - - @Test - public void filterExistsMissingTests() throws Exception { - createIndex("test"); - - indexRandom(true, - client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x1", "x_1").field("field1", "value1_1").field("field2", "value2_1").endObject()), - client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x2", "x_2").field("field1", "value1_2").endObject()), - client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y1", "y_1").field("field2", "value2_3").endObject()), - client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y2", "y_2").field("field3", "value3_4").endObject())); - - CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(constantScoreQuery(existsQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("_exists_:field1")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("field2"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("field3"))).get(); - assertHitCount(countResponse, 1l); - - // wildcard check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("x*"))).get(); - assertHitCount(countResponse, 2l); - - // object check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), existsQuery("obj1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(constantScoreQuery(missingQuery("field1"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("_missing_:field1")).get(); - assertHitCount(countResponse, 2l); - - // wildcard check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("x*"))).get(); - assertHitCount(countResponse, 2l); - - // object check - countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), missingQuery("obj1"))).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void passQueryAsJSONStringTest() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefresh(true).get(); - - WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }"); - assertHitCount(client().prepareCount().setQuery(wrapper).get(), 1l); - - BoolQueryBuilder bool = boolQuery().must(wrapper).must(new TermQueryBuilder("field2", "value2_1")); - assertHitCount(client().prepareCount().setQuery(bool).get(), 1l); - } - - @Test - public void testMatchQueryNumeric() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("long", 1l, "double", 1.0d).get(); - client().prepareIndex("test", "type1", "2").setSource("long", 2l, "double", 2.0d).get(); - client().prepareIndex("test", "type1", "3").setSource("long", 3l, "double", 3.0d).get(); - refresh(); - CountResponse countResponse = client().prepareCount().setQuery(matchQuery("long", "1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(matchQuery("double", "2")).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testMultiMatchQuery() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value4", "field3", "value3").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2", "field2", "value5", "field3", "value2").get(); - client().prepareIndex("test", "type1", "3").setSource("field1", "value3", "field2", "value6", "field3", "value1").get(); - refresh(); - - MultiMatchQueryBuilder builder = QueryBuilders.multiMatchQuery("value1 value2 value4", "field1", "field2"); - CountResponse countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 2l); - - refresh(); - builder = QueryBuilders.multiMatchQuery("value1", "field1", "field2") - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 1l); - - refresh(); - builder = QueryBuilders.multiMatchQuery("value1", "field1", "field3^1.5") - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 2l); - - refresh(); - builder = QueryBuilders.multiMatchQuery("value1").field("field1").field("field3", 1.5f) - .operator(MatchQueryBuilder.Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together. - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 2l); - - // Test lenient - client().prepareIndex("test", "type1", "3").setSource("field1", "value7", "field2", "value8", "field4", 5).get(); - refresh(); - - builder = QueryBuilders.multiMatchQuery("value1", "field1", "field2", "field4"); - builder.lenient(true); - countResponse = client().prepareCount().setQuery(builder).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testMatchQueryZeroTermsQuery() { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,analyzer=classic", "field2", "type=string,analyzer=classic")); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value2").get(); - refresh(); - - BoolQueryBuilder boolQuery = boolQuery() - .must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)) - .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); - CountResponse countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 0l); - - boolQuery = boolQuery() - .must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)) - .must(matchQuery("field1", "value1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 1l); - - boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testMultiMatchQueryZeroTermsQuery() { - assertAcked(prepareCreate("test") - .addMapping("type1", "field1", "type=string,analyzer=classic", "field2", "type=string,analyzer=classic")); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); - client().prepareIndex("test", "type1", "2").setSource("field1", "value3", "field2", "value4").get(); - refresh(); - - BoolQueryBuilder boolQuery = boolQuery() - .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)) - .must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.NONE)); // Fields are ORed together - CountResponse countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 0l); - - boolQuery = boolQuery() - .must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)) - .must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 1l); - - boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQueryBuilder.ZeroTermsQuery.ALL)); - countResponse = client().prepareCount().setQuery(boolQuery).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testMultiMatchQueryMinShouldMatch() { - createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("field1", new String[]{"value1", "value2", "value3"}).get(); - client().prepareIndex("test", "type1", "2").setSource("field2", "value1").get(); - refresh(); - - MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); - - multiMatchQuery.useDisMax(true); - multiMatchQuery.minimumShouldMatch("70%"); - CountResponse countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 1l); - - multiMatchQuery.minimumShouldMatch("30%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 2l); - - multiMatchQuery.useDisMax(false); - multiMatchQuery.minimumShouldMatch("70%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 1l); - - multiMatchQuery.minimumShouldMatch("30%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 2l); - - multiMatchQuery = multiMatchQuery("value1 value2 bar", "field1"); - multiMatchQuery.minimumShouldMatch("100%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 0l); - - multiMatchQuery.minimumShouldMatch("70%"); - countResponse = client().prepareCount().setQuery(multiMatchQuery).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testFuzzyQueryString() { - createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "kimchy", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("str:kimcy~1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:11~1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("date:2012-02-02~1d")).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testSpecialRangeSyntaxInQueryString() { - createIndex("test"); - client().prepareIndex("test", "type1", "1").setSource("str", "kimchy", "date", "2012-02-01", "num", 12).get(); - client().prepareIndex("test", "type1", "2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(queryStringQuery("num:>19")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:>20")).get(); - assertHitCount(countResponse, 0l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:>=20")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:>11")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:<20")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("num:<=20")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(queryStringQuery("+num:>11 +num:<20")).get(); - assertHitCount(countResponse, 1l); - } - - @Test - public void testEmptyTermsFilter() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "terms", "type=string")); - ensureGreen(); - client().prepareIndex("test", "type", "1").setSource("term", "1").get(); - client().prepareIndex("test", "type", "2").setSource("term", "2").get(); - client().prepareIndex("test", "type", "3").setSource("term", "3").get(); - client().prepareIndex("test", "type", "4").setSource("term", "4").get(); - refresh(); - CountResponse countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsQuery("term", new String[0]))).get(); - assertHitCount(countResponse, 0l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), idsQuery())).get(); - assertHitCount(countResponse, 0l); - } - - @Test - public void testTermsLookupFilter() throws Exception { - assertAcked(prepareCreate("lookup").addMapping("type", "terms", "type=string", "other", "type=string")); - assertAcked(prepareCreate("lookup2").addMapping("type", - jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("arr").startObject("properties").startObject("term").field("type", "string") - .endObject().endObject().endObject().endObject().endObject().endObject())); - assertAcked(prepareCreate("test").addMapping("type", "term", "type=string")); - ensureGreen(); - - indexRandom(true, client().prepareIndex("lookup", "type", "1").setSource("terms", new String[]{"1", "3"}), - client().prepareIndex("lookup", "type", "2").setSource("terms", new String[]{"2"}), - client().prepareIndex("lookup", "type", "3").setSource("terms", new String[]{"2", "4"}), - client().prepareIndex("lookup", "type", "4").setSource("other", "value"), - client().prepareIndex("lookup2", "type", "1").setSource(XContentFactory.jsonBuilder().startObject() - .startArray("arr") - .startObject().field("term", "1").endObject() - .startObject().field("term", "3").endObject() - .endArray() - .endObject()), - client().prepareIndex("lookup2", "type", "2").setSource(XContentFactory.jsonBuilder().startObject() - .startArray("arr") - .startObject().field("term", "2").endObject() - .endArray() - .endObject()), - client().prepareIndex("lookup2", "type", "3").setSource(XContentFactory.jsonBuilder().startObject() - .startArray("arr") - .startObject().field("term", "2").endObject() - .startObject().field("term", "4").endObject() - .endArray() - .endObject()), - client().prepareIndex("test", "type", "1").setSource("term", "1"), - client().prepareIndex("test", "type", "2").setSource("term", "2"), - client().prepareIndex("test", "type", "3").setSource("term", "3"), - client().prepareIndex("test", "type", "4").setSource("term", "4")); - - CountResponse countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms"))).get(); - assertHitCount(countResponse, 2l); - - // same as above, just on the _id... - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("_id").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms"))).get(); - assertHitCount(countResponse, 2l); - - // another search with same parameters... - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("1").lookupPath("terms"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("2").lookupPath("terms"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("3").lookupPath("terms")) - ).get(); - assertNoFailures(countResponse); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup").lookupType("type").lookupId("4").lookupPath("terms"))).get(); - assertHitCount(countResponse, 0l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("1").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("2").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("term").lookupIndex("lookup2").lookupType("type").lookupId("3").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount("test") - .setQuery(filteredQuery(matchAllQuery(), termsLookupQuery("not_exists").lookupIndex("lookup2").lookupType("type").lookupId("3").lookupPath("arr.term"))).get(); - assertHitCount(countResponse, 0l); - } - - @Test - public void testBasicFilterById() throws Exception { - createIndex("test"); - ensureGreen(); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery("type1", "type2").ids("1", "2"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().ids("1", "2"))).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery("type1").ids("1", "2"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().ids("1"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery(null).ids("1"))).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery("type1", "type2", "type3").ids("1", "2", "3", "4"))).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testBasicQueryById() throws Exception { - createIndex("test"); - ensureGreen(); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get(); - refresh(); - - CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery("type1", "type2").ids("1", "2")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery().ids("1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery().ids("1", "2")).get(); - assertHitCount(countResponse, 2l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery("type1").ids("1", "2")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery().ids("1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery(null).ids("1")).get(); - assertHitCount(countResponse, 1l); - - countResponse = client().prepareCount().setQuery(QueryBuilders.idsQuery("type1", "type2", "type3").ids("1", "2", "3", "4")).get(); - assertHitCount(countResponse, 2l); - } - - @Test - public void testNumericTermsAndRanges() throws Exception { - assertAcked(prepareCreate("test") - .addMapping("type1", - "num_byte", "type=byte", "num_short", "type=short", - "num_integer", "type=integer", "num_long", "type=long", - "num_float", "type=float", "num_double", "type=double")); - ensureGreen(); - - client().prepareIndex("test", "type1", "1").setSource("num_byte", 1, "num_short", 1, "num_integer", 1, - "num_long", 1, "num_float", 1, "num_double", 1).get(); - - client().prepareIndex("test", "type1", "2").setSource("num_byte", 2, "num_short", 2, "num_integer", 2, - "num_long", 2, "num_float", 2, "num_double", 2).get(); - - client().prepareIndex("test", "type1", "17").setSource("num_byte", 17, "num_short", 17, "num_integer", 17, - "num_long", 17, "num_float", 17, "num_double", 17).get(); - refresh(); - - CountResponse countResponse; - logger.info("--> term query on 1"); - countResponse = client().prepareCount("test").setQuery(termQuery("num_byte", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_short", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_integer", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_long", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_float", 1)).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termQuery("num_double", 1)).get(); - assertHitCount(countResponse, 1l); - - logger.info("--> terms query on 1"); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_byte", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_short", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_integer", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_long", new int[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_float", new double[]{1})).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(termsQuery("num_double", new double[]{1})).get(); - assertHitCount(countResponse, 1l); - - logger.info("--> term filter on 1"); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_byte", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_short", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_integer", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_long", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_float", 1))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termQuery("num_double", 1))).get(); - assertHitCount(countResponse, 1l); - - logger.info("--> terms filter on 1"); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_byte", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_short", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_integer", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_long", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_float", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - countResponse = client().prepareCount("test").setQuery(filteredQuery(matchAllQuery(), termsQuery("num_double", new int[]{1}))).get(); - assertHitCount(countResponse, 1l); - } - - @Test // see #2994 - public void testSimpleSpan() throws IOException { - createIndex("test"); - ensureGreen(); - - client().prepareIndex("test", "test", "1").setSource("description", "foo other anything bar").get(); - client().prepareIndex("test", "test", "2").setSource("description", "foo other anything").get(); - client().prepareIndex("test", "test", "3").setSource("description", "foo other").get(); - client().prepareIndex("test", "test", "4").setSource("description", "foo").get(); - refresh(); - - CountResponse response = client().prepareCount("test") - .setQuery(QueryBuilders.spanOrQuery().clause(QueryBuilders.spanTermQuery("description", "bar"))).get(); - assertHitCount(response, 1l); - - response = client().prepareCount("test").setQuery( - QueryBuilders.spanNearQuery() - .clause(QueryBuilders.spanTermQuery("description", "foo")) - .clause(QueryBuilders.spanTermQuery("description", "other")) - .slop(3)).get(); - assertHitCount(response, 3l); - } -} diff --git a/src/test/java/org/elasticsearch/document/DocumentActionsTests.java b/src/test/java/org/elasticsearch/document/DocumentActionsTests.java index 8673a93dc3e..0b869d72e36 100644 --- a/src/test/java/org/elasticsearch/document/DocumentActionsTests.java +++ b/src/test/java/org/elasticsearch/document/DocumentActionsTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.document; -import com.google.common.base.Charsets; import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -30,6 +29,7 @@ import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -40,7 +40,8 @@ import java.io.IOException; import static org.elasticsearch.client.Requests.*; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; /** * @@ -163,11 +164,11 @@ public class DocumentActionsTests extends ElasticsearchIntegrationTest { assertThat(countResponse.getFailedShards(), equalTo(0)); // test failed (simply query that can't be parsed) - countResponse = client().count(countRequest("test").source("{ term : { _type : \"type1 } }".getBytes(Charsets.UTF_8))).actionGet(); - - assertThat(countResponse.getCount(), equalTo(0l)); - assertThat(countResponse.getSuccessfulShards(), equalTo(0)); - assertThat(countResponse.getFailedShards(), equalTo(numShards.numPrimaries)); + try { + client().count(countRequest("test").source("{ term : { _type : \"type1 } }")).actionGet(); + } catch(SearchPhaseExecutionException e) { + assertThat(e.shardFailures().length, equalTo(numShards.numPrimaries)); + } // count with no query is a match all one countResponse = client().prepareCount("test").execute().actionGet(); diff --git a/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index b8e97efe31d..ece5757355b 100644 --- a/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -22,7 +22,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableSet; import org.elasticsearch.Version; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.support.nodes.NodeOperationResponse; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -314,7 +314,7 @@ public class AsyncShardFetchTests extends ElasticsearchTestCase { } - static class Response extends NodeOperationResponse { + static class Response extends BaseNodeResponse { public Response(DiscoveryNode node) { super(node); diff --git a/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index a1db68ab039..c1d260392ff 100644 --- a/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; +import org.elasticsearch.common.inject.ProvisionException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; @@ -94,7 +95,7 @@ public class AnalysisModuleTests extends ElasticsearchTestCase { Settings settings = loadFromClasspath("org/elasticsearch/index/analysis/test1.yml"); testSimpleConfiguration(settings); } - + @Test public void testDefaultFactoryTokenFilters() throws IOException { assertTokenFilter("keyword_repeat", KeywordRepeatFilter.class); @@ -238,4 +239,36 @@ public class AnalysisModuleTests extends ElasticsearchTestCase { return wordListFile; } + @Test + public void testUnderscoreInAnalyzerName() { + Settings settings = Settings.builder() + .put("index.analysis.analyzer._invalid_name.tokenizer", "keyword") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, "1") + .build(); + try { + getAnalysisService(settings); + fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); + } catch (ProvisionException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); + } + } + + @Test + public void testUnderscoreInAnalyzerNameAlias() { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.valid_name.tokenizer", "keyword") + .put("index.analysis.analyzer.valid_name.alias", "_invalid_name") + .put("path.home", createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, "1") + .build(); + try { + getAnalysisService(settings); + fail("This should fail with IllegalArgumentException because the analyzers alias starts with _"); + } catch (ProvisionException e) { + assertTrue(e.getCause() instanceof IllegalArgumentException); + assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")); + } + } } diff --git a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java index 1ec854c4c65..75c8e18fc91 100644 --- a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.test.ElasticsearchTestCase; import java.io.IOException; +import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -39,7 +40,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { FieldMappersLookup lookup = new FieldMappersLookup(); assertNull(lookup.fullName("foo")); assertNull(lookup.indexName("foo")); - List names = lookup.simpleMatchToFullName("foo"); + Collection names = lookup.simpleMatchToFullName("foo"); assertNotNull(names); assertTrue(names.isEmpty()); names = lookup.simpleMatchToFullName("foo"); @@ -105,7 +106,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldMappersLookup lookup = new FieldMappersLookup(); lookup = lookup.copyAndAddAll(newList(f1, f2)); - List names = lookup.simpleMatchToIndexNames("b*"); + Collection names = lookup.simpleMatchToIndexNames("b*"); assertTrue(names.contains("baz")); assertTrue(names.contains("boo")); } @@ -115,7 +116,7 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo"); FieldMappersLookup lookup = new FieldMappersLookup(); lookup = lookup.copyAndAddAll(newList(f1, f2)); - List names = lookup.simpleMatchToFullName("b*"); + Collection names = lookup.simpleMatchToFullName("b*"); assertTrue(names.contains("foo")); assertTrue(names.contains("bar")); } diff --git a/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java index dc537468c28..087bc306321 100644 --- a/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java +++ b/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java @@ -168,8 +168,13 @@ public class RestFilterChainTests extends ElasticsearchTestCase { } @Override - public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource) throws IOException { - return super.newBuilder(autoDetectSource); + public XContentBuilder newErrorBuilder() throws IOException { + return super.newErrorBuilder(); + } + + @Override + public XContentBuilder newBuilder(@Nullable BytesReference autoDetectSource, boolean useFiltering) throws IOException { + return super.newBuilder(autoDetectSource, useFiltering); } @Override diff --git a/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java b/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java new file mode 100644 index 00000000000..3faa0ea3673 --- /dev/null +++ b/src/test/java/org/elasticsearch/rest/action/support/RestTableTest.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.support; + +import org.elasticsearch.common.Table; +import org.elasticsearch.test.ElasticsearchTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.junit.Before; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.rest.action.support.RestTable.buildDisplayHeaders; +import static org.hamcrest.Matchers.*; + +public class RestTableTest extends ElasticsearchTestCase { + + private Table table = new Table(); + private FakeRestRequest restRequest = new FakeRestRequest(); + + @Before + public void setup() { + table.startHeaders(); + table.addCell("bulk.foo", "alias:f;desc:foo"); + table.addCell("bulk.bar", "alias:b;desc:bar"); + // should be matched as well due to the aliases + table.addCell("aliasedBulk", "alias:bulkWhatever;desc:bar"); + table.addCell("aliasedSecondBulk", "alias:foobar,bulkolicious,bulkotastic;desc:bar"); + // no match + table.addCell("unmatched", "alias:un.matched;desc:bar"); + // invalid alias + table.addCell("invalidAliasesBulk", "alias:,,,;desc:bar"); + table.endHeaders(); + } + + @Test + public void testThatDisplayHeadersSupportWildcards() throws Exception { + restRequest.params().put("h", "bulk*"); + List headers = buildDisplayHeaders(table, restRequest); + + List headerNames = getHeaderNames(headers); + assertThat(headerNames, contains("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); + assertThat(headerNames, not(hasItem("unmatched"))); + } + + @Test + public void testThatDisplayHeadersAreNotAddedTwice() throws Exception { + restRequest.params().put("h", "nonexistent,bulk*,bul*"); + List headers = buildDisplayHeaders(table, restRequest); + + List headerNames = getHeaderNames(headers); + assertThat(headerNames, contains("bulk.foo", "bulk.bar", "aliasedBulk", "aliasedSecondBulk")); + assertThat(headerNames, not(hasItem("unmatched"))); + } + + private List getHeaderNames(List headers) { + List headerNames = new ArrayList<>(); + for (RestTable.DisplayHeader header : headers) { + headerNames.add(header.name); + } + + return headerNames; + } +} diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java index 177f3bca51f..3f12b81325e 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -34,11 +35,13 @@ import org.junit.Test; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; @@ -282,6 +285,55 @@ public class MaxBucketTests extends ElasticsearchIntegrationTest { } } + @Test + public void testMetric_asSubAggOfSingleBucketAgg() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + filter("filter") + .filter(termQuery("tag", "tag0")) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + + assertSearchResponse(response); + + Filter filter = response.getAggregations().get("filter"); + assertThat(filter, notNullValue()); + assertThat(filter.getName(), equalTo("filter")); + Histogram histo = filter.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + List maxKeys = new ArrayList<>(); + double maxValue = Double.NEGATIVE_INFINITY; + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + if (bucket.getDocCount() != 0) { + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + if (sum.value() > maxValue) { + maxValue = sum.value(); + maxKeys = new ArrayList<>(); + maxKeys.add(bucket.getKeyAsString()); + } else if (sum.value() == maxValue) { + maxKeys.add(bucket.getKeyAsString()); + } + } + } + + InternalBucketMetricValue maxBucketValue = filter.getAggregations().get("max_bucket"); + assertThat(maxBucketValue, notNullValue()); + assertThat(maxBucketValue.getName(), equalTo("max_bucket")); + assertThat(maxBucketValue.value(), equalTo(maxValue)); + assertThat(maxBucketValue.keys(), equalTo(maxKeys.toArray(new String[maxKeys.size()]))); + } + @Test public void testMetric_asSubAggWithInsertZeros() throws Exception { SearchResponse response = client() diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java index 38da141ad5c..3db9531a537 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.moving.avg; import com.google.common.collect.EvictingQueue; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -32,26 +31,17 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.movavg.models.*; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.hamcrest.Matchers; import org.junit.Test; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -59,8 +49,12 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogra import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; @@ -76,6 +70,9 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { static int windowSize; static double alpha; static double beta; + static double gamma; + static int period; + static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; static ValuesSourceMetricsAggregationBuilder metric; static List mockHisto; @@ -84,7 +81,7 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { enum MovAvgType { - SIMPLE ("simple"), LINEAR("linear"), EWMA("ewma"), HOLT("holt"); + SIMPLE ("simple"), LINEAR("linear"), EWMA("ewma"), HOLT("holt"), HOLT_WINTERS("holt_winters"); private final String name; @@ -121,9 +118,13 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { interval = 5; numBuckets = randomIntBetween(6, 80); - windowSize = randomIntBetween(3, 10); + period = randomIntBetween(1, 5); + windowSize = randomIntBetween(period * 2, 10); // start must be 2*period to play nice with HW alpha = randomDouble(); beta = randomDouble(); + gamma = randomDouble(); + seasonalityType = randomBoolean() ? HoltWintersModel.SeasonalityType.ADDITIVE : HoltWintersModel.SeasonalityType.MULTIPLICATIVE; + gapPolicy = randomBoolean() ? BucketHelpers.GapPolicy.SKIP : BucketHelpers.GapPolicy.INSERT_ZEROS; metric = randomMetric("the_metric", VALUE_FIELD); @@ -154,6 +155,11 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { .field(INTERVAL_FIELD, 49) .field(GAP_FIELD, 1).endObject())); + for (int i = -10; i < 10; i++) { + builders.add(client().prepareIndex("neg_idx", "type").setSource( + jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject())); + } + indexRandom(true, builders); ensureSearchable(); } @@ -204,6 +210,15 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { break; case HOLT: values.add(holt(window)); + break; + case HOLT_WINTERS: + // HW needs at least 2 periods of data to start + if (window.size() >= period * 2) { + values.add(holtWinters(window)); + } else { + values.add(null); + } + break; } @@ -300,7 +315,79 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { return s + (0 * b) ; } + /** + * Holt winters (triple exponential) moving avg + * @param window Window of values to compute movavg for + * @return + */ + private double holtWinters(Collection window) { + // Smoothed value + double s = 0; + double last_s = 0; + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[window.size()]; + + double padding = seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE) ? 0.0000000001 : 0; + + int counter = 0; + double[] vs = new double[window.size()]; + for (double v : window) { + vs[counter] = v + padding; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } else { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + } + + b = beta * (s - last_s) + (1 - beta) * last_b; + + if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } else { + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + } + + last_s = s; + last_b = b; + } + + int seasonCounter = (window.size() - 1) - period; + if (seasonalityType.equals(HoltWintersModel.SeasonalityType.MULTIPLICATIVE)) { + return s + (0 * b) * seasonal[seasonCounter % window.size()]; + } else { + return s + (0 * b) + seasonal[seasonCounter % window.size()]; + } + } /** @@ -514,6 +601,111 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } } + @Test + public void HoltWintersValuedField() { + + SearchResponse response = client() + .prepareSearch("idx").setTypes("type") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD).interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(movingAvg("movavg_counts") + .window(windowSize) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("_count")) + .subAggregation(movingAvg("movavg_values") + .window(windowSize) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("the_metric")) + ).execute().actionGet(); + + assertSearchResponse(response); + + InternalHistogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); + + List expectedCounts = testValues.get(MovAvgType.HOLT_WINTERS.toString() + "_" + MetricTarget.COUNT.toString()); + List expectedValues = testValues.get(MovAvgType.HOLT_WINTERS.toString() + "_" + MetricTarget.VALUE.toString()); + + Iterator actualIter = buckets.iterator(); + Iterator expectedBucketIter = mockHisto.iterator(); + Iterator expectedCountsIter = expectedCounts.iterator(); + Iterator expectedValuesIter = expectedValues.iterator(); + + while (actualIter.hasNext()) { + assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); + + Histogram.Bucket actual = actualIter.next(); + PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); + Double expectedCount = expectedCountsIter.next(); + Double expectedValue = expectedValuesIter.next(); + + assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); + assertThat("doc counts do not match", actual.getDocCount(), equalTo((long)expected.count)); + + assertBucketContents(actual, expectedCount, expectedValue); + } + } + + @Test + public void testPredictNegativeKeysAtStart() { + + SearchResponse response = client() + .prepareSearch("neg_idx") + .setTypes("type") + .addAggregation( + histogram("histo") + .field(INTERVAL_FIELD) + .interval(1) + .subAggregation(avg("avg").field(VALUE_FIELD)) + .subAggregation( + movingAvg("movavg_values").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy).predict(5).setBucketsPaths("avg"))).execute().actionGet(); + + assertSearchResponse(response); + + InternalHistogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(25)); + + for (int i = 0; i < 20; i++) { + Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat((long) bucket.getKey(), equalTo((long) i - 10)); + assertThat(bucket.getDocCount(), equalTo(1l)); + Avg avgAgg = bucket.getAggregations().get("avg"); + assertThat(avgAgg, notNullValue()); + assertThat(avgAgg.value(), equalTo(10d)); + SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); + assertThat(movAvgAgg, notNullValue()); + assertThat(movAvgAgg.value(), equalTo(10d)); + } + + for (int i = 20; i < 25; i++) { + System.out.println(i); + Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat((long) bucket.getKey(), equalTo((long) i - 10)); + assertThat(bucket.getDocCount(), equalTo(0l)); + Avg avgAgg = bucket.getAggregations().get("avg"); + assertThat(avgAgg, nullValue()); + SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); + assertThat(movAvgAgg, notNullValue()); + assertThat(movAvgAgg.value(), equalTo(10d)); + } + } + + @Test public void testSizeZeroWindow() { try { @@ -1012,6 +1204,55 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } } + @Test + public void testHoltWintersNotEnoughData() { + try { + SearchResponse response = client() + .prepareSearch("idx").setTypes("type") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD).interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(movingAvg("movavg_counts") + .window(10) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("_count")) + .subAggregation(movingAvg("movavg_values") + .window(windowSize) + .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() + .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) + .gapPolicy(gapPolicy) + .setBucketsPaths("the_metric")) + ).execute().actionGet(); + } catch (SearchPhaseExecutionException e) { + // All good + } + + } + + @Test + public void testBadModelParams() { + try { + SearchResponse response = client() + .prepareSearch("idx").setTypes("type") + .addAggregation( + histogram("histo").field(INTERVAL_FIELD).interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(movingAvg("movavg_counts") + .window(10) + .modelBuilder(randomModelBuilder(100)) + .gapPolicy(gapPolicy) + .setBucketsPaths("_count")) + ).execute().actionGet(); + } catch (SearchPhaseExecutionException e) { + // All good + } + + } + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { @@ -1030,6 +1271,8 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { SimpleValue countMovAvg = actual.getAggregations().get("movavg_counts"); if (expectedCount == null) { assertThat("[_count] movavg is not null", countMovAvg, nullValue()); + } else if (Double.isNaN(expectedCount)) { + assertThat("[_count] movavg should be NaN, but is ["+countMovAvg.value()+"] instead", countMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[_count] movavg is null", countMovAvg, notNullValue()); assertThat("[_count] movavg does not match expected ["+countMovAvg.value()+" vs "+expectedCount+"]", @@ -1040,6 +1283,8 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { SimpleValue valuesMovAvg = actual.getAggregations().get("movavg_values"); if (expectedValue == null) { assertThat("[value] movavg is not null", valuesMovAvg, Matchers.nullValue()); + } else if (Double.isNaN(expectedValue)) { + assertThat("[value] movavg should be NaN, but is ["+valuesMovAvg.value()+"] instead", valuesMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[value] movavg is null", valuesMovAvg, notNullValue()); assertThat("[value] movavg does not match expected ["+valuesMovAvg.value()+" vs "+expectedValue+"]", @@ -1048,17 +1293,24 @@ public class MovAvgTests extends ElasticsearchIntegrationTest { } private MovAvgModelBuilder randomModelBuilder() { + return randomModelBuilder(0); + } + + private MovAvgModelBuilder randomModelBuilder(double padding) { int rand = randomIntBetween(0,3); + // HoltWinters is excluded from random generation, because it's "cold start" behavior makes + // randomized testing too tricky. Should probably add dedicated, randomized tests just for HoltWinters, + // which can compensate for the idiosyncrasies switch (rand) { case 0: return new SimpleModel.SimpleModelBuilder(); case 1: return new LinearModel.LinearModelBuilder(); case 2: - return new EwmaModel.EWMAModelBuilder().alpha(alpha); + return new EwmaModel.EWMAModelBuilder().alpha(alpha + padding); case 3: - return new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta); + return new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha + padding).beta(beta + padding); default: return new SimpleModel.SimpleModelBuilder(); } diff --git a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java index a25f84d1902..0bd9711c7ef 100644 --- a/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java +++ b/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java @@ -28,6 +28,8 @@ import static org.hamcrest.Matchers.equalTo; import org.junit.Test; +import java.util.Arrays; + public class MovAvgUnitTests extends ElasticsearchTestCase { @Test @@ -259,7 +261,7 @@ public class MovAvgUnitTests extends ElasticsearchTestCase { MovAvgModel model = new HoltLinearModel(alpha, beta); int windowSize = randomIntBetween(1, 50); - int numPredictions = randomIntBetween(1,50); + int numPredictions = randomIntBetween(1, 50); EvictingQueue window = EvictingQueue.create(windowSize); for (int i = 0; i < windowSize; i++) { @@ -297,4 +299,288 @@ public class MovAvgUnitTests extends ElasticsearchTestCase { assertThat(Double.compare(expected[i], actual[i]), equalTo(0)); } } + + @Test + public void testHoltWintersMultiplicativePadModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.MULTIPLICATIVE, true); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v + 0.0000000001; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + // Calculate first seasonal + if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) { + Arrays.fill(seasonal, 0.0); + } else { + for (int i = 0; i < period; i++) { + seasonal[i] = vs[i] / s; + } + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + double expected = s + (0 * b) * seasonal[seasonCounter % windowSize];; + double actual = model.next(window); + assertThat(Double.compare(expected, actual), equalTo(0)); + } + + @Test + public void testHoltWintersMultiplicativePadPredictionModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.MULTIPLICATIVE, true); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + int numPredictions = randomIntBetween(1, 50); + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + double actual[] = model.predict(window, numPredictions); + double expected[] = new double[numPredictions]; + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v + 0.0000000001; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + for (int i = 0; i < period; i++) { + // Calculate first seasonal + seasonal[i] = vs[i] / s; + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] / seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] / (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + + for (int i = 0; i < numPredictions; i++) { + + expected[i] = s + (i * b) * seasonal[seasonCounter % windowSize]; + assertThat(Double.compare(expected[i], actual[i]), equalTo(0)); + seasonCounter += 1; + } + + } + + @Test + public void testHoltWintersAdditiveModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.ADDITIVE, false); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + for (int i = 0; i < period; i++) { + // Calculate first seasonal + seasonal[i] = vs[i] / s; + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + double expected = s + (0 * b) + seasonal[seasonCounter % windowSize];; + double actual = model.next(window); + assertThat(Double.compare(expected, actual), equalTo(0)); + } + + @Test + public void testHoltWintersAdditivePredictionModel() { + double alpha = randomDouble(); + double beta = randomDouble(); + double gamma = randomDouble(); + int period = randomIntBetween(1,10); + MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.ADDITIVE, false); + + int windowSize = randomIntBetween(period * 2, 50); // HW requires at least two periods of data + int numPredictions = randomIntBetween(1, 50); + + EvictingQueue window = EvictingQueue.create(windowSize); + for (int i = 0; i < windowSize; i++) { + window.offer(randomDouble()); + } + double actual[] = model.predict(window, numPredictions); + double expected[] = new double[numPredictions]; + + // Smoothed value + double s = 0; + double last_s = 0; + + // Trend value + double b = 0; + double last_b = 0; + + // Seasonal value + double[] seasonal = new double[windowSize]; + + int counter = 0; + double[] vs = new double[windowSize]; + for (double v : window) { + vs[counter] = v; + counter += 1; + } + + + // Initial level value is average of first season + // Calculate the slopes between first and second season for each period + for (int i = 0; i < period; i++) { + s += vs[i]; + b += (vs[i] - vs[i + period]) / 2; + } + s /= (double) period; + b /= (double) period; + last_s = s; + last_b = b; + + for (int i = 0; i < period; i++) { + // Calculate first seasonal + seasonal[i] = vs[i] / s; + } + + for (int i = period; i < vs.length; i++) { + s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b); + b = beta * (s - last_s) + (1 - beta) * last_b; + + //seasonal[i] = gamma * (vs[i] / s) + ((1 - gamma) * seasonal[i - period]); + seasonal[i] = gamma * (vs[i] - (last_s + last_b )) + (1 - gamma) * seasonal[i - period]; + last_s = s; + last_b = b; + } + + int seasonCounter = (windowSize - 1) - period; + + for (int i = 0; i < numPredictions; i++) { + + expected[i] = s + (i * b) + seasonal[seasonCounter % windowSize]; + assertThat(Double.compare(expected[i], actual[i]), equalTo(0)); + seasonCounter += 1; + } + + } } diff --git a/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java b/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java index 2845af198a2..3a9135cb731 100644 --- a/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java +++ b/src/test/java/org/elasticsearch/search/highlight/CustomHighlighter.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight; import com.google.common.collect.Lists; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.mapper.FieldMapper; import java.util.List; import java.util.Locale; @@ -68,6 +69,11 @@ public class CustomHighlighter implements Highlighter { return new HighlightField(highlighterContext.fieldName, responses.toArray(new Text[]{})); } + @Override + public boolean canHighlight(FieldMapper fieldMapper) { + return true; + } + private static class CacheEntry { private int position; private int docId; diff --git a/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java b/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java index b3e723f213a..7a0ebf57738 100644 --- a/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java +++ b/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java @@ -51,12 +51,124 @@ import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.*; @Slow public class HighlighterSearchTests extends ElasticsearchIntegrationTest { + @Test + public void testHighlightingWithWildcardName() throws IOException { + // test the kibana case with * as fieldname that will try highlight all fields including meta fields + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("text") + .field("type", "string") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("text", "text").endObject()) + .get(); + refresh(); + String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); + assertHighlight(search, 0, "text", 0, equalTo("text")); + } + + @Test + public void testPlainHighlighterWithLongUnanalyzedStringTerm() throws IOException { + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("properties") + .startObject("long_text") + .field("type", "string") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .field("ignore_above", 1) + .endObject() + .startObject("text") + .field("type", "string") + .field("analyzer", "keyword") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + // crate a term that is larger than the allowed 32766, index it and then try highlight on it + // the search request should still succeed + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < 32767; i++) { + builder.append('a'); + } + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("long_text", builder.toString()).field("text", "text").endObject()) + .get(); + refresh(); + String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); + assertHighlight(search, 0, "text", 0, equalTo("text")); + search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("long_text").highlighterType(highlighter)).get(); + assertNoFailures(search); + assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); + } + + @Test + public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { + XContentBuilder mappings = jsonBuilder(); + mappings.startObject(); + mappings.startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("unstored_field") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .field("type", "string") + .field("store", "no") + .endObject() + .startObject("text") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .field("type", "string") + .field("store", "yes") + .endObject() + .endObject() + .endObject(); + mappings.endObject(); + assertAcked(prepareCreate("test") + .addMapping("type", mappings)); + ensureYellow(); + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) + .get(); + refresh(); + String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); + SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); + assertHighlight(search, 0, "text", 0, equalTo("text")); + search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("unstored_text")).get(); + assertNoFailures(search); + assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); + } + + @Test // see #3486 public void testHighTermFrequencyDoc() throws IOException { @@ -1171,12 +1283,11 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest { RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter")); - assertFailures(client().prepareSearch() + //should not fail if there is a wildcard + assertNoFailures(client().prepareSearch() .setQuery(matchPhraseQuery("title", "this is a test")) .addHighlightedField("tit*", 50, 1, 10) - .setHighlighterType("fast-vector-highlighter"), - RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter")); + .setHighlighterType("fast-vector-highlighter").get()); } @Test @@ -2169,12 +2280,11 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest { RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); - assertFailures(client().prepareSearch() + //should not fail if there is a wildcard + assertNoFailures(client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("tit*") - .setHighlighterType("postings"), - RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); + .setHighlighterType("postings").get()); } @Test diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index ea05c58ae35..0655588e35d 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -145,15 +145,7 @@ import java.net.InetSocketAddress; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.IdentityHashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; +import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -906,7 +898,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue()); for (String fieldName : fieldNames) { - List matches = documentMapper.mappers().simpleMatchToFullName(fieldName); + Collection matches = documentMapper.mappers().simpleMatchToFullName(fieldName); assertThat("field " + fieldName + " doesn't exists on " + node, matches, Matchers.not(emptyIterable())); } } @@ -1067,12 +1059,17 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase lastKnownCount.set(indexer.totalIndexedDocs()); } if (lastKnownCount.get() >= numDocs) { - long count = client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(); - if (count == lastKnownCount.get()) { - // no progress - try to refresh for the next time - client().admin().indices().prepareRefresh().get(); + try { + long count = client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(); + if (count == lastKnownCount.get()) { + // no progress - try to refresh for the next time + client().admin().indices().prepareRefresh().get(); + } + lastKnownCount.set(count); + } catch (Throwable e) { // count now acts like search and barfs if all shards failed... + logger.debug("failed to executed count", e); + return false; } - lastKnownCount.set(count); logger.debug("[{}] docs visible for search. waiting for [{}]", lastKnownCount.get(), numDocs); } else { logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs); diff --git a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 3cc47378116..819c1d5ab1d 100644 --- a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -50,7 +50,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.block.ClusterBlock; @@ -152,7 +152,7 @@ public class ElasticsearchAssertions { } } - public static String formatShardStatus(BroadcastOperationResponse response) { + public static String formatShardStatus(BroadcastResponse response) { String msg = " Total shards: " + response.getTotalShards() + " Successful shards: " + response.getSuccessfulShards() + " & " + response.getFailedShards() + " shard failures:"; for (ShardOperationFailedException failure : response.getShardFailures()) { @@ -321,12 +321,12 @@ public class ElasticsearchAssertions { assertVersionSerializable(percolateResponse); } - public static void assertNoFailures(BroadcastOperationResponse response) { + public static void assertNoFailures(BroadcastResponse response) { assertThat("Unexpected ShardFailures: " + Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); assertVersionSerializable(response); } - public static void assertAllSuccessful(BroadcastOperationResponse response) { + public static void assertAllSuccessful(BroadcastResponse response) { assertNoFailures(response); assertThat("Expected all shards successful but got successful [" + response.getSuccessfulShards() + "] total [" + response.getTotalShards() + "]", response.getTotalShards(), equalTo(response.getSuccessfulShards())); diff --git a/src/test/java/org/elasticsearch/test/rest/client/RestClient.java b/src/test/java/org/elasticsearch/test/rest/client/RestClient.java index 495fabc8e7b..b6222948fdb 100644 --- a/src/test/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/src/test/java/org/elasticsearch/test/rest/client/RestClient.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import com.google.common.collect.Sets; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; @@ -41,6 +42,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; /** @@ -50,6 +52,8 @@ import java.util.concurrent.TimeUnit; public class RestClient implements Closeable { private static final ESLogger logger = Loggers.getLogger(RestClient.class); + //query_string params that don't need to be declared in the spec, thay are supported by default + private static final Set ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path"); private final RestSpec restSpec; private final CloseableHttpClient httpClient; @@ -172,10 +176,11 @@ public class RestClient implements Closeable { if (restApi.getPathParts().contains(entry.getKey())) { pathParts.put(entry.getKey(), entry.getValue()); } else { - if (!restApi.getParams().contains(entry.getKey())) { + if (restApi.getParams().contains(entry.getKey()) || ALWAYS_ACCEPTED_QUERY_STRING_PARAMS.contains(entry.getKey())) { + httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); + } else { throw new IllegalArgumentException("param [" + entry.getKey() + "] not supported in [" + restApi.getName() + "] api"); } - httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); } } } diff --git a/src/test/resources/packaging/scripts/50_systemd.bats b/src/test/resources/packaging/scripts/50_systemd.bats new file mode 100644 index 00000000000..addd4ff1ac7 --- /dev/null +++ b/src/test/resources/packaging/scripts/50_systemd.bats @@ -0,0 +1,146 @@ +#!/usr/bin/env bats + +# This file is used to test the elasticsearch Systemd setup. + +# WARNING: This testing file must be executed as root and can +# dramatically change your system. It removes the 'elasticsearch' +# user/group and also many directories. Do not execute this file +# unless you know exactly what you are doing. + +# The test case can be executed with the Bash Automated +# Testing System tool available at https://github.com/sstephenson/bats +# Thanks to Sam Stephenson! + +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Load test utilities +load packaging_test_utils + +# Cleans everything for the 1st execution +setup() { + if [ "$BATS_TEST_NUMBER" -eq 1 ]; then + clean_before_test + fi + + + # Installs a package before test + if is_dpkg; then + dpkg -i elasticsearch*.deb >&2 || true + fi + if is_rpm; then + rpm -i elasticsearch*.rpm >&2 || true + fi +} + +@test "[SYSTEMD] daemon reload" { + skip_not_systemd + + run systemctl daemon-reload + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] enable" { + skip_not_systemd + + run systemctl enable elasticsearch.service + [ "$status" -eq 0 ] + + run systemctl is-enabled elasticsearch.service + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] start" { + skip_not_systemd + + run systemctl start elasticsearch.service + [ "$status" -eq 0 ] + + wait_for_elasticsearch_status + + assert_file_exist "/var/run/elasticsearch/elasticsearch.pid" +} + +@test "[SYSTEMD] start (running)" { + skip_not_systemd + + run systemctl start elasticsearch.service + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] is active (running)" { + skip_not_systemd + + run systemctl is-active elasticsearch.service + [ "$status" -eq 0 ] + [ "$output" = "active" ] +} + +@test "[SYSTEMD] status (running)" { + skip_not_systemd + + run systemctl status elasticsearch.service + [ "$status" -eq 0 ] +} + +################################## +# Check that Elasticsearch is working +################################## +@test "[SYSTEMD] test elasticsearch" { + skip_not_systemd + + run_elasticsearch_tests +} + +@test "[SYSTEMD] restart" { + skip_not_systemd + + run systemctl restart elasticsearch.service + [ "$status" -eq 0 ] + + wait_for_elasticsearch_status + + run service elasticsearch status + [ "$status" -eq 0 ] +} + +@test "[SYSTEMD] stop (running)" { + skip_not_systemd + + run systemctl stop elasticsearch.service + [ "$status" -eq 0 ] + + run systemctl status elasticsearch.service + echo "$output" | grep "Active:" | grep "inactive" +} + +@test "[SYSTEMD] stop (stopped)" { + skip_not_systemd + + run systemctl stop elasticsearch.service + [ "$status" -eq 0 ] + + run systemctl status elasticsearch.service + echo "$output" | grep "Active:" | grep "inactive" +} + +@test "[SYSTEMD] status (stopped)" { + skip_not_systemd + + run systemctl status elasticsearch.service + echo "$output" | grep "Active:" | grep "inactive" +} diff --git a/src/test/resources/packaging/scripts/packaging_test_utils.bash b/src/test/resources/packaging/scripts/packaging_test_utils.bash index b1058c641da..b5fe262efd1 100644 --- a/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -217,6 +217,12 @@ verify_package_installation() { # License file assert_file "/usr/share/elasticsearch/LICENSE.txt" f root 644 fi + + if is_systemd; then + assert_file "/usr/lib/systemd/system/elasticsearch.service" f root 644 + assert_file "/usr/lib/tmpfiles.d/elasticsearch.conf" f root 644 + assert_file "/usr/lib/sysctl.d/elasticsearch.conf" f root 644 + fi } @@ -294,7 +300,10 @@ clean_before_test() { "/etc/sysconfig/elasticsearch" \ "/var/run/elasticsearch" \ "/usr/share/doc/elasticsearch" \ - "/tmp/elasticsearch") + "/tmp/elasticsearch" \ + "/usr/lib/systemd/system/elasticsearch.conf" \ + "/usr/lib/tmpfiles.d/elasticsearch.conf" \ + "/usr/lib/sysctl.d/elasticsearch.conf") if [ "$ES_CLEAN_BEFORE_TEST" = "true" ]; then # Kills all processes of user elasticsearch