mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-25 06:16:40 +00:00
Merge branch 'master' into ccr
* master: Move default location of dependencies report (#31228) Remove dependencies report task dependencies (#31227) Add recognition of MPL 2.0 (#31226) Fix unknown licenses (#31223) Remove version from license file name for GCS SDK (#31221) Fully encapsulate LocalCheckpointTracker inside of the engine (#31213) [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes #28008 (#31160) Add licenses for transport-nio (#31218) Remove DocumentFieldMappers#simpleMatchToFullName. (#31041) Allow to trim all ops above a certain seq# with a term lower than X, post backport fix (#31211) Compliant SAML Response destination check (#31175) Remove DocumentFieldMappers#smartNameFieldMapper, as it is no longer needed. (#31018) Remove extraneous references to 'tokenized' in the mapper code. (#31010) Allow to trim all ops above a certain seq# with a term lower than X (#30176) SQL: Make a single JDBC driver jar (#31012) Enhance license detection for various licenses (#31198) [DOCS] Add note about long-lived idle connections (#30990) Move number of language analyzers to analysis-common module (#31143) Default max concurrent search req. numNodes * 5 (#31171) flush job to ensure all results have been written (#31187)
This commit is contained in:
commit
afe02a3cac
@ -543,7 +543,7 @@ subprojects { project ->
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove assemble on all qa projects because we don't need to publish
|
||||
/* Remove assemble/dependenciesInfo on all qa projects because we don't need to publish
|
||||
* artifacts for them. */
|
||||
gradle.projectsEvaluated {
|
||||
subprojects {
|
||||
@ -553,6 +553,11 @@ gradle.projectsEvaluated {
|
||||
project.tasks.remove(assemble)
|
||||
project.build.dependsOn.remove('assemble')
|
||||
}
|
||||
Task dependenciesInfo = project.tasks.findByName('dependenciesInfo')
|
||||
if (dependenciesInfo) {
|
||||
project.tasks.remove(dependenciesInfo)
|
||||
project.precommit.dependsOn.remove('dependenciesInfo')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -762,6 +762,10 @@ class BuildPlugin implements Plugin<Project> {
|
||||
|
||||
private static configureDependenciesInfo(Project project) {
|
||||
Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class)
|
||||
deps.dependencies = project.configurations.compile.allDependencies
|
||||
deps.runtimeConfiguration = project.configurations.runtime
|
||||
deps.compileOnlyConfiguration = project.configurations.compileOnly
|
||||
project.afterEvaluate {
|
||||
deps.mappings = project.dependencyLicenses.mappings
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,14 +19,19 @@
|
||||
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.elasticsearch.gradle.precommit.DependencyLicensesTask
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.artifacts.Configuration
|
||||
import org.gradle.api.artifacts.Dependency
|
||||
import org.gradle.api.artifacts.DependencyResolutionListener
|
||||
import org.gradle.api.artifacts.DependencySet
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputDirectory
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
|
||||
/**
|
||||
* A task to gather information about the dependencies and export them into a csv file.
|
||||
@ -44,7 +49,14 @@ public class DependenciesInfoTask extends DefaultTask {
|
||||
|
||||
/** Dependencies to gather information from. */
|
||||
@Input
|
||||
public DependencySet dependencies
|
||||
public Configuration runtimeConfiguration
|
||||
|
||||
/** We subtract compile-only dependencies. */
|
||||
@Input
|
||||
public Configuration compileOnlyConfiguration
|
||||
|
||||
@Input
|
||||
public LinkedHashMap<String, String> mappings
|
||||
|
||||
/** Directory to read license files */
|
||||
@InputDirectory
|
||||
@ -59,15 +71,34 @@ public class DependenciesInfoTask extends DefaultTask {
|
||||
|
||||
@TaskAction
|
||||
public void generateDependenciesInfo() {
|
||||
|
||||
final DependencySet runtimeDependencies = runtimeConfiguration.getAllDependencies()
|
||||
// we have to resolve the transitive dependencies and create a group:artifactId:version map
|
||||
final Set<String> compileOnlyArtifacts =
|
||||
compileOnlyConfiguration
|
||||
.getResolvedConfiguration()
|
||||
.resolvedArtifacts
|
||||
.collect { it -> "${it.moduleVersion.id.group}:${it.moduleVersion.id.name}:${it.moduleVersion.id.version}" }
|
||||
|
||||
final StringBuilder output = new StringBuilder()
|
||||
|
||||
for (Dependency dependency : dependencies) {
|
||||
// Only external dependencies are checked
|
||||
if (dependency.group != null && dependency.group.contains("elasticsearch") == false) {
|
||||
final String url = createURL(dependency.group, dependency.name, dependency.version)
|
||||
final String licenseType = getLicenseType(dependency.group, dependency.name)
|
||||
output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n")
|
||||
for (final Dependency dependency : runtimeDependencies) {
|
||||
// we do not need compile-only dependencies here
|
||||
if (compileOnlyArtifacts.contains("${dependency.group}:${dependency.name}:${dependency.version}")) {
|
||||
continue
|
||||
}
|
||||
// only external dependencies are checked
|
||||
if (dependency.group != null && dependency.group.contains("org.elasticsearch")) {
|
||||
continue
|
||||
}
|
||||
|
||||
final String url = createURL(dependency.group, dependency.name, dependency.version)
|
||||
final String dependencyName = DependencyLicensesTask.getDependencyName(mappings, dependency.name)
|
||||
logger.info("mapped dependency ${dependency.group}:${dependency.name} to ${dependencyName} for license info")
|
||||
|
||||
final String licenseType = getLicenseType(dependency.group, dependencyName)
|
||||
output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n")
|
||||
|
||||
}
|
||||
outputFile.setText(output.toString(), 'UTF-8')
|
||||
}
|
||||
@ -109,7 +140,8 @@ public class DependenciesInfoTask extends DefaultTask {
|
||||
}
|
||||
|
||||
if (license) {
|
||||
final String content = license.readLines("UTF-8").toString()
|
||||
// replace * because they are sometimes used at the beginning lines as if the license was a multi-line comment
|
||||
final String content = new String(license.readBytes(), "UTF-8").replaceAll("\\s+", " ").replaceAll("\\*", " ")
|
||||
final String spdx = checkSPDXLicense(content)
|
||||
if (spdx == null) {
|
||||
// License has not be identified as SPDX.
|
||||
@ -133,15 +165,88 @@ public class DependenciesInfoTask extends DefaultTask {
|
||||
private String checkSPDXLicense(final String licenseText) {
|
||||
String spdx = null
|
||||
|
||||
final String APACHE_2_0 = "Apache.*License.*(v|V)ersion 2.0"
|
||||
final String BSD_2 = "BSD 2-clause.*License"
|
||||
final String APACHE_2_0 = "Apache.*License.*(v|V)ersion.*2\\.0"
|
||||
|
||||
final String BSD_2 = """
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1\\. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer\\.
|
||||
2\\. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution\\.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.
|
||||
IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.
|
||||
""".replaceAll("\\s+", "\\\\s*")
|
||||
|
||||
final String BSD_3 = """
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
(1\\.)? Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer\\.
|
||||
(2\\.)? Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution\\.
|
||||
((3\\.)? The name of .+ may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission\\.|
|
||||
(3\\.)? Neither the name of .+ nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission\\.)
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.
|
||||
IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.
|
||||
""".replaceAll("\\s+", "\\\\s*")
|
||||
|
||||
final String CDDL_1_0 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.0"
|
||||
final String CDDL_1_1 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.1"
|
||||
final String ICU = "ICU License - ICU 1.8.1 and later"
|
||||
final String LGPL_3 = "GNU LESSER GENERAL PUBLIC LICENSE.*Version 3"
|
||||
final String MIT = "MIT License"
|
||||
|
||||
final String MIT = """
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files \\(the "Software"\\), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software\\.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE\\.
|
||||
""".replaceAll("\\s+", "\\\\s*")
|
||||
|
||||
final String MOZILLA_1_1 = "Mozilla Public License.*Version 1.1"
|
||||
|
||||
final String MOZILLA_2_0 = "Mozilla\\s*Public\\s*License\\s*Version\\s*2\\.0"
|
||||
|
||||
switch (licenseText) {
|
||||
case ~/.*${APACHE_2_0}.*/:
|
||||
spdx = 'Apache-2.0'
|
||||
@ -152,6 +257,9 @@ public class DependenciesInfoTask extends DefaultTask {
|
||||
case ~/.*${BSD_2}.*/:
|
||||
spdx = 'BSD-2-Clause'
|
||||
break
|
||||
case ~/.*${BSD_3}.*/:
|
||||
spdx = 'BSD-3-Clause'
|
||||
break
|
||||
case ~/.*${LGPL_3}.*/:
|
||||
spdx = 'LGPL-3.0'
|
||||
break
|
||||
@ -167,6 +275,9 @@ public class DependenciesInfoTask extends DefaultTask {
|
||||
case ~/.*${MOZILLA_1_1}.*/:
|
||||
spdx = 'MPL-1.1'
|
||||
break
|
||||
case ~/.*${MOZILLA_2_0}.*/:
|
||||
spdx = 'MPL-2.0'
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
@ -109,6 +109,10 @@ public class DependencyLicensesTask extends DefaultTask {
|
||||
mappings.put(from, to)
|
||||
}
|
||||
|
||||
public LinkedHashMap<String, String> getMappings() {
|
||||
return new LinkedHashMap<>(mappings)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a rule which will skip SHA checking for the given dependency name. This should be used for
|
||||
* locally build dependencies, which cause the sha to change constantly.
|
||||
@ -129,10 +133,6 @@ public class DependencyLicensesTask extends DefaultTask {
|
||||
throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies")
|
||||
}
|
||||
|
||||
|
||||
// order is the same for keys and values iteration since we use a linked hashmap
|
||||
List<String> mapped = new ArrayList<>(mappings.values())
|
||||
Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')')
|
||||
Map<String, Integer> licenses = new HashMap<>()
|
||||
Map<String, Integer> notices = new HashMap<>()
|
||||
Set<File> shaFiles = new HashSet<File>()
|
||||
@ -151,7 +151,7 @@ public class DependencyLicensesTask extends DefaultTask {
|
||||
|
||||
for (File dependency : dependencies) {
|
||||
String jarName = dependency.getName()
|
||||
String depName = jarName - ~/\-\d+.*/
|
||||
String depName = jarName - ~/\-v?\d+.*/
|
||||
if (ignoreShas.contains(depName)) {
|
||||
// local deps should not have sha files!
|
||||
if (getShaFile(jarName).exists()) {
|
||||
@ -162,16 +162,10 @@ public class DependencyLicensesTask extends DefaultTask {
|
||||
checkSha(dependency, jarName, shaFiles)
|
||||
}
|
||||
|
||||
logger.info("Checking license/notice for " + depName)
|
||||
Matcher match = mappingsPattern.matcher(depName)
|
||||
if (match.matches()) {
|
||||
int i = 0
|
||||
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
|
||||
logger.info("Mapped dependency name ${depName} to ${mapped.get(i)} for license check")
|
||||
depName = mapped.get(i)
|
||||
}
|
||||
checkFile(depName, jarName, licenses, 'LICENSE')
|
||||
checkFile(depName, jarName, notices, 'NOTICE')
|
||||
final String dependencyName = getDependencyName(mappings, depName)
|
||||
logger.info("mapped dependency name ${depName} to ${dependencyName} for license/notice check")
|
||||
checkFile(dependencyName, jarName, licenses, 'LICENSE')
|
||||
checkFile(dependencyName, jarName, notices, 'NOTICE')
|
||||
}
|
||||
|
||||
licenses.each { license, count ->
|
||||
@ -189,6 +183,19 @@ public class DependencyLicensesTask extends DefaultTask {
|
||||
}
|
||||
}
|
||||
|
||||
public static String getDependencyName(final LinkedHashMap<String, String> mappings, final String dependencyName) {
|
||||
// order is the same for keys and values iteration since we use a linked hashmap
|
||||
List<String> mapped = new ArrayList<>(mappings.values())
|
||||
Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')')
|
||||
Matcher match = mappingsPattern.matcher(dependencyName)
|
||||
if (match.matches()) {
|
||||
int i = 0
|
||||
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
|
||||
return mapped.get(i)
|
||||
}
|
||||
return dependencyName
|
||||
}
|
||||
|
||||
private File getShaFile(String jarName) {
|
||||
return new File(licensesDir, jarName + SHA_EXTENSION)
|
||||
}
|
||||
|
@ -31,6 +31,9 @@ esplugin {
|
||||
tasks.remove(assemble)
|
||||
build.dependsOn.remove('assemble')
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
dependenciesInfo.enabled = false
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
|
||||
|
||||
// no unit tests
|
||||
|
@ -34,7 +34,7 @@ Collection distributions = project('archives').subprojects + project('packages')
|
||||
task generateDependenciesReport(type: ConcatFilesTask) {
|
||||
files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' )
|
||||
headerLine = "name,version,url,license"
|
||||
target = new File(System.getProperty('csv')?: "${project.buildDir}/dependencies/es-dependencies.csv")
|
||||
target = new File(System.getProperty('csv')?: "${project.buildDir}/reports/dependencies/es-dependencies.csv")
|
||||
}
|
||||
|
||||
/*****************************************************************************
|
||||
|
@ -44,7 +44,12 @@ time setting format). Defaults to `30s`.
|
||||
|`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`)
|
||||
between all nodes. Defaults to `false`.
|
||||
|
||||
|`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere.
|
||||
|`transport.ping_schedule` | Schedule a regular application-level ping message
|
||||
to ensure that transport connections between nodes are kept alive. Defaults to
|
||||
`5s` in the transport client and `-1` (disabled) elsewhere. It is preferable to
|
||||
correctly configure TCP keep-alives instead of using this feature, because TCP
|
||||
keep-alives apply to all kinds of long-lived connection and not just to
|
||||
transport connections.
|
||||
|
||||
|=======================================================================
|
||||
|
||||
@ -80,6 +85,20 @@ The following parameters can be configured like that
|
||||
* `tcp_send_buffer_size`: Configures the send buffer size of the socket
|
||||
* `tcp_receive_buffer_size`: Configures the receive buffer size of the socket
|
||||
|
||||
[float]
|
||||
==== Long-lived idle connections
|
||||
|
||||
Elasticsearch opens a number of long-lived TCP connections between each pair of
|
||||
nodes in the cluster, and some of these connections may be idle for an extended
|
||||
period of time. Nonetheless, Elasticsearch requires these connections to remain
|
||||
open, and it can disrupt the operation of the cluster if any inter-node
|
||||
connections are closed by an external influence such as a firewall. It is
|
||||
important to configure your network to preserve long-lived idle connections
|
||||
between Elasticsearch nodes, for instance by leaving `tcp_keep_alive` enabled
|
||||
and ensuring that the keepalive interval is shorter than any timeout that might
|
||||
cause idle connections to be closed, or by setting `transport.ping_schedule` if
|
||||
keepalives cannot be configured.
|
||||
|
||||
[float]
|
||||
=== Transport Tracer
|
||||
|
||||
|
@ -241,6 +241,13 @@ number of terms that must match.
|
||||
The syntax is the same as the <<query-dsl-minimum-should-match,minimum should match>>.
|
||||
(Defaults to `"30%"`).
|
||||
|
||||
`fail_on_unsupported_field`::
|
||||
Controls whether the query should fail (throw an exception) if any of the
|
||||
specified fields are not of the supported types
|
||||
(`text` or `keyword'). Set this to `false` to ignore the field and continue
|
||||
processing. Defaults to
|
||||
`true`.
|
||||
|
||||
`boost_terms`::
|
||||
Each term in the formed query could be further boosted by their tf-idf score.
|
||||
This sets the boost factor to use when using this feature. Defaults to
|
||||
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider<ArabicAnalyzer> {
|
||||
|
||||
private final ArabicAnalyzer arabicAnalyzer;
|
||||
|
||||
public ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
arabicAnalyzer = new ArabicAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, ArabicAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class ArmenianAnalyzerProvider extends AbstractIndexAnalyzerProvider<ArmenianAnalyzer> {
|
||||
|
||||
private final ArmenianAnalyzer analyzer;
|
||||
|
||||
public ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new ArmenianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, ArmenianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.eu.BasqueAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class BasqueAnalyzerProvider extends AbstractIndexAnalyzerProvider<BasqueAnalyzer> {
|
||||
|
||||
private final BasqueAnalyzer analyzer;
|
||||
|
||||
public BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new BasqueAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, BasqueAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.bn.BengaliAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class BengaliAnalyzerProvider extends AbstractIndexAnalyzerProvider<BengaliAnalyzer> {
|
||||
|
||||
private final BengaliAnalyzer analyzer;
|
||||
|
||||
public BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new BengaliAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, BengaliAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class BrazilianAnalyzerProvider extends AbstractIndexAnalyzerProvider<BrazilianAnalyzer> {
|
||||
|
||||
private final BrazilianAnalyzer analyzer;
|
||||
|
||||
public BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new BrazilianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, BrazilianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class BulgarianAnalyzerProvider extends AbstractIndexAnalyzerProvider<BulgarianAnalyzer> {
|
||||
|
||||
private final BulgarianAnalyzer analyzer;
|
||||
|
||||
public BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new BulgarianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, BulgarianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ca.CatalanAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class CatalanAnalyzerProvider extends AbstractIndexAnalyzerProvider<CatalanAnalyzer> {
|
||||
|
||||
private final CatalanAnalyzer analyzer;
|
||||
|
||||
public CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new CatalanAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, CatalanAnalyzer.getDefaultStopSet()),
|
@ -17,12 +17,13 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
|
||||
/**
|
||||
* Only for old indexes
|
||||
@ -31,16 +32,16 @@ public class ChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider<Stand
|
||||
|
||||
private final StandardAnalyzer analyzer;
|
||||
|
||||
public ChineseAnalyzerProvider(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
ChineseAnalyzerProvider(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
// old index: best effort
|
||||
analyzer = new StandardAnalyzer();
|
||||
analyzer.setVersion(version);
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StandardAnalyzer get() {
|
||||
return this.analyzer;
|
||||
}
|
||||
}
|
||||
}
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.cjk.CJKAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class CjkAnalyzerProvider extends AbstractIndexAnalyzerProvider<CJKAnalyzer> {
|
||||
|
||||
private final CJKAnalyzer analyzer;
|
||||
|
||||
public CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
CharArraySet stopWords = Analysis.parseStopWords(env, settings, CJKAnalyzer.getDefaultStopSet());
|
||||
|
@ -24,11 +24,17 @@ import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.LowerCaseFilter;
|
||||
import org.apache.lucene.analysis.StopFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
|
||||
import org.apache.lucene.analysis.ar.ArabicNormalizationFilter;
|
||||
import org.apache.lucene.analysis.ar.ArabicStemFilter;
|
||||
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
|
||||
import org.apache.lucene.analysis.bn.BengaliAnalyzer;
|
||||
import org.apache.lucene.analysis.bn.BengaliNormalizationFilter;
|
||||
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
|
||||
import org.apache.lucene.analysis.br.BrazilianStemFilter;
|
||||
import org.apache.lucene.analysis.ca.CatalanAnalyzer;
|
||||
import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
|
||||
import org.apache.lucene.analysis.cjk.CJKAnalyzer;
|
||||
import org.apache.lucene.analysis.cjk.CJKBigramFilter;
|
||||
import org.apache.lucene.analysis.cjk.CJKWidthFilter;
|
||||
import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter;
|
||||
@ -40,14 +46,22 @@ import org.apache.lucene.analysis.core.LowerCaseTokenizer;
|
||||
import org.apache.lucene.analysis.core.StopAnalyzer;
|
||||
import org.apache.lucene.analysis.core.UpperCaseFilter;
|
||||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.cz.CzechAnalyzer;
|
||||
import org.apache.lucene.analysis.cz.CzechStemFilter;
|
||||
import org.apache.lucene.analysis.da.DanishAnalyzer;
|
||||
import org.apache.lucene.analysis.de.GermanAnalyzer;
|
||||
import org.apache.lucene.analysis.de.GermanNormalizationFilter;
|
||||
import org.apache.lucene.analysis.de.GermanStemFilter;
|
||||
import org.apache.lucene.analysis.en.EnglishAnalyzer;
|
||||
import org.apache.lucene.analysis.en.KStemFilter;
|
||||
import org.apache.lucene.analysis.en.PorterStemFilter;
|
||||
import org.apache.lucene.analysis.eu.BasqueAnalyzer;
|
||||
import org.apache.lucene.analysis.fa.PersianNormalizationFilter;
|
||||
import org.apache.lucene.analysis.fi.FinnishAnalyzer;
|
||||
import org.apache.lucene.analysis.fr.FrenchAnalyzer;
|
||||
import org.apache.lucene.analysis.gl.GalicianAnalyzer;
|
||||
import org.apache.lucene.analysis.hi.HindiNormalizationFilter;
|
||||
import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
|
||||
import org.apache.lucene.analysis.in.IndicNormalizationFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute;
|
||||
@ -64,6 +78,7 @@ import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
|
||||
import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer;
|
||||
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
|
||||
import org.apache.lucene.analysis.ngram.NGramTokenizer;
|
||||
import org.apache.lucene.analysis.nl.DutchAnalyzer;
|
||||
import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
|
||||
import org.apache.lucene.analysis.pattern.PatternTokenizer;
|
||||
import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter;
|
||||
@ -73,6 +88,7 @@ import org.apache.lucene.analysis.shingle.ShingleFilter;
|
||||
import org.apache.lucene.analysis.snowball.SnowballFilter;
|
||||
import org.apache.lucene.analysis.standard.ClassicFilter;
|
||||
import org.apache.lucene.analysis.standard.ClassicTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer;
|
||||
import org.apache.lucene.analysis.th.ThaiTokenizer;
|
||||
import org.apache.lucene.analysis.tr.ApostropheFilter;
|
||||
@ -113,6 +129,24 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
analyzers.put("fingerprint", FingerprintAnalyzerProvider::new);
|
||||
analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
|
||||
analyzers.put("pattern", PatternAnalyzerProvider::new);
|
||||
analyzers.put("snowball", SnowballAnalyzerProvider::new);
|
||||
analyzers.put("arabic", ArabicAnalyzerProvider::new);
|
||||
analyzers.put("armenian", ArmenianAnalyzerProvider::new);
|
||||
analyzers.put("basque", BasqueAnalyzerProvider::new);
|
||||
analyzers.put("bengali", BengaliAnalyzerProvider::new);
|
||||
analyzers.put("brazilian", BrazilianAnalyzerProvider::new);
|
||||
analyzers.put("bulgarian", BulgarianAnalyzerProvider::new);
|
||||
analyzers.put("catalan", CatalanAnalyzerProvider::new);
|
||||
analyzers.put("chinese", ChineseAnalyzerProvider::new);
|
||||
analyzers.put("cjk", CjkAnalyzerProvider::new);
|
||||
analyzers.put("czech", CzechAnalyzerProvider::new);
|
||||
analyzers.put("danish", DanishAnalyzerProvider::new);
|
||||
analyzers.put("dutch", DutchAnalyzerProvider::new);
|
||||
analyzers.put("english", EnglishAnalyzerProvider::new);
|
||||
analyzers.put("finnish", FinnishAnalyzerProvider::new);
|
||||
analyzers.put("french", FrenchAnalyzerProvider::new);
|
||||
analyzers.put("galician", GalicianAnalyzerProvider::new);
|
||||
analyzers.put("german", GermanAnalyzerProvider::new);
|
||||
return analyzers;
|
||||
}
|
||||
|
||||
@ -213,10 +247,108 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
@Override
|
||||
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
||||
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE,
|
||||
version -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version ->
|
||||
new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET);
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version -> {
|
||||
Analyzer a = new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true,
|
||||
CharArraySet.EMPTY_SET);
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new ArabicAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new ArmenianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new BasqueAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("bengali", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new BengaliAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("brazilian", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new BrazilianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new BulgarianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new CatalanAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.LUCENE, version -> {
|
||||
// only for old indices, best effort
|
||||
Analyzer a = new StandardAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new CJKAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new CzechAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new DanishAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("dutch", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new DutchAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("english", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new EnglishAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("finnish", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new FinnishAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new FrenchAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new GalicianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, version -> {
|
||||
Analyzer a = new GermanAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}));
|
||||
return analyzers;
|
||||
}
|
||||
|
||||
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.cz.CzechAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class CzechAnalyzerProvider extends AbstractIndexAnalyzerProvider<CzechAnalyzer> {
|
||||
|
||||
private final CzechAnalyzer analyzer;
|
||||
|
||||
public CzechAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
CzechAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new CzechAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, CzechAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.da.DanishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class DanishAnalyzerProvider extends AbstractIndexAnalyzerProvider<DanishAnalyzer> {
|
||||
|
||||
private final DanishAnalyzer analyzer;
|
||||
|
||||
public DanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
DanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new DanishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, DanishAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.nl.DutchAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class DutchAnalyzerProvider extends AbstractIndexAnalyzerProvider<DutchAnalyzer> {
|
||||
|
||||
private final DutchAnalyzer analyzer;
|
||||
|
||||
public DutchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
DutchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new DutchAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, DutchAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.en.EnglishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class EnglishAnalyzerProvider extends AbstractIndexAnalyzerProvider<EnglishAnalyzer> {
|
||||
|
||||
private final EnglishAnalyzer analyzer;
|
||||
|
||||
public EnglishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
EnglishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new EnglishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, EnglishAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.fi.FinnishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class FinnishAnalyzerProvider extends AbstractIndexAnalyzerProvider<FinnishAnalyzer> {
|
||||
|
||||
private final FinnishAnalyzer analyzer;
|
||||
|
||||
public FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new FinnishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, FinnishAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.fr.FrenchAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class FrenchAnalyzerProvider extends AbstractIndexAnalyzerProvider<FrenchAnalyzer> {
|
||||
|
||||
private final FrenchAnalyzer analyzer;
|
||||
|
||||
public FrenchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
FrenchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new FrenchAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, FrenchAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.gl.GalicianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class GalicianAnalyzerProvider extends AbstractIndexAnalyzerProvider<GalicianAnalyzer> {
|
||||
|
||||
private final GalicianAnalyzer analyzer;
|
||||
|
||||
public GalicianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
GalicianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new GalicianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, GalicianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.de.GermanAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class GermanAnalyzerProvider extends AbstractIndexAnalyzerProvider<GermanAnalyzer> {
|
||||
|
||||
private final GermanAnalyzer analyzer;
|
||||
|
||||
public GermanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
GermanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new GermanAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, GermanAnalyzer.getDefaultStopSet()),
|
@ -1,4 +1,4 @@
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
@ -48,12 +48,12 @@ public final class SnowballAnalyzer extends Analyzer {
|
||||
private CharArraySet stopSet;
|
||||
|
||||
/** Builds the named analyzer with no stop words. */
|
||||
public SnowballAnalyzer(String name) {
|
||||
SnowballAnalyzer(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/** Builds the named analyzer with the given stop words. */
|
||||
public SnowballAnalyzer(String name, CharArraySet stopWords) {
|
||||
SnowballAnalyzer(String name, CharArraySet stopWords) {
|
||||
this(name);
|
||||
stopSet = CharArraySet.unmodifiableSet(CharArraySet.copy(stopWords));
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.core.StopAnalyzer;
|
||||
@ -26,6 +26,8 @@ import org.apache.lucene.analysis.nl.DutchAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@ -60,7 +62,7 @@ public class SnowballAnalyzerProvider extends AbstractIndexAnalyzerProvider<Snow
|
||||
|
||||
private final SnowballAnalyzer analyzer;
|
||||
|
||||
public SnowballAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
SnowballAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
||||
String language = settings.get("language", settings.get("name", "English"));
|
@ -1,4 +1,4 @@
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
@ -30,14 +30,14 @@ public class SnowballAnalyzerTests extends ESTokenStreamTestCase {
|
||||
assertAnalyzesTo(a, "he abhorred accents",
|
||||
new String[]{"he", "abhor", "accent"});
|
||||
}
|
||||
|
||||
|
||||
public void testStopwords() throws Exception {
|
||||
Analyzer a = new SnowballAnalyzer("English",
|
||||
StandardAnalyzer.STOP_WORDS_SET);
|
||||
assertAnalyzesTo(a, "the quick brown fox jumped",
|
||||
new String[]{"quick", "brown", "fox", "jump"});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Test turkish lowercasing
|
||||
*/
|
||||
@ -48,7 +48,7 @@ public class SnowballAnalyzerTests extends ESTokenStreamTestCase {
|
||||
assertAnalyzesTo(a, "AĞACI", new String[] { "ağaç" });
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void testReusableTokenStream() throws Exception {
|
||||
Analyzer a = new SnowballAnalyzer("English");
|
||||
assertAnalyzesTo(a, "he abhorred accents",
|
||||
@ -56,4 +56,4 @@ public class SnowballAnalyzerTests extends ESTokenStreamTestCase {
|
||||
assertAnalyzesTo(a, "she abhorred him",
|
||||
new String[]{"she", "abhor", "him"});
|
||||
}
|
||||
}
|
||||
}
|
@ -38,6 +38,25 @@
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: বার }
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: bengali
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: বাড়ী
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: বার }
|
||||
|
||||
---
|
||||
"fingerprint":
|
||||
- do:
|
||||
@ -69,3 +88,507 @@
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: foo }
|
||||
- match: { tokens.1.token: bar }
|
||||
|
||||
---
|
||||
"snowball":
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: the brown foxes
|
||||
analyzer: snowball
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: brown }
|
||||
- match: { tokens.1.token: fox }
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_snowball:
|
||||
type: snowball
|
||||
language: "Dutch"
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: de bruine vossen
|
||||
analyzer: my_snowball
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: bruin }
|
||||
- match: { tokens.1.token: voss }
|
||||
|
||||
---
|
||||
"arabic":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: arabic
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: كبيرة
|
||||
analyzer: arabic
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: كبير }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: كبيرة
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: كبير }
|
||||
|
||||
---
|
||||
"armenian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: armenian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: արծիվ
|
||||
analyzer: armenian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: արծ }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: արծիվ
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: արծ }
|
||||
|
||||
---
|
||||
"basque":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: basque
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: zaldiak
|
||||
analyzer: basque
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: zaldi }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: zaldiak
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: zaldi }
|
||||
|
||||
---
|
||||
"brazilian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: brazilian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: boataria
|
||||
analyzer: brazilian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: boat }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: boataria
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: boat }
|
||||
|
||||
---
|
||||
"bulgarian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: bulgarian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: градове
|
||||
analyzer: bulgarian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: град }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: градове
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: град }
|
||||
|
||||
---
|
||||
"catalan":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: catalan
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: llengües
|
||||
analyzer: catalan
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: llengu }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: llengües
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: llengu }
|
||||
|
||||
---
|
||||
"chinese":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: chinese
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: only for old indices
|
||||
analyzer: chinese
|
||||
- length: { tokens: 3 }
|
||||
- match: { tokens.0.token: only }
|
||||
- match: { tokens.1.token: old }
|
||||
- match: { tokens.2.token: indices }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: only for old indices
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 3 }
|
||||
- match: { tokens.0.token: only }
|
||||
- match: { tokens.1.token: old }
|
||||
- match: { tokens.2.token: indices }
|
||||
|
||||
---
|
||||
"cjk":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: cjk
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: 多くの
|
||||
analyzer: cjk
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: 多く }
|
||||
- match: { tokens.1.token: くの }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: 多くの
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: 多く }
|
||||
- match: { tokens.1.token: くの }
|
||||
|
||||
---
|
||||
"czech":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: czech
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: Pokud mluvime o volnem
|
||||
analyzer: czech
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: mluvim }
|
||||
- match: { tokens.1.token: voln }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: Pokud mluvime o volnem
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 2 }
|
||||
- match: { tokens.0.token: mluvim }
|
||||
- match: { tokens.1.token: voln }
|
||||
|
||||
---
|
||||
"danish":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: danish
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: undersøgelse
|
||||
analyzer: danish
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: undersøg }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: undersøgelse
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: undersøg }
|
||||
|
||||
---
|
||||
"dutch":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: dutch
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: lidstaten
|
||||
analyzer: dutch
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: lidstat }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: lidstaten
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: lidstat }
|
||||
|
||||
---
|
||||
"english":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: english
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: books
|
||||
analyzer: english
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: book }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: books
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: book }
|
||||
|
||||
---
|
||||
"finnish":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: finnish
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: edeltäjiinsä
|
||||
analyzer: finnish
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: edeltäj }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: edeltäjiinsä
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: edeltäj }
|
||||
|
||||
---
|
||||
"french":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: french
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: sécuritaires
|
||||
analyzer: french
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: securitair }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: sécuritaires
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: securitair }
|
||||
|
||||
---
|
||||
"galician":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: galician
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: corresponderá
|
||||
analyzer: galician
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: correspond }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: corresponderá
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: correspond }
|
||||
|
||||
---
|
||||
"german":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: german
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: Tischen
|
||||
analyzer: german
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: tisch }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: Tischen
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: tisch }
|
||||
|
@ -0,0 +1,58 @@
|
||||
---
|
||||
"Test query string with snowball":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
properties:
|
||||
field:
|
||||
type: text
|
||||
number:
|
||||
type: integer
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body: { field: foo bar}
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: [test]
|
||||
|
||||
- do:
|
||||
indices.validate_query:
|
||||
index: test
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- is_true: valid
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- match: {hits.total: 1}
|
||||
|
||||
- do:
|
||||
explain:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- is_true: matched
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: test
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- match: {count : 1}
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
@ -109,7 +110,13 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
private void init() throws IOException {
|
||||
prepareCreate("test").addMapping("test", jsonBuilder().startObject()
|
||||
Settings.Builder settings = Settings.builder();
|
||||
settings.put(indexSettings());
|
||||
settings.put("index.analysis.analyzer.mock_english.tokenizer", "standard");
|
||||
settings.put("index.analysis.analyzer.mock_english.filter", "stop");
|
||||
prepareCreate("test")
|
||||
.setSettings(settings)
|
||||
.addMapping("test", jsonBuilder().startObject()
|
||||
.startObject("test")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
@ -133,7 +140,7 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
|
||||
.endObject()
|
||||
.startObject("token_count_without_position_increments")
|
||||
.field("type", "token_count")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "mock_english")
|
||||
.field("enable_position_increments", false)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
@ -214,13 +221,13 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
|
||||
assertThat(hit.field("foo.token_count"), not(nullValue()));
|
||||
assertThat(hit.field("foo.token_count").getValues().size(), equalTo(standardTermCounts.length));
|
||||
for (int i = 0; i < standardTermCounts.length; i++) {
|
||||
assertThat((Integer) hit.field("foo.token_count").getValues().get(i), equalTo(standardTermCounts[i]));
|
||||
assertThat(hit.field("foo.token_count").getValues().get(i), equalTo(standardTermCounts[i]));
|
||||
}
|
||||
|
||||
assertThat(hit.field("foo.token_count_without_position_increments"), not(nullValue()));
|
||||
assertThat(hit.field("foo.token_count_without_position_increments").getValues().size(), equalTo(englishTermCounts.length));
|
||||
for (int i = 0; i < englishTermCounts.length; i++) {
|
||||
assertThat((Integer) hit.field("foo.token_count_without_position_increments").getValues().get(i),
|
||||
assertThat(hit.field("foo.token_count_without_position_increments").getValues().get(i),
|
||||
equalTo(englishTermCounts[i]));
|
||||
}
|
||||
|
||||
|
@ -79,9 +79,9 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
|
||||
new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
|
||||
// previous mapper has not been modified
|
||||
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
|
||||
assertThat(((TokenCountFieldMapper) stage1.mappers().getMapper("tc")).analyzer(), equalTo("keyword"));
|
||||
// but the new one has the change
|
||||
assertThat(((TokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));
|
||||
assertThat(((TokenCountFieldMapper) stage2.mappers().getMapper("tc")).analyzer(), equalTo("standard"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -8,14 +8,14 @@
|
||||
place:
|
||||
properties:
|
||||
name:
|
||||
type: text
|
||||
type: keyword
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: place
|
||||
id: 1
|
||||
refresh: true
|
||||
body: { "name": "bob's house" }
|
||||
body: { "name": "bob! house" }
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
@ -24,11 +24,10 @@
|
||||
body:
|
||||
properties:
|
||||
name:
|
||||
type: text
|
||||
type: keyword
|
||||
fields:
|
||||
english:
|
||||
type: text
|
||||
analyzer: english
|
||||
|
||||
- do:
|
||||
search:
|
||||
|
@ -22,8 +22,6 @@ esplugin {
|
||||
classname 'org.elasticsearch.transport.nio.NioTransportPlugin'
|
||||
}
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-try"
|
||||
compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked"
|
||||
|
||||
@ -40,6 +38,10 @@ dependencies {
|
||||
compile "io.netty:netty-transport:4.1.16.Final"
|
||||
}
|
||||
|
||||
dependencyLicenses {
|
||||
mapping from: /netty-.*/, to: 'netty'
|
||||
}
|
||||
|
||||
thirdPartyAudit.excludes = [
|
||||
// classes are missing
|
||||
|
||||
|
202
plugins/transport-nio/licenses/netty-LICENSE.txt
Normal file
202
plugins/transport-nio/licenses/netty-LICENSE.txt
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
116
plugins/transport-nio/licenses/netty-NOTICE.txt
Normal file
116
plugins/transport-nio/licenses/netty-NOTICE.txt
Normal file
@ -0,0 +1,116 @@
|
||||
|
||||
The Netty Project
|
||||
=================
|
||||
|
||||
Please visit the Netty web site for more information:
|
||||
|
||||
* http://netty.io/
|
||||
|
||||
Copyright 2011 The Netty Project
|
||||
|
||||
The Netty Project licenses this file to you under the Apache License,
|
||||
version 2.0 (the "License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at:
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
License for the specific language governing permissions and limitations
|
||||
under the License.
|
||||
|
||||
Also, please refer to each LICENSE.<component>.txt file, which is located in
|
||||
the 'license' directory of the distribution file, for the license terms of the
|
||||
components that this product depends on.
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
This product contains the extensions to Java Collections Framework which has
|
||||
been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.jsr166y.txt (Public Domain)
|
||||
* HOMEPAGE:
|
||||
* http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/
|
||||
* http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/
|
||||
|
||||
This product contains a modified version of Robert Harder's Public Domain
|
||||
Base64 Encoder and Decoder, which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.base64.txt (Public Domain)
|
||||
* HOMEPAGE:
|
||||
* http://iharder.sourceforge.net/current/java/base64/
|
||||
|
||||
This product contains a modified version of 'JZlib', a re-implementation of
|
||||
zlib in pure Java, which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.jzlib.txt (BSD Style License)
|
||||
* HOMEPAGE:
|
||||
* http://www.jcraft.com/jzlib/
|
||||
|
||||
This product contains a modified version of 'Webbit', a Java event based
|
||||
WebSocket and HTTP server:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.webbit.txt (BSD License)
|
||||
* HOMEPAGE:
|
||||
* https://github.com/joewalnes/webbit
|
||||
|
||||
This product optionally depends on 'Protocol Buffers', Google's data
|
||||
interchange format, which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.protobuf.txt (New BSD License)
|
||||
* HOMEPAGE:
|
||||
* http://code.google.com/p/protobuf/
|
||||
|
||||
This product optionally depends on 'Bouncy Castle Crypto APIs' to generate
|
||||
a temporary self-signed X.509 certificate when the JVM does not provide the
|
||||
equivalent functionality. It can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.bouncycastle.txt (MIT License)
|
||||
* HOMEPAGE:
|
||||
* http://www.bouncycastle.org/
|
||||
|
||||
This product optionally depends on 'SLF4J', a simple logging facade for Java,
|
||||
which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.slf4j.txt (MIT License)
|
||||
* HOMEPAGE:
|
||||
* http://www.slf4j.org/
|
||||
|
||||
This product optionally depends on 'Apache Commons Logging', a logging
|
||||
framework, which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.commons-logging.txt (Apache License 2.0)
|
||||
* HOMEPAGE:
|
||||
* http://commons.apache.org/logging/
|
||||
|
||||
This product optionally depends on 'Apache Log4J', a logging framework,
|
||||
which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.log4j.txt (Apache License 2.0)
|
||||
* HOMEPAGE:
|
||||
* http://logging.apache.org/log4j/
|
||||
|
||||
This product optionally depends on 'JBoss Logging', a logging framework,
|
||||
which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.jboss-logging.txt (GNU LGPL 2.1)
|
||||
* HOMEPAGE:
|
||||
* http://anonsvn.jboss.org/repos/common/common-logging-spi/
|
||||
|
||||
This product optionally depends on 'Apache Felix', an open source OSGi
|
||||
framework implementation, which can be obtained at:
|
||||
|
||||
* LICENSE:
|
||||
* license/LICENSE.felix.txt (Apache License 2.0)
|
||||
* HOMEPAGE:
|
||||
* http://felix.apache.org/
|
@ -0,0 +1 @@
|
||||
63b5fa95c74785e16f2c30ce268bc222e35c8cb5
|
@ -0,0 +1 @@
|
||||
d84a1f21768b7309c2954521cf5a1f46c2309eb1
|
@ -0,0 +1 @@
|
||||
d64312378b438dfdad84267c599a053327c6f02a
|
@ -0,0 +1 @@
|
||||
177a6b30cca92f6f5f9873c9befd681377a4c328
|
@ -0,0 +1 @@
|
||||
fec0e63e7dd7f4eeef7ea8dc47a1ff32dfc7ebc2
|
@ -0,0 +1 @@
|
||||
f6eb553b53fb3a90a8ac1170697093fed82eae28
|
@ -0,0 +1 @@
|
||||
3c8ee2c4d4a1cbb947a5c184c7aeb2204260958b
|
@ -44,14 +44,6 @@
|
||||
|
||||
- match: {count : 0}
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: test
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- match: {count : 1}
|
||||
|
||||
- do:
|
||||
count:
|
||||
index: test
|
||||
|
@ -50,16 +50,6 @@
|
||||
|
||||
- is_false: matched
|
||||
|
||||
- do:
|
||||
explain:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- is_true: matched
|
||||
|
||||
- do:
|
||||
explain:
|
||||
index: test
|
||||
|
@ -35,14 +35,6 @@
|
||||
|
||||
- is_true: valid
|
||||
|
||||
- do:
|
||||
indices.validate_query:
|
||||
index: test
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- is_true: valid
|
||||
|
||||
- do:
|
||||
indices.validate_query:
|
||||
index: test
|
||||
|
@ -44,14 +44,6 @@
|
||||
|
||||
- match: {hits.total: 0}
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
q: field:bars
|
||||
analyzer: snowball
|
||||
|
||||
- match: {hits.total: 1}
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
|
@ -186,7 +186,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
||||
}
|
||||
} else {
|
||||
// not a pattern
|
||||
FieldMapper fieldMapper = allFieldMappers.smartNameFieldMapper(field);
|
||||
FieldMapper fieldMapper = allFieldMappers.getMapper(field);
|
||||
if (fieldMapper != null) {
|
||||
addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults());
|
||||
} else if (request.probablySingleFieldRequest()) {
|
||||
|
@ -76,7 +76,7 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA
|
||||
MapperService mapperService = indicesService.indexServiceSafe(shardId.getIndex()).mapperService();
|
||||
Set<String> fieldNames = new HashSet<>();
|
||||
for (String field : request.fields()) {
|
||||
fieldNames.addAll(mapperService.simpleMatchToIndexNames(field));
|
||||
fieldNames.addAll(mapperService.simpleMatchToFullName(field));
|
||||
}
|
||||
Predicate<String> fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName());
|
||||
Map<String, FieldCapabilities> responseMap = new HashMap<>();
|
||||
|
@ -22,6 +22,7 @@ import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.replication.ReplicatedWriteRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
|
||||
@ -33,17 +34,24 @@ import java.util.Arrays;
|
||||
*/
|
||||
public final class ResyncReplicationRequest extends ReplicatedWriteRequest<ResyncReplicationRequest> {
|
||||
|
||||
private long trimAboveSeqNo;
|
||||
private Translog.Operation[] operations;
|
||||
|
||||
ResyncReplicationRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
public ResyncReplicationRequest(final ShardId shardId, final Translog.Operation[] operations) {
|
||||
public ResyncReplicationRequest(final ShardId shardId, final long trimAboveSeqNo,
|
||||
final Translog.Operation[] operations) {
|
||||
super(shardId);
|
||||
this.trimAboveSeqNo = trimAboveSeqNo;
|
||||
this.operations = operations;
|
||||
}
|
||||
|
||||
public long getTrimAboveSeqNo() {
|
||||
return trimAboveSeqNo;
|
||||
}
|
||||
|
||||
public Translog.Operation[] getOperations() {
|
||||
return operations;
|
||||
}
|
||||
@ -60,12 +68,20 @@ public final class ResyncReplicationRequest extends ReplicatedWriteRequest<Resyn
|
||||
throw new IllegalStateException("resync replication request serialization is broken in 6.0.0");
|
||||
}
|
||||
super.readFrom(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
trimAboveSeqNo = in.readZLong();
|
||||
} else {
|
||||
trimAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
}
|
||||
operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeZLong(trimAboveSeqNo);
|
||||
}
|
||||
out.writeArray(Translog.Operation::writeOperation, operations);
|
||||
}
|
||||
|
||||
@ -74,12 +90,13 @@ public final class ResyncReplicationRequest extends ReplicatedWriteRequest<Resyn
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
final ResyncReplicationRequest that = (ResyncReplicationRequest) o;
|
||||
return Arrays.equals(operations, that.operations);
|
||||
return trimAboveSeqNo == that.trimAboveSeqNo
|
||||
&& Arrays.equals(operations, that.operations);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(operations);
|
||||
return Long.hashCode(trimAboveSeqNo) + 31 * Arrays.hashCode(operations);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -88,6 +105,7 @@ public final class ResyncReplicationRequest extends ReplicatedWriteRequest<Resyn
|
||||
"shardId=" + shardId +
|
||||
", timeout=" + timeout +
|
||||
", index='" + index + '\'' +
|
||||
", trimAboveSeqNo=" + trimAboveSeqNo +
|
||||
", ops=" + operations.length +
|
||||
"}";
|
||||
}
|
||||
|
@ -135,6 +135,9 @@ public class TransportResyncReplicationAction extends TransportWriteAction<Resyn
|
||||
}
|
||||
}
|
||||
}
|
||||
if (request.getTrimAboveSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
replica.trimOperationOfPreviousPrimaryTerms(request.getTrimAboveSeqNo());
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
|
@ -346,7 +346,8 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
||||
* it sane. A single search request that fans out to lots of shards should not hit a cluster too hard while 256 is already a
|
||||
* lot.
|
||||
*/
|
||||
searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount));
|
||||
// we use nodeCount * 5 as we used to default this to the default number of shard which used to be 5.
|
||||
searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount * 5));
|
||||
}
|
||||
boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators);
|
||||
searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(),
|
||||
|
@ -63,7 +63,7 @@ import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
|
||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
@ -237,6 +237,12 @@ public abstract class Engine implements Closeable {
|
||||
*/
|
||||
public abstract boolean isThrottled();
|
||||
|
||||
/**
|
||||
* Trims translog for terms below <code>belowTerm</code> and seq# above <code>aboveSeqNo</code>
|
||||
* @see Translog#trimOperations(long, long)
|
||||
*/
|
||||
public abstract void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) throws EngineException;
|
||||
|
||||
/** A Lock implementation that always allows the lock to be acquired */
|
||||
protected static final class NoOpLock implements Lock {
|
||||
|
||||
@ -636,11 +642,28 @@ public abstract class Engine implements Closeable {
|
||||
}
|
||||
|
||||
/**
|
||||
* The sequence number service for this engine.
|
||||
*
|
||||
* @return the sequence number service
|
||||
* @return the local checkpoint for this Engine
|
||||
*/
|
||||
public abstract LocalCheckpointTracker getLocalCheckpointTracker();
|
||||
public abstract long getLocalCheckpoint();
|
||||
|
||||
/**
|
||||
* Waits for all operations up to the provided sequence number to complete.
|
||||
*
|
||||
* @param seqNo the sequence number that the checkpoint must advance to before this method returns
|
||||
* @throws InterruptedException if the thread was interrupted while blocking on the condition
|
||||
*/
|
||||
public abstract void waitForOpsToComplete(long seqNo) throws InterruptedException;
|
||||
|
||||
/**
|
||||
* Reset the local checkpoint in the tracker to the given local checkpoint
|
||||
* @param localCheckpoint the new checkpoint to be set
|
||||
*/
|
||||
public abstract void resetLocalCheckpoint(long localCheckpoint);
|
||||
|
||||
/**
|
||||
* @return a {@link SeqNoStats} object, using local state and the supplied global checkpoint
|
||||
*/
|
||||
public abstract SeqNoStats getSeqNoStats(long globalCheckpoint);
|
||||
|
||||
/**
|
||||
* Returns the latest global checkpoint value that has been persisted in the underlying storage (i.e. translog's checkpoint)
|
||||
@ -911,7 +934,7 @@ public abstract class Engine implements Closeable {
|
||||
* checks and removes translog files that no longer need to be retained. See
|
||||
* {@link org.elasticsearch.index.translog.TranslogDeletionPolicy} for details
|
||||
*/
|
||||
public abstract void trimTranslog() throws EngineException;
|
||||
public abstract void trimUnreferencedTranslogFiles() throws EngineException;
|
||||
|
||||
/**
|
||||
* Tests whether or not the translog generation should be rolled to a new generation.
|
||||
|
@ -76,6 +76,7 @@ import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.merge.OnGoingMerge;
|
||||
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
|
||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.shard.ElasticsearchMergePolicy;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -1652,7 +1653,7 @@ public class InternalEngine extends Engine {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trimTranslog() throws EngineException {
|
||||
public void trimUnreferencedTranslogFiles() throws EngineException {
|
||||
try (ReleasableLock lock = readLock.acquire()) {
|
||||
ensureOpen();
|
||||
translog.trimUnreferencedReaders();
|
||||
@ -1669,6 +1670,24 @@ public class InternalEngine extends Engine {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) throws EngineException {
|
||||
try (ReleasableLock lock = readLock.acquire()) {
|
||||
ensureOpen();
|
||||
translog.trimOperations(belowTerm, aboveSeqNo);
|
||||
} catch (AlreadyClosedException e) {
|
||||
failOnTragicEvent(e);
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
failEngine("translog operations trimming failed", e);
|
||||
} catch (Exception inner) {
|
||||
e.addSuppressed(inner);
|
||||
}
|
||||
throw new EngineException(shardId, "failed to trim translog operations", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void pruneDeletedTombstones() {
|
||||
/*
|
||||
* We need to deploy two different trimming strategies for GC deletes on primary and replicas. Delete operations on primary
|
||||
@ -2289,10 +2308,31 @@ public class InternalEngine extends Engine {
|
||||
return mergeScheduler.stats();
|
||||
}
|
||||
|
||||
public final LocalCheckpointTracker getLocalCheckpointTracker() {
|
||||
// Used only for testing! Package private to prevent anyone else from using it
|
||||
LocalCheckpointTracker getLocalCheckpointTracker() {
|
||||
return localCheckpointTracker;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLocalCheckpoint() {
|
||||
return localCheckpointTracker.getCheckpoint();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void waitForOpsToComplete(long seqNo) throws InterruptedException {
|
||||
localCheckpointTracker.waitForOpsToComplete(seqNo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void resetLocalCheckpoint(long localCheckpoint) {
|
||||
localCheckpointTracker.resetCheckpoint(localCheckpoint);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SeqNoStats getSeqNoStats(long globalCheckpoint) {
|
||||
return localCheckpointTracker.getStats(globalCheckpoint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of times a version was looked up either from the index.
|
||||
* Note this is only available if assertions are enabled
|
||||
|
@ -202,7 +202,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||
|
||||
if (gFields != null && gFields.length > 0) {
|
||||
for (String field : gFields) {
|
||||
FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field);
|
||||
FieldMapper fieldMapper = docMapper.mappers().getMapper(field);
|
||||
if (fieldMapper == null) {
|
||||
if (docMapper.objectMappers().get(field) != null) {
|
||||
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
|
||||
|
@ -84,14 +84,6 @@ public class BooleanFieldMapper extends FieldMapper {
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder tokenized(boolean tokenized) {
|
||||
if (tokenized) {
|
||||
throw new IllegalArgumentException("bool field can't be tokenized");
|
||||
}
|
||||
return super.tokenized(tokenized);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BooleanFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
|
@ -20,16 +20,13 @@
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
||||
|
||||
@ -70,29 +67,6 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
||||
return fieldMappers.get(field);
|
||||
}
|
||||
|
||||
public Collection<String> simpleMatchToFullName(String pattern) {
|
||||
Set<String> fields = new HashSet<>();
|
||||
for (FieldMapper fieldMapper : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) {
|
||||
fields.add(fieldMapper.fieldType().name());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
FieldMapper fieldMapper = getMapper(name);
|
||||
if (fieldMapper != null) {
|
||||
return fieldMapper;
|
||||
}
|
||||
for (FieldMapper otherFieldMapper : this) {
|
||||
if (otherFieldMapper.fieldType().name().equals(name)) {
|
||||
return otherFieldMapper;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* A smart analyzer used for indexing that takes into account specific analyzers configured
|
||||
* per {@link FieldMapper}.
|
||||
|
@ -147,11 +147,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T tokenized(boolean tokenized) {
|
||||
this.fieldType.setTokenized(tokenized);
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T boost(float boost) {
|
||||
this.fieldType.setBoost(boost);
|
||||
return builder;
|
||||
@ -376,9 +371,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
|
||||
if (includeDefaults || indexed != defaultIndexed ||
|
||||
fieldType().tokenized() != defaultFieldType.tokenized()) {
|
||||
builder.field("index", indexTokenizeOption(indexed, fieldType().tokenized()));
|
||||
if (includeDefaults || indexed != defaultIndexed) {
|
||||
builder.field("index", indexed);
|
||||
}
|
||||
if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) {
|
||||
builder.field("store", fieldType().stored());
|
||||
@ -474,11 +468,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
||||
}
|
||||
}
|
||||
|
||||
/* Only protected so that string can override it */
|
||||
protected Object indexTokenizeOption(boolean indexed, boolean tokenized) {
|
||||
return indexed;
|
||||
}
|
||||
|
||||
protected abstract String contentType();
|
||||
|
||||
public static class MultiFields {
|
||||
|
@ -165,7 +165,7 @@ public abstract class MappedFieldType extends FieldType {
|
||||
boolean indexed = indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) {
|
||||
if (indexed != mergeWithIndexed) {
|
||||
conflicts.add("mapper [" + name() + "] has different [index] values");
|
||||
}
|
||||
if (stored() != other.stored()) {
|
||||
|
@ -721,7 +721,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||
* Returns all the fields that match the given pattern. If the pattern is prefixed with a type
|
||||
* then the fields will be returned with a type prefix.
|
||||
*/
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
public Collection<String> simpleMatchToFullName(String pattern) {
|
||||
if (Regex.isSimpleMatchPattern(pattern) == false) {
|
||||
// no wildcards
|
||||
return Collections.singletonList(pattern);
|
||||
|
@ -198,7 +198,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
||||
* type then the fields will be returned with a type prefix.
|
||||
*/
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
return mapperService.simpleMatchToIndexNames(pattern);
|
||||
return mapperService.simpleMatchToFullName(pattern);
|
||||
}
|
||||
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
|
@ -91,7 +91,7 @@ public final class QueryParserHelper {
|
||||
public static FieldMapper getFieldMapper(MapperService mapperService, String field) {
|
||||
DocumentMapper mapper = mapperService.documentMapper();
|
||||
if (mapper != null) {
|
||||
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(field);
|
||||
FieldMapper fieldMapper = mapper.mappers().getMapper(field);
|
||||
if (fieldMapper != null) {
|
||||
return fieldMapper;
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ public class SequenceNumbers {
|
||||
*/
|
||||
public static final long UNASSIGNED_SEQ_NO = -2L;
|
||||
/**
|
||||
* Represents no operations have been performed on the shard.
|
||||
* Represents no operations have been performed on the shard. Initial value of a sequence number.
|
||||
*/
|
||||
public static final long NO_OPS_PERFORMED = -1L;
|
||||
|
||||
|
@ -407,7 +407,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting;
|
||||
|
||||
if (newRouting.primary() && currentRouting.isRelocationTarget() == false) {
|
||||
replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint());
|
||||
replicationTracker.activatePrimaryMode(getLocalCheckpoint());
|
||||
}
|
||||
|
||||
changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]");
|
||||
@ -481,8 +481,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
*/
|
||||
engine.rollTranslogGeneration();
|
||||
engine.fillSeqNoGaps(newPrimaryTerm);
|
||||
replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(),
|
||||
getEngine().getLocalCheckpointTracker().getCheckpoint());
|
||||
replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(), getLocalCheckpoint());
|
||||
primaryReplicaSyncer.accept(this, new ActionListener<ResyncTask>() {
|
||||
@Override
|
||||
public void onResponse(ResyncTask resyncTask) {
|
||||
@ -508,7 +507,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
}
|
||||
},
|
||||
e -> failShard("exception during primary term transition", e));
|
||||
replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint());
|
||||
replicationTracker.activatePrimaryMode(getLocalCheckpoint());
|
||||
primaryTerm = newPrimaryTerm;
|
||||
}
|
||||
}
|
||||
@ -875,7 +874,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
@Nullable
|
||||
public SeqNoStats seqNoStats() {
|
||||
Engine engine = getEngineOrNull();
|
||||
return engine == null ? null : engine.getLocalCheckpointTracker().getStats(replicationTracker.getGlobalCheckpoint());
|
||||
return engine == null ? null : engine.getSeqNoStats(replicationTracker.getGlobalCheckpoint());
|
||||
}
|
||||
|
||||
public IndexingStats indexingStats(String... types) {
|
||||
@ -994,7 +993,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
public void trimTranslog() {
|
||||
verifyNotClosed();
|
||||
final Engine engine = getEngine();
|
||||
engine.trimTranslog();
|
||||
engine.trimUnreferencedTranslogFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1196,6 +1195,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
assert currentEngineReference.get() == null;
|
||||
}
|
||||
|
||||
public void trimOperationOfPreviousPrimaryTerms(long aboveSeqNo) {
|
||||
getEngine().trimOperationsFromTranslog(primaryTerm, aboveSeqNo);
|
||||
}
|
||||
|
||||
public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine.Operation.Origin origin) throws IOException {
|
||||
final Engine.Result result;
|
||||
switch (operation.opType()) {
|
||||
@ -1721,7 +1724,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
* @throws InterruptedException if the thread was interrupted while blocking on the condition
|
||||
*/
|
||||
public void waitForOpsToComplete(final long seqNo) throws InterruptedException {
|
||||
getEngine().getLocalCheckpointTracker().waitForOpsToComplete(seqNo);
|
||||
getEngine().waitForOpsToComplete(seqNo);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1754,7 +1757,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
* @return the local checkpoint
|
||||
*/
|
||||
public long getLocalCheckpoint() {
|
||||
return getEngine().getLocalCheckpointTracker().getCheckpoint();
|
||||
return getEngine().getLocalCheckpoint();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1795,7 +1798,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
return;
|
||||
}
|
||||
// only sync if there are not operations in flight
|
||||
final SeqNoStats stats = getEngine().getLocalCheckpointTracker().getStats(replicationTracker.getGlobalCheckpoint());
|
||||
final SeqNoStats stats = getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint());
|
||||
if (stats.getMaxSeqNo() == stats.getGlobalCheckpoint()) {
|
||||
final ObjectLongMap<String> globalCheckpoints = getInSyncGlobalCheckpoints();
|
||||
final String allocationId = routingEntry().allocationId().getId();
|
||||
@ -1832,7 +1835,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
*/
|
||||
public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final String reason) {
|
||||
verifyReplicationTarget();
|
||||
final long localCheckpoint = getEngine().getLocalCheckpointTracker().getCheckpoint();
|
||||
final long localCheckpoint = getLocalCheckpoint();
|
||||
if (globalCheckpoint > localCheckpoint) {
|
||||
/*
|
||||
* This can happen during recovery when the shard has started its engine but recovery is not finalized and is receiving global
|
||||
@ -1861,8 +1864,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
verifyPrimary();
|
||||
assert shardRouting.isRelocationTarget() : "only relocation target can update allocation IDs from primary context: " + shardRouting;
|
||||
assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) &&
|
||||
getEngine().getLocalCheckpointTracker().getCheckpoint() ==
|
||||
primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint();
|
||||
getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint();
|
||||
synchronized (mutex) {
|
||||
replicationTracker.activateWithPrimaryContext(primaryContext); // make changes to primaryMode flag only under mutex
|
||||
}
|
||||
@ -2248,7 +2250,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
||||
operationPrimaryTerm,
|
||||
getLocalCheckpoint(),
|
||||
localCheckpoint);
|
||||
getEngine().getLocalCheckpointTracker().resetCheckpoint(localCheckpoint);
|
||||
getEngine().resetLocalCheckpoint(localCheckpoint);
|
||||
getEngine().rollTranslogGeneration();
|
||||
});
|
||||
globalCheckpointUpdated = true;
|
||||
|
@ -62,7 +62,7 @@ final class LocalShardSnapshot implements Closeable {
|
||||
}
|
||||
|
||||
long maxSeqNo() {
|
||||
return shard.getEngine().getLocalCheckpointTracker().getMaxSeqNo();
|
||||
return shard.getEngine().getSeqNoStats(-1).getMaxSeqNo();
|
||||
}
|
||||
|
||||
long maxUnsafeAutoIdTimestamp() {
|
||||
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
@ -84,6 +85,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
try {
|
||||
final long startingSeqNo = indexShard.getGlobalCheckpoint() + 1;
|
||||
Translog.Snapshot snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo);
|
||||
final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo();
|
||||
resyncListener = new ActionListener<ResyncTask>() {
|
||||
@Override
|
||||
public void onResponse(final ResyncTask resyncTask) {
|
||||
@ -135,7 +137,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
}
|
||||
};
|
||||
resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot,
|
||||
startingSeqNo, resyncListener);
|
||||
startingSeqNo, maxSeqNo, resyncListener);
|
||||
} catch (Exception e) {
|
||||
if (resyncListener != null) {
|
||||
resyncListener.onFailure(e);
|
||||
@ -146,7 +148,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
}
|
||||
|
||||
private void resync(final ShardId shardId, final String primaryAllocationId, final long primaryTerm, final Translog.Snapshot snapshot,
|
||||
long startingSeqNo, ActionListener<ResyncTask> listener) {
|
||||
long startingSeqNo, long maxSeqNo, ActionListener<ResyncTask> listener) {
|
||||
ResyncRequest request = new ResyncRequest(shardId, primaryAllocationId);
|
||||
ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-)
|
||||
ActionListener<Void> wrappedListener = new ActionListener<Void>() {
|
||||
@ -166,7 +168,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
};
|
||||
try {
|
||||
new SnapshotSender(logger, syncAction, resyncTask, shardId, primaryAllocationId, primaryTerm, snapshot, chunkSize.bytesAsInt(),
|
||||
startingSeqNo, wrappedListener).run();
|
||||
startingSeqNo, maxSeqNo, wrappedListener).run();
|
||||
} catch (Exception e) {
|
||||
wrappedListener.onFailure(e);
|
||||
}
|
||||
@ -186,14 +188,16 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
private final ShardId shardId;
|
||||
private final Translog.Snapshot snapshot;
|
||||
private final long startingSeqNo;
|
||||
private final long maxSeqNo;
|
||||
private final int chunkSizeInBytes;
|
||||
private final ActionListener<Void> listener;
|
||||
private final AtomicBoolean firstMessage = new AtomicBoolean(true);
|
||||
private final AtomicInteger totalSentOps = new AtomicInteger();
|
||||
private final AtomicInteger totalSkippedOps = new AtomicInteger();
|
||||
private AtomicBoolean closed = new AtomicBoolean();
|
||||
|
||||
SnapshotSender(Logger logger, SyncAction syncAction, ResyncTask task, ShardId shardId, String primaryAllocationId, long primaryTerm,
|
||||
Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, ActionListener<Void> listener) {
|
||||
Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, long maxSeqNo, ActionListener<Void> listener) {
|
||||
this.logger = logger;
|
||||
this.syncAction = syncAction;
|
||||
this.task = task;
|
||||
@ -203,6 +207,7 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
this.snapshot = snapshot;
|
||||
this.chunkSizeInBytes = chunkSizeInBytes;
|
||||
this.startingSeqNo = startingSeqNo;
|
||||
this.maxSeqNo = maxSeqNo;
|
||||
this.listener = listener;
|
||||
task.setTotalOperations(snapshot.totalOperations());
|
||||
}
|
||||
@ -248,11 +253,15 @@ public class PrimaryReplicaSyncer extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
if (!operations.isEmpty()) {
|
||||
final long trimmedAboveSeqNo = firstMessage.get() ? maxSeqNo : SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
// have to send sync request even in case of there are no operations to sync - have to sync trimmedAboveSeqNo at least
|
||||
if (!operations.isEmpty() || trimmedAboveSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
task.setPhase("sending_ops");
|
||||
ResyncReplicationRequest request = new ResyncReplicationRequest(shardId, operations.toArray(EMPTY_ARRAY));
|
||||
ResyncReplicationRequest request =
|
||||
new ResyncReplicationRequest(shardId, trimmedAboveSeqNo, operations.toArray(EMPTY_ARRAY));
|
||||
logger.trace("{} sending batch of [{}][{}] (total sent: [{}], skipped: [{}])", shardId, operations.size(),
|
||||
new ByteSizeValue(size), totalSentOps.get(), totalSkippedOps.get());
|
||||
firstMessage.set(false);
|
||||
syncAction.sync(request, task, primaryAllocationId, primaryTerm, this);
|
||||
} else if (closed.compareAndSet(false, true)) {
|
||||
logger.trace("{} resync completed (total sent: [{}], skipped: [{}])", shardId, totalSentOps.get(), totalSkippedOps.get());
|
||||
|
@ -155,7 +155,7 @@ public class TermVectorsService {
|
||||
private static void handleFieldWildcards(IndexShard indexShard, TermVectorsRequest request) {
|
||||
Set<String> fieldNames = new HashSet<>();
|
||||
for (String pattern : request.selectedFields()) {
|
||||
fieldNames.addAll(indexShard.mapperService().simpleMatchToIndexNames(pattern));
|
||||
fieldNames.addAll(indexShard.mapperService().simpleMatchToFullName(pattern));
|
||||
}
|
||||
request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY));
|
||||
}
|
||||
|
@ -45,14 +45,29 @@ final class Checkpoint {
|
||||
final long maxSeqNo;
|
||||
final long globalCheckpoint;
|
||||
final long minTranslogGeneration;
|
||||
final long trimmedAboveSeqNo;
|
||||
|
||||
private static final int INITIAL_VERSION = 1; // start with 1, just to recognize there was some magic serialization logic before
|
||||
private static final int CURRENT_VERSION = 2; // introduction of global checkpoints
|
||||
private static final int VERSION_6_0_0 = 2; // introduction of global checkpoints
|
||||
private static final int CURRENT_VERSION = 3; // introduction of trimmed above seq#
|
||||
|
||||
private static final String CHECKPOINT_CODEC = "ckp";
|
||||
|
||||
// size of 6.4.0 checkpoint
|
||||
|
||||
static final int V3_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC)
|
||||
+ Integer.BYTES // ops
|
||||
+ Long.BYTES // offset
|
||||
+ Long.BYTES // generation
|
||||
+ Long.BYTES // minimum sequence number, introduced in 6.0.0
|
||||
+ Long.BYTES // maximum sequence number, introduced in 6.0.0
|
||||
+ Long.BYTES // global checkpoint, introduced in 6.0.0
|
||||
+ Long.BYTES // minimum translog generation in the translog - introduced in 6.0.0
|
||||
+ Long.BYTES // maximum reachable (trimmed) sequence number, introduced in 6.4.0
|
||||
+ CodecUtil.footerLength();
|
||||
|
||||
// size of 6.0.0 checkpoint
|
||||
static final int FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC)
|
||||
static final int V2_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC)
|
||||
+ Integer.BYTES // ops
|
||||
+ Long.BYTES // offset
|
||||
+ Long.BYTES // generation
|
||||
@ -72,16 +87,20 @@ final class Checkpoint {
|
||||
/**
|
||||
* Create a new translog checkpoint.
|
||||
*
|
||||
* @param offset the current offset in the translog
|
||||
* @param numOps the current number of operations in the translog
|
||||
* @param generation the current translog generation
|
||||
* @param minSeqNo the current minimum sequence number of all operations in the translog
|
||||
* @param maxSeqNo the current maximum sequence number of all operations in the translog
|
||||
* @param globalCheckpoint the last-known global checkpoint
|
||||
* @param offset the current offset in the translog
|
||||
* @param numOps the current number of operations in the translog
|
||||
* @param generation the current translog generation
|
||||
* @param minSeqNo the current minimum sequence number of all operations in the translog
|
||||
* @param maxSeqNo the current maximum sequence number of all operations in the translog
|
||||
* @param globalCheckpoint the last-known global checkpoint
|
||||
* @param minTranslogGeneration the minimum generation referenced by the translog at this moment.
|
||||
* @param trimmedAboveSeqNo all operations with seq# above trimmedAboveSeqNo should be ignored and not read from the
|
||||
* corresponding translog file. {@link SequenceNumbers#UNASSIGNED_SEQ_NO} is used to disable trimming.
|
||||
*/
|
||||
Checkpoint(long offset, int numOps, long generation, long minSeqNo, long maxSeqNo, long globalCheckpoint, long minTranslogGeneration) {
|
||||
Checkpoint(long offset, int numOps, long generation, long minSeqNo, long maxSeqNo, long globalCheckpoint,
|
||||
long minTranslogGeneration, long trimmedAboveSeqNo) {
|
||||
assert minSeqNo <= maxSeqNo : "minSeqNo [" + minSeqNo + "] is higher than maxSeqNo [" + maxSeqNo + "]";
|
||||
assert trimmedAboveSeqNo <= maxSeqNo : "trimmedAboveSeqNo [" + trimmedAboveSeqNo + "] is higher than maxSeqNo [" + maxSeqNo + "]";
|
||||
assert minTranslogGeneration <= generation :
|
||||
"minTranslogGen [" + minTranslogGeneration + "] is higher than generation [" + generation + "]";
|
||||
this.offset = offset;
|
||||
@ -91,6 +110,7 @@ final class Checkpoint {
|
||||
this.maxSeqNo = maxSeqNo;
|
||||
this.globalCheckpoint = globalCheckpoint;
|
||||
this.minTranslogGeneration = minTranslogGeneration;
|
||||
this.trimmedAboveSeqNo = trimmedAboveSeqNo;
|
||||
}
|
||||
|
||||
private void write(DataOutput out) throws IOException {
|
||||
@ -101,26 +121,52 @@ final class Checkpoint {
|
||||
out.writeLong(maxSeqNo);
|
||||
out.writeLong(globalCheckpoint);
|
||||
out.writeLong(minTranslogGeneration);
|
||||
out.writeLong(trimmedAboveSeqNo);
|
||||
}
|
||||
|
||||
static Checkpoint emptyTranslogCheckpoint(final long offset, final long generation, final long globalCheckpoint,
|
||||
long minTranslogGeneration) {
|
||||
final long minSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
|
||||
final long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
|
||||
return new Checkpoint(offset, 0, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration);
|
||||
final long trimmedAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
return new Checkpoint(offset, 0, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo);
|
||||
}
|
||||
|
||||
static Checkpoint readCheckpointV6_4_0(final DataInput in) throws IOException {
|
||||
final long offset = in.readLong();
|
||||
final int numOps = in.readInt();
|
||||
final long generation = in.readLong();
|
||||
final long minSeqNo = in.readLong();
|
||||
final long maxSeqNo = in.readLong();
|
||||
final long globalCheckpoint = in.readLong();
|
||||
final long minTranslogGeneration = in.readLong();
|
||||
final long trimmedAboveSeqNo = in.readLong();
|
||||
return new Checkpoint(offset, numOps, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo);
|
||||
}
|
||||
|
||||
static Checkpoint readCheckpointV6_0_0(final DataInput in) throws IOException {
|
||||
return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong());
|
||||
final long offset = in.readLong();
|
||||
final int numOps = in.readInt();
|
||||
final long generation = in.readLong();
|
||||
final long minSeqNo = in.readLong();
|
||||
final long maxSeqNo = in.readLong();
|
||||
final long globalCheckpoint = in.readLong();
|
||||
final long minTranslogGeneration = in.readLong();
|
||||
final long trimmedAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
return new Checkpoint(offset, numOps, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo);
|
||||
}
|
||||
|
||||
// reads a checksummed checkpoint introduced in ES 5.0.0
|
||||
static Checkpoint readCheckpointV5_0_0(final DataInput in) throws IOException {
|
||||
final long offset = in.readLong();
|
||||
final int numOps = in.readInt();
|
||||
final long generation = in.readLong();
|
||||
final long minSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
|
||||
final long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
|
||||
final long globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
final long minTranslogGeneration = -1L;
|
||||
return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration);
|
||||
final long minTranslogGeneration = -1;
|
||||
final long trimmedAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||
return new Checkpoint(offset, numOps, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -133,6 +179,7 @@ final class Checkpoint {
|
||||
", maxSeqNo=" + maxSeqNo +
|
||||
", globalCheckpoint=" + globalCheckpoint +
|
||||
", minTranslogGeneration=" + minTranslogGeneration +
|
||||
", trimmedAboveSeqNo=" + trimmedAboveSeqNo +
|
||||
'}';
|
||||
}
|
||||
|
||||
@ -145,17 +192,20 @@ final class Checkpoint {
|
||||
if (fileVersion == INITIAL_VERSION) {
|
||||
assert indexInput.length() == V1_FILE_SIZE : indexInput.length();
|
||||
return Checkpoint.readCheckpointV5_0_0(indexInput);
|
||||
} else if (fileVersion == VERSION_6_0_0) {
|
||||
assert indexInput.length() == V2_FILE_SIZE : indexInput.length();
|
||||
return Checkpoint.readCheckpointV6_0_0(indexInput);
|
||||
} else {
|
||||
assert fileVersion == CURRENT_VERSION : fileVersion;
|
||||
assert indexInput.length() == FILE_SIZE : indexInput.length();
|
||||
return Checkpoint.readCheckpointV6_0_0(indexInput);
|
||||
assert indexInput.length() == V3_FILE_SIZE : indexInput.length();
|
||||
return Checkpoint.readCheckpointV6_4_0(indexInput);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException {
|
||||
final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(FILE_SIZE) {
|
||||
final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V3_FILE_SIZE) {
|
||||
@Override
|
||||
public synchronized byte[] toByteArray() {
|
||||
// don't clone
|
||||
@ -164,13 +214,13 @@ final class Checkpoint {
|
||||
};
|
||||
final String resourceDesc = "checkpoint(path=\"" + checkpointFile + "\", gen=" + checkpoint + ")";
|
||||
try (OutputStreamIndexOutput indexOutput =
|
||||
new OutputStreamIndexOutput(resourceDesc, checkpointFile.toString(), byteOutputStream, FILE_SIZE)) {
|
||||
new OutputStreamIndexOutput(resourceDesc, checkpointFile.toString(), byteOutputStream, V3_FILE_SIZE)) {
|
||||
CodecUtil.writeHeader(indexOutput, CHECKPOINT_CODEC, CURRENT_VERSION);
|
||||
checkpoint.write(indexOutput);
|
||||
CodecUtil.writeFooter(indexOutput);
|
||||
|
||||
assert indexOutput.getFilePointer() == FILE_SIZE :
|
||||
"get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " + FILE_SIZE;
|
||||
assert indexOutput.getFilePointer() == V3_FILE_SIZE :
|
||||
"get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " + V3_FILE_SIZE;
|
||||
assert indexOutput.getFilePointer() < 512 :
|
||||
"checkpoint files have to be smaller than 512 bytes for atomic writes; size: " + indexOutput.getFilePointer();
|
||||
|
||||
@ -196,7 +246,8 @@ final class Checkpoint {
|
||||
if (generation != that.generation) return false;
|
||||
if (minSeqNo != that.minSeqNo) return false;
|
||||
if (maxSeqNo != that.maxSeqNo) return false;
|
||||
return globalCheckpoint == that.globalCheckpoint;
|
||||
if (globalCheckpoint != that.globalCheckpoint) return false;
|
||||
return trimmedAboveSeqNo == that.trimmedAboveSeqNo;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -207,6 +258,7 @@ final class Checkpoint {
|
||||
result = 31 * result + Long.hashCode(minSeqNo);
|
||||
result = 31 * result + Long.hashCode(maxSeqNo);
|
||||
result = 31 * result + Long.hashCode(globalCheckpoint);
|
||||
result = 31 * result + Long.hashCode(trimmedAboveSeqNo);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -56,6 +56,15 @@ final class MultiSnapshot implements Translog.Snapshot {
|
||||
return totalOperations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int skippedOperations() {
|
||||
int skippedOperations = overriddenOperations;
|
||||
for (TranslogSnapshot translog : translogs) {
|
||||
skippedOperations += translog.skippedOperations();
|
||||
}
|
||||
return skippedOperations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int overriddenOperations() {
|
||||
return overriddenOperations;
|
||||
|
@ -710,6 +710,41 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||
return TRANSLOG_FILE_PREFIX + generation + CHECKPOINT_SUFFIX;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trims translog for terms of files below <code>belowTerm</code> and seq# above <code>aboveSeqNo</code>.
|
||||
* Effectively it moves max visible seq# {@link Checkpoint#trimmedAboveSeqNo} therefore {@link TranslogSnapshot} skips those operations.
|
||||
*/
|
||||
public void trimOperations(long belowTerm, long aboveSeqNo) throws IOException {
|
||||
assert aboveSeqNo >= SequenceNumbers.NO_OPS_PERFORMED : "aboveSeqNo has to a valid sequence number";
|
||||
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
ensureOpen();
|
||||
if (current.getPrimaryTerm() < belowTerm) {
|
||||
throw new IllegalArgumentException("Trimming the translog can only be done for terms lower than the current one. " +
|
||||
"Trim requested for term [ " + belowTerm + " ] , current is [ " + current.getPrimaryTerm() + " ]");
|
||||
}
|
||||
// we assume that the current translog generation doesn't have trimmable ops. Verify that.
|
||||
assert current.assertNoSeqAbove(belowTerm, aboveSeqNo);
|
||||
// update all existed ones (if it is necessary) as checkpoint and reader are immutable
|
||||
final List<TranslogReader> newReaders = new ArrayList<>(readers.size());
|
||||
try {
|
||||
for (TranslogReader reader : readers) {
|
||||
final TranslogReader newReader =
|
||||
reader.getPrimaryTerm() < belowTerm
|
||||
? reader.closeIntoTrimmedReader(aboveSeqNo, getChannelFactory())
|
||||
: reader;
|
||||
newReaders.add(newReader);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
IOUtils.closeWhileHandlingException(newReaders);
|
||||
close();
|
||||
throw e;
|
||||
}
|
||||
|
||||
this.readers.clear();
|
||||
this.readers.addAll(newReaders);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the given location has be synced / written to the underlying storage.
|
||||
@ -859,6 +894,13 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||
*/
|
||||
int totalOperations();
|
||||
|
||||
/**
|
||||
* The number of operations have been skipped (overridden or trimmed) in the snapshot so far.
|
||||
*/
|
||||
default int skippedOperations() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of operations have been overridden (eg. superseded) in the snapshot so far.
|
||||
* If two operations have the same sequence number, the operation with a lower term will be overridden by the operation
|
||||
|
@ -21,6 +21,8 @@ package org.elasticsearch.index.translog;
|
||||
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.elasticsearch.common.io.Channels;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.EOFException;
|
||||
@ -28,8 +30,11 @@ import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.elasticsearch.index.translog.Translog.getCommitCheckpointFileName;
|
||||
|
||||
/**
|
||||
* an immutable translog filereader
|
||||
*/
|
||||
@ -70,6 +75,39 @@ public class TranslogReader extends BaseTranslogReader implements Closeable {
|
||||
return new TranslogReader(checkpoint, channel, path, header);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes current reader and creates new one with new checkoint and same file channel
|
||||
*/
|
||||
TranslogReader closeIntoTrimmedReader(long aboveSeqNo, ChannelFactory channelFactory) throws IOException {
|
||||
if (closed.compareAndSet(false, true)) {
|
||||
Closeable toCloseOnFailure = channel;
|
||||
final TranslogReader newReader;
|
||||
try {
|
||||
if (aboveSeqNo < checkpoint.trimmedAboveSeqNo
|
||||
|| aboveSeqNo < checkpoint.maxSeqNo && checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
final Path checkpointFile = path.getParent().resolve(getCommitCheckpointFileName(checkpoint.generation));
|
||||
final Checkpoint newCheckpoint = new Checkpoint(checkpoint.offset, checkpoint.numOps,
|
||||
checkpoint.generation, checkpoint.minSeqNo, checkpoint.maxSeqNo,
|
||||
checkpoint.globalCheckpoint, checkpoint.minTranslogGeneration, aboveSeqNo);
|
||||
Checkpoint.write(channelFactory, checkpointFile, newCheckpoint, StandardOpenOption.WRITE);
|
||||
|
||||
IOUtils.fsync(checkpointFile, false);
|
||||
IOUtils.fsync(checkpointFile.getParent(), true);
|
||||
|
||||
newReader = new TranslogReader(newCheckpoint, channel, path, header);
|
||||
} else {
|
||||
newReader = new TranslogReader(checkpoint, channel, path, header);
|
||||
}
|
||||
toCloseOnFailure = null;
|
||||
return newReader;
|
||||
} finally {
|
||||
IOUtils.close(toCloseOnFailure);
|
||||
}
|
||||
} else {
|
||||
throw new AlreadyClosedException(toString() + " is already closed");
|
||||
}
|
||||
}
|
||||
|
||||
public long sizeInBytes() {
|
||||
return length;
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.elasticsearch.index.translog;
|
||||
|
||||
import org.elasticsearch.common.io.Channels;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
@ -32,6 +33,7 @@ final class TranslogSnapshot extends BaseTranslogReader {
|
||||
|
||||
private final ByteBuffer reusableBuffer;
|
||||
private long position;
|
||||
private int skippedOperations;
|
||||
private int readOperations;
|
||||
private BufferedChecksumStreamInput reuse;
|
||||
|
||||
@ -54,17 +56,24 @@ final class TranslogSnapshot extends BaseTranslogReader {
|
||||
return totalOperations;
|
||||
}
|
||||
|
||||
int skippedOperations(){
|
||||
return skippedOperations;
|
||||
}
|
||||
|
||||
@Override
|
||||
Checkpoint getCheckpoint() {
|
||||
return checkpoint;
|
||||
}
|
||||
|
||||
public Translog.Operation next() throws IOException {
|
||||
if (readOperations < totalOperations) {
|
||||
return readOperation();
|
||||
} else {
|
||||
return null;
|
||||
while (readOperations < totalOperations) {
|
||||
final Translog.Operation operation = readOperation();
|
||||
if (operation.seqNo() <= checkpoint.trimmedAboveSeqNo || checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
return operation;
|
||||
}
|
||||
skippedOperations++;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected Translog.Operation readOperation() throws IOException {
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.index.translog;
|
||||
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.OutputStreamDataOutput;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Assertions;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
@ -92,6 +91,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
||||
this.minSeqNo = initialCheckpoint.minSeqNo;
|
||||
assert initialCheckpoint.maxSeqNo == SequenceNumbers.NO_OPS_PERFORMED : initialCheckpoint.maxSeqNo;
|
||||
this.maxSeqNo = initialCheckpoint.maxSeqNo;
|
||||
assert initialCheckpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO : initialCheckpoint.trimmedAboveSeqNo;
|
||||
this.globalCheckpointSupplier = globalCheckpointSupplier;
|
||||
this.seenSequenceNumbers = Assertions.ENABLED ? new HashMap<>() : null;
|
||||
}
|
||||
@ -213,6 +213,25 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
||||
return true;
|
||||
}
|
||||
|
||||
synchronized boolean assertNoSeqAbove(long belowTerm, long aboveSeqNo) {
|
||||
seenSequenceNumbers.entrySet().stream().filter(e -> e.getKey().longValue() > aboveSeqNo)
|
||||
.forEach(e -> {
|
||||
final Translog.Operation op;
|
||||
try {
|
||||
op = Translog.readOperation(new BufferedChecksumStreamInput(e.getValue().v1().streamInput()));
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
long seqNo = op.seqNo();
|
||||
long primaryTerm = op.primaryTerm();
|
||||
if (primaryTerm < belowTerm) {
|
||||
throw new AssertionError("current should not have any operations with seq#:primaryTerm ["
|
||||
+ seqNo + ":" + primaryTerm + "] > " + aboveSeqNo + ":" + belowTerm);
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* write all buffered ops to disk and fsync file.
|
||||
*
|
||||
@ -241,7 +260,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
||||
@Override
|
||||
synchronized Checkpoint getCheckpoint() {
|
||||
return new Checkpoint(totalOffset, operationCounter, generation, minSeqNo, maxSeqNo,
|
||||
globalCheckpointSupplier.getAsLong(), minTranslogGenerationSupplier.getAsLong());
|
||||
globalCheckpointSupplier.getAsLong(), minTranslogGenerationSupplier.getAsLong(),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -29,24 +29,7 @@ import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.ArabicAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.ArmenianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.BasqueAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.BengaliAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.BrazilianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.BulgarianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.CatalanAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||
import org.elasticsearch.index.analysis.ChineseAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.CjkAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.CzechAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.DanishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.DutchAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.EnglishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.FinnishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.FrenchAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.GalicianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.GermanAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.GreekAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.HindiAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.HungarianAnalyzerProvider;
|
||||
@ -68,7 +51,6 @@ import org.elasticsearch.index.analysis.RomanianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.RussianAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.SimpleAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.SnowballAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.SoraniAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.SpanishAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.StandardAnalyzerProvider;
|
||||
@ -245,24 +227,6 @@ public final class AnalysisModule {
|
||||
analyzers.register("stop", StopAnalyzerProvider::new);
|
||||
analyzers.register("whitespace", WhitespaceAnalyzerProvider::new);
|
||||
analyzers.register("keyword", KeywordAnalyzerProvider::new);
|
||||
analyzers.register("snowball", SnowballAnalyzerProvider::new);
|
||||
analyzers.register("arabic", ArabicAnalyzerProvider::new);
|
||||
analyzers.register("armenian", ArmenianAnalyzerProvider::new);
|
||||
analyzers.register("basque", BasqueAnalyzerProvider::new);
|
||||
analyzers.register("bengali", BengaliAnalyzerProvider::new);
|
||||
analyzers.register("brazilian", BrazilianAnalyzerProvider::new);
|
||||
analyzers.register("bulgarian", BulgarianAnalyzerProvider::new);
|
||||
analyzers.register("catalan", CatalanAnalyzerProvider::new);
|
||||
analyzers.register("chinese", ChineseAnalyzerProvider::new);
|
||||
analyzers.register("cjk", CjkAnalyzerProvider::new);
|
||||
analyzers.register("czech", CzechAnalyzerProvider::new);
|
||||
analyzers.register("danish", DanishAnalyzerProvider::new);
|
||||
analyzers.register("dutch", DutchAnalyzerProvider::new);
|
||||
analyzers.register("english", EnglishAnalyzerProvider::new);
|
||||
analyzers.register("finnish", FinnishAnalyzerProvider::new);
|
||||
analyzers.register("french", FrenchAnalyzerProvider::new);
|
||||
analyzers.register("galician", GalicianAnalyzerProvider::new);
|
||||
analyzers.register("german", GermanAnalyzerProvider::new);
|
||||
analyzers.register("greek", GreekAnalyzerProvider::new);
|
||||
analyzers.register("hindi", HindiAnalyzerProvider::new);
|
||||
analyzers.register("hungarian", HungarianAnalyzerProvider::new);
|
||||
|
@ -20,37 +20,21 @@ package org.elasticsearch.indices.analysis;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
|
||||
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
|
||||
import org.apache.lucene.analysis.bn.BengaliAnalyzer;
|
||||
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
|
||||
import org.apache.lucene.analysis.ca.CatalanAnalyzer;
|
||||
import org.apache.lucene.analysis.cjk.CJKAnalyzer;
|
||||
import org.apache.lucene.analysis.ckb.SoraniAnalyzer;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.core.SimpleAnalyzer;
|
||||
import org.apache.lucene.analysis.core.StopAnalyzer;
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.cz.CzechAnalyzer;
|
||||
import org.apache.lucene.analysis.da.DanishAnalyzer;
|
||||
import org.apache.lucene.analysis.de.GermanAnalyzer;
|
||||
import org.apache.lucene.analysis.el.GreekAnalyzer;
|
||||
import org.apache.lucene.analysis.en.EnglishAnalyzer;
|
||||
import org.apache.lucene.analysis.es.SpanishAnalyzer;
|
||||
import org.apache.lucene.analysis.eu.BasqueAnalyzer;
|
||||
import org.apache.lucene.analysis.fa.PersianAnalyzer;
|
||||
import org.apache.lucene.analysis.fi.FinnishAnalyzer;
|
||||
import org.apache.lucene.analysis.fr.FrenchAnalyzer;
|
||||
import org.apache.lucene.analysis.ga.IrishAnalyzer;
|
||||
import org.apache.lucene.analysis.gl.GalicianAnalyzer;
|
||||
import org.apache.lucene.analysis.hi.HindiAnalyzer;
|
||||
import org.apache.lucene.analysis.hu.HungarianAnalyzer;
|
||||
import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
|
||||
import org.apache.lucene.analysis.id.IndonesianAnalyzer;
|
||||
import org.apache.lucene.analysis.it.ItalianAnalyzer;
|
||||
import org.apache.lucene.analysis.lt.LithuanianAnalyzer;
|
||||
import org.apache.lucene.analysis.lv.LatvianAnalyzer;
|
||||
import org.apache.lucene.analysis.nl.DutchAnalyzer;
|
||||
import org.apache.lucene.analysis.no.NorwegianAnalyzer;
|
||||
import org.apache.lucene.analysis.pt.PortugueseAnalyzer;
|
||||
import org.apache.lucene.analysis.ro.RomanianAnalyzer;
|
||||
@ -61,7 +45,6 @@ import org.apache.lucene.analysis.sv.SwedishAnalyzer;
|
||||
import org.apache.lucene.analysis.th.ThaiAnalyzer;
|
||||
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.index.analysis.SnowballAnalyzer;
|
||||
import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy;
|
||||
|
||||
import java.util.Locale;
|
||||
@ -129,168 +112,6 @@ public enum PreBuiltAnalyzers {
|
||||
}
|
||||
},
|
||||
|
||||
SNOWBALL {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer analyzer = new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
analyzer.setVersion(version.luceneVersion);
|
||||
return analyzer;
|
||||
}
|
||||
},
|
||||
|
||||
ARABIC {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new ArabicAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
ARMENIAN {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new ArmenianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
BASQUE {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new BasqueAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
BENGALI {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new BengaliAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
BRAZILIAN {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new BrazilianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
BULGARIAN {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new BulgarianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
CATALAN {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new CatalanAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
CHINESE(CachingStrategy.ONE) {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new StandardAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
CJK {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new CJKAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
CZECH {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new CzechAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
DUTCH {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new DutchAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
DANISH {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new DanishAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
ENGLISH {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new EnglishAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
FINNISH {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new FinnishAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
FRENCH {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new FrenchAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
GALICIAN {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new GalicianAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
GERMAN {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
Analyzer a = new GermanAnalyzer();
|
||||
a.setVersion(version.luceneVersion);
|
||||
return a;
|
||||
}
|
||||
},
|
||||
|
||||
GREEK {
|
||||
@Override
|
||||
protected Analyzer create(Version version) {
|
||||
|
@ -615,9 +615,9 @@ public class RecoverySourceHandler {
|
||||
cancellableThreads.executeIO(sendBatch);
|
||||
}
|
||||
|
||||
assert expectedTotalOps == snapshot.overriddenOperations() + skippedOps + totalSentOps
|
||||
assert expectedTotalOps == snapshot.skippedOperations() + skippedOps + totalSentOps
|
||||
: String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]",
|
||||
expectedTotalOps, snapshot.overriddenOperations(), skippedOps, totalSentOps);
|
||||
expectedTotalOps, snapshot.skippedOperations(), skippedOps, totalSentOps);
|
||||
|
||||
if (requiredOpsTracker.getCheckpoint() < endingSeqNo) {
|
||||
throw new IllegalStateException("translog replay failed to cover required sequence numbers" +
|
||||
|
@ -53,8 +53,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
||||
for (SearchContextHighlight.Field field : context.highlight().fields()) {
|
||||
Collection<String> fieldNamesToHighlight;
|
||||
if (Regex.isSimpleMatchPattern(field.field())) {
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().getType());
|
||||
fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field());
|
||||
fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field());
|
||||
} else {
|
||||
fieldNamesToHighlight = Collections.singletonList(field.field());
|
||||
}
|
||||
|
@ -40,7 +40,7 @@ public class ResyncReplicationRequestTests extends ESTestCase {
|
||||
final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(),
|
||||
Versions.MATCH_ANY, VersionType.INTERNAL, bytes, null, -1);
|
||||
final ShardId shardId = new ShardId(new Index("index", "uuid"), 0);
|
||||
final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, new Translog.Operation[]{index});
|
||||
final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index});
|
||||
|
||||
final BytesStreamOutput out = new BytesStreamOutput();
|
||||
before.writeTo(out);
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.collect.ImmutableOpenIntMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.gateway.GatewayAllocator;
|
||||
import org.elasticsearch.index.engine.EngineTestCase;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexShardTestCase;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -350,7 +351,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
|
||||
assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1));
|
||||
}
|
||||
final IndexShard oldPrimaryShard = internalCluster().getInstance(IndicesService.class, oldPrimary).getShardOrNull(shardId);
|
||||
IndexShardTestCase.getEngine(oldPrimaryShard).getLocalCheckpointTracker().generateSeqNo(); // Make gap in seqno.
|
||||
EngineTestCase.generateNewSeqNo(IndexShardTestCase.getEngine(oldPrimaryShard)); // Make gap in seqno.
|
||||
long moreDocs = scaledRandomIntBetween(1, 10);
|
||||
for (int i = 0; i < moreDocs; i++) {
|
||||
IndexResponse indexResult = index("test", "doc", Long.toString(numDocs + i));
|
||||
|
@ -61,14 +61,17 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
|
||||
}
|
||||
|
||||
public void testThatInstancesAreCachedAndReused() {
|
||||
assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT),
|
||||
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT));
|
||||
// same lucene version should be cached
|
||||
assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_1),
|
||||
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_2));
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT));
|
||||
// same es version should be cached
|
||||
assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1));
|
||||
assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_0),
|
||||
PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_1));
|
||||
|
||||
assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_0),
|
||||
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_1));
|
||||
// Same Lucene version should be cached:
|
||||
assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_1),
|
||||
PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_2));
|
||||
}
|
||||
|
||||
public void testThatAnalyzersAreUsedInMapping() throws IOException {
|
||||
|
@ -232,7 +232,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
|
||||
operation = randomBoolean() ?
|
||||
appendOnlyPrimary(doc, false, 1)
|
||||
: appendOnlyReplica(doc, false, 1, engine.getLocalCheckpointTracker().generateSeqNo());
|
||||
: appendOnlyReplica(doc, false, 1, generateNewSeqNo(engine));
|
||||
engine.index(operation);
|
||||
assertTrue("safe access should be required", engine.isSafeAccessRequired());
|
||||
assertEquals(1, engine.getVersionMapSize()); // now we add this to the map
|
||||
@ -1028,7 +1028,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
engine.index(indexForDoc(doc));
|
||||
boolean inSync = randomBoolean();
|
||||
if (inSync) {
|
||||
globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
globalCheckpoint.set(engine.getLocalCheckpoint());
|
||||
}
|
||||
|
||||
engine.flush();
|
||||
@ -1046,7 +1046,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 4L : 1L));
|
||||
assertThat(engine.getTranslog().getDeletionPolicy().getTranslogGenerationOfLastCommit(), equalTo(4L));
|
||||
|
||||
globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
globalCheckpoint.set(engine.getLocalCheckpoint());
|
||||
engine.flush(true, true);
|
||||
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(5L));
|
||||
assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(5L));
|
||||
@ -2058,12 +2058,12 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
final Engine.DeleteResult result = initialEngine.delete(delete);
|
||||
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
|
||||
assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1));
|
||||
assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo + 1));
|
||||
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1));
|
||||
indexedIds.remove(id);
|
||||
primarySeqNo++;
|
||||
} else {
|
||||
assertThat(result.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO));
|
||||
assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo));
|
||||
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
|
||||
}
|
||||
} else {
|
||||
// index a document
|
||||
@ -2076,12 +2076,12 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
final Engine.IndexResult result = initialEngine.index(index);
|
||||
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
|
||||
assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1));
|
||||
assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo + 1));
|
||||
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1));
|
||||
indexedIds.add(id);
|
||||
primarySeqNo++;
|
||||
} else {
|
||||
assertThat(result.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO));
|
||||
assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo));
|
||||
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2090,7 +2090,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
replicaLocalCheckpoint = randomIntBetween(Math.toIntExact(replicaLocalCheckpoint), Math.toIntExact(primarySeqNo));
|
||||
}
|
||||
gcpTracker.updateLocalCheckpoint(primary.allocationId().getId(),
|
||||
initialEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
initialEngine.getLocalCheckpoint());
|
||||
gcpTracker.updateLocalCheckpoint(replica.allocationId().getId(), replicaLocalCheckpoint);
|
||||
|
||||
if (rarely()) {
|
||||
@ -2103,8 +2103,8 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
logger.info("localcheckpoint {}, global {}", replicaLocalCheckpoint, primarySeqNo);
|
||||
globalCheckpoint = gcpTracker.getGlobalCheckpoint();
|
||||
|
||||
assertEquals(primarySeqNo, initialEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(primarySeqNo, initialEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(primarySeqNo, initialEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(primarySeqNo, initialEngine.getLocalCheckpoint());
|
||||
assertThat(globalCheckpoint, equalTo(replicaLocalCheckpoint));
|
||||
|
||||
assertThat(
|
||||
@ -2126,7 +2126,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())){
|
||||
recoveringEngine.recoverFromTranslog();
|
||||
|
||||
assertEquals(primarySeqNo, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertThat(
|
||||
Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
|
||||
equalTo(primarySeqNo));
|
||||
@ -2139,9 +2139,9 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
// that the committed max seq no is equivalent to what the current primary seq no is, as all data
|
||||
// we have assigned sequence numbers to should be in the commit
|
||||
equalTo(primarySeqNo));
|
||||
assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(primarySeqNo));
|
||||
assertThat(recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo));
|
||||
assertThat(recoveringEngine.getLocalCheckpointTracker().generateSeqNo(), equalTo(primarySeqNo + 1));
|
||||
assertThat(recoveringEngine.getLocalCheckpoint(), equalTo(primarySeqNo));
|
||||
assertThat(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
|
||||
assertThat(generateNewSeqNo(recoveringEngine), equalTo(primarySeqNo + 1));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2444,7 +2444,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
|
||||
try (InternalEngine engine = createEngine(config)) {
|
||||
engine.index(firstIndexRequest);
|
||||
globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
globalCheckpoint.set(engine.getLocalCheckpoint());
|
||||
expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog());
|
||||
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
|
||||
assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY));
|
||||
@ -2607,7 +2607,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
engine.recoverFromTranslog();
|
||||
final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
|
||||
engine.index(indexForDoc(doc1));
|
||||
globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
globalCheckpoint.set(engine.getLocalCheckpoint());
|
||||
throwErrorOnCommit.set(true);
|
||||
FlushFailedEngineException e = expectThrows(FlushFailedEngineException.class, engine::flush);
|
||||
assertThat(e.getCause().getMessage(), equalTo("power's out"));
|
||||
@ -2667,7 +2667,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
}
|
||||
|
||||
public void testTranslogReplay() throws IOException {
|
||||
final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getLocalCheckpointTracker().getCheckpoint();
|
||||
final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getLocalCheckpoint();
|
||||
final int numDocs = randomIntBetween(1, 10);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
|
||||
@ -3610,7 +3610,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
final AtomicBoolean stall,
|
||||
final AtomicLong expectedLocalCheckpoint) {
|
||||
return (engine, operation) -> {
|
||||
final long seqNo = engine.getLocalCheckpointTracker().generateSeqNo();
|
||||
final long seqNo = generateNewSeqNo(engine);
|
||||
final CountDownLatch latch = latchReference.get();
|
||||
if (stall.get()) {
|
||||
try {
|
||||
@ -3662,8 +3662,8 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(initialEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(expectedLocalCheckpoint.get()));
|
||||
assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo((long) (docs - 1)));
|
||||
assertThat(initialEngine.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint.get()));
|
||||
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo((long) (docs - 1)));
|
||||
initialEngine.flush(true, true);
|
||||
|
||||
latchReference.get().countDown();
|
||||
@ -3677,7 +3677,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) {
|
||||
recoveringEngine.recoverFromTranslog();
|
||||
recoveringEngine.fillSeqNoGaps(2);
|
||||
assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), greaterThanOrEqualTo((long) (docs - 1)));
|
||||
assertThat(recoveringEngine.getLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -3758,7 +3758,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
expectedLocalCheckpoint = numberOfOperations - 1;
|
||||
}
|
||||
|
||||
assertThat(engine.getLocalCheckpointTracker().getCheckpoint(), equalTo(expectedLocalCheckpoint));
|
||||
assertThat(engine.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint));
|
||||
try (Engine.GetResult result = engine.get(new Engine.Get(true, false, "type", "2", uid), searcherFactory)) {
|
||||
assertThat(result.exists(), equalTo(exists));
|
||||
}
|
||||
@ -3790,11 +3790,11 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get());
|
||||
final String reason = "filling gaps";
|
||||
noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason));
|
||||
assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1)));
|
||||
assertThat(noOpEngine.getLocalCheckpoint(), equalTo((long) (maxSeqNo + 1)));
|
||||
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled));
|
||||
noOpEngine.noOp(
|
||||
new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(), randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason));
|
||||
assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 2)));
|
||||
assertThat(noOpEngine.getLocalCheckpoint(), equalTo((long) (maxSeqNo + 2)));
|
||||
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1));
|
||||
// skip to the op that we added to the translog
|
||||
Translog.Operation op;
|
||||
@ -4009,17 +4009,17 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
actualEngine.rollTranslogGeneration();
|
||||
}
|
||||
}
|
||||
final long currentLocalCheckpoint = actualEngine.getLocalCheckpointTracker().getCheckpoint();
|
||||
final long currentLocalCheckpoint = actualEngine.getLocalCheckpoint();
|
||||
final long resetLocalCheckpoint =
|
||||
randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(currentLocalCheckpoint));
|
||||
actualEngine.getLocalCheckpointTracker().resetCheckpoint(resetLocalCheckpoint);
|
||||
actualEngine.resetLocalCheckpoint(resetLocalCheckpoint);
|
||||
completedSeqNos.clear();
|
||||
actualEngine.restoreLocalCheckpointFromTranslog();
|
||||
final Set<Long> intersection = new HashSet<>(expectedCompletedSeqNos);
|
||||
intersection.retainAll(LongStream.range(resetLocalCheckpoint + 1, operations).boxed().collect(Collectors.toSet()));
|
||||
assertThat(completedSeqNos, equalTo(intersection));
|
||||
assertThat(actualEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(currentLocalCheckpoint));
|
||||
assertThat(actualEngine.getLocalCheckpointTracker().generateSeqNo(), equalTo((long) operations));
|
||||
assertThat(actualEngine.getLocalCheckpoint(), equalTo(currentLocalCheckpoint));
|
||||
assertThat(generateNewSeqNo(actualEngine), equalTo((long) operations));
|
||||
} finally {
|
||||
IOUtils.close(actualEngine);
|
||||
}
|
||||
@ -4043,7 +4043,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
replicaEngine.index(replicaIndexForDoc(doc, 1, indexResult.getSeqNo(), false));
|
||||
}
|
||||
}
|
||||
checkpointOnReplica = replicaEngine.getLocalCheckpointTracker().getCheckpoint();
|
||||
checkpointOnReplica = replicaEngine.getLocalCheckpoint();
|
||||
} finally {
|
||||
IOUtils.close(replicaEngine);
|
||||
}
|
||||
@ -4053,16 +4053,16 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
|
||||
Engine recoveringEngine = null;
|
||||
try {
|
||||
assertEquals(docs - 1, engine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(docs - 1, engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(maxSeqIDOnReplica, replicaEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(docs - 1, engine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(docs - 1, engine.getLocalCheckpoint());
|
||||
assertEquals(maxSeqIDOnReplica, replicaEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpoint());
|
||||
trimUnsafeCommits(copy(replicaEngine.config(), globalCheckpoint::get));
|
||||
recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get));
|
||||
assertEquals(numDocsOnReplica, recoveringEngine.getTranslog().stats().getUncommittedOperations());
|
||||
recoveringEngine.recoverFromTranslog();
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpoint());
|
||||
assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2));
|
||||
|
||||
// now snapshot the tlog and ensure the primary term is updated
|
||||
@ -4077,10 +4077,10 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
}
|
||||
|
||||
}
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint());
|
||||
if ((flushed = randomBoolean())) {
|
||||
globalCheckpoint.set(recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
globalCheckpoint.set(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
recoveringEngine.getTranslog().sync();
|
||||
recoveringEngine.flush(true, true);
|
||||
}
|
||||
@ -4097,11 +4097,11 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
assertThat(recoveringEngine.getTranslog().stats().getUncommittedOperations(), equalTo(0));
|
||||
}
|
||||
recoveringEngine.recoverFromTranslog();
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint());
|
||||
assertEquals(0, recoveringEngine.fillSeqNoGaps(3));
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
|
||||
assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint());
|
||||
} finally {
|
||||
IOUtils.close(recoveringEngine);
|
||||
}
|
||||
@ -4284,7 +4284,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
// Advance the global checkpoint during the flush to create a lag between a persisted global checkpoint in the translog
|
||||
// (this value is visible to the deletion policy) and an in memory global checkpoint in the SequenceNumbersService.
|
||||
if (rarely()) {
|
||||
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getLocalCheckpointTracker().getCheckpoint()));
|
||||
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getLocalCheckpoint()));
|
||||
}
|
||||
super.commitIndexWriter(writer, translog, syncId);
|
||||
}
|
||||
@ -4296,7 +4296,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
|
||||
engine.index(indexForDoc(testParsedDocument(Integer.toString(docId), null, document, B_1, null)));
|
||||
if (frequently()) {
|
||||
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getCheckpoint()));
|
||||
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint()));
|
||||
engine.getTranslog().sync();
|
||||
}
|
||||
if (frequently()) {
|
||||
@ -4430,11 +4430,11 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
engine.flush(false, randomBoolean());
|
||||
List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
|
||||
// Global checkpoint advanced but not enough - all commits are kept.
|
||||
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getCheckpoint() - 1));
|
||||
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint() - 1));
|
||||
engine.syncTranslog();
|
||||
assertThat(DirectoryReader.listCommits(store.directory()), equalTo(commits));
|
||||
// Global checkpoint advanced enough - only the last commit is kept.
|
||||
globalCheckpoint.set(randomLongBetween(engine.getLocalCheckpointTracker().getCheckpoint(), Long.MAX_VALUE));
|
||||
globalCheckpoint.set(randomLongBetween(engine.getLocalCheckpoint(), Long.MAX_VALUE));
|
||||
engine.syncTranslog();
|
||||
assertThat(DirectoryReader.listCommits(store.directory()), contains(commits.get(commits.size() - 1)));
|
||||
}
|
||||
@ -4458,7 +4458,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
for (int i = 0; i < numSnapshots; i++) {
|
||||
snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit.
|
||||
}
|
||||
globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
globalCheckpoint.set(engine.getLocalCheckpoint());
|
||||
engine.syncTranslog();
|
||||
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
|
||||
for (int i = 0; i < numSnapshots - 1; i++) {
|
||||
@ -4508,13 +4508,13 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(0));
|
||||
// If the new index commit still points to the same translog generation as the current index commit,
|
||||
// we should not enable the periodically flush condition; otherwise we can get into an infinite loop of flushes.
|
||||
engine.getLocalCheckpointTracker().generateSeqNo(); // create a gap here
|
||||
generateNewSeqNo(engine); // create a gap here
|
||||
for (int id = 0; id < numDocs; id++) {
|
||||
if (randomBoolean()) {
|
||||
translog.rollGeneration();
|
||||
}
|
||||
final ParsedDocument doc = testParsedDocument("new" + id, null, testDocumentWithTextField(), SOURCE, null);
|
||||
engine.index(replicaIndexForDoc(doc, 2L, engine.getLocalCheckpointTracker().generateSeqNo(), false));
|
||||
engine.index(replicaIndexForDoc(doc, 2L, generateNewSeqNo(engine), false));
|
||||
if (engine.shouldPeriodicallyFlush()) {
|
||||
engine.flush();
|
||||
assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
|
||||
@ -4535,7 +4535,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
engine.onSettingsChanged();
|
||||
final int numOps = scaledRandomIntBetween(100, 10_000);
|
||||
for (int i = 0; i < numOps; i++) {
|
||||
final long localCheckPoint = engine.getLocalCheckpointTracker().getCheckpoint();
|
||||
final long localCheckPoint = engine.getLocalCheckpoint();
|
||||
final long seqno = randomLongBetween(Math.max(0, localCheckPoint), localCheckPoint + 5);
|
||||
final ParsedDocument doc = testParsedDocument(Long.toString(seqno), null, testDocumentWithTextField(), SOURCE, null);
|
||||
engine.index(replicaIndexForDoc(doc, 1L, seqno, false));
|
||||
@ -4622,9 +4622,9 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
}
|
||||
final long deleteBatch = between(10, 20);
|
||||
final long gapSeqNo = randomLongBetween(
|
||||
engine.getLocalCheckpointTracker().getMaxSeqNo() + 1, engine.getLocalCheckpointTracker().getMaxSeqNo() + deleteBatch);
|
||||
engine.getSeqNoStats(-1).getMaxSeqNo() + 1, engine.getSeqNoStats(-1).getMaxSeqNo() + deleteBatch);
|
||||
for (int i = 0; i < deleteBatch; i++) {
|
||||
final long seqno = engine.getLocalCheckpointTracker().generateSeqNo();
|
||||
final long seqno = generateNewSeqNo(engine);
|
||||
if (seqno != gapSeqNo) {
|
||||
if (randomBoolean()) {
|
||||
clock.incrementAndGet();
|
||||
@ -4671,7 +4671,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
ParsedDocument doc = testParsedDocument("append-only" + i, null, testDocumentWithTextField(), SOURCE, null);
|
||||
if (randomBoolean()) {
|
||||
engine.index(appendOnlyReplica(doc, randomBoolean(), 1, engine.getLocalCheckpointTracker().generateSeqNo()));
|
||||
engine.index(appendOnlyReplica(doc, randomBoolean(), 1, generateNewSeqNo(engine)));
|
||||
} else {
|
||||
engine.index(appendOnlyPrimary(doc, randomBoolean(), randomNonNegativeLong()));
|
||||
}
|
||||
@ -4688,7 +4688,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
for (int i = 0; i < numOps; i++) {
|
||||
ParsedDocument parsedDocument = testParsedDocument(Integer.toString(i), null, testDocumentWithTextField(), SOURCE, null);
|
||||
if (randomBoolean()) { // On replica - update max_seqno for non-append-only operations
|
||||
final long seqno = engine.getLocalCheckpointTracker().generateSeqNo();
|
||||
final long seqno = generateNewSeqNo(engine);
|
||||
final Engine.Index doc = replicaIndexForDoc(parsedDocument, 1, seqno, randomBoolean());
|
||||
if (randomBoolean()) {
|
||||
engine.index(doc);
|
||||
@ -4707,7 +4707,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
}
|
||||
appendOnlyIndexer.join(120_000);
|
||||
assertThat(engine.getMaxSeqNoOfNonAppendOnlyOperations(), equalTo(maxSeqNoOfNonAppendOnly));
|
||||
globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint());
|
||||
globalCheckpoint.set(engine.getLocalCheckpoint());
|
||||
engine.syncTranslog();
|
||||
engine.flush();
|
||||
}
|
||||
@ -4719,15 +4719,14 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
|
||||
public void testSkipOptimizeForExposedAppendOnlyOperations() throws Exception {
|
||||
long lookupTimes = 0L;
|
||||
final LocalCheckpointTracker localCheckpointTracker = engine.getLocalCheckpointTracker();
|
||||
final int initDocs = between(0, 10);
|
||||
for (int i = 0; i < initDocs; i++) {
|
||||
index(engine, i);
|
||||
lookupTimes++;
|
||||
}
|
||||
// doc1 is delayed and arrived after a non-append-only op.
|
||||
final long seqNoAppendOnly1 = localCheckpointTracker.generateSeqNo();
|
||||
final long seqnoNormalOp = localCheckpointTracker.generateSeqNo();
|
||||
final long seqNoAppendOnly1 = generateNewSeqNo(engine);
|
||||
final long seqnoNormalOp = generateNewSeqNo(engine);
|
||||
if (randomBoolean()) {
|
||||
engine.index(replicaIndexForDoc(
|
||||
testParsedDocument("d", null, testDocumentWithTextField(), SOURCE, null), 1, seqnoNormalOp, false));
|
||||
@ -4746,7 +4745,7 @@ public class InternalEngineTests extends EngineTestCase {
|
||||
|
||||
// optimize for other append-only 2 (its seqno > max_seqno of non-append-only) - do not look up in version map.
|
||||
engine.index(appendOnlyReplica(testParsedDocument("append-only-2", null, testDocumentWithTextField(), SOURCE, null),
|
||||
false, randomNonNegativeLong(), localCheckpointTracker.generateSeqNo()));
|
||||
false, randomNonNegativeLong(), generateNewSeqNo(engine)));
|
||||
assertThat(engine.getNumVersionLookups(), equalTo(lookupTimes));
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,7 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
|
||||
FieldMapper fieldMapper = mapper.mappers().getMapper("field");
|
||||
assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class));
|
||||
assertThat(fieldMapper.fieldType().stored(), equalTo(false));
|
||||
}
|
||||
@ -94,7 +94,7 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase {
|
||||
XContentType.JSON));
|
||||
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");
|
||||
assertEquals(new BytesRef(value), indexedValue);
|
||||
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
|
||||
FieldMapper fieldMapper = mapper.mappers().getMapper("field");
|
||||
Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue);
|
||||
assertEquals(new BytesArray(value), originalValue);
|
||||
}
|
||||
|
@ -56,11 +56,11 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase {
|
||||
|
||||
DocumentMapper merged = stage1.merge(stage2.mapping());
|
||||
// stage1 mapping should not have been modified
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
|
||||
assertThat(stage1.mappers().getMapper("age"), nullValue());
|
||||
assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue());
|
||||
// but merged should
|
||||
assertThat(merged.mappers().smartNameFieldMapper("age"), notNullValue());
|
||||
assertThat(merged.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
|
||||
assertThat(merged.mappers().getMapper("age"), notNullValue());
|
||||
assertThat(merged.mappers().getMapper("obj1.prop1"), notNullValue());
|
||||
}
|
||||
|
||||
public void testMergeObjectDynamic() throws Exception {
|
||||
|
@ -69,25 +69,25 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
|
||||
IndexReader reader = DirectoryReader.open(writer);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", context), 10);
|
||||
TopDocs topDocs = searcher.search(mapper.mappers().getMapper("field1").fieldType().termQuery("value1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").fieldType().termQuery("1", context), 10);
|
||||
topDocs = searcher.search(mapper.mappers().getMapper("field2").fieldType().termQuery("1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").fieldType().termQuery("1.1", context), 10);
|
||||
topDocs = searcher.search(mapper.mappers().getMapper("field3").fieldType().termQuery("1.1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").fieldType().termQuery("2010-01-01", context), 10);
|
||||
topDocs = searcher.search(mapper.mappers().getMapper("field4").fieldType().termQuery("2010-01-01", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("1", context), 10);
|
||||
topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("2", context), 10);
|
||||
topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("2", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("3", context), 10);
|
||||
topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("3", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2L));
|
||||
writer.close();
|
||||
reader.close();
|
||||
|
@ -629,10 +629,10 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
||||
.setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
|
||||
|
||||
defaultMapper = index.mapperService().documentMapper("type");
|
||||
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
|
||||
FieldMapper mapper = defaultMapper.mappers().getMapper("s_long");
|
||||
assertThat(mapper.fieldType().typeName(), equalTo("long"));
|
||||
|
||||
mapper = defaultMapper.mappers().smartNameFieldMapper("s_double");
|
||||
mapper = defaultMapper.mappers().getMapper("s_double");
|
||||
assertThat(mapper.fieldType().typeName(), equalTo("float"));
|
||||
}
|
||||
|
||||
@ -656,10 +656,10 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
||||
.setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get());
|
||||
|
||||
defaultMapper = index.mapperService().documentMapper("type");
|
||||
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
|
||||
FieldMapper mapper = defaultMapper.mappers().getMapper("s_long");
|
||||
assertThat(mapper, instanceOf(TextFieldMapper.class));
|
||||
|
||||
mapper = defaultMapper.mappers().smartNameFieldMapper("s_double");
|
||||
mapper = defaultMapper.mappers().getMapper("s_double");
|
||||
assertThat(mapper, instanceOf(TextFieldMapper.class));
|
||||
}
|
||||
|
||||
@ -707,9 +707,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
||||
|
||||
defaultMapper = index.mapperService().documentMapper("type");
|
||||
|
||||
DateFieldMapper dateMapper1 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date1");
|
||||
DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date2");
|
||||
DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date3");
|
||||
DateFieldMapper dateMapper1 = (DateFieldMapper) defaultMapper.mappers().getMapper("date1");
|
||||
DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().getMapper("date2");
|
||||
DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().getMapper("date3");
|
||||
// inherited from dynamic date format
|
||||
assertEquals("yyyy-MM-dd", dateMapper1.fieldType().dateTimeFormatter().format());
|
||||
// inherited from dynamic date format since the mapping in the template did not specify a format
|
||||
|
@ -56,11 +56,11 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase {
|
||||
docMapper = index.mapperService().documentMapper("person");
|
||||
DocumentFieldMappers mappers = docMapper.mappers();
|
||||
|
||||
assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue());
|
||||
assertEquals(IndexOptions.NONE, mappers.smartNameFieldMapper("s").fieldType().indexOptions());
|
||||
assertThat(mappers.getMapper("s"), Matchers.notNullValue());
|
||||
assertEquals(IndexOptions.NONE, mappers.getMapper("s").fieldType().indexOptions());
|
||||
|
||||
assertThat(mappers.smartNameFieldMapper("l"), Matchers.notNullValue());
|
||||
assertNotSame(IndexOptions.NONE, mappers.smartNameFieldMapper("l").fieldType().indexOptions());
|
||||
assertThat(mappers.getMapper("l"), Matchers.notNullValue());
|
||||
assertNotSame(IndexOptions.NONE, mappers.getMapper("l").fieldType().indexOptions());
|
||||
|
||||
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase {
|
||||
MapperService mapperService = mock(MapperService.class);
|
||||
when(mapperService.fullName("_field_names")).thenReturn(fieldNamesFieldType);
|
||||
when(mapperService.fullName("field_name")).thenReturn(fieldType);
|
||||
when(mapperService.simpleMatchToIndexNames("field_name")).thenReturn(Collections.singletonList("field_name"));
|
||||
when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singletonList("field_name"));
|
||||
|
||||
QueryShardContext queryShardContext = new QueryShardContext(0,
|
||||
indexSettings, null, null, mapperService, null, null, null, null, null, null, () -> 0L, null);
|
||||
|
@ -122,7 +122,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
||||
|
||||
DocumentMapper builderDocMapper = new DocumentMapper.Builder(new RootObjectMapper.Builder("person").add(
|
||||
new TextFieldMapper.Builder("name").store(true)
|
||||
.addMultiField(new TextFieldMapper.Builder("indexed").index(true).tokenized(true))
|
||||
.addMultiField(new TextFieldMapper.Builder("indexed").index(true))
|
||||
.addMultiField(new TextFieldMapper.Builder("not_indexed").index(false).store(true))
|
||||
), indexService.mapperService()).build(indexService.mapperService());
|
||||
|
||||
|
@ -55,7 +55,6 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
||||
import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
@ -87,6 +86,9 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto"))
|
||||
.put("index.analysis.analyzer.synonym.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.synonym.filter", "mySynonyms")
|
||||
// Stop filter remains in server as it is part of lucene-core
|
||||
.put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
|
||||
.build();
|
||||
indexService = createIndex("test", settings);
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
@ -621,7 +623,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("index_options", "offsets")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
@ -637,7 +639,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("index_options", "freqs")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
@ -654,7 +656,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("index_options", "positions")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
@ -675,7 +677,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("term_vector", "with_positions_offsets")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
@ -696,7 +698,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.field("term_vector", "with_positions")
|
||||
.endObject().endObject().endObject().endObject());
|
||||
@ -725,7 +727,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "my_stop_analyzer")
|
||||
.field("index_phrases", true)
|
||||
.endObject()
|
||||
.startObject("synfield")
|
||||
@ -742,20 +744,20 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
|
||||
Query q = new MatchPhraseQueryBuilder("field", "two words").toQuery(queryShardContext);
|
||||
assertThat(q, is(new PhraseQuery("field._index_phrase", "two word")));
|
||||
assertThat(q, is(new PhraseQuery("field._index_phrase", "two words")));
|
||||
|
||||
Query q2 = new MatchPhraseQueryBuilder("field", "three words here").toQuery(queryShardContext);
|
||||
assertThat(q2, is(new PhraseQuery("field._index_phrase", "three word", "word here")));
|
||||
assertThat(q2, is(new PhraseQuery("field._index_phrase", "three words", "words here")));
|
||||
|
||||
Query q3 = new MatchPhraseQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext);
|
||||
assertThat(q3, is(new PhraseQuery(1, "field", "two", "word")));
|
||||
assertThat(q3, is(new PhraseQuery(1, "field", "two", "words")));
|
||||
|
||||
Query q4 = new MatchPhraseQueryBuilder("field", "singleton").toQuery(queryShardContext);
|
||||
assertThat(q4, is(new TermQuery(new Term("field", "singleton"))));
|
||||
|
||||
Query q5 = new MatchPhraseQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext);
|
||||
assertThat(q5,
|
||||
is(new PhraseQuery.Builder().add(new Term("field", "sparkl")).add(new Term("field", "stopword"), 2).build()));
|
||||
is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build()));
|
||||
|
||||
Query q6 = new MatchPhraseQueryBuilder("synfield", "motor car").toQuery(queryShardContext);
|
||||
assertThat(q6, is(new MultiPhraseQuery.Builder()
|
||||
@ -778,7 +780,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
|
||||
ts.reset();
|
||||
assertTrue(ts.incrementToken());
|
||||
assertEquals("some english", termAtt.toString());
|
||||
assertEquals("Some English", termAtt.toString());
|
||||
}
|
||||
|
||||
{
|
||||
@ -821,7 +823,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes")
|
||||
.field("min_chars", 1)
|
||||
.field("max_chars", 10)
|
||||
@ -855,7 +857,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes").endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
@ -880,7 +882,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes")
|
||||
.field("min_chars", 1)
|
||||
.field("max_chars", 10)
|
||||
@ -903,7 +905,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes")
|
||||
.field("min_chars", 11)
|
||||
.field("max_chars", 10)
|
||||
@ -920,7 +922,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes")
|
||||
.field("min_chars", 0)
|
||||
.field("max_chars", 10)
|
||||
@ -937,7 +939,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.startObject("index_prefixes")
|
||||
.field("min_chars", 1)
|
||||
.field("max_chars", 25)
|
||||
@ -954,7 +956,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||
String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
.field("type", "text")
|
||||
.field("analyzer", "english")
|
||||
.field("analyzer", "standard")
|
||||
.field("index_prefixes", (String) null)
|
||||
.endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
@ -1300,7 +1300,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = new QueryStringQueryBuilder("the quick fox")
|
||||
.field(STRING_FIELD_NAME)
|
||||
.analyzer("english")
|
||||
.analyzer("stop")
|
||||
.toQuery(createShardContext());
|
||||
BooleanQuery expected = new BooleanQuery.Builder()
|
||||
.add(new TermQuery(new Term(STRING_FIELD_NAME, "quick")), Occur.SHOULD)
|
||||
@ -1313,7 +1313,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = new QueryStringQueryBuilder("the* quick fox")
|
||||
.field(STRING_FIELD_NAME)
|
||||
.analyzer("english")
|
||||
.analyzer("stop")
|
||||
.toQuery(createShardContext());
|
||||
BooleanQuery expected = new BooleanQuery.Builder()
|
||||
.add(new PrefixQuery(new Term(STRING_FIELD_NAME, "the")), Occur.SHOULD)
|
||||
|
@ -629,7 +629,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = new SimpleQueryStringBuilder("the quick fox")
|
||||
.field(STRING_FIELD_NAME)
|
||||
.analyzer("english")
|
||||
.analyzer("stop")
|
||||
.toQuery(createShardContext());
|
||||
BooleanQuery expected = new BooleanQuery.Builder()
|
||||
.add(new TermQuery(new Term(STRING_FIELD_NAME, "quick")), BooleanClause.Occur.SHOULD)
|
||||
@ -642,7 +642,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = new SimpleQueryStringBuilder("the* quick fox")
|
||||
.field(STRING_FIELD_NAME)
|
||||
.analyzer("english")
|
||||
.analyzer("stop")
|
||||
.toQuery(createShardContext());
|
||||
BooleanQuery expected = new BooleanQuery.Builder()
|
||||
.add(new PrefixQuery(new Term(STRING_FIELD_NAME, "the")), BooleanClause.Occur.SHOULD)
|
||||
|
@ -344,9 +344,10 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
||||
op1 = snapshot.next();
|
||||
assertThat(op1, notNullValue());
|
||||
assertThat(snapshot.next(), nullValue());
|
||||
assertThat(snapshot.overriddenOperations(), equalTo(0));
|
||||
assertThat(snapshot.skippedOperations(), equalTo(0));
|
||||
}
|
||||
// Make sure that replica2 receives translog ops (eg. op2) from replica1 and overwrites its stale operation (op1).
|
||||
// Make sure that replica2 receives translog ops (eg. op2) from replica1
|
||||
// and does not overwrite its stale operation (op1) as it is trimmed.
|
||||
logger.info("--> Promote replica1 as the primary");
|
||||
shards.promoteReplicaToPrimary(replica1).get(); // wait until resync completed.
|
||||
shards.index(new IndexRequest(index.getName(), "type", "d2").source("{}", XContentType.JSON));
|
||||
@ -357,7 +358,8 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
||||
assertThat(op2.seqNo(), equalTo(op1.seqNo()));
|
||||
assertThat(op2.primaryTerm(), greaterThan(op1.primaryTerm()));
|
||||
assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations));
|
||||
assertThat(snapshot.overriddenOperations(), equalTo(1));
|
||||
assertThat(snapshot.overriddenOperations(), equalTo(0));
|
||||
assertThat(snapshot.skippedOperations(), equalTo(1));
|
||||
}
|
||||
|
||||
// Make sure that peer-recovery transfers all but non-overridden operations.
|
||||
@ -370,7 +372,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
||||
assertThat(snapshot.totalOperations(), equalTo(initDocs + 1));
|
||||
assertThat(snapshot.next(), equalTo(op2));
|
||||
assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations));
|
||||
assertThat("Peer-recovery should not send overridden operations", snapshot.overriddenOperations(), equalTo(0));
|
||||
assertThat("Peer-recovery should not send overridden operations", snapshot.skippedOperations(), equalTo(0));
|
||||
}
|
||||
// TODO: We should assert the content of shards in the ReplicationGroup.
|
||||
// Without rollback replicas(current implementation), we don't have the same content across shards:
|
||||
|
@ -53,8 +53,12 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
@ -65,6 +69,7 @@ import static org.hamcrest.Matchers.both;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
@ -353,10 +358,19 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC
|
||||
|
||||
@TestLogging("org.elasticsearch.index.shard:TRACE,org.elasticsearch.action.resync:TRACE")
|
||||
public void testResyncAfterPrimaryPromotion() throws Exception {
|
||||
// TODO: check translog trimming functionality once it's implemented
|
||||
try (ReplicationGroup shards = createGroup(2)) {
|
||||
// TODO: check translog trimming functionality once rollback is implemented in Lucene (ES trimming is done)
|
||||
Map<String, String> mappings =
|
||||
Collections.singletonMap("type", "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}");
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(2, mappings))) {
|
||||
shards.startAll();
|
||||
int initialDocs = shards.indexDocs(randomInt(10));
|
||||
int initialDocs = randomInt(10);
|
||||
|
||||
for (int i = 0; i < initialDocs; i++) {
|
||||
final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "initial_doc_" + i)
|
||||
.source("{ \"f\": \"normal\"}", XContentType.JSON);
|
||||
shards.index(indexRequest);
|
||||
}
|
||||
|
||||
boolean syncedGlobalCheckPoint = randomBoolean();
|
||||
if (syncedGlobalCheckPoint) {
|
||||
shards.syncGlobalCheckpoint();
|
||||
@ -364,16 +378,30 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC
|
||||
|
||||
final IndexShard oldPrimary = shards.getPrimary();
|
||||
final IndexShard newPrimary = shards.getReplicas().get(0);
|
||||
final IndexShard justReplica = shards.getReplicas().get(1);
|
||||
|
||||
// simulate docs that were inflight when primary failed
|
||||
final int extraDocs = randomIntBetween(0, 5);
|
||||
final int extraDocs = randomInt(5);
|
||||
logger.info("--> indexing {} extra docs", extraDocs);
|
||||
for (int i = 0; i < extraDocs; i++) {
|
||||
final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i)
|
||||
.source("{}", XContentType.JSON);
|
||||
final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_doc_" + i)
|
||||
.source("{ \"f\": \"normal\"}", XContentType.JSON);
|
||||
final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary);
|
||||
indexOnReplica(bulkShardRequest, shards, newPrimary);
|
||||
}
|
||||
|
||||
final int extraDocsToBeTrimmed = randomIntBetween(0, 10);
|
||||
logger.info("--> indexing {} extra docs to be trimmed", extraDocsToBeTrimmed);
|
||||
for (int i = 0; i < extraDocsToBeTrimmed; i++) {
|
||||
final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_trimmed_" + i)
|
||||
.source("{ \"f\": \"trimmed\"}", XContentType.JSON);
|
||||
final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary);
|
||||
// have to replicate to another replica != newPrimary one - the subject to trim
|
||||
indexOnReplica(bulkShardRequest, shards, justReplica);
|
||||
}
|
||||
|
||||
logger.info("--> seqNo primary {} replica {}", oldPrimary.seqNoStats(), newPrimary.seqNoStats());
|
||||
|
||||
logger.info("--> resyncing replicas");
|
||||
PrimaryReplicaSyncer.ResyncTask task = shards.promoteReplicaToPrimary(newPrimary).get();
|
||||
if (syncedGlobalCheckPoint) {
|
||||
@ -381,7 +409,36 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC
|
||||
} else {
|
||||
assertThat(task.getResyncedOperations(), greaterThanOrEqualTo(extraDocs));
|
||||
}
|
||||
shards.assertAllEqual(initialDocs + extraDocs);
|
||||
List<IndexShard> replicas = shards.getReplicas();
|
||||
|
||||
// check all docs on primary are available on replica
|
||||
Set<String> primaryIds = getShardDocUIDs(newPrimary);
|
||||
assertThat(primaryIds.size(), equalTo(initialDocs + extraDocs));
|
||||
for (IndexShard replica : replicas) {
|
||||
Set<String> replicaIds = getShardDocUIDs(replica);
|
||||
Set<String> temp = new HashSet<>(primaryIds);
|
||||
temp.removeAll(replicaIds);
|
||||
assertThat(replica.routingEntry() + " is missing docs", temp, empty());
|
||||
temp = new HashSet<>(replicaIds);
|
||||
temp.removeAll(primaryIds);
|
||||
// yeah, replica has more docs as there is no Lucene roll back on it
|
||||
assertThat(replica.routingEntry() + " has to have extra docs", temp,
|
||||
extraDocsToBeTrimmed > 0 ? not(empty()) : empty());
|
||||
}
|
||||
|
||||
// check translog on replica is trimmed
|
||||
int translogOperations = 0;
|
||||
try(Translog.Snapshot snapshot = getTranslog(justReplica).newSnapshot()) {
|
||||
Translog.Operation next;
|
||||
while ((next = snapshot.next()) != null) {
|
||||
translogOperations++;
|
||||
assertThat("unexpected op: " + next, (int)next.seqNo(), lessThan(initialDocs + extraDocs));
|
||||
assertThat("unexpected primaryTerm: " + next.primaryTerm(), next.primaryTerm(), is(oldPrimary.getPrimaryTerm()));
|
||||
final Translog.Source source = next.getSource();
|
||||
assertThat(source.source.utf8ToString(), is("{ \"f\": \"normal\"}"));
|
||||
}
|
||||
}
|
||||
assertThat(translogOperations, is(initialDocs + extraDocs));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,6 +78,7 @@ import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineException;
|
||||
import org.elasticsearch.index.engine.EngineTestCase;
|
||||
import org.elasticsearch.index.engine.InternalEngine;
|
||||
import org.elasticsearch.index.engine.InternalEngineFactory;
|
||||
import org.elasticsearch.index.engine.Segment;
|
||||
@ -857,7 +858,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||
recoverReplica(replicaShard, primaryShard);
|
||||
final int maxSeqNo = randomIntBetween(0, 128);
|
||||
for (int i = 0; i <= maxSeqNo; i++) {
|
||||
primaryShard.getEngine().getLocalCheckpointTracker().generateSeqNo();
|
||||
EngineTestCase.generateNewSeqNo(primaryShard.getEngine());
|
||||
}
|
||||
final long checkpoint = rarely() ? maxSeqNo - scaledRandomIntBetween(0, maxSeqNo) : maxSeqNo;
|
||||
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.shard;
|
||||
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.resync.ResyncReplicationRequest;
|
||||
import org.elasticsearch.action.resync.ResyncReplicationResponse;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
@ -36,15 +37,20 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.core.IsInstanceOf.instanceOf;
|
||||
|
||||
public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
||||
@ -53,15 +59,17 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
||||
IndexShard shard = newStartedShard(true);
|
||||
TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet());
|
||||
AtomicBoolean syncActionCalled = new AtomicBoolean();
|
||||
List<ResyncReplicationRequest> resyncRequests = new ArrayList<>();
|
||||
PrimaryReplicaSyncer.SyncAction syncAction =
|
||||
(request, parentTask, allocationId, primaryTerm, listener) -> {
|
||||
logger.info("Sending off {} operations", request.getOperations().length);
|
||||
syncActionCalled.set(true);
|
||||
resyncRequests.add(request);
|
||||
assertThat(parentTask, instanceOf(PrimaryReplicaSyncer.ResyncTask.class));
|
||||
listener.onResponse(new ResyncReplicationResponse());
|
||||
};
|
||||
PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, taskManager, syncAction);
|
||||
syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 100)));
|
||||
syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 10)));
|
||||
|
||||
int numDocs = randomInt(10);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
@ -72,7 +80,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
||||
}
|
||||
|
||||
long globalCheckPoint = numDocs > 0 ? randomIntBetween(0, numDocs - 1) : 0;
|
||||
boolean syncNeeded = numDocs > 0 && globalCheckPoint < numDocs - 1;
|
||||
boolean syncNeeded = numDocs > 0;
|
||||
|
||||
String allocationId = shard.routingEntry().allocationId().getId();
|
||||
shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId),
|
||||
@ -84,19 +92,29 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
||||
|
||||
PlainActionFuture<PrimaryReplicaSyncer.ResyncTask> fut = new PlainActionFuture<>();
|
||||
syncer.resync(shard, fut);
|
||||
fut.get();
|
||||
PrimaryReplicaSyncer.ResyncTask resyncTask = fut.get();
|
||||
|
||||
if (syncNeeded) {
|
||||
assertTrue("Sync action was not called", syncActionCalled.get());
|
||||
ResyncReplicationRequest resyncRequest = resyncRequests.remove(0);
|
||||
assertThat(resyncRequest.getTrimAboveSeqNo(), equalTo(numDocs - 1L));
|
||||
|
||||
assertThat("trimAboveSeqNo has to be specified in request #0 only", resyncRequests.stream()
|
||||
.mapToLong(ResyncReplicationRequest::getTrimAboveSeqNo)
|
||||
.filter(seqNo -> seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO)
|
||||
.findFirst()
|
||||
.isPresent(),
|
||||
is(false));
|
||||
}
|
||||
assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, fut.get().getTotalOperations());
|
||||
if (syncNeeded) {
|
||||
|
||||
assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, resyncTask.getTotalOperations());
|
||||
if (syncNeeded && globalCheckPoint < numDocs - 1) {
|
||||
long skippedOps = globalCheckPoint + 1; // everything up to global checkpoint included
|
||||
assertEquals(skippedOps, fut.get().getSkippedOperations());
|
||||
assertEquals(numDocs - skippedOps, fut.get().getResyncedOperations());
|
||||
assertEquals(skippedOps, resyncTask.getSkippedOperations());
|
||||
assertEquals(numDocs - skippedOps, resyncTask.getResyncedOperations());
|
||||
} else {
|
||||
assertEquals(0, fut.get().getSkippedOperations());
|
||||
assertEquals(0, fut.get().getResyncedOperations());
|
||||
assertEquals(0, resyncTask.getSkippedOperations());
|
||||
assertEquals(0, resyncTask.getResyncedOperations());
|
||||
}
|
||||
|
||||
closeShards(shard);
|
||||
|
@ -118,7 +118,8 @@ public class TranslogHeaderTests extends ESTestCase {
|
||||
assertThat("test file [" + translogFile + "] should exist", Files.exists(translogFile), equalTo(true));
|
||||
final E error = expectThrows(expectedErrorType, () -> {
|
||||
final Checkpoint checkpoint = new Checkpoint(Files.size(translogFile), 1, 1,
|
||||
SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, 1);
|
||||
SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED,
|
||||
SequenceNumbers.NO_OPS_PERFORMED, 1, SequenceNumbers.NO_OPS_PERFORMED);
|
||||
try (FileChannel channel = FileChannel.open(translogFile, StandardOpenOption.READ)) {
|
||||
TranslogReader.open(channel, translogFile, checkpoint, null);
|
||||
}
|
||||
|
@ -107,6 +107,7 @@ import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.LongStream;
|
||||
@ -121,8 +122,11 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.isIn;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.stub;
|
||||
|
||||
@ -1475,8 +1479,8 @@ public class TranslogTests extends ESTestCase {
|
||||
fail("corrupted");
|
||||
} catch (IllegalStateException ex) {
|
||||
assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3080, " +
|
||||
"numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1} but got: Checkpoint{offset=0, numOps=0, " +
|
||||
"generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0}", ex.getMessage());
|
||||
"numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} but got: Checkpoint{offset=0, numOps=0, " +
|
||||
"generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0, trimmedAboveSeqNo=-2}", ex.getMessage());
|
||||
}
|
||||
Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
|
||||
try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)) {
|
||||
@ -1508,6 +1512,191 @@ public class TranslogTests extends ESTestCase {
|
||||
assertEquals(ops, readOperations);
|
||||
}
|
||||
|
||||
public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() throws Exception {
|
||||
int extraDocs = randomIntBetween(10, 15);
|
||||
|
||||
// increment primaryTerm to avoid potential negative numbers
|
||||
primaryTerm.addAndGet(extraDocs);
|
||||
translog.rollGeneration();
|
||||
|
||||
for (int op = 0; op < extraDocs; op++) {
|
||||
String ascii = randomAlphaOfLengthBetween(1, 50);
|
||||
Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op,
|
||||
ascii.getBytes("UTF-8"));
|
||||
translog.add(operation);
|
||||
}
|
||||
|
||||
AssertionError error = expectThrows(AssertionError.class, () -> translog.trimOperations(primaryTerm.get(), 0));
|
||||
assertThat(error.getMessage(), is("current should not have any operations with seq#:primaryTerm "
|
||||
+ "[1:" + (primaryTerm.get() - 1) + "] > 0:" + primaryTerm.get()));
|
||||
|
||||
primaryTerm.incrementAndGet();
|
||||
translog.rollGeneration();
|
||||
|
||||
// add a single operation to current with seq# > trimmed seq# but higher primary term
|
||||
Translog.Index operation = new Translog.Index("test", "" + 1, 1L, primaryTerm.get(),
|
||||
randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8"));
|
||||
translog.add(operation);
|
||||
|
||||
// it is possible to trim after generation rollover
|
||||
translog.trimOperations(primaryTerm.get(), 0);
|
||||
}
|
||||
|
||||
public void testSnapshotTrimmedOperations() throws Exception {
|
||||
final InMemoryTranslog inMemoryTranslog = new InMemoryTranslog();
|
||||
final List<Translog.Operation> allOperations = new ArrayList<>();
|
||||
|
||||
for(int attempt = 0, maxAttempts = randomIntBetween(3, 10); attempt < maxAttempts; attempt++) {
|
||||
List<Long> ops = LongStream.range(0, allOperations.size() + randomIntBetween(10, 15))
|
||||
.boxed().collect(Collectors.toList());
|
||||
Randomness.shuffle(ops);
|
||||
|
||||
AtomicReference<String> source = new AtomicReference<>();
|
||||
for (final long op : ops) {
|
||||
source.set(randomAlphaOfLengthBetween(1, 50));
|
||||
|
||||
// have to use exactly the same source for same seq# if primaryTerm is not changed
|
||||
if (primaryTerm.get() == translog.getCurrent().getPrimaryTerm()) {
|
||||
// use the latest source of op with the same seq# - therefore no break
|
||||
allOperations
|
||||
.stream()
|
||||
.filter(allOp -> allOp instanceof Translog.Index && allOp.seqNo() == op)
|
||||
.map(allOp -> ((Translog.Index)allOp).source().utf8ToString())
|
||||
.reduce((a, b) -> b)
|
||||
.ifPresent(source::set);
|
||||
}
|
||||
|
||||
// use ongoing primaryTerms - or the same as it was
|
||||
Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(),
|
||||
source.get().getBytes("UTF-8"));
|
||||
translog.add(operation);
|
||||
inMemoryTranslog.add(operation);
|
||||
allOperations.add(operation);
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
primaryTerm.incrementAndGet();
|
||||
translog.rollGeneration();
|
||||
}
|
||||
|
||||
long maxTrimmedSeqNo = randomInt(allOperations.size());
|
||||
|
||||
translog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo);
|
||||
inMemoryTranslog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo);
|
||||
translog.sync();
|
||||
|
||||
Collection<Translog.Operation> effectiveOperations = inMemoryTranslog.operations();
|
||||
|
||||
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
|
||||
assertThat(snapshot, containsOperationsInAnyOrder(effectiveOperations));
|
||||
assertThat(snapshot.totalOperations(), is(allOperations.size()));
|
||||
assertThat(snapshot.skippedOperations(), is(allOperations.size() - effectiveOperations.size()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* this class mimic behaviour of original {@link Translog}
|
||||
*/
|
||||
static class InMemoryTranslog {
|
||||
private final Map<Long, Translog.Operation> operations = new HashMap<>();
|
||||
|
||||
void add(Translog.Operation operation) {
|
||||
final Translog.Operation old = operations.put(operation.seqNo(), operation);
|
||||
assert old == null || old.primaryTerm() <= operation.primaryTerm();
|
||||
}
|
||||
|
||||
void trimOperations(long belowTerm, long aboveSeqNo) {
|
||||
for (final Iterator<Map.Entry<Long, Translog.Operation>> it = operations.entrySet().iterator(); it.hasNext(); ) {
|
||||
final Map.Entry<Long, Translog.Operation> next = it.next();
|
||||
Translog.Operation op = next.getValue();
|
||||
boolean drop = op.primaryTerm() < belowTerm && op.seqNo() > aboveSeqNo;
|
||||
if (drop) {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Collection<Translog.Operation> operations() {
|
||||
return operations.values();
|
||||
}
|
||||
}
|
||||
|
||||
public void testRandomExceptionsOnTrimOperations( ) throws Exception {
|
||||
Path tempDir = createTempDir();
|
||||
final FailSwitch fail = new FailSwitch();
|
||||
fail.failNever();
|
||||
TranslogConfig config = getTranslogConfig(tempDir);
|
||||
List<FileChannel> fileChannels = new ArrayList<>();
|
||||
final Translog failableTLog =
|
||||
getFailableTranslog(fail, config, randomBoolean(), false, null, createTranslogDeletionPolicy(), fileChannels);
|
||||
|
||||
IOException expectedException = null;
|
||||
int translogOperations = 0;
|
||||
final int maxAttempts = 10;
|
||||
for(int attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
int maxTrimmedSeqNo;
|
||||
fail.failNever();
|
||||
int extraTranslogOperations = randomIntBetween(10, 100);
|
||||
|
||||
List<Integer> ops = IntStream.range(translogOperations, translogOperations + extraTranslogOperations)
|
||||
.boxed().collect(Collectors.toList());
|
||||
Randomness.shuffle(ops);
|
||||
for (int op : ops) {
|
||||
String ascii = randomAlphaOfLengthBetween(1, 50);
|
||||
Translog.Index operation = new Translog.Index("test", "" + op, op,
|
||||
primaryTerm.get(), ascii.getBytes("UTF-8"));
|
||||
|
||||
failableTLog.add(operation);
|
||||
}
|
||||
|
||||
translogOperations += extraTranslogOperations;
|
||||
|
||||
// at least one roll + inc of primary term has to be there - otherwise trim would not take place at all
|
||||
// last attempt we have to make roll as well - otherwise could skip trimming as it has been trimmed already
|
||||
boolean rollover = attempt == 0 || attempt == maxAttempts - 1 || randomBoolean();
|
||||
if (rollover) {
|
||||
primaryTerm.incrementAndGet();
|
||||
failableTLog.rollGeneration();
|
||||
}
|
||||
|
||||
maxTrimmedSeqNo = rollover ? translogOperations - randomIntBetween(4, 8) : translogOperations + 1;
|
||||
|
||||
// if we are so happy to reach the max attempts - fail it always`
|
||||
fail.failRate(attempt < maxAttempts - 1 ? 25 : 100);
|
||||
try {
|
||||
failableTLog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo);
|
||||
} catch (IOException e){
|
||||
expectedException = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertThat(expectedException, is(not(nullValue())));
|
||||
|
||||
assertThat(fileChannels, is(not(empty())));
|
||||
assertThat("all file channels have to be closed",
|
||||
fileChannels.stream().filter(f -> f.isOpen()).findFirst().isPresent(), is(false));
|
||||
|
||||
assertThat(failableTLog.isOpen(), is(false));
|
||||
final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, () -> failableTLog.newSnapshot());
|
||||
assertThat(alreadyClosedException.getMessage(),
|
||||
is("translog is already closed"));
|
||||
|
||||
fail.failNever();
|
||||
|
||||
// check that despite of IO exception translog is not corrupted
|
||||
try(Translog reopenedTranslog = openTranslog(config, failableTLog.getTranslogUUID())) {
|
||||
try (Translog.Snapshot snapshot = reopenedTranslog.newSnapshot()) {
|
||||
assertThat(snapshot.totalOperations(), greaterThan(0));
|
||||
Translog.Operation operation;
|
||||
for (int i = 0; (operation = snapshot.next()) != null; i++) {
|
||||
assertNotNull("operation " + i + " must be non-null", operation);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testLocationHashCodeEquals() throws IOException {
|
||||
List<Translog.Location> locations = new ArrayList<>();
|
||||
List<Translog.Location> locations2 = new ArrayList<>();
|
||||
@ -2008,7 +2197,8 @@ public class TranslogTests extends ESTestCase {
|
||||
private volatile boolean onceFailedFailAlways = false;
|
||||
|
||||
public boolean fail() {
|
||||
boolean fail = randomIntBetween(1, 100) <= failRate;
|
||||
final int rnd = randomIntBetween(1, 100);
|
||||
boolean fail = rnd <= failRate;
|
||||
if (fail && onceFailedFailAlways) {
|
||||
failAlways();
|
||||
}
|
||||
@ -2027,17 +2217,30 @@ public class TranslogTests extends ESTestCase {
|
||||
failRate = randomIntBetween(1, 100);
|
||||
}
|
||||
|
||||
public void failRate(int rate) {
|
||||
failRate = rate;
|
||||
}
|
||||
|
||||
public void onceFailedFailAlways() {
|
||||
onceFailedFailAlways = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites,
|
||||
final boolean throwUnknownException, String translogUUID,
|
||||
final TranslogDeletionPolicy deletionPolicy) throws IOException {
|
||||
return getFailableTranslog(fail, config, partialWrites, throwUnknownException, translogUUID, deletionPolicy, null);
|
||||
}
|
||||
|
||||
private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites,
|
||||
final boolean throwUnknownException, String translogUUID,
|
||||
final TranslogDeletionPolicy deletionPolicy,
|
||||
final List<FileChannel> fileChannels) throws IOException {
|
||||
final ChannelFactory channelFactory = (file, openOption) -> {
|
||||
FileChannel channel = FileChannel.open(file, openOption);
|
||||
if (fileChannels != null) {
|
||||
fileChannels.add(channel);
|
||||
}
|
||||
boolean success = false;
|
||||
try {
|
||||
final boolean isCkpFile = file.getFileName().toString().endsWith(".ckp"); // don't do partial writes for checkpoints we rely on the fact that the bytes are written as an atomic operation
|
||||
@ -2394,7 +2597,7 @@ public class TranslogTests extends ESTestCase {
|
||||
}
|
||||
final long generation = randomNonNegativeLong();
|
||||
return new Checkpoint(randomLong(), randomInt(), generation, minSeqNo, maxSeqNo, randomNonNegativeLong(),
|
||||
randomLongBetween(1, generation));
|
||||
randomLongBetween(1, generation), maxSeqNo);
|
||||
}
|
||||
|
||||
public void testCheckpointOnDiskFull() throws IOException {
|
||||
@ -2618,7 +2821,7 @@ public class TranslogTests extends ESTestCase {
|
||||
assertThat(Tuple.tuple(op.seqNo(), op.primaryTerm()), isIn(seenSeqNos));
|
||||
readFromSnapshot++;
|
||||
}
|
||||
readFromSnapshot += snapshot.overriddenOperations();
|
||||
readFromSnapshot += snapshot.skippedOperations();
|
||||
}
|
||||
assertThat(readFromSnapshot, equalTo(expectedSnapshotOps));
|
||||
final long seqNoLowerBound = seqNo;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user