Return reloaded analyzers in _reload_search_ananlyzer response (#43813)

Currently the repsonse of the "_reload_search_analyzer" endpoint contains the
index names and nodeIds of indices were analyzers reloading was triggered. This
change add the names of the search-time analyzers that were reloaded.

Closes #43804
This commit is contained in:
Christoph Büscher 2019-07-02 18:08:04 +02:00
parent cc7c5ab2c0
commit 31cf96e7bf
7 changed files with 155 additions and 58 deletions

View File

@ -69,4 +69,33 @@ reload to ensure the new state of the file is reflected everywhere in the cluste
POST /my_index/_reload_search_analyzers
--------------------------------------------------
// CONSOLE
// TEST[s/^/PUT my_index\n/]
// TEST[continued]
The reload request returns information about the nodes it was executed on and the
analyzers that were reloaded:
[source,js]
--------------------------------------------------
{
"_shards" : {
"total" : 2,
"successful" : 2,
"failed" : 0
},
"reload_details" : [
{
"index" : "my_index",
"reloaded_analyzers" : [
"my_synonyms"
],
"reloaded_node_ids" : [
"mfdqTXn_T7SGr2Ho2KT8uw"
]
}
]
}
--------------------------------------------------
// TEST[continued]
// TESTRESPONSE[s/"total" : 2/"total" : $body._shards.total/]
// TESTRESPONSE[s/"successful" : 2/"successful" : $body._shards.successful/]
// TESTRESPONSE[s/mfdqTXn_T7SGr2Ho2KT8uw/$body.reload_details.0.reloaded_node_ids.0/]

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.Analyzer;
@ -849,19 +850,23 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
public synchronized void reloadSearchAnalyzers(AnalysisRegistry registry) throws IOException {
public synchronized List<String> reloadSearchAnalyzers(AnalysisRegistry registry) throws IOException {
logger.info("reloading search analyzers");
// refresh indexAnalyzers and search analyzers
final Map<String, TokenizerFactory> tokenizerFactories = registry.buildTokenizerFactories(indexSettings);
final Map<String, CharFilterFactory> charFilterFactories = registry.buildCharFilterFactories(indexSettings);
final Map<String, TokenFilterFactory> tokenFilterFactories = registry.buildTokenFilterFactories(indexSettings);
final Map<String, Settings> settings = indexSettings.getSettings().getGroups("index.analysis.analyzer");
final List<String> reloadedAnalyzers = new ArrayList<>();
for (NamedAnalyzer namedAnalyzer : indexAnalyzers.getAnalyzers().values()) {
if (namedAnalyzer.analyzer() instanceof ReloadableCustomAnalyzer) {
ReloadableCustomAnalyzer analyzer = (ReloadableCustomAnalyzer) namedAnalyzer.analyzer();
Settings analyzerSettings = settings.get(namedAnalyzer.name());
analyzer.reload(namedAnalyzer.name(), analyzerSettings, tokenizerFactories, charFilterFactories, tokenFilterFactories);
String analyzerName = namedAnalyzer.name();
Settings analyzerSettings = settings.get(analyzerName);
analyzer.reload(analyzerName, analyzerSettings, tokenizerFactories, charFilterFactories, tokenFilterFactories);
reloadedAnalyzers.add(analyzerName);
}
}
return reloadedAnalyzers;
}
}

View File

@ -8,18 +8,20 @@ package org.elasticsearch.xpack.core.action;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.action.TransportReloadAnalyzersAction.ReloadResult;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
@ -28,16 +30,25 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
*/
public class ReloadAnalyzersResponse extends BroadcastResponse {
private final Map<String, List<String>> reloadedIndicesNodes;
private final Map<String, ReloadDetails> reloadDetails;
private static final ParseField RELOAD_DETAILS_FIELD = new ParseField("reload_details");
private static final ParseField INDEX_FIELD = new ParseField("index");
private static final ParseField RELOADED_ANALYZERS_FIELD = new ParseField("reloaded_analyzers");
private static final ParseField RELOADED_NODE_IDS_FIELD = new ParseField("reloaded_node_ids");
public ReloadAnalyzersResponse() {
reloadedIndicesNodes = Collections.emptyMap();
reloadDetails = Collections.emptyMap();
}
public ReloadAnalyzersResponse(int totalShards, int successfulShards, int failedShards,
List<DefaultShardOperationFailedException> shardFailures, Map<String, List<String>> reloadedIndicesNodes) {
List<DefaultShardOperationFailedException> shardFailures, Map<String, ReloadDetails> reloadedIndicesNodes) {
super(totalShards, successfulShards, failedShards, shardFailures);
this.reloadedIndicesNodes = reloadedIndicesNodes;
this.reloadDetails = reloadedIndicesNodes;
}
public final Map<String, ReloadDetails> getReloadDetails() {
return this.reloadDetails;
}
/**
@ -45,11 +56,13 @@ public class ReloadAnalyzersResponse extends BroadcastResponse {
*/
@Override
protected void addCustomXContentFields(XContentBuilder builder, Params params) throws IOException {
builder.startArray("reloaded_nodes");
for (Entry<String, List<String>> indexNodesReloaded : reloadedIndicesNodes.entrySet()) {
builder.startArray(RELOAD_DETAILS_FIELD.getPreferredName());
for (Entry<String, ReloadDetails> indexDetails : reloadDetails.entrySet()) {
builder.startObject();
builder.field("index", indexNodesReloaded.getKey());
builder.field("reloaded_node_ids", indexNodesReloaded.getValue());
ReloadDetails value = indexDetails.getValue();
builder.field(INDEX_FIELD.getPreferredName(), value.getIndexName());
builder.field(RELOADED_ANALYZERS_FIELD.getPreferredName(), value.getReloadedAnalyzers());
builder.field(RELOADED_NODE_IDS_FIELD.getPreferredName(), value.getReloadedIndicesNodes());
builder.endObject();
}
builder.endArray();
@ -59,31 +72,61 @@ public class ReloadAnalyzersResponse extends BroadcastResponse {
private static final ConstructingObjectParser<ReloadAnalyzersResponse, Void> PARSER = new ConstructingObjectParser<>("reload_analyzer",
true, arg -> {
BroadcastResponse response = (BroadcastResponse) arg[0];
List<Tuple<String, List<String>>> results = (List<Tuple<String, List<String>>>) arg[1];
Map<String, List<String>> reloadedNodeIds = new HashMap<>();
for (Tuple<String, List<String>> result : results) {
reloadedNodeIds.put(result.v1(), result.v2());
List<ReloadDetails> results = (List<ReloadDetails>) arg[1];
Map<String, ReloadDetails> reloadedNodeIds = new HashMap<>();
for (ReloadDetails result : results) {
reloadedNodeIds.put(result.getIndexName(), result);
}
return new ReloadAnalyzersResponse(response.getTotalShards(), response.getSuccessfulShards(), response.getFailedShards(),
Arrays.asList(response.getShardFailures()), reloadedNodeIds);
});
@SuppressWarnings({ "unchecked" })
private static final ConstructingObjectParser<Tuple<String, List<String>>, Void> ENTRY_PARSER = new ConstructingObjectParser<>(
private static final ConstructingObjectParser<ReloadDetails, Void> ENTRY_PARSER = new ConstructingObjectParser<>(
"reload_analyzer.entry", true, arg -> {
String index = (String) arg[0];
List<String> nodeIds = (List<String>) arg[1];
return new Tuple<>(index, nodeIds);
return new ReloadDetails((String) arg[0], new HashSet<>((List<String>) arg[1]), new HashSet<>((List<String>) arg[2]));
});
static {
declareBroadcastFields(PARSER);
PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, new ParseField("reloaded_nodes"));
ENTRY_PARSER.declareString(constructorArg(), new ParseField("index"));
ENTRY_PARSER.declareStringArray(constructorArg(), new ParseField("reloaded_node_ids"));
PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, RELOAD_DETAILS_FIELD);
ENTRY_PARSER.declareString(constructorArg(), INDEX_FIELD);
ENTRY_PARSER.declareStringArray(constructorArg(), RELOADED_ANALYZERS_FIELD);
ENTRY_PARSER.declareStringArray(constructorArg(), RELOADED_NODE_IDS_FIELD);
}
public static ReloadAnalyzersResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static class ReloadDetails {
private final String indexName;
private final Set<String> reloadedIndicesNodes;
private final Set<String> reloadedAnalyzers;
ReloadDetails(String name, Set<String> reloadedIndicesNodes, Set<String> reloadedAnalyzers) {
this.indexName = name;
this.reloadedIndicesNodes = reloadedIndicesNodes;
this.reloadedAnalyzers = reloadedAnalyzers;
}
public String getIndexName() {
return indexName;
}
public Set<String> getReloadedIndicesNodes() {
return reloadedIndicesNodes;
}
public Set<String> getReloadedAnalyzers() {
return reloadedAnalyzers;
}
void merge(ReloadResult other) {
assert this.indexName == other.index;
this.reloadedAnalyzers.addAll(other.reloadedSearchAnalyzers);
this.reloadedIndicesNodes.add(other.nodeId);
}
}
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.action.ReloadAnalyzersResponse.ReloadDetails;
import org.elasticsearch.xpack.core.action.TransportReloadAnalyzersAction.ReloadResult;
import java.io.IOException;
@ -67,18 +68,18 @@ public class TransportReloadAnalyzersAction
@Override
protected ReloadAnalyzersResponse newResponse(ReloadAnalyzersRequest request, int totalShards, int successfulShards, int failedShards,
List<ReloadResult> responses, List<DefaultShardOperationFailedException> shardFailures, ClusterState clusterState) {
Map<String, List<String>> reloadedIndicesNodes = new HashMap<String, List<String>>();
Map<String, ReloadDetails> reloadedIndicesDetails = new HashMap<String, ReloadDetails>();
for (ReloadResult result : responses) {
if (reloadedIndicesNodes.containsKey(result.index)) {
List<String> nodes = reloadedIndicesNodes.get(result.index);
nodes.add(result.nodeId);
if (reloadedIndicesDetails.containsKey(result.index)) {
reloadedIndicesDetails.get(result.index).merge(result);;
} else {
List<String> nodes = new ArrayList<>();
nodes.add(result.nodeId);
reloadedIndicesNodes.put(result.index, nodes);
HashSet<String> nodeIds = new HashSet<String>();
nodeIds.add(result.nodeId);
ReloadDetails details = new ReloadDetails(result.index, nodeIds, new HashSet<String>(result.reloadedSearchAnalyzers));
reloadedIndicesDetails.put(result.index, details);
}
}
return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, shardFailures, reloadedIndicesNodes);
return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, shardFailures, reloadedIndicesDetails);
}
@Override
@ -92,17 +93,19 @@ public class TransportReloadAnalyzersAction
protected ReloadResult shardOperation(ReloadAnalyzersRequest request, ShardRouting shardRouting) throws IOException {
logger.info("reloading analyzers for index shard " + shardRouting);
IndexService indexService = indicesService.indexService(shardRouting.index());
indexService.mapperService().reloadSearchAnalyzers(indicesService.getAnalysis());
return new ReloadResult(shardRouting.index().getName(), shardRouting.currentNodeId());
List<String> reloadedSearchAnalyzers = indexService.mapperService().reloadSearchAnalyzers(indicesService.getAnalysis());
return new ReloadResult(shardRouting.index().getName(), shardRouting.currentNodeId(), reloadedSearchAnalyzers);
}
public static final class ReloadResult implements Streamable {
static final class ReloadResult implements Streamable {
String index;
String nodeId;
List<String> reloadedSearchAnalyzers;
private ReloadResult(String index, String nodeId) {
private ReloadResult(String index, String nodeId, List<String> reloadedSearchAnalyzers) {
this.index = index;
this.nodeId = nodeId;
this.reloadedSearchAnalyzers = reloadedSearchAnalyzers;
}
private ReloadResult() {
@ -112,12 +115,14 @@ public class TransportReloadAnalyzersAction
public void readFrom(StreamInput in) throws IOException {
this.index = in.readString();
this.nodeId = in.readString();
this.reloadedSearchAnalyzers = in.readStringList();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeString(nodeId);
out.writeStringCollection(this.reloadedSearchAnalyzers);
}
}

View File

@ -9,27 +9,31 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractBroadcastResponseTestCase;
import org.elasticsearch.xpack.core.action.ReloadAnalyzersResponse;
import org.elasticsearch.xpack.core.action.ReloadAnalyzersResponse.ReloadDetails;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestCase<ReloadAnalyzersResponse> {
@Override
protected ReloadAnalyzersResponse createTestInstance(int totalShards, int successfulShards, int failedShards,
List<DefaultShardOperationFailedException> failures) {
Map<String, List<String>> reloadedIndicesNodes = new HashMap<>();
Map<String, ReloadDetails> reloadedIndicesDetails = new HashMap<>();
int randomIndices = randomIntBetween(0, 5);
for (int i = 0; i < randomIndices; i++) {
List<String> randomNodeIds = Arrays.asList(generateRandomStringArray(5, 5, false, true));
reloadedIndicesNodes.put(randomAlphaOfLengthBetween(5, 10), randomNodeIds);
String name = randomAlphaOfLengthBetween(5, 10);
Set<String> reloadedIndicesNodes = new HashSet<>(Arrays.asList(generateRandomStringArray(5, 5, false, true)));
Set<String> reloadedAnalyzers = new HashSet<>(Arrays.asList(generateRandomStringArray(5, 5, false, true)));
reloadedIndicesDetails.put(name, new ReloadDetails(name, reloadedIndicesNodes, reloadedAnalyzers));
}
return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, failures, reloadedIndicesNodes);
return new ReloadAnalyzersResponse(totalShards, successfulShards, failedShards, failures, reloadedIndicesDetails);
}
@Override
@ -39,12 +43,13 @@ public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestC
@Override
public void testToXContent() {
Map<String, List<String>> reloadedIndicesNodes = Collections.singletonMap("index", Collections.singletonList("nodeId"));
Map<String, ReloadDetails> reloadedIndicesNodes = Collections.singletonMap("index",
new ReloadDetails("index", Collections.singleton("nodeId"), Collections.singleton("my_analyzer")));
ReloadAnalyzersResponse response = new ReloadAnalyzersResponse(10, 5, 5, null, reloadedIndicesNodes);
String output = Strings.toString(response);
assertEquals(
"{\"_shards\":{\"total\":10,\"successful\":5,\"failed\":5},"
+ "\"reloaded_nodes\":[{\"index\":\"index\",\"reloaded_node_ids\":[\"nodeId\"]}]"
+ "\"reload_details\":[{\"index\":\"index\",\"reloaded_analyzers\":[\"my_analyzer\"],\"reloaded_node_ids\":[\"nodeId\"]}]"
+ "}",
output);
}

View File

@ -15,8 +15,6 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin;
import org.elasticsearch.xpack.core.action.ReloadAnalyzerAction;
import org.elasticsearch.xpack.core.action.ReloadAnalyzersRequest;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -57,24 +55,26 @@ public class ReloadSynonymAnalyzerTests extends ESSingleNodeTestCase {
out.println("foo, baz");
}
assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder()
final String indexName = "test";
final String analyzerName = "my_synonym_analyzer";
assertAcked(client().admin().indices().prepareCreate(indexName).setSettings(Settings.builder()
.put("index.number_of_shards", 5)
.put("index.number_of_replicas", 0)
.put("analysis.analyzer.my_synonym_analyzer.tokenizer", "standard")
.putList("analysis.analyzer.my_synonym_analyzer.filter", "lowercase", "my_synonym_filter")
.put("analysis.analyzer." + analyzerName + ".tokenizer", "standard")
.putList("analysis.analyzer." + analyzerName + ".filter", "lowercase", "my_synonym_filter")
.put("analysis.filter.my_synonym_filter.type", "synonym")
.put("analysis.filter.my_synonym_filter.updateable", "true")
.put("analysis.filter.my_synonym_filter.synonyms_path", synonymsFileName))
.addMapping("_doc", "field", "type=text,analyzer=standard,search_analyzer=my_synonym_analyzer"));
.addMapping("_doc", "field", "type=text,analyzer=standard,search_analyzer=" + analyzerName));
client().prepareIndex("test", "_doc", "1").setSource("field", "Foo").get();
assertNoFailures(client().admin().indices().prepareRefresh("test").execute().actionGet());
client().prepareIndex(indexName, "_doc", "1").setSource("field", "Foo").get();
assertNoFailures(client().admin().indices().prepareRefresh(indexName).execute().actionGet());
SearchResponse response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get();
SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "baz")).get();
assertHitCount(response, 1L);
response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get();
response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "buzz")).get();
assertHitCount(response, 0L);
Response analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get();
Response analyzeResponse = client().admin().indices().prepareAnalyze(indexName, "foo").setAnalyzer(analyzerName).get();
assertEquals(2, analyzeResponse.getTokens().size());
assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm());
assertEquals("baz", analyzeResponse.getTokens().get(1).getTerm());
@ -84,9 +84,14 @@ public class ReloadSynonymAnalyzerTests extends ESSingleNodeTestCase {
new OutputStreamWriter(Files.newOutputStream(synonymsFile, StandardOpenOption.WRITE), StandardCharsets.UTF_8))) {
out.println("foo, baz, buzz");
}
assertNoFailures(client().execute(ReloadAnalyzerAction.INSTANCE, new ReloadAnalyzersRequest("test")).actionGet());
ReloadAnalyzersResponse reloadResponse = client().execute(ReloadAnalyzerAction.INSTANCE, new ReloadAnalyzersRequest(indexName))
.actionGet();
assertNoFailures(reloadResponse);
Set<String> reloadedAnalyzers = reloadResponse.getReloadDetails().get(indexName).getReloadedAnalyzers();
assertEquals(1, reloadedAnalyzers.size());
assertTrue(reloadedAnalyzers.contains(analyzerName));
analyzeResponse = client().admin().indices().prepareAnalyze("test", "Foo").setAnalyzer("my_synonym_analyzer").get();
analyzeResponse = client().admin().indices().prepareAnalyze(indexName, "Foo").setAnalyzer(analyzerName).get();
assertEquals(3, analyzeResponse.getTokens().size());
Set<String> tokens = new HashSet<>();
analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t));
@ -94,9 +99,9 @@ public class ReloadSynonymAnalyzerTests extends ESSingleNodeTestCase {
assertTrue(tokens.contains("baz"));
assertTrue(tokens.contains("buzz"));
response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get();
response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "baz")).get();
assertHitCount(response, 1L);
response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")).get();
response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "buzz")).get();
assertHitCount(response, 1L);
}
}

View File

@ -31,6 +31,7 @@ import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
@ -102,6 +103,10 @@ public class ReloadSynonymAnalyzerIT extends ESIntegTestCase {
.actionGet();
assertNoFailures(reloadResponse);
assertEquals(cluster().numDataNodes(), reloadResponse.getSuccessfulShards());
assertTrue(reloadResponse.getReloadDetails().containsKey("test"));
assertEquals("test", reloadResponse.getReloadDetails().get("test").getIndexName());
assertEquals(Collections.singleton("my_synonym_analyzer"),
reloadResponse.getReloadDetails().get("test").getReloadedAnalyzers());
analyzeResponse = client().admin().indices().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get();
assertEquals(3, analyzeResponse.getTokens().size());