Rename MetaData to Metadata in all of the places (#54519)
This is a simple naming change PR, to fix the fact that "metadata" is a single English word, and for too long we have not followed general naming conventions for it. We are also not consistent about it, for example, METADATA instead of META_DATA if we were trying to be consistent with MetaData (although METADATA is correct when considered in the context of "metadata"). This was a simple find and replace across the code base, only taking a few minutes to fix this naming issue forever.
This commit is contained in:
parent
114894dd76
commit
5fcda57b37
|
@ -21,8 +21,8 @@ package org.elasticsearch.benchmark.routing.allocation;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.metadata.Metadata;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
|
@ -127,19 +127,19 @@ public class AllocationBenchmark {
|
|||
Settings.builder().put("cluster.routing.allocation.awareness.attributes", "tag").build()
|
||||
);
|
||||
|
||||
MetaData.Builder mb = MetaData.builder();
|
||||
Metadata.Builder mb = Metadata.builder();
|
||||
for (int i = 1; i <= numIndices; i++) {
|
||||
mb.put(
|
||||
IndexMetaData.builder("test_" + i)
|
||||
IndexMetadata.builder("test_" + i)
|
||||
.settings(Settings.builder().put("index.version.created", Version.CURRENT))
|
||||
.numberOfShards(numShards)
|
||||
.numberOfReplicas(numReplicas)
|
||||
);
|
||||
}
|
||||
MetaData metaData = mb.build();
|
||||
Metadata metadata = mb.build();
|
||||
RoutingTable.Builder rb = RoutingTable.builder();
|
||||
for (int i = 1; i <= numIndices; i++) {
|
||||
rb.addAsNew(metaData.index("test_" + i));
|
||||
rb.addAsNew(metadata.index("test_" + i));
|
||||
}
|
||||
RoutingTable routingTable = rb.build();
|
||||
DiscoveryNodes.Builder nb = DiscoveryNodes.builder();
|
||||
|
@ -147,7 +147,7 @@ public class AllocationBenchmark {
|
|||
nb.add(Allocators.newNode("node" + i, Collections.singletonMap("tag", "tag_" + (i % numTags))));
|
||||
}
|
||||
initialClusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
|
||||
.metaData(metaData)
|
||||
.metadata(metadata)
|
||||
.routingTable(routingTable)
|
||||
.nodes(nb)
|
||||
.build();
|
||||
|
|
|
@ -63,18 +63,18 @@ public final class RestClientBenchmark extends AbstractBenchmark<RestClient> {
|
|||
|
||||
private static final class RestBulkRequestExecutor implements BulkRequestExecutor {
|
||||
private final RestClient client;
|
||||
private final String actionMetaData;
|
||||
private final String actionMetadata;
|
||||
|
||||
RestBulkRequestExecutor(RestClient client, String index, String type) {
|
||||
this.client = client;
|
||||
this.actionMetaData = String.format(Locale.ROOT, "{ \"index\" : { \"_index\" : \"%s\", \"_type\" : \"%s\" } }%n", index, type);
|
||||
this.actionMetadata = String.format(Locale.ROOT, "{ \"index\" : { \"_index\" : \"%s\", \"_type\" : \"%s\" } }%n", index, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean bulkIndex(List<String> bulkData) {
|
||||
StringBuilder bulkRequestBody = new StringBuilder();
|
||||
for (String bulkItem : bulkData) {
|
||||
bulkRequestBody.append(actionMetaData);
|
||||
bulkRequestBody.append(actionMetadata);
|
||||
bulkRequestBody.append(bulkItem);
|
||||
bulkRequestBody.append("\n");
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -52,9 +52,9 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
private final String error;
|
||||
private final ElasticsearchException exception;
|
||||
|
||||
private final Map<String, Set<AliasMetaData>> aliases;
|
||||
private final Map<String, Set<AliasMetadata>> aliases;
|
||||
|
||||
GetAliasesResponse(RestStatus status, String error, Map<String, Set<AliasMetaData>> aliases) {
|
||||
GetAliasesResponse(RestStatus status, String error, Map<String, Set<AliasMetadata>> aliases) {
|
||||
this.status = status;
|
||||
this.error = error;
|
||||
this.aliases = aliases;
|
||||
|
@ -90,7 +90,7 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
/**
|
||||
* Return the requested aliases
|
||||
*/
|
||||
public Map<String, Set<AliasMetaData>> getAliases() {
|
||||
public Map<String, Set<AliasMetadata>> getAliases() {
|
||||
return aliases;
|
||||
}
|
||||
|
||||
|
@ -103,13 +103,13 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
builder.field("status", status.getStatus());
|
||||
}
|
||||
|
||||
for (Map.Entry<String, Set<AliasMetaData>> entry : aliases.entrySet()) {
|
||||
for (Map.Entry<String, Set<AliasMetadata>> entry : aliases.entrySet()) {
|
||||
builder.startObject(entry.getKey());
|
||||
{
|
||||
builder.startObject("aliases");
|
||||
{
|
||||
for (final AliasMetaData alias : entry.getValue()) {
|
||||
AliasMetaData.Builder.toXContent(alias, builder, ToXContent.EMPTY_PARAMS);
|
||||
for (final AliasMetadata alias : entry.getValue()) {
|
||||
AliasMetadata.Builder.toXContent(alias, builder, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -129,7 +129,7 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
parser.nextToken();
|
||||
}
|
||||
ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
Map<String, Set<AliasMetaData>> aliases = new HashMap<>();
|
||||
Map<String, Set<AliasMetadata>> aliases = new HashMap<>();
|
||||
|
||||
String currentFieldName;
|
||||
Token token;
|
||||
|
@ -159,7 +159,7 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
} else {
|
||||
String indexName = parser.currentName();
|
||||
if (parser.nextToken() == Token.START_OBJECT) {
|
||||
Set<AliasMetaData> parseInside = parseAliases(parser);
|
||||
Set<AliasMetadata> parseInside = parseAliases(parser);
|
||||
aliases.put(indexName, parseInside);
|
||||
}
|
||||
}
|
||||
|
@ -173,8 +173,8 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
return new GetAliasesResponse(status, error, aliases);
|
||||
}
|
||||
|
||||
private static Set<AliasMetaData> parseAliases(XContentParser parser) throws IOException {
|
||||
Set<AliasMetaData> aliases = new HashSet<>();
|
||||
private static Set<AliasMetadata> parseAliases(XContentParser parser) throws IOException {
|
||||
Set<AliasMetadata> aliases = new HashSet<>();
|
||||
Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
|
@ -183,7 +183,7 @@ public class GetAliasesResponse implements StatusToXContentObject {
|
|||
} else if (token == Token.START_OBJECT) {
|
||||
if ("aliases".equals(currentFieldName)) {
|
||||
while (parser.nextToken() != Token.END_OBJECT) {
|
||||
AliasMetaData fromXContent = AliasMetaData.Builder.fromXContent(parser);
|
||||
AliasMetadata fromXContent = AliasMetadata.Builder.fromXContent(parser);
|
||||
aliases.add(fromXContent);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -54,7 +54,7 @@ import org.elasticsearch.client.indices.ReloadAnalyzersRequest;
|
|||
import org.elasticsearch.client.indices.ResizeRequest;
|
||||
import org.elasticsearch.client.indices.UnfreezeIndexRequest;
|
||||
import org.elasticsearch.client.indices.rollover.RolloverRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -326,7 +326,7 @@ final class IndicesRequestConverters {
|
|||
}
|
||||
|
||||
static Request split(ResizeRequest resizeRequest) throws IOException {
|
||||
if (IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.exists(resizeRequest.getSettings()) == false) {
|
||||
if (IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.exists(resizeRequest.getSettings()) == false) {
|
||||
throw new IllegalArgumentException("index.number_of_shards is required for split operations");
|
||||
}
|
||||
return resize(resizeRequest, ResizeType.SPLIT);
|
||||
|
|
|
@ -98,7 +98,7 @@ public class StringStatsAggregationBuilder extends ValuesSourceAggregationBuilde
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ public class TopMetricsAggregationBuilder extends AbstractAggregationBuilder<Top
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ public class GetFieldMappingsResponse {
|
|||
|
||||
private static final ParseField MAPPINGS = new ParseField("mappings");
|
||||
|
||||
private static final ObjectParser<Map<String, FieldMappingMetaData>, String> PARSER =
|
||||
private static final ObjectParser<Map<String, FieldMappingMetadata>, String> PARSER =
|
||||
new ObjectParser<>(MAPPINGS.getPreferredName(), true, HashMap::new);
|
||||
|
||||
static {
|
||||
|
@ -51,16 +51,16 @@ public class GetFieldMappingsResponse {
|
|||
p.nextToken();
|
||||
while (p.currentToken() == XContentParser.Token.FIELD_NAME) {
|
||||
final String fieldName = p.currentName();
|
||||
final FieldMappingMetaData fieldMappingMetaData = FieldMappingMetaData.fromXContent(p);
|
||||
fieldMappings.put(fieldName, fieldMappingMetaData);
|
||||
final FieldMappingMetadata fieldMappingMetadata = FieldMappingMetadata.fromXContent(p);
|
||||
fieldMappings.put(fieldName, fieldMappingMetadata);
|
||||
p.nextToken();
|
||||
}
|
||||
}, MAPPINGS, ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
private Map<String, Map<String, FieldMappingMetaData>> mappings;
|
||||
private Map<String, Map<String, FieldMappingMetadata>> mappings;
|
||||
|
||||
GetFieldMappingsResponse(Map<String, Map<String, FieldMappingMetaData>> mappings) {
|
||||
GetFieldMappingsResponse(Map<String, Map<String, FieldMappingMetadata>> mappings) {
|
||||
this.mappings = mappings;
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ public class GetFieldMappingsResponse {
|
|||
/**
|
||||
* Returns the fields mapping. The return map keys are indexes and fields (as specified in the request).
|
||||
*/
|
||||
public Map<String, Map<String, FieldMappingMetaData>> mappings() {
|
||||
public Map<String, Map<String, FieldMappingMetadata>> mappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
|
@ -76,10 +76,10 @@ public class GetFieldMappingsResponse {
|
|||
* Returns the mappings of a specific index and field.
|
||||
*
|
||||
* @param field field name as specified in the {@link GetFieldMappingsRequest}
|
||||
* @return FieldMappingMetaData for the requested field or null if not found.
|
||||
* @return FieldMappingMetadata for the requested field or null if not found.
|
||||
*/
|
||||
public FieldMappingMetaData fieldMappings(String index, String field) {
|
||||
Map<String, FieldMappingMetaData> indexMapping = mappings.get(index);
|
||||
public FieldMappingMetadata fieldMappings(String index, String field) {
|
||||
Map<String, FieldMappingMetadata> indexMapping = mappings.get(index);
|
||||
if (indexMapping == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -89,11 +89,11 @@ public class GetFieldMappingsResponse {
|
|||
|
||||
public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
final Map<String, Map<String, FieldMappingMetaData>> mappings = new HashMap<>();
|
||||
final Map<String, Map<String, FieldMappingMetadata>> mappings = new HashMap<>();
|
||||
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
while (parser.currentToken() == XContentParser.Token.FIELD_NAME) {
|
||||
final String index = parser.currentName();
|
||||
final Map<String, FieldMappingMetaData> fieldMappings = PARSER.parse(parser, index);
|
||||
final Map<String, FieldMappingMetadata> fieldMappings = PARSER.parse(parser, index);
|
||||
mappings.put(index, fieldMappings);
|
||||
parser.nextToken();
|
||||
}
|
||||
|
@ -101,13 +101,13 @@ public class GetFieldMappingsResponse {
|
|||
return new GetFieldMappingsResponse(mappings);
|
||||
}
|
||||
|
||||
public static class FieldMappingMetaData {
|
||||
public static class FieldMappingMetadata {
|
||||
private static final ParseField FULL_NAME = new ParseField("full_name");
|
||||
private static final ParseField MAPPING = new ParseField("mapping");
|
||||
|
||||
private static final ConstructingObjectParser<FieldMappingMetaData, String> PARSER =
|
||||
private static final ConstructingObjectParser<FieldMappingMetadata, String> PARSER =
|
||||
new ConstructingObjectParser<>("field_mapping_meta_data", true,
|
||||
a -> new FieldMappingMetaData((String)a[0], (BytesReference)a[1])
|
||||
a -> new FieldMappingMetadata((String)a[0], (BytesReference)a[1])
|
||||
);
|
||||
|
||||
static {
|
||||
|
@ -124,7 +124,7 @@ public class GetFieldMappingsResponse {
|
|||
private String fullName;
|
||||
private BytesReference source;
|
||||
|
||||
public FieldMappingMetaData(String fullName, BytesReference source) {
|
||||
public FieldMappingMetadata(String fullName, BytesReference source) {
|
||||
this.fullName = fullName;
|
||||
this.source = source;
|
||||
}
|
||||
|
@ -145,20 +145,20 @@ public class GetFieldMappingsResponse {
|
|||
return source;
|
||||
}
|
||||
|
||||
public static FieldMappingMetaData fromXContent(XContentParser parser) throws IOException {
|
||||
public static FieldMappingMetadata fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "FieldMappingMetaData{fullName='" + fullName + '\'' + ", source=" + source + '}';
|
||||
return "FieldMappingMetadata{fullName='" + fullName + '\'' + ", source=" + source + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof FieldMappingMetaData)) return false;
|
||||
FieldMappingMetaData that = (FieldMappingMetaData) o;
|
||||
if (!(o instanceof FieldMappingMetadata)) return false;
|
||||
FieldMappingMetadata that = (FieldMappingMetadata) o;
|
||||
return Objects.equals(fullName, that.fullName) && Objects.equals(source, that.source);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
@ -43,15 +43,15 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
|
|||
*/
|
||||
public class GetIndexResponse {
|
||||
|
||||
private Map<String, MappingMetaData> mappings;
|
||||
private Map<String, List<AliasMetaData>> aliases;
|
||||
private Map<String, MappingMetadata> mappings;
|
||||
private Map<String, List<AliasMetadata>> aliases;
|
||||
private Map<String, Settings> settings;
|
||||
private Map<String, Settings> defaultSettings;
|
||||
private String[] indices;
|
||||
|
||||
GetIndexResponse(String[] indices,
|
||||
Map<String, MappingMetaData> mappings,
|
||||
Map<String, List<AliasMetaData>> aliases,
|
||||
Map<String, MappingMetadata> mappings,
|
||||
Map<String, List<AliasMetadata>> aliases,
|
||||
Map<String, Settings> settings,
|
||||
Map<String, Settings> defaultSettings) {
|
||||
this.indices = indices;
|
||||
|
@ -75,11 +75,11 @@ public class GetIndexResponse {
|
|||
return indices;
|
||||
}
|
||||
|
||||
public Map<String, MappingMetaData> getMappings() {
|
||||
public Map<String, MappingMetadata> getMappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
public Map<String, List<AliasMetaData>> getAliases() {
|
||||
public Map<String, List<AliasMetadata>> getAliases() {
|
||||
return aliases;
|
||||
}
|
||||
|
||||
|
@ -123,23 +123,23 @@ public class GetIndexResponse {
|
|||
}
|
||||
}
|
||||
|
||||
private static List<AliasMetaData> parseAliases(XContentParser parser) throws IOException {
|
||||
List<AliasMetaData> indexAliases = new ArrayList<>();
|
||||
private static List<AliasMetadata> parseAliases(XContentParser parser) throws IOException {
|
||||
List<AliasMetadata> indexAliases = new ArrayList<>();
|
||||
// We start at START_OBJECT since parseIndexEntry ensures that
|
||||
while (parser.nextToken() != Token.END_OBJECT) {
|
||||
ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
indexAliases.add(AliasMetaData.Builder.fromXContent(parser));
|
||||
indexAliases.add(AliasMetadata.Builder.fromXContent(parser));
|
||||
}
|
||||
return indexAliases;
|
||||
}
|
||||
|
||||
private static MappingMetaData parseMappings(XContentParser parser) throws IOException {
|
||||
return new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, parser.map());
|
||||
private static MappingMetadata parseMappings(XContentParser parser) throws IOException {
|
||||
return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, parser.map());
|
||||
}
|
||||
|
||||
private static IndexEntry parseIndexEntry(XContentParser parser) throws IOException {
|
||||
List<AliasMetaData> indexAliases = null;
|
||||
MappingMetaData indexMappings = null;
|
||||
List<AliasMetadata> indexAliases = null;
|
||||
MappingMetadata indexMappings = null;
|
||||
Settings indexSettings = null;
|
||||
Settings indexDefaultSettings = null;
|
||||
// We start at START_OBJECT since fromXContent ensures that
|
||||
|
@ -172,11 +172,11 @@ public class GetIndexResponse {
|
|||
|
||||
// This is just an internal container to make stuff easier for returning
|
||||
private static class IndexEntry {
|
||||
List<AliasMetaData> indexAliases = new ArrayList<>();
|
||||
MappingMetaData indexMappings;
|
||||
List<AliasMetadata> indexAliases = new ArrayList<>();
|
||||
MappingMetadata indexMappings;
|
||||
Settings indexSettings = Settings.EMPTY;
|
||||
Settings indexDefaultSettings = Settings.EMPTY;
|
||||
IndexEntry(List<AliasMetaData> indexAliases, MappingMetaData indexMappings, Settings indexSettings, Settings indexDefaultSettings) {
|
||||
IndexEntry(List<AliasMetadata> indexAliases, MappingMetadata indexMappings, Settings indexSettings, Settings indexDefaultSettings) {
|
||||
if (indexAliases != null) this.indexAliases = indexAliases;
|
||||
if (indexMappings != null) this.indexMappings = indexMappings;
|
||||
if (indexSettings != null) this.indexSettings = indexSettings;
|
||||
|
@ -185,8 +185,8 @@ public class GetIndexResponse {
|
|||
}
|
||||
|
||||
public static GetIndexResponse fromXContent(XContentParser parser) throws IOException {
|
||||
Map<String, List<AliasMetaData>> aliases = new HashMap<>();
|
||||
Map<String, MappingMetaData> mappings = new HashMap<>();
|
||||
Map<String, List<AliasMetadata>> aliases = new HashMap<>();
|
||||
Map<String, MappingMetadata> mappings = new HashMap<>();
|
||||
Map<String, Settings> settings = new HashMap<>();
|
||||
Map<String, Settings> defaultSettings = new HashMap<>();
|
||||
List<String> indices = new ArrayList<>();
|
||||
|
@ -204,7 +204,7 @@ public class GetIndexResponse {
|
|||
indices.add(indexName);
|
||||
IndexEntry indexEntry = parseIndexEntry(parser);
|
||||
// make the order deterministic
|
||||
CollectionUtil.timSort(indexEntry.indexAliases, Comparator.comparing(AliasMetaData::alias));
|
||||
CollectionUtil.timSort(indexEntry.indexAliases, Comparator.comparing(AliasMetadata::alias));
|
||||
aliases.put(indexName, Collections.unmodifiableList(indexEntry.indexAliases));
|
||||
mappings.put(indexName, indexEntry.indexMappings);
|
||||
settings.put(indexName, indexEntry.indexSettings);
|
||||
|
|
|
@ -31,32 +31,32 @@ public class GetIndexTemplatesResponse {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
List<IndexTemplateMetaData> thisList = new ArrayList<>(this.indexTemplates);
|
||||
thisList.sort(Comparator.comparing(IndexTemplateMetaData::name));
|
||||
List<IndexTemplateMetadata> thisList = new ArrayList<>(this.indexTemplates);
|
||||
thisList.sort(Comparator.comparing(IndexTemplateMetadata::name));
|
||||
return "GetIndexTemplatesResponse [indexTemplates=" + thisList + "]";
|
||||
}
|
||||
|
||||
private final List<IndexTemplateMetaData> indexTemplates;
|
||||
private final List<IndexTemplateMetadata> indexTemplates;
|
||||
|
||||
GetIndexTemplatesResponse() {
|
||||
indexTemplates = new ArrayList<>();
|
||||
}
|
||||
|
||||
GetIndexTemplatesResponse(List<IndexTemplateMetaData> indexTemplates) {
|
||||
GetIndexTemplatesResponse(List<IndexTemplateMetadata> indexTemplates) {
|
||||
this.indexTemplates = indexTemplates;
|
||||
}
|
||||
|
||||
public List<IndexTemplateMetaData> getIndexTemplates() {
|
||||
public List<IndexTemplateMetadata> getIndexTemplates() {
|
||||
return indexTemplates;
|
||||
}
|
||||
|
||||
|
||||
public static GetIndexTemplatesResponse fromXContent(XContentParser parser) throws IOException {
|
||||
final List<IndexTemplateMetaData> templates = new ArrayList<>();
|
||||
final List<IndexTemplateMetadata> templates = new ArrayList<>();
|
||||
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
final IndexTemplateMetaData templateMetaData = IndexTemplateMetaData.Builder.fromXContent(parser, parser.currentName());
|
||||
templates.add(templateMetaData);
|
||||
final IndexTemplateMetadata templateMetadata = IndexTemplateMetadata.Builder.fromXContent(parser, parser.currentName());
|
||||
templates.add(templateMetadata);
|
||||
}
|
||||
}
|
||||
return new GetIndexTemplatesResponse(templates);
|
||||
|
@ -64,8 +64,8 @@ public class GetIndexTemplatesResponse {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
List<IndexTemplateMetaData> sortedList = new ArrayList<>(this.indexTemplates);
|
||||
sortedList.sort(Comparator.comparing(IndexTemplateMetaData::name));
|
||||
List<IndexTemplateMetadata> sortedList = new ArrayList<>(this.indexTemplates);
|
||||
sortedList.sort(Comparator.comparing(IndexTemplateMetadata::name));
|
||||
return Objects.hash(sortedList);
|
||||
}
|
||||
|
||||
|
@ -79,10 +79,10 @@ public class GetIndexTemplatesResponse {
|
|||
return false;
|
||||
// To compare results we need to make sure the templates are listed in the same order
|
||||
GetIndexTemplatesResponse other = (GetIndexTemplatesResponse) obj;
|
||||
List<IndexTemplateMetaData> thisList = new ArrayList<>(this.indexTemplates);
|
||||
List<IndexTemplateMetaData> otherList = new ArrayList<>(other.indexTemplates);
|
||||
thisList.sort(Comparator.comparing(IndexTemplateMetaData::name));
|
||||
otherList.sort(Comparator.comparing(IndexTemplateMetaData::name));
|
||||
List<IndexTemplateMetadata> thisList = new ArrayList<>(this.indexTemplates);
|
||||
List<IndexTemplateMetadata> otherList = new ArrayList<>(other.indexTemplates);
|
||||
thisList.sort(Comparator.comparing(IndexTemplateMetadata::name));
|
||||
otherList.sort(Comparator.comparing(IndexTemplateMetadata::name));
|
||||
return Objects.equals(thisList, otherList);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
|
@ -33,13 +33,13 @@ public class GetMappingsResponse {
|
|||
|
||||
static final ParseField MAPPINGS = new ParseField("mappings");
|
||||
|
||||
private Map<String, MappingMetaData> mappings;
|
||||
private Map<String, MappingMetadata> mappings;
|
||||
|
||||
public GetMappingsResponse(Map<String, MappingMetaData> mappings) {
|
||||
public GetMappingsResponse(Map<String, MappingMetadata> mappings) {
|
||||
this.mappings = mappings;
|
||||
}
|
||||
|
||||
public Map<String, MappingMetaData> mappings() {
|
||||
public Map<String, MappingMetadata> mappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,7 @@ public class GetMappingsResponse {
|
|||
|
||||
Map<String, Object> parts = parser.map();
|
||||
|
||||
Map<String, MappingMetaData> mappings = new HashMap<>();
|
||||
Map<String, MappingMetadata> mappings = new HashMap<>();
|
||||
for (Map.Entry<String, Object> entry : parts.entrySet()) {
|
||||
String indexName = entry.getKey();
|
||||
assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass();
|
||||
|
@ -63,7 +63,7 @@ public class GetMappingsResponse {
|
|||
final Map<String, Object> fieldMappings = (Map<String, Object>) ((Map<String, ?>) entry.getValue())
|
||||
.get(MAPPINGS.getPreferredName());
|
||||
|
||||
mappings.put(indexName, new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, fieldMappings));
|
||||
mappings.put(indexName, new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, fieldMappings));
|
||||
}
|
||||
|
||||
return new GetMappingsResponse(mappings);
|
||||
|
|
|
@ -18,9 +18,9 @@
|
|||
*/
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
@ -38,23 +38,23 @@ import java.util.stream.Collectors;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class IndexTemplateMetaData {
|
||||
public class IndexTemplateMetadata {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<IndexTemplateMetaData, String> PARSER = new ConstructingObjectParser<>(
|
||||
"IndexTemplateMetaData", true, (a, name) -> {
|
||||
List<Map.Entry<String, AliasMetaData>> alias = (List<Map.Entry<String, AliasMetaData>>) a[5];
|
||||
ImmutableOpenMap<String, AliasMetaData> aliasMap =
|
||||
new ImmutableOpenMap.Builder<String, AliasMetaData>()
|
||||
private static final ConstructingObjectParser<IndexTemplateMetadata, String> PARSER = new ConstructingObjectParser<>(
|
||||
"IndexTemplateMetadata", true, (a, name) -> {
|
||||
List<Map.Entry<String, AliasMetadata>> alias = (List<Map.Entry<String, AliasMetadata>>) a[5];
|
||||
ImmutableOpenMap<String, AliasMetadata> aliasMap =
|
||||
new ImmutableOpenMap.Builder<String, AliasMetadata>()
|
||||
.putAll(alias.stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
|
||||
.build();
|
||||
return new IndexTemplateMetaData(
|
||||
return new IndexTemplateMetadata(
|
||||
name,
|
||||
(Integer) a[0],
|
||||
(Integer) a[1],
|
||||
(List<String>) a[2],
|
||||
(Settings) a[3],
|
||||
(MappingMetaData) a[4],
|
||||
(MappingMetadata) a[4],
|
||||
aliasMap);
|
||||
});
|
||||
|
||||
|
@ -65,7 +65,7 @@ public class IndexTemplateMetaData {
|
|||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
|
||||
Settings.Builder templateSettingsBuilder = Settings.builder();
|
||||
templateSettingsBuilder.put(Settings.fromXContent(p));
|
||||
templateSettingsBuilder.normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
templateSettingsBuilder.normalizePrefix(IndexMetadata.INDEX_SETTING_PREFIX);
|
||||
return templateSettingsBuilder.build();
|
||||
}, new ParseField("settings"));
|
||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
|
||||
|
@ -73,10 +73,10 @@ public class IndexTemplateMetaData {
|
|||
if (mapping.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, mapping);
|
||||
return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mapping);
|
||||
}, new ParseField("mappings"));
|
||||
PARSER.declareNamedObjects(optionalConstructorArg(),
|
||||
(p, c, name) -> new AbstractMap.SimpleEntry<>(name, AliasMetaData.Builder.fromXContent(p)), new ParseField("aliases"));
|
||||
(p, c, name) -> new AbstractMap.SimpleEntry<>(name, AliasMetadata.Builder.fromXContent(p)), new ParseField("aliases"));
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
@ -107,14 +107,14 @@ public class IndexTemplateMetaData {
|
|||
|
||||
private final Settings settings;
|
||||
|
||||
private final MappingMetaData mappings;
|
||||
private final MappingMetadata mappings;
|
||||
|
||||
private final ImmutableOpenMap<String, AliasMetaData> aliases;
|
||||
private final ImmutableOpenMap<String, AliasMetadata> aliases;
|
||||
|
||||
public IndexTemplateMetaData(String name, int order, Integer version,
|
||||
public IndexTemplateMetadata(String name, int order, Integer version,
|
||||
List<String> patterns, Settings settings,
|
||||
MappingMetaData mappings,
|
||||
ImmutableOpenMap<String, AliasMetaData> aliases) {
|
||||
MappingMetadata mappings,
|
||||
ImmutableOpenMap<String, AliasMetadata> aliases) {
|
||||
if (patterns == null || patterns.isEmpty()) {
|
||||
throw new IllegalArgumentException("Index patterns must not be null or empty; got " + patterns);
|
||||
}
|
||||
|
@ -148,11 +148,11 @@ public class IndexTemplateMetaData {
|
|||
return this.settings;
|
||||
}
|
||||
|
||||
public MappingMetaData mappings() {
|
||||
public MappingMetadata mappings() {
|
||||
return this.mappings;
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, AliasMetaData> aliases() {
|
||||
public ImmutableOpenMap<String, AliasMetadata> aliases() {
|
||||
return this.aliases;
|
||||
}
|
||||
|
||||
|
@ -164,7 +164,7 @@ public class IndexTemplateMetaData {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
IndexTemplateMetaData that = (IndexTemplateMetaData) o;
|
||||
IndexTemplateMetadata that = (IndexTemplateMetadata) o;
|
||||
return order == that.order &&
|
||||
Objects.equals(name, that.name) &&
|
||||
Objects.equals(version, that.version) &&
|
||||
|
@ -191,9 +191,9 @@ public class IndexTemplateMetaData {
|
|||
|
||||
private Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
private MappingMetaData mappings;
|
||||
private MappingMetadata mappings;
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases;
|
||||
private final ImmutableOpenMap.Builder<String, AliasMetadata> aliases;
|
||||
|
||||
public Builder(String name) {
|
||||
this.name = name;
|
||||
|
@ -201,15 +201,15 @@ public class IndexTemplateMetaData {
|
|||
aliases = ImmutableOpenMap.builder();
|
||||
}
|
||||
|
||||
public Builder(IndexTemplateMetaData indexTemplateMetaData) {
|
||||
this.name = indexTemplateMetaData.name();
|
||||
order(indexTemplateMetaData.order());
|
||||
version(indexTemplateMetaData.version());
|
||||
patterns(indexTemplateMetaData.patterns());
|
||||
settings(indexTemplateMetaData.settings());
|
||||
public Builder(IndexTemplateMetadata indexTemplateMetadata) {
|
||||
this.name = indexTemplateMetadata.name();
|
||||
order(indexTemplateMetadata.order());
|
||||
version(indexTemplateMetadata.version());
|
||||
patterns(indexTemplateMetadata.patterns());
|
||||
settings(indexTemplateMetadata.settings());
|
||||
|
||||
mappings = indexTemplateMetaData.mappings();
|
||||
aliases = ImmutableOpenMap.builder(indexTemplateMetaData.aliases());
|
||||
mappings = indexTemplateMetadata.mappings();
|
||||
aliases = ImmutableOpenMap.builder(indexTemplateMetadata.aliases());
|
||||
}
|
||||
|
||||
public Builder order(int order) {
|
||||
|
@ -237,27 +237,27 @@ public class IndexTemplateMetaData {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder mapping(MappingMetaData mappings) {
|
||||
public Builder mapping(MappingMetadata mappings) {
|
||||
this.mappings = mappings;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder putAlias(AliasMetaData aliasMetaData) {
|
||||
aliases.put(aliasMetaData.alias(), aliasMetaData);
|
||||
public Builder putAlias(AliasMetadata aliasMetadata) {
|
||||
aliases.put(aliasMetadata.alias(), aliasMetadata);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder putAlias(AliasMetaData.Builder aliasMetaData) {
|
||||
aliases.put(aliasMetaData.alias(), aliasMetaData.build());
|
||||
public Builder putAlias(AliasMetadata.Builder aliasMetadata) {
|
||||
aliases.put(aliasMetadata.alias(), aliasMetadata.build());
|
||||
return this;
|
||||
}
|
||||
|
||||
public IndexTemplateMetaData build() {
|
||||
return new IndexTemplateMetaData(name, order, version, indexPatterns, settings, mappings, aliases.build());
|
||||
public IndexTemplateMetadata build() {
|
||||
return new IndexTemplateMetadata(name, order, version, indexPatterns, settings, mappings, aliases.build());
|
||||
}
|
||||
|
||||
|
||||
public static IndexTemplateMetaData fromXContent(XContentParser parser, String templateName) throws IOException {
|
||||
public static IndexTemplateMetadata fromXContent(XContentParser parser, String templateName) throws IOException {
|
||||
return PARSER.parse(parser, templateName);
|
||||
}
|
||||
}
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.Metadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -33,7 +33,7 @@ public class GetRollupCapsRequest implements Validatable, ToXContentObject {
|
|||
|
||||
public GetRollupCapsRequest(final String indexPattern) {
|
||||
if (Strings.isNullOrEmpty(indexPattern) || indexPattern.equals("*")) {
|
||||
this.indexPattern = MetaData.ALL;
|
||||
this.indexPattern = Metadata.ALL;
|
||||
} else {
|
||||
this.indexPattern = indexPattern;
|
||||
}
|
||||
|
|
|
@ -21,11 +21,11 @@ package org.elasticsearch.client.watcher;
|
|||
|
||||
import java.util.Objects;
|
||||
|
||||
public class WatcherMetaData {
|
||||
public class WatcherMetadata {
|
||||
|
||||
private final boolean manuallyStopped;
|
||||
|
||||
public WatcherMetaData(boolean manuallyStopped) {
|
||||
public WatcherMetadata(boolean manuallyStopped) {
|
||||
this.manuallyStopped = manuallyStopped;
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@ public class WatcherMetaData {
|
|||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
WatcherMetaData action = (WatcherMetaData) o;
|
||||
WatcherMetadata action = (WatcherMetadata) o;
|
||||
|
||||
return manuallyStopped == action.manuallyStopped;
|
||||
}
|
|
@ -40,21 +40,21 @@ public class WatcherStatsResponse {
|
|||
private final NodesResponseHeader header;
|
||||
private final String clusterName;
|
||||
|
||||
private final WatcherMetaData watcherMetaData;
|
||||
private final WatcherMetadata watcherMetadata;
|
||||
|
||||
public WatcherStatsResponse(NodesResponseHeader header, String clusterName, WatcherMetaData watcherMetaData, List<Node> nodes) {
|
||||
public WatcherStatsResponse(NodesResponseHeader header, String clusterName, WatcherMetadata watcherMetadata, List<Node> nodes) {
|
||||
this.nodes = nodes;
|
||||
this.header = header;
|
||||
this.clusterName = clusterName;
|
||||
this.watcherMetaData = watcherMetaData;
|
||||
this.watcherMetadata = watcherMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the status of the requested watch. If an action was
|
||||
* successfully acknowledged, this will be reflected in its status.
|
||||
*/
|
||||
public WatcherMetaData getWatcherMetaData() {
|
||||
return watcherMetaData;
|
||||
public WatcherMetadata getWatcherMetadata() {
|
||||
return watcherMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -84,7 +84,7 @@ public class WatcherStatsResponse {
|
|||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<WatcherStatsResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("watcher_stats_response", true,
|
||||
a -> new WatcherStatsResponse((NodesResponseHeader) a[0], (String) a[1], new WatcherMetaData((boolean) a[2]),
|
||||
a -> new WatcherStatsResponse((NodesResponseHeader) a[0], (String) a[1], new WatcherMetadata((boolean) a[2]),
|
||||
(List<Node>) a[3]));
|
||||
|
||||
static {
|
||||
|
@ -107,13 +107,13 @@ public class WatcherStatsResponse {
|
|||
return Objects.equals(nodes, that.nodes) &&
|
||||
Objects.equals(header, that.header) &&
|
||||
Objects.equals(clusterName, that.clusterName) &&
|
||||
Objects.equals(watcherMetaData, that.watcherMetaData);
|
||||
Objects.equals(watcherMetadata, that.watcherMetadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
return Objects.hash(nodes, header, clusterName, watcherMetaData);
|
||||
return Objects.hash(nodes, header, clusterName, watcherMetadata);
|
||||
}
|
||||
|
||||
public static class Node {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -44,23 +44,23 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
|
|||
return new GetAliasesResponse(status, errorMessage, createIndicesAliasesMap(0, 5));
|
||||
}
|
||||
|
||||
private static Map<String, Set<AliasMetaData>> createIndicesAliasesMap(int min, int max) {
|
||||
Map<String, Set<AliasMetaData>> map = new HashMap<>();
|
||||
private static Map<String, Set<AliasMetadata>> createIndicesAliasesMap(int min, int max) {
|
||||
Map<String, Set<AliasMetadata>> map = new HashMap<>();
|
||||
int indicesNum = randomIntBetween(min, max);
|
||||
for (int i = 0; i < indicesNum; i++) {
|
||||
String index = randomAlphaOfLength(5);
|
||||
Set<AliasMetaData> aliasMetaData = new HashSet<>();
|
||||
Set<AliasMetadata> aliasMetadata = new HashSet<>();
|
||||
int aliasesNum = randomIntBetween(0, 3);
|
||||
for (int alias = 0; alias < aliasesNum; alias++) {
|
||||
aliasMetaData.add(createAliasMetaData());
|
||||
aliasMetadata.add(createAliasMetadata());
|
||||
}
|
||||
map.put(index, aliasMetaData);
|
||||
map.put(index, aliasMetadata);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
public static AliasMetaData createAliasMetaData() {
|
||||
AliasMetaData.Builder builder = AliasMetaData.builder(randomAlphaOfLengthBetween(3, 10));
|
||||
public static AliasMetadata createAliasMetadata() {
|
||||
AliasMetadata.Builder builder = AliasMetadata.builder(randomAlphaOfLengthBetween(3, 10));
|
||||
if (randomBoolean()) {
|
||||
builder.routing(randomAlphaOfLengthBetween(3, 10));
|
||||
}
|
||||
|
@ -85,7 +85,7 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
|
|||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return p -> p.equals("") // do not add elements at the top-level as any element at this level is parsed as a new index
|
||||
|| p.endsWith(".aliases") // do not add new alias
|
||||
|| p.contains(".filter"); // do not insert random data into AliasMetaData#filter
|
||||
|| p.contains(".filter"); // do not insert random data into AliasMetadata#filter
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -167,8 +167,8 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
|
|||
assertThat(response.getError(), equalTo("alias [something] missing"));
|
||||
assertThat(response.getAliases().size(), equalTo(1));
|
||||
assertThat(response.getAliases().get(index).size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData = response.getAliases().get(index).iterator().next();
|
||||
assertThat(aliasMetaData.alias(), equalTo("alias"));
|
||||
AliasMetadata aliasMetadata = response.getAliases().get(index).iterator().next();
|
||||
assertThat(aliasMetadata.alias(), equalTo("alias"));
|
||||
assertThat(response.getException(), nullValue());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ import org.elasticsearch.client.indices.GetIndexTemplatesRequest;
|
|||
import org.elasticsearch.client.indices.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.client.indices.GetMappingsRequest;
|
||||
import org.elasticsearch.client.indices.GetMappingsResponse;
|
||||
import org.elasticsearch.client.indices.IndexTemplateMetaData;
|
||||
import org.elasticsearch.client.indices.IndexTemplateMetadata;
|
||||
import org.elasticsearch.client.indices.IndexTemplatesExistRequest;
|
||||
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.client.indices.PutMappingRequest;
|
||||
|
@ -80,9 +80,9 @@ import org.elasticsearch.client.indices.ReloadAnalyzersResponse;
|
|||
import org.elasticsearch.client.indices.UnfreezeIndexRequest;
|
||||
import org.elasticsearch.client.indices.rollover.RolloverRequest;
|
||||
import org.elasticsearch.client.indices.rollover.RolloverResponse;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.ValidationException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -119,8 +119,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractRawValues;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
|
||||
import static org.hamcrest.CoreMatchers.hasItem;
|
||||
|
@ -445,11 +445,11 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName));
|
||||
MappingMetaData mappingMetaData = getIndexResponse.getMappings().get(indexName);
|
||||
assertNotNull(mappingMetaData);
|
||||
assertEquals("_doc", mappingMetaData.type());
|
||||
assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetaData.source().string());
|
||||
Object o = mappingMetaData.getSourceAsMap().get("properties");
|
||||
MappingMetadata mappingMetadata = getIndexResponse.getMappings().get(indexName);
|
||||
assertNotNull(mappingMetadata);
|
||||
assertEquals("_doc", mappingMetadata.type());
|
||||
assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetadata.source().string());
|
||||
Object o = mappingMetadata.getSourceAsMap().get("properties");
|
||||
assertThat(o, instanceOf(Map.class));
|
||||
//noinspection unchecked
|
||||
assertThat(((Map<String, Object>) o).get("field-1"), instanceOf(Map.class));
|
||||
|
@ -479,10 +479,10 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS));
|
||||
assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS));
|
||||
assertNotNull(getIndexResponse.getMappings().get(indexName));
|
||||
MappingMetaData mappingMetaData = getIndexResponse.getMappings().get(indexName).get("_doc");
|
||||
assertNotNull(mappingMetaData);
|
||||
assertEquals("_doc", mappingMetaData.type());
|
||||
assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetaData.source().string());
|
||||
MappingMetadata mappingMetadata = getIndexResponse.getMappings().get(indexName).get("_doc");
|
||||
assertNotNull(mappingMetadata);
|
||||
assertEquals("_doc", mappingMetadata.type());
|
||||
assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetadata.source().string());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -667,13 +667,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
highLevelClient().indices()::getFieldMapping,
|
||||
highLevelClient().indices()::getFieldMappingAsync);
|
||||
|
||||
final Map<String, GetFieldMappingsResponse.FieldMappingMetaData> fieldMappingMap =
|
||||
final Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldMappingMap =
|
||||
getFieldMappingsResponse.mappings().get(indexName);
|
||||
|
||||
final GetFieldMappingsResponse.FieldMappingMetaData metaData =
|
||||
new GetFieldMappingsResponse.FieldMappingMetaData("field",
|
||||
final GetFieldMappingsResponse.FieldMappingMetadata metadata =
|
||||
new GetFieldMappingsResponse.FieldMappingMetadata("field",
|
||||
new BytesArray("{\"field\":{\"type\":\"text\"}}"));
|
||||
assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData)));
|
||||
assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata)));
|
||||
}
|
||||
|
||||
public void testGetFieldMappingWithTypes() throws IOException {
|
||||
|
@ -703,13 +703,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
highLevelClient().indices()::getFieldMappingAsync,
|
||||
expectWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE));
|
||||
|
||||
final Map<String, org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData>
|
||||
final Map<String, org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata>
|
||||
fieldMappingMap = getFieldMappingsResponse.mappings().get(indexName).get("_doc");
|
||||
|
||||
final org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData metaData =
|
||||
new org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData("field",
|
||||
final org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata metadata =
|
||||
new org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata("field",
|
||||
new BytesArray("{\"field\":{\"type\":\"text\"}}"));
|
||||
assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData)));
|
||||
assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata)));
|
||||
}
|
||||
|
||||
public void testDeleteIndex() throws IOException {
|
||||
|
@ -1258,12 +1258,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData, notNullValue());
|
||||
assertThat(aliasMetaData.alias(), equalTo("alias1"));
|
||||
assertThat(aliasMetaData.getFilter(), nullValue());
|
||||
assertThat(aliasMetaData.getIndexRouting(), nullValue());
|
||||
assertThat(aliasMetaData.getSearchRouting(), nullValue());
|
||||
AliasMetadata aliasMetadata = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata, notNullValue());
|
||||
assertThat(aliasMetadata.alias(), equalTo("alias1"));
|
||||
assertThat(aliasMetadata.getFilter(), nullValue());
|
||||
assertThat(aliasMetadata.getIndexRouting(), nullValue());
|
||||
assertThat(aliasMetadata.getSearchRouting(), nullValue());
|
||||
}
|
||||
{
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().aliases("alias*");
|
||||
|
@ -1272,13 +1272,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData1, notNullValue());
|
||||
assertThat(aliasMetaData1.alias(), equalTo("alias1"));
|
||||
AliasMetadata aliasMetadata1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata1, notNullValue());
|
||||
assertThat(aliasMetadata1.alias(), equalTo("alias1"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index2").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetaData2, notNullValue());
|
||||
assertThat(aliasMetaData2.alias(), equalTo("alias2"));
|
||||
AliasMetadata aliasMetadata2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetadata2, notNullValue());
|
||||
assertThat(aliasMetadata2.alias(), equalTo("alias2"));
|
||||
}
|
||||
{
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().aliases("_all");
|
||||
|
@ -1287,13 +1287,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData1, notNullValue());
|
||||
assertThat(aliasMetaData1.alias(), equalTo("alias1"));
|
||||
AliasMetadata aliasMetadata1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata1, notNullValue());
|
||||
assertThat(aliasMetadata1.alias(), equalTo("alias1"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index2").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetaData2, notNullValue());
|
||||
assertThat(aliasMetaData2.alias(), equalTo("alias2"));
|
||||
AliasMetadata aliasMetadata2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetadata2, notNullValue());
|
||||
assertThat(aliasMetadata2.alias(), equalTo("alias2"));
|
||||
}
|
||||
{
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().aliases("*");
|
||||
|
@ -1302,13 +1302,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData1, notNullValue());
|
||||
assertThat(aliasMetaData1.alias(), equalTo("alias1"));
|
||||
AliasMetadata aliasMetadata1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata1, notNullValue());
|
||||
assertThat(aliasMetadata1.alias(), equalTo("alias1"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index2").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetaData2, notNullValue());
|
||||
assertThat(aliasMetaData2.alias(), equalTo("alias2"));
|
||||
AliasMetadata aliasMetadata2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetadata2, notNullValue());
|
||||
assertThat(aliasMetadata2.alias(), equalTo("alias2"));
|
||||
}
|
||||
{
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices("_all");
|
||||
|
@ -1318,13 +1318,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat("Unexpected number of aliases, got: " + getAliasesResponse.getAliases().toString(),
|
||||
getAliasesResponse.getAliases().size(), equalTo(3));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData1, notNullValue());
|
||||
assertThat(aliasMetaData1.alias(), equalTo("alias1"));
|
||||
AliasMetadata aliasMetadata1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata1, notNullValue());
|
||||
assertThat(aliasMetadata1.alias(), equalTo("alias1"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index2").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetaData2, notNullValue());
|
||||
assertThat(aliasMetaData2.alias(), equalTo("alias2"));
|
||||
AliasMetadata aliasMetadata2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetadata2, notNullValue());
|
||||
assertThat(aliasMetadata2.alias(), equalTo("alias2"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index3").size(), equalTo(0));
|
||||
}
|
||||
{
|
||||
|
@ -1334,13 +1334,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(3));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData1, notNullValue());
|
||||
assertThat(aliasMetaData1.alias(), equalTo("alias1"));
|
||||
AliasMetadata aliasMetadata1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata1, notNullValue());
|
||||
assertThat(aliasMetadata1.alias(), equalTo("alias1"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index2").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetaData2, notNullValue());
|
||||
assertThat(aliasMetaData2.alias(), equalTo("alias2"));
|
||||
AliasMetadata aliasMetadata2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetadata2, notNullValue());
|
||||
assertThat(aliasMetadata2.alias(), equalTo("alias2"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index3").size(), equalTo(0));
|
||||
}
|
||||
{
|
||||
|
@ -1350,13 +1350,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(3));
|
||||
assertThat(getAliasesResponse.getAliases().get("index1").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetaData1, notNullValue());
|
||||
assertThat(aliasMetaData1.alias(), equalTo("alias1"));
|
||||
AliasMetadata aliasMetadata1 = getAliasesResponse.getAliases().get("index1").iterator().next();
|
||||
assertThat(aliasMetadata1, notNullValue());
|
||||
assertThat(aliasMetadata1.alias(), equalTo("alias1"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index2").size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetaData2, notNullValue());
|
||||
assertThat(aliasMetaData2.alias(), equalTo("alias2"));
|
||||
AliasMetadata aliasMetadata2 = getAliasesResponse.getAliases().get("index2").iterator().next();
|
||||
assertThat(aliasMetadata2, notNullValue());
|
||||
assertThat(aliasMetadata2.alias(), equalTo("alias2"));
|
||||
assertThat(getAliasesResponse.getAliases().get("index3").size(), equalTo(0));
|
||||
}
|
||||
}
|
||||
|
@ -1423,9 +1423,9 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertThat(getAliasesResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getAliasesResponse.getAliases().get(index).size(), equalTo(1));
|
||||
AliasMetaData aliasMetaData = getAliasesResponse.getAliases().get(index).iterator().next();
|
||||
assertThat(aliasMetaData, notNullValue());
|
||||
assertThat(aliasMetaData.alias(), equalTo(alias));
|
||||
AliasMetadata aliasMetadata = getAliasesResponse.getAliases().get(index).iterator().next();
|
||||
assertThat(aliasMetadata, notNullValue());
|
||||
assertThat(aliasMetadata.alias(), equalTo(alias));
|
||||
/*
|
||||
This is the above response in json format:
|
||||
{
|
||||
|
@ -1450,16 +1450,16 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
public void testIndexPutSettings() throws IOException {
|
||||
|
||||
final Setting<Integer> dynamicSetting = IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING;
|
||||
final String dynamicSettingKey = IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
final Setting<Integer> dynamicSetting = IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING;
|
||||
final String dynamicSettingKey = IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
|
||||
final int dynamicSettingValue = 0;
|
||||
|
||||
final Setting<String> staticSetting = IndexSettings.INDEX_CHECK_ON_STARTUP;
|
||||
final String staticSettingKey = IndexSettings.INDEX_CHECK_ON_STARTUP.getKey();
|
||||
final String staticSettingValue = "true";
|
||||
|
||||
final Setting<Integer> unmodifiableSetting = IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING;
|
||||
final String unmodifiableSettingKey = IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
final Setting<Integer> unmodifiableSetting = IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING;
|
||||
final String unmodifiableSettingKey = IndexMetadata.SETTING_NUMBER_OF_SHARDS;
|
||||
final int unmodifiableSettingValue = 3;
|
||||
|
||||
String index = "index";
|
||||
|
@ -1783,7 +1783,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
client.indices()::getTemplate, client.indices()::getTemplateAsync,
|
||||
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE));
|
||||
assertThat(getTemplate1.getIndexTemplates(), hasSize(1));
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetaData template1 = getTemplate1.getIndexTemplates().get(0);
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0);
|
||||
assertThat(template1.name(), equalTo("template-1"));
|
||||
assertThat(template1.patterns(), contains("pattern-1", "name-1"));
|
||||
assertTrue(template1.aliases().containsKey("alias-1"));
|
||||
|
@ -1794,7 +1794,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
client.indices()::getTemplate, client.indices()::getTemplateAsync,
|
||||
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE));
|
||||
assertThat(getTemplate2.getIndexTemplates(), hasSize(1));
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetaData template2 = getTemplate2.getIndexTemplates().get(0);
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0);
|
||||
assertThat(template2.name(), equalTo("template-2"));
|
||||
assertThat(template2.patterns(), contains("pattern-2", "name-2"));
|
||||
assertTrue(template2.aliases().isEmpty());
|
||||
|
@ -1811,7 +1811,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
getBothRequest, client.indices()::getTemplate, client.indices()::getTemplateAsync,
|
||||
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE));
|
||||
assertThat(getBoth.getIndexTemplates(), hasSize(2));
|
||||
assertThat(getBoth.getIndexTemplates().stream().map(org.elasticsearch.cluster.metadata.IndexTemplateMetaData::getName).toArray(),
|
||||
assertThat(getBoth.getIndexTemplates().stream().map(org.elasticsearch.cluster.metadata.IndexTemplateMetadata::getName).toArray(),
|
||||
arrayContainingInAnyOrder("template-1", "template-2"));
|
||||
|
||||
GetIndexTemplatesRequest getAllRequest = new GetIndexTemplatesRequest();
|
||||
|
@ -1819,7 +1819,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
getAllRequest, client.indices()::getTemplate, client.indices()::getTemplateAsync,
|
||||
expectWarnings(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE));
|
||||
assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2));
|
||||
assertThat(getAll.getIndexTemplates().stream().map(org.elasticsearch.cluster.metadata.IndexTemplateMetaData::getName)
|
||||
assertThat(getAll.getIndexTemplates().stream().map(org.elasticsearch.cluster.metadata.IndexTemplateMetadata::getName)
|
||||
.collect(Collectors.toList()),
|
||||
hasItems("template-1", "template-2"));
|
||||
|
||||
|
@ -1864,7 +1864,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
new GetIndexTemplatesRequest("template-1"),
|
||||
client.indices()::getIndexTemplate, client.indices()::getIndexTemplateAsync);
|
||||
assertThat(getTemplate1.getIndexTemplates(), hasSize(1));
|
||||
IndexTemplateMetaData template1 = getTemplate1.getIndexTemplates().get(0);
|
||||
IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0);
|
||||
assertThat(template1.name(), equalTo("template-1"));
|
||||
assertThat(template1.patterns(), contains("pattern-1", "name-1"));
|
||||
assertTrue(template1.aliases().containsKey("alias-1"));
|
||||
|
@ -1872,13 +1872,13 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
GetIndexTemplatesResponse getTemplate2 = execute(new GetIndexTemplatesRequest("template-2"),
|
||||
client.indices()::getIndexTemplate, client.indices()::getIndexTemplateAsync);
|
||||
assertThat(getTemplate2.getIndexTemplates(), hasSize(1));
|
||||
IndexTemplateMetaData template2 = getTemplate2.getIndexTemplates().get(0);
|
||||
IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0);
|
||||
assertThat(template2.name(), equalTo("template-2"));
|
||||
assertThat(template2.patterns(), contains("pattern-2", "name-2"));
|
||||
assertTrue(template2.aliases().isEmpty());
|
||||
assertThat(template2.settings().get("index.number_of_shards"), equalTo("2"));
|
||||
assertThat(template2.settings().get("index.number_of_replicas"), equalTo("0"));
|
||||
// New API returns a MappingMetaData class rather than CompressedXContent for the mapping
|
||||
// New API returns a MappingMetadata class rather than CompressedXContent for the mapping
|
||||
assertTrue(template2.mappings().sourceAsMap().containsKey("properties"));
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> props = (Map<String, Object>) template2.mappings().sourceAsMap().get("properties");
|
||||
|
@ -1893,14 +1893,14 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
GetIndexTemplatesResponse getBoth = execute(
|
||||
getBothRequest, client.indices()::getIndexTemplate, client.indices()::getIndexTemplateAsync);
|
||||
assertThat(getBoth.getIndexTemplates(), hasSize(2));
|
||||
assertThat(getBoth.getIndexTemplates().stream().map(IndexTemplateMetaData::name).toArray(),
|
||||
assertThat(getBoth.getIndexTemplates().stream().map(IndexTemplateMetadata::name).toArray(),
|
||||
arrayContainingInAnyOrder("template-1", "template-2"));
|
||||
|
||||
GetIndexTemplatesRequest getAllRequest = new GetIndexTemplatesRequest();
|
||||
GetIndexTemplatesResponse getAll = execute(
|
||||
getAllRequest, client.indices()::getIndexTemplate, client.indices()::getIndexTemplateAsync);
|
||||
assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2));
|
||||
assertThat(getAll.getIndexTemplates().stream().map(IndexTemplateMetaData::name)
|
||||
assertThat(getAll.getIndexTemplates().stream().map(IndexTemplateMetadata::name)
|
||||
.collect(Collectors.toList()),
|
||||
hasItems("template-1", "template-2"));
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(response.isAcknowledged());
|
||||
|
||||
WatcherStatsResponse stats = highLevelClient().watcher().watcherStats(new WatcherStatsRequest(), RequestOptions.DEFAULT);
|
||||
assertFalse(stats.getWatcherMetaData().manuallyStopped());
|
||||
assertFalse(stats.getWatcherMetadata().manuallyStopped());
|
||||
assertThat(stats.getNodes(), not(empty()));
|
||||
for(WatcherStatsResponse.Node node : stats.getNodes()) {
|
||||
assertEquals(WatcherState.STARTED, node.getWatcherState());
|
||||
|
@ -74,7 +74,7 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(response.isAcknowledged());
|
||||
|
||||
WatcherStatsResponse stats = highLevelClient().watcher().watcherStats(new WatcherStatsRequest(), RequestOptions.DEFAULT);
|
||||
assertTrue(stats.getWatcherMetaData().manuallyStopped());
|
||||
assertTrue(stats.getWatcherMetadata().manuallyStopped());
|
||||
}
|
||||
|
||||
public void testPutWatch() throws Exception {
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.client.core;
|
||||
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -91,7 +91,7 @@ public class CountResponseTests extends ESTestCase {
|
|||
String nodeId = randomAlphaOfLengthBetween(5, 10);
|
||||
String indexName = randomAlphaOfLengthBetween(5, 10);
|
||||
searchShardTarget = new SearchShardTarget(nodeId,
|
||||
new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomInt()), null, null);
|
||||
new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null, null);
|
||||
}
|
||||
return new ShardSearchFailure(ex, searchShardTarget);
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ import org.elasticsearch.client.slm.SnapshotLifecycleStats;
|
|||
import org.elasticsearch.client.slm.SnapshotRetentionConfiguration;
|
||||
import org.elasticsearch.client.slm.StartSLMRequest;
|
||||
import org.elasticsearch.client.slm.StopSLMRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -361,7 +361,7 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("my_index-1")
|
||||
.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put("index.lifecycle.name", "my_policy")
|
||||
.put("index.lifecycle.rollover_alias", "my_alias")
|
||||
.build());
|
||||
|
@ -369,7 +369,7 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
CreateIndexRequest createOtherIndexRequest = new CreateIndexRequest("other_index")
|
||||
.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build());
|
||||
client.indices().create(createOtherIndexRequest, RequestOptions.DEFAULT);
|
||||
|
||||
|
@ -623,7 +623,7 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("my_index")
|
||||
.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2)
|
||||
.put("index.lifecycle.name", "my_policy")
|
||||
.build());
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
|
@ -688,7 +688,7 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
client.indexLifecycle().putLifecyclePolicy(putRequest, RequestOptions.DEFAULT);
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest("my_index")
|
||||
.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put("index.lifecycle.name", "my_policy")
|
||||
.build());
|
||||
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
|
|
|
@ -76,7 +76,7 @@ import org.elasticsearch.client.indices.GetIndexTemplatesRequest;
|
|||
import org.elasticsearch.client.indices.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.client.indices.GetMappingsRequest;
|
||||
import org.elasticsearch.client.indices.GetMappingsResponse;
|
||||
import org.elasticsearch.client.indices.IndexTemplateMetaData;
|
||||
import org.elasticsearch.client.indices.IndexTemplateMetadata;
|
||||
import org.elasticsearch.client.indices.IndexTemplatesExistRequest;
|
||||
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.client.indices.PutMappingRequest;
|
||||
|
@ -86,8 +86,8 @@ import org.elasticsearch.client.indices.ReloadAnalyzersResponse.ReloadDetails;
|
|||
import org.elasticsearch.client.indices.UnfreezeIndexRequest;
|
||||
import org.elasticsearch.client.indices.rollover.RolloverRequest;
|
||||
import org.elasticsearch.client.indices.rollover.RolloverResponse;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
@ -593,8 +593,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-mappings-execute
|
||||
|
||||
// tag::get-mappings-response
|
||||
Map<String, MappingMetaData> allMappings = getMappingResponse.mappings(); // <1>
|
||||
MappingMetaData indexMapping = allMappings.get("twitter"); // <2>
|
||||
Map<String, MappingMetadata> allMappings = getMappingResponse.mappings(); // <1>
|
||||
MappingMetadata indexMapping = allMappings.get("twitter"); // <2>
|
||||
Map<String, Object> mapping = indexMapping.sourceAsMap(); // <3>
|
||||
// end::get-mappings-response
|
||||
|
||||
|
@ -645,8 +645,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final ActionListener<GetMappingsResponse> latchListener = new LatchedActionListener<>(listener, latch);
|
||||
listener = ActionListener.wrap(r -> {
|
||||
Map<String, MappingMetaData> allMappings = r.mappings();
|
||||
MappingMetaData indexMapping = allMappings.get("twitter");
|
||||
Map<String, MappingMetadata> allMappings = r.mappings();
|
||||
MappingMetadata indexMapping = allMappings.get("twitter");
|
||||
Map<String, Object> mapping = indexMapping.sourceAsMap();
|
||||
|
||||
Map<String, String> type = new HashMap<>();
|
||||
|
@ -716,15 +716,15 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-field-mappings-execute
|
||||
|
||||
// tag::get-field-mappings-response
|
||||
final Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetaData>> mappings =
|
||||
final Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mappings =
|
||||
response.mappings();// <1>
|
||||
final Map<String, GetFieldMappingsResponse.FieldMappingMetaData> fieldMappings =
|
||||
final Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldMappings =
|
||||
mappings.get("twitter"); // <2>
|
||||
final GetFieldMappingsResponse.FieldMappingMetaData metaData =
|
||||
final GetFieldMappingsResponse.FieldMappingMetadata metadata =
|
||||
fieldMappings.get("message");// <3>
|
||||
|
||||
final String fullName = metaData.fullName();// <4>
|
||||
final Map<String, Object> source = metaData.sourceAsMap(); // <5>
|
||||
final String fullName = metadata.fullName();// <4>
|
||||
final Map<String, Object> source = metadata.sourceAsMap(); // <5>
|
||||
// end::get-field-mappings-response
|
||||
}
|
||||
|
||||
|
@ -748,14 +748,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final ActionListener<GetFieldMappingsResponse> latchListener = new LatchedActionListener<>(listener, latch);
|
||||
listener = ActionListener.wrap(r -> {
|
||||
final Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetaData>> mappings =
|
||||
final Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mappings =
|
||||
r.mappings();
|
||||
final Map<String, GetFieldMappingsResponse.FieldMappingMetaData> fieldMappings =
|
||||
final Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldMappings =
|
||||
mappings.get("twitter");
|
||||
final GetFieldMappingsResponse.FieldMappingMetaData metaData1 = fieldMappings.get("message");
|
||||
final GetFieldMappingsResponse.FieldMappingMetadata metadata1 = fieldMappings.get("message");
|
||||
|
||||
final String fullName = metaData1.fullName();
|
||||
final Map<String, Object> source = metaData1.sourceAsMap();
|
||||
final String fullName = metadata1.fullName();
|
||||
final Map<String, Object> source = metadata1.sourceAsMap();
|
||||
latchListener.onResponse(r);
|
||||
}, e -> {
|
||||
latchListener.onFailure(e);
|
||||
|
@ -1234,9 +1234,9 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-index-execute
|
||||
|
||||
// tag::get-index-response
|
||||
MappingMetaData indexMappings = getIndexResponse.getMappings().get("index"); // <1>
|
||||
MappingMetadata indexMappings = getIndexResponse.getMappings().get("index"); // <1>
|
||||
Map<String, Object> indexTypeMappings = indexMappings.getSourceAsMap(); // <2>
|
||||
List<AliasMetaData> indexAliases = getIndexResponse.getAliases().get("index"); // <3>
|
||||
List<AliasMetadata> indexAliases = getIndexResponse.getAliases().get("index"); // <3>
|
||||
String numberOfShardsString = getIndexResponse.getSetting("index", "index.number_of_shards"); // <4>
|
||||
Settings indexSettings = getIndexResponse.getSettings().get("index"); // <5>
|
||||
Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); // <6>
|
||||
|
@ -1998,7 +1998,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-alias-execute
|
||||
|
||||
// tag::get-alias-response
|
||||
Map<String, Set<AliasMetaData>> aliases = response.getAliases(); // <1>
|
||||
Map<String, Set<AliasMetadata>> aliases = response.getAliases(); // <1>
|
||||
// end::get-alias-response
|
||||
|
||||
// tag::get-alias-response-error
|
||||
|
@ -2320,7 +2320,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-templates-execute
|
||||
|
||||
// tag::get-templates-response
|
||||
List<IndexTemplateMetaData> templates = getTemplatesResponse.getIndexTemplates(); // <1>
|
||||
List<IndexTemplateMetadata> templates = getTemplatesResponse.getIndexTemplates(); // <1>
|
||||
// end::get-templates-response
|
||||
|
||||
assertThat(templates, hasSize(1));
|
||||
|
|
|
@ -47,7 +47,7 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.cluster.SnapshotsInProgress;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -234,10 +234,10 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::get-repository-execute
|
||||
|
||||
// tag::get-repository-response
|
||||
List<RepositoryMetaData> repositoryMetaDataResponse = response.repositories();
|
||||
List<RepositoryMetadata> repositoryMetadataResponse = response.repositories();
|
||||
// end::get-repository-response
|
||||
assertThat(1, equalTo(repositoryMetaDataResponse.size()));
|
||||
assertThat(repositoryName, equalTo(repositoryMetaDataResponse.get(0).name()));
|
||||
assertThat(1, equalTo(repositoryMetadataResponse.size()));
|
||||
assertThat(repositoryName, equalTo(repositoryMetadataResponse.get(0).name()));
|
||||
}
|
||||
|
||||
public void testSnapshotGetRepositoryAsync() throws InterruptedException {
|
||||
|
@ -455,14 +455,14 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::verify-repository-execute
|
||||
|
||||
// tag::verify-repository-response
|
||||
List<VerifyRepositoryResponse.NodeView> repositoryMetaDataResponse = response.getNodes();
|
||||
List<VerifyRepositoryResponse.NodeView> repositoryMetadataResponse = response.getNodes();
|
||||
// end::verify-repository-response
|
||||
assertThat(1, equalTo(repositoryMetaDataResponse.size()));
|
||||
assertThat(1, equalTo(repositoryMetadataResponse.size()));
|
||||
final boolean async = Booleans.parseBoolean(System.getProperty("tests.rest.async", "false"));
|
||||
if (async) {
|
||||
assertThat("asyncIntegTest-0", equalTo(repositoryMetaDataResponse.get(0).getName()));
|
||||
assertThat("asyncIntegTest-0", equalTo(repositoryMetadataResponse.get(0).getName()));
|
||||
} else {
|
||||
assertThat("integTest-0", equalTo(repositoryMetaDataResponse.get(0).getName()));
|
||||
assertThat("integTest-0", equalTo(repositoryMetadataResponse.get(0).getName()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.client.indices.GetFieldMappingsResponse.FieldMappingMetaData;
|
||||
import org.elasticsearch.client.indices.GetFieldMappingsResponse.FieldMappingMetadata;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -52,18 +52,18 @@ public class GetFieldMappingsResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static GetFieldMappingsResponse createTestInstance() {
|
||||
Map<String, Map<String, FieldMappingMetaData>> mappings = new HashMap<>();
|
||||
Map<String, Map<String, FieldMappingMetadata>> mappings = new HashMap<>();
|
||||
// if mappings is empty, means that fields are not found
|
||||
if (randomBoolean()) {
|
||||
int indices = randomInt(10);
|
||||
for (int i = 0; i < indices; i++) {
|
||||
Map<String, FieldMappingMetaData> fieldMappings = new HashMap<>();
|
||||
Map<String, FieldMappingMetadata> fieldMappings = new HashMap<>();
|
||||
int fields = randomInt(10);
|
||||
for (int k = 0; k < fields; k++) {
|
||||
final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}";
|
||||
final String fieldName = randomAlphaOfLength(8);
|
||||
FieldMappingMetaData metaData = new FieldMappingMetaData(fieldName, new BytesArray(mapping));
|
||||
fieldMappings.put(fieldName, metaData);
|
||||
FieldMappingMetadata metadata = new FieldMappingMetadata(fieldName, new BytesArray(mapping));
|
||||
fieldMappings.put(fieldName, metadata);
|
||||
}
|
||||
mappings.put(randomAlphaOfLength(8), fieldMappings);
|
||||
}
|
||||
|
@ -74,10 +74,10 @@ public class GetFieldMappingsResponseTests extends ESTestCase {
|
|||
// As the client class GetFieldMappingsResponse doesn't have toXContent method, adding this method here only for the test
|
||||
private static void toXContent(GetFieldMappingsResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
for (Map.Entry<String, Map<String, FieldMappingMetaData>> indexEntry : response.mappings().entrySet()) {
|
||||
for (Map.Entry<String, Map<String, FieldMappingMetadata>> indexEntry : response.mappings().entrySet()) {
|
||||
builder.startObject(indexEntry.getKey());
|
||||
builder.startObject("mappings");
|
||||
for (Map.Entry<String, FieldMappingMetaData> fieldEntry : indexEntry.getValue().entrySet()) {
|
||||
for (Map.Entry<String, FieldMappingMetadata> fieldEntry : indexEntry.getValue().entrySet()) {
|
||||
builder.startObject(fieldEntry.getKey());
|
||||
builder.field("full_name", fieldEntry.getValue().fullName());
|
||||
builder.field("mapping", fieldEntry.getValue().sourceAsMap());
|
||||
|
|
|
@ -21,8 +21,8 @@ package org.elasticsearch.client.indices;
|
|||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.client.GetAliasesResponseTests;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -72,8 +72,8 @@ public class GetIndexResponseTests extends ESTestCase {
|
|||
|
||||
private static GetIndexResponse createTestInstance() {
|
||||
String[] indices = generateRandomStringArray(5, 5, false, false);
|
||||
Map<String, MappingMetaData> mappings = new HashMap<>();
|
||||
Map<String, List<AliasMetaData>> aliases = new HashMap<>();
|
||||
Map<String, MappingMetadata> mappings = new HashMap<>();
|
||||
Map<String, List<AliasMetadata>> aliases = new HashMap<>();
|
||||
Map<String, Settings> settings = new HashMap<>();
|
||||
Map<String, Settings> defaultSettings = new HashMap<>();
|
||||
IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS;
|
||||
|
@ -81,13 +81,13 @@ public class GetIndexResponseTests extends ESTestCase {
|
|||
for (String index: indices) {
|
||||
mappings.put(index, createMappingsForIndex());
|
||||
|
||||
List<AliasMetaData> aliasMetaDataList = new ArrayList<>();
|
||||
List<AliasMetadata> aliasMetadataList = new ArrayList<>();
|
||||
int aliasesNum = randomIntBetween(0, 3);
|
||||
for (int i=0; i<aliasesNum; i++) {
|
||||
aliasMetaDataList.add(GetAliasesResponseTests.createAliasMetaData());
|
||||
aliasMetadataList.add(GetAliasesResponseTests.createAliasMetadata());
|
||||
}
|
||||
CollectionUtil.timSort(aliasMetaDataList, Comparator.comparing(AliasMetaData::alias));
|
||||
aliases.put(index, Collections.unmodifiableList(aliasMetaDataList));
|
||||
CollectionUtil.timSort(aliasMetadataList, Comparator.comparing(AliasMetadata::alias));
|
||||
aliases.put(index, Collections.unmodifiableList(aliasMetadataList));
|
||||
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(RandomCreateIndexGenerator.randomIndexSettings());
|
||||
|
@ -100,11 +100,11 @@ public class GetIndexResponseTests extends ESTestCase {
|
|||
return new GetIndexResponse(indices, mappings, aliases, settings, defaultSettings);
|
||||
}
|
||||
|
||||
private static MappingMetaData createMappingsForIndex() {
|
||||
private static MappingMetadata createMappingsForIndex() {
|
||||
int typeCount = rarely() ? 0 : 1;
|
||||
MappingMetaData mmd;
|
||||
MappingMetadata mmd;
|
||||
try {
|
||||
mmd = new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap());
|
||||
mmd = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ public class GetIndexResponseTests extends ESTestCase {
|
|||
|
||||
try {
|
||||
String typeName = MapperService.SINGLE_MAPPING_NAME;
|
||||
mmd = new MappingMetaData(typeName, mappings);
|
||||
mmd = new MappingMetadata(typeName, mappings);
|
||||
} catch (IOException e) {
|
||||
fail("shouldn't have failed " + e);
|
||||
}
|
||||
|
@ -162,15 +162,15 @@ public class GetIndexResponseTests extends ESTestCase {
|
|||
|
||||
private static void toXContent(GetIndexResponse response, XContentBuilder builder) throws IOException {
|
||||
// first we need to repackage from GetIndexResponse to org.elasticsearch.action.admin.indices.get.GetIndexResponse
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> allMappings = ImmutableOpenMap.builder();
|
||||
ImmutableOpenMap.Builder<String, List<AliasMetaData>> aliases = ImmutableOpenMap.builder();
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetadata>> allMappings = ImmutableOpenMap.builder();
|
||||
ImmutableOpenMap.Builder<String, List<AliasMetadata>> aliases = ImmutableOpenMap.builder();
|
||||
ImmutableOpenMap.Builder<String, Settings> settings = ImmutableOpenMap.builder();
|
||||
ImmutableOpenMap.Builder<String, Settings> defaultSettings = ImmutableOpenMap.builder();
|
||||
|
||||
Map<String, MappingMetaData> indexMappings = response.getMappings();
|
||||
Map<String, MappingMetadata> indexMappings = response.getMappings();
|
||||
for (String index : response.getIndices()) {
|
||||
MappingMetaData mmd = indexMappings.get(index);
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> typedMappings = ImmutableOpenMap.builder();
|
||||
MappingMetadata mmd = indexMappings.get(index);
|
||||
ImmutableOpenMap.Builder<String, MappingMetadata> typedMappings = ImmutableOpenMap.builder();
|
||||
if (mmd != null) {
|
||||
typedMappings.put(MapperService.SINGLE_MAPPING_NAME, mmd);
|
||||
}
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -78,15 +78,15 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
int numTemplates = randomIntBetween(0, 32);
|
||||
for (int i = 0; i < numTemplates; i++) {
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetaData.Builder esIMD =
|
||||
new org.elasticsearch.cluster.metadata.IndexTemplateMetaData.Builder(String.format(Locale.ROOT, "%02d ", i) +
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetadata.Builder esIMD =
|
||||
new org.elasticsearch.cluster.metadata.IndexTemplateMetadata.Builder(String.format(Locale.ROOT, "%02d ", i) +
|
||||
randomAlphaOfLength(4));
|
||||
esIMD.patterns(Arrays.asList(generateRandomStringArray(32, 4, false, false)));
|
||||
esIMD.settings(randomIndexSettings());
|
||||
esIMD.putMapping("_doc", new CompressedXContent(BytesReference.bytes(randomMapping("_doc", xContentType))));
|
||||
int numAliases = randomIntBetween(0, 8);
|
||||
for (int j = 0; j < numAliases; j++) {
|
||||
esIMD.putAlias(randomAliasMetaData(String.format(Locale.ROOT, "%02d ", j) + randomAlphaOfLength(4)));
|
||||
esIMD.putAlias(randomAliasMetadata(String.format(Locale.ROOT, "%02d ", j) + randomAlphaOfLength(4)));
|
||||
}
|
||||
esIMD.order(randomIntBetween(0, Integer.MAX_VALUE));
|
||||
esIMD.version(randomIntBetween(0, Integer.MAX_VALUE));
|
||||
|
@ -101,10 +101,10 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
GetIndexTemplatesResponse response = GetIndexTemplatesResponse.fromXContent(parser);
|
||||
assertThat(response.getIndexTemplates().size(), equalTo(numTemplates));
|
||||
|
||||
response.getIndexTemplates().sort(Comparator.comparing(IndexTemplateMetaData::name));
|
||||
response.getIndexTemplates().sort(Comparator.comparing(IndexTemplateMetadata::name));
|
||||
for (int i = 0; i < numTemplates; i++) {
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetaData esIMD = esResponse.getIndexTemplates().get(i);
|
||||
IndexTemplateMetaData result = response.getIndexTemplates().get(i);
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetadata esIMD = esResponse.getIndexTemplates().get(i);
|
||||
IndexTemplateMetadata result = response.getIndexTemplates().get(i);
|
||||
|
||||
assertThat(result.patterns(), equalTo(esIMD.patterns()));
|
||||
assertThat(result.settings(), equalTo(esIMD.settings()));
|
||||
|
@ -118,11 +118,11 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
assertThat(result.mappings().sourceAsMap(), equalTo(expectedMapping.get("_doc")));
|
||||
|
||||
assertThat(result.aliases().size(), equalTo(esIMD.aliases().size()));
|
||||
List<AliasMetaData> expectedAliases = Arrays.stream(esIMD.aliases().values().toArray(AliasMetaData.class))
|
||||
.sorted(Comparator.comparing(AliasMetaData::alias))
|
||||
List<AliasMetadata> expectedAliases = Arrays.stream(esIMD.aliases().values().toArray(AliasMetadata.class))
|
||||
.sorted(Comparator.comparing(AliasMetadata::alias))
|
||||
.collect(Collectors.toList());
|
||||
List<AliasMetaData> actualAliases = Arrays.stream(result.aliases().values().toArray(AliasMetaData.class))
|
||||
.sorted(Comparator.comparing(AliasMetaData::alias))
|
||||
List<AliasMetadata> actualAliases = Arrays.stream(result.aliases().values().toArray(AliasMetadata.class))
|
||||
.sorted(Comparator.comparing(AliasMetadata::alias))
|
||||
.collect(Collectors.toList());
|
||||
for (int j = 0; j < result.aliases().size(); j++) {
|
||||
assertThat(actualAliases.get(j), equalTo(expectedAliases.get(j)));
|
||||
|
@ -148,8 +148,8 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
// Check there's no doc types at the root of the mapping
|
||||
Map<String, Object> expectedMap = XContentHelper.convertToMap(
|
||||
new BytesArray(mappingString), true, XContentType.JSON).v2();
|
||||
for (IndexTemplateMetaData template : newInstance.getIndexTemplates()) {
|
||||
MappingMetaData mappingMD = template.mappings();
|
||||
for (IndexTemplateMetadata template : newInstance.getIndexTemplates()) {
|
||||
MappingMetadata mappingMD = template.mappings();
|
||||
if(mappingMD != null) {
|
||||
Map<String, Object> mappingAsMap = mappingMD.sourceAsMap();
|
||||
assertEquals(expectedMap, mappingAsMap);
|
||||
|
@ -158,14 +158,14 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
static GetIndexTemplatesResponse createTestInstance() {
|
||||
List<IndexTemplateMetaData> templates = new ArrayList<>();
|
||||
List<IndexTemplateMetadata> templates = new ArrayList<>();
|
||||
int numTemplates = between(0, 10);
|
||||
for (int t = 0; t < numTemplates; t++) {
|
||||
IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder("template-" + t);
|
||||
IndexTemplateMetadata.Builder templateBuilder = IndexTemplateMetadata.builder("template-" + t);
|
||||
templateBuilder.patterns(IntStream.range(0, between(1, 5)).mapToObj(i -> "pattern-" + i).collect(Collectors.toList()));
|
||||
int numAlias = between(0, 5);
|
||||
for (int i = 0; i < numAlias; i++) {
|
||||
templateBuilder.putAlias(AliasMetaData.builder(randomAlphaOfLengthBetween(1, 10)));
|
||||
templateBuilder.putAlias(AliasMetadata.builder(randomAlphaOfLengthBetween(1, 10)));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
templateBuilder.settings(Settings.builder().put("index.setting-1", randomLong()));
|
||||
|
@ -179,7 +179,7 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
try {
|
||||
Map<String, Object> map = XContentHelper.convertToMap(new BytesArray(mappingString), true, XContentType.JSON).v2();
|
||||
MappingMetaData mapping = new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, map);
|
||||
MappingMetadata mapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, map);
|
||||
templateBuilder.mapping(mapping);
|
||||
} catch (IOException ex) {
|
||||
throw new UncheckedIOException(ex);
|
||||
|
@ -195,15 +195,15 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
|
||||
//Create a server-side counterpart for the client-side class and call toXContent on it
|
||||
|
||||
List<org.elasticsearch.cluster.metadata.IndexTemplateMetaData> serverIndexTemplates = new ArrayList<>();
|
||||
List<IndexTemplateMetaData> clientIndexTemplates = response.getIndexTemplates();
|
||||
for (IndexTemplateMetaData clientITMD : clientIndexTemplates) {
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetaData.Builder serverTemplateBuilder =
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetaData.builder(clientITMD.name());
|
||||
List<org.elasticsearch.cluster.metadata.IndexTemplateMetadata> serverIndexTemplates = new ArrayList<>();
|
||||
List<IndexTemplateMetadata> clientIndexTemplates = response.getIndexTemplates();
|
||||
for (IndexTemplateMetadata clientITMD : clientIndexTemplates) {
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetadata.Builder serverTemplateBuilder =
|
||||
org.elasticsearch.cluster.metadata.IndexTemplateMetadata.builder(clientITMD.name());
|
||||
|
||||
serverTemplateBuilder.patterns(clientITMD.patterns());
|
||||
|
||||
Iterator<AliasMetaData> aliases = clientITMD.aliases().valuesIt();
|
||||
Iterator<AliasMetadata> aliases = clientITMD.aliases().valuesIt();
|
||||
aliases.forEachRemaining((a)->serverTemplateBuilder.putAlias(a));
|
||||
|
||||
serverTemplateBuilder.settings(clientITMD.settings());
|
||||
|
@ -220,8 +220,8 @@ public class GetIndexTemplatesResponseTests extends ESTestCase {
|
|||
serverResponse.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
}
|
||||
|
||||
private static AliasMetaData randomAliasMetaData(String name) {
|
||||
AliasMetaData.Builder alias = AliasMetaData.builder(name);
|
||||
private static AliasMetadata randomAliasMetadata(String name) {
|
||||
AliasMetadata.Builder alias = AliasMetadata.builder(name);
|
||||
if (randomBoolean()) {
|
||||
if (randomBoolean()) {
|
||||
alias.routing(randomAlphaOfLength(5));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetadata;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
|
@ -56,8 +56,8 @@ public class GetMappingsResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static GetMappingsResponse createTestInstance() {
|
||||
Map<String, MappingMetaData> mappings = Collections.singletonMap(
|
||||
"index-" + randomAlphaOfLength(5), randomMappingMetaData());
|
||||
Map<String, MappingMetadata> mappings = Collections.singletonMap(
|
||||
"index-" + randomAlphaOfLength(5), randomMappingMetadata());
|
||||
return new GetMappingsResponse(mappings);
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ public class GetMappingsResponseTests extends ESTestCase {
|
|||
return field -> !field.equals(MAPPINGS.getPreferredName());
|
||||
}
|
||||
|
||||
public static MappingMetaData randomMappingMetaData() {
|
||||
public static MappingMetadata randomMappingMetadata() {
|
||||
Map<String, Object> mappings = new HashMap<>();
|
||||
|
||||
if (frequently()) { // rarely have no fields
|
||||
|
@ -80,7 +80,7 @@ public class GetMappingsResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
try {
|
||||
return new MappingMetaData(MapperService.SINGLE_MAPPING_NAME, mappings);
|
||||
return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mappings);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -102,10 +102,10 @@ public class GetMappingsResponseTests extends ESTestCase {
|
|||
private static void toXContent(GetMappingsResponse response, XContentBuilder builder) throws IOException {
|
||||
Params params = new ToXContent.MapParams(
|
||||
Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false"));
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> allMappings = ImmutableOpenMap.builder();
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetadata>> allMappings = ImmutableOpenMap.builder();
|
||||
|
||||
for (Map.Entry<String, MappingMetaData> indexEntry : response.mappings().entrySet()) {
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> mappings = ImmutableOpenMap.builder();
|
||||
for (Map.Entry<String, MappingMetadata> indexEntry : response.mappings().entrySet()) {
|
||||
ImmutableOpenMap.Builder<String, MappingMetadata> mappings = ImmutableOpenMap.builder();
|
||||
mappings.put(MapperService.SINGLE_MAPPING_NAME, indexEntry.getValue());
|
||||
allMappings.put(indexEntry.getKey(), mappings.build());
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.client.rollup;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.Metadata;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -26,8 +26,8 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
public class GetRollupCapsRequestTests extends ESTestCase {
|
||||
|
||||
public void testImplicitIndexPattern() {
|
||||
String pattern = randomFrom("", "*", MetaData.ALL, null);
|
||||
String pattern = randomFrom("", "*", Metadata.ALL, null);
|
||||
GetRollupCapsRequest request = new GetRollupCapsRequest(pattern);
|
||||
assertThat(request.getIndexPattern(), equalTo(MetaData.ALL));
|
||||
assertThat(request.getIndexPattern(), equalTo(Metadata.ALL));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ public class WatcherStatsResponseTests extends ESTestCase {
|
|||
private void toXContent(WatcherStatsResponse response, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
NodesResponseHeaderTestUtils.toXContent(response.getHeader(), response.getClusterName(), builder);
|
||||
toXContent(response.getWatcherMetaData(), builder);
|
||||
toXContent(response.getWatcherMetadata(), builder);
|
||||
builder.startArray("stats");
|
||||
for (WatcherStatsResponse.Node node : response.getNodes()) {
|
||||
toXContent(node, builder);
|
||||
|
@ -61,8 +61,8 @@ public class WatcherStatsResponseTests extends ESTestCase {
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
private void toXContent(WatcherMetaData metaData, XContentBuilder builder) throws IOException {
|
||||
builder.field("manually_stopped", metaData.manuallyStopped());
|
||||
private void toXContent(WatcherMetadata metadata, XContentBuilder builder) throws IOException {
|
||||
builder.field("manually_stopped", metadata.manuallyStopped());
|
||||
}
|
||||
|
||||
private void toXContent(WatcherStatsResponse.Node node, XContentBuilder builder) throws IOException {
|
||||
|
@ -182,7 +182,7 @@ public class WatcherStatsResponseTests extends ESTestCase {
|
|||
}
|
||||
NodesResponseHeader nodesResponseHeader = new NodesResponseHeader(randomInt(10), randomInt(10),
|
||||
randomInt(10), Collections.emptyList());
|
||||
WatcherMetaData watcherMetaData = new WatcherMetaData(randomBoolean());
|
||||
return new WatcherStatsResponse(nodesResponseHeader, randomAlphaOfLength(10), watcherMetaData, nodes);
|
||||
WatcherMetadata watcherMetadata = new WatcherMetadata(randomBoolean());
|
||||
return new WatcherStatsResponse(nodesResponseHeader, randomAlphaOfLength(10), watcherMetadata, nodes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1766,7 +1766,7 @@ this:
|
|||
"count" : 26
|
||||
},
|
||||
{
|
||||
"value" : "o.e.c.m.MetaDataIndexTemplateService",
|
||||
"value" : "o.e.c.m.MetadataIndexTemplateService",
|
||||
"count" : 8
|
||||
},
|
||||
{
|
||||
|
|
|
@ -185,7 +185,7 @@ CRUD::
|
|||
|
||||
Distributed::
|
||||
* [Close Index API] Mark shard copy as stale if needed during shard verification {pull}36755[#36755]
|
||||
* [Close Index API] Refactor MetaDataIndexStateService {pull}36354[#36354] (issue: {issue}36249[#36249])
|
||||
* [Close Index API] Refactor MetadataIndexStateService {pull}36354[#36354] (issue: {issue}36249[#36249])
|
||||
* [Close Index API] Add TransportShardCloseAction for pre-closing verifications {pull}36249[#36249]
|
||||
* TransportResyncReplicationAction should not honour blocks {pull}35795[#35795] (issues: {issue}35332[#35332], {issue}35597[#35597])
|
||||
* Expose all permits acquisition in IndexShard and TransportReplicationAction {pull}35540[#35540] (issue: {issue}33888[#33888])
|
||||
|
@ -399,7 +399,7 @@ ZenDiscovery::
|
|||
* [Zen2] PersistedState interface implementation {pull}35819[#35819]
|
||||
* [Zen2] Support rolling upgrades from Zen1 {pull}35737[#35737]
|
||||
* [Zen2] Add lag detector {pull}35685[#35685]
|
||||
* [Zen2] Move ClusterState fields to be persisted to ClusterState.MetaData {pull}35625[#35625]
|
||||
* [Zen2] Move ClusterState fields to be persisted to ClusterState.Metadata {pull}35625[#35625]
|
||||
* [Zen2] Introduce ClusterBootstrapService {pull}35488[#35488]
|
||||
* [Zen2] Introduce vote withdrawal {pull}35446[#35446]
|
||||
* Zen2: Add basic Zen1 transport-level BWC {pull}35443[#35443]
|
||||
|
|
|
@ -514,7 +514,7 @@ Features/Ingest::
|
|||
* ingest: support default pipelines + bulk upserts {pull}36618[#36618] (issue: {issue}36219[#36219])
|
||||
|
||||
Features/Java High Level REST Client::
|
||||
* Update IndexTemplateMetaData to allow unknown fields {pull}38448[#38448] (issue: {issue}36938[#36938])
|
||||
* Update IndexTemplateMetadata to allow unknown fields {pull}38448[#38448] (issue: {issue}36938[#36938])
|
||||
* `if_seq_no` and `if_primary_term` parameters aren't wired correctly in REST Client's CRUD API {pull}38411[#38411]
|
||||
* Update Rollup Caps to allow unknown fields {pull}38339[#38339] (issue: {issue}36938[#36938])
|
||||
* Fix ILM explain response to allow unknown fields {pull}38054[#38054] (issue: {issue}36938[#36938])
|
||||
|
|
|
@ -571,7 +571,7 @@ Discovery-Plugins::
|
|||
|
||||
Distributed::
|
||||
* [Close Index API] Mark shard copy as stale if needed during shard verification {pull}36755[#36755]
|
||||
* [Close Index API] Refactor MetaDataIndexStateService {pull}36354[#36354] (issue: {issue}36249[#36249])
|
||||
* [Close Index API] Refactor MetadataIndexStateService {pull}36354[#36354] (issue: {issue}36249[#36249])
|
||||
* [Close Index API] Add TransportShardCloseAction for pre-closing verifications {pull}36249[#36249]
|
||||
* TransportResyncReplicationAction should not honour blocks {pull}35795[#35795] (issues: {issue}35332[#35332], {issue}35597[#35597])
|
||||
* Expose all permits acquisition in IndexShard and TransportReplicationAction {pull}35540[#35540] (issue: {issue}33888[#33888])
|
||||
|
@ -1076,7 +1076,7 @@ ZenDiscovery::
|
|||
* PersistedState interface implementation {pull}35819[#35819]
|
||||
* Support rolling upgrades from Zen1 {pull}35737[#35737]
|
||||
* Add lag detector {pull}35685[#35685]
|
||||
* Move ClusterState fields to be persisted to ClusterState.MetaData {pull}35625[#35625]
|
||||
* Move ClusterState fields to be persisted to ClusterState.Metadata {pull}35625[#35625]
|
||||
* Introduce ClusterBootstrapService {pull}35488[#35488]
|
||||
* Introduce vote withdrawal {pull}35446[#35446]
|
||||
* Add basic Zen1 transport-level BWC {pull}35443[#35443]
|
||||
|
@ -1251,7 +1251,7 @@ Features/Ingest::
|
|||
|
||||
Features/Java High Level REST Client::
|
||||
* Drop extra level from user parser {pull}34932[#34932]
|
||||
* Update IndexTemplateMetaData to allow unknown fields {pull}38448[#38448] (issue: {issue}36938[#36938])
|
||||
* Update IndexTemplateMetadata to allow unknown fields {pull}38448[#38448] (issue: {issue}36938[#36938])
|
||||
* `if_seq_no` and `if_primary_term` parameters aren't wired correctly in REST Client's CRUD API {pull}38411[#38411]
|
||||
* Update Rollup Caps to allow unknown fields {pull}38339[#38339] (issue: {issue}36938[#36938])
|
||||
* Fix ILM explain response to allow unknown fields {pull}38054[#38054] (issue: {issue}36938[#36938])
|
||||
|
|
|
@ -285,7 +285,7 @@ Engine::
|
|||
* Add a merge policy that prunes ID postings for soft-deleted but retained documents {pull}40741[#40741]
|
||||
|
||||
Features/Indices APIs::
|
||||
* Remove "template" field in IndexTemplateMetaData {pull}42099[#42099] (issue: {issue}38502[#38502])
|
||||
* Remove "template" field in IndexTemplateMetadata {pull}42099[#42099] (issue: {issue}38502[#38502])
|
||||
|
||||
Features/Ingest::
|
||||
* Avoid HashMap construction on Grok non-match {pull}42444[#42444]
|
||||
|
|
|
@ -43,8 +43,8 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
|
|||
|
||||
/** per shard ctor */
|
||||
InternalMatrixStats(String name, long count, RunningStats multiFieldStatsResults, MatrixStatsResults results,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
super(name, pipelineAggregators, metadata);
|
||||
assert count >= 0;
|
||||
this.stats = multiFieldStatsResults;
|
||||
this.results = results;
|
||||
|
@ -240,7 +240,7 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
|
|||
|
||||
// return empty result iff all stats are null
|
||||
if (aggs.isEmpty()) {
|
||||
return new InternalMatrixStats(name, 0, null, new MatrixStatsResults(), pipelineAggregators(), getMetaData());
|
||||
return new InternalMatrixStats(name, 0, null, new MatrixStatsResults(), pipelineAggregators(), getMetadata());
|
||||
}
|
||||
|
||||
RunningStats runningStats = new RunningStats();
|
||||
|
@ -250,9 +250,9 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
|
|||
|
||||
if (reduceContext.isFinalReduce()) {
|
||||
MatrixStatsResults results = new MatrixStatsResults(runningStats);
|
||||
return new InternalMatrixStats(name, results.getDocCount(), runningStats, results, pipelineAggregators(), getMetaData());
|
||||
return new InternalMatrixStats(name, results.getDocCount(), runningStats, results, pipelineAggregators(), getMetadata());
|
||||
}
|
||||
return new InternalMatrixStats(name, runningStats.docCount, runningStats, null, pipelineAggregators(), getMetaData());
|
||||
return new InternalMatrixStats(name, runningStats.docCount, runningStats, null, pipelineAggregators(), getMetadata());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,14 +48,14 @@ public class MatrixStatsAggregationBuilder
|
|||
}
|
||||
|
||||
protected MatrixStatsAggregationBuilder(MatrixStatsAggregationBuilder clone,
|
||||
AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
super(clone, factoriesBuilder, metadata);
|
||||
this.multiValueMode = clone.multiValueMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
return new MatrixStatsAggregationBuilder(this, factoriesBuilder, metaData);
|
||||
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
return new MatrixStatsAggregationBuilder(this, factoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -84,7 +84,7 @@ public class MatrixStatsAggregationBuilder
|
|||
Map<String, ValuesSourceConfig<Numeric>> configs,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
|
||||
return new MatrixStatsAggregatorFactory(name, configs, multiValueMode, queryShardContext, parent, subFactoriesBuilder, metaData);
|
||||
return new MatrixStatsAggregatorFactory(name, configs, multiValueMode, queryShardContext, parent, subFactoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -51,8 +51,8 @@ final class MatrixStatsAggregator extends MetricsAggregator {
|
|||
|
||||
MatrixStatsAggregator(String name, Map<String, ValuesSource.Numeric> valuesSources, SearchContext context,
|
||||
Aggregator parent, MultiValueMode multiValueMode, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String,Object> metaData) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metaData);
|
||||
Map<String,Object> metadata) throws IOException {
|
||||
super(name, context, parent, pipelineAggregators, metadata);
|
||||
if (valuesSources != null && !valuesSources.isEmpty()) {
|
||||
this.valuesSources = new NumericArrayValuesSource(valuesSources, multiValueMode);
|
||||
stats = context.bigArrays().newObjectArray(1);
|
||||
|
@ -126,12 +126,12 @@ final class MatrixStatsAggregator extends MetricsAggregator {
|
|||
if (valuesSources == null || bucket >= stats.size()) {
|
||||
return buildEmptyAggregation();
|
||||
}
|
||||
return new InternalMatrixStats(name, stats.size(), stats.get(bucket), null, pipelineAggregators(), metaData());
|
||||
return new InternalMatrixStats(name, stats.size(), stats.get(bucket), null, pipelineAggregators(), metadata());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalMatrixStats(name, 0, null, null, pipelineAggregators(), metaData());
|
||||
return new InternalMatrixStats(name, 0, null, null, pipelineAggregators(), metadata());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -43,8 +43,8 @@ final class MatrixStatsAggregatorFactory extends ArrayValuesSourceAggregatorFact
|
|||
QueryShardContext queryShardContext,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, configs, queryShardContext, parent, subFactoriesBuilder, metaData);
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
super(name, configs, queryShardContext, parent, subFactoriesBuilder, metadata);
|
||||
this.multiValueMode = multiValueMode;
|
||||
}
|
||||
|
||||
|
@ -52,9 +52,9 @@ final class MatrixStatsAggregatorFactory extends ArrayValuesSourceAggregatorFact
|
|||
protected Aggregator createUnmapped(SearchContext searchContext,
|
||||
Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData)
|
||||
Map<String, Object> metadata)
|
||||
throws IOException {
|
||||
return new MatrixStatsAggregator(name, null, searchContext, parent, multiValueMode, pipelineAggregators, metaData);
|
||||
return new MatrixStatsAggregator(name, null, searchContext, parent, multiValueMode, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -63,7 +63,7 @@ final class MatrixStatsAggregatorFactory extends ArrayValuesSourceAggregatorFact
|
|||
Aggregator parent,
|
||||
boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
return new MatrixStatsAggregator(name, valuesSources, searchContext, parent, multiValueMode, pipelineAggregators, metaData);
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
return new MatrixStatsAggregator(name, valuesSources, searchContext, parent, multiValueMode, pipelineAggregators, metadata);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,8 +56,8 @@ public abstract class ArrayValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
super(name, valuesSourceType, targetValueType);
|
||||
}
|
||||
|
||||
protected LeafOnly(LeafOnly<VS, AB> clone, Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
protected LeafOnly(LeafOnly<VS, AB> clone, Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
super(clone, factoriesBuilder, metadata);
|
||||
if (factoriesBuilder.count() > 0) {
|
||||
throw new AggregationInitializationException("Aggregator [" + name + "] of type ["
|
||||
+ getType() + "] cannot accept sub-aggregations");
|
||||
|
@ -109,8 +109,8 @@ public abstract class ArrayValuesSourceAggregationBuilder<VS extends ValuesSourc
|
|||
}
|
||||
|
||||
protected ArrayValuesSourceAggregationBuilder(ArrayValuesSourceAggregationBuilder<VS, AB> clone,
|
||||
Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
super(clone, factoriesBuilder, metadata);
|
||||
this.valuesSourceType = clone.valuesSourceType;
|
||||
this.targetValueType = clone.targetValueType;
|
||||
this.fields = new ArrayList<>(clone.fields);
|
||||
|
|
|
@ -39,8 +39,8 @@ public abstract class ArrayValuesSourceAggregatorFactory<VS extends ValuesSource
|
|||
public ArrayValuesSourceAggregatorFactory(String name, Map<String, ValuesSourceConfig<VS>> configs,
|
||||
QueryShardContext queryShardContext, AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, queryShardContext, parent, subFactoriesBuilder, metaData);
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
super(name, queryShardContext, parent, subFactoriesBuilder, metadata);
|
||||
this.configs = configs;
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ public abstract class ArrayValuesSourceAggregatorFactory<VS extends ValuesSource
|
|||
Aggregator parent,
|
||||
boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
HashMap<String, VS> valuesSources = new HashMap<>();
|
||||
|
||||
for (Map.Entry<String, ValuesSourceConfig<VS>> config : configs.entrySet()) {
|
||||
|
@ -59,22 +59,22 @@ public abstract class ArrayValuesSourceAggregatorFactory<VS extends ValuesSource
|
|||
}
|
||||
}
|
||||
if (valuesSources.isEmpty()) {
|
||||
return createUnmapped(searchContext, parent, pipelineAggregators, metaData);
|
||||
return createUnmapped(searchContext, parent, pipelineAggregators, metadata);
|
||||
}
|
||||
return doCreateInternal(valuesSources, searchContext, parent,
|
||||
collectsFromSingleBucket, pipelineAggregators, metaData);
|
||||
collectsFromSingleBucket, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
protected abstract Aggregator createUnmapped(SearchContext searchContext,
|
||||
Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException;
|
||||
Map<String, Object> metadata) throws IOException;
|
||||
|
||||
protected abstract Aggregator doCreateInternal(Map<String, VS> valuesSources,
|
||||
SearchContext searchContext,
|
||||
Aggregator parent,
|
||||
boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException;
|
||||
Map<String, Object> metadata) throws IOException;
|
||||
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
|
||||
@Override
|
||||
protected InternalMatrixStats createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
Map<String, Object> metadata) {
|
||||
double[] values = new double[fields.length];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
values[i] = randomDouble();
|
||||
|
@ -79,7 +79,7 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
RunningStats runningStats = new RunningStats();
|
||||
runningStats.add(fields, values);
|
||||
MatrixStatsResults matrixStatsResults = hasMatrixStatsResults ? new MatrixStatsResults(runningStats) : null;
|
||||
return new InternalMatrixStats(name, 1L, runningStats, matrixStatsResults, Collections.emptyList(), metaData);
|
||||
return new InternalMatrixStats(name, 1L, runningStats, matrixStatsResults, Collections.emptyList(), metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -93,7 +93,7 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
long docCount = instance.getDocCount();
|
||||
RunningStats runningStats = instance.getStats();
|
||||
MatrixStatsResults matrixStatsResults = instance.getResults();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
Map<String, Object> metadata = instance.getMetadata();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
|
@ -117,15 +117,15 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern
|
|||
break;
|
||||
case 3:
|
||||
default:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
if (metadata == null) {
|
||||
metadata = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
metadata = new HashMap<>(instance.getMetadata());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
metadata.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
}
|
||||
return new InternalMatrixStats(name, docCount, runningStats, matrixStatsResults, Collections.emptyList(), metaData);
|
||||
return new InternalMatrixStats(name, docCount, runningStats, matrixStatsResults, Collections.emptyList(), metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.analysis.common;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -43,7 +43,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testNGramDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
|
@ -63,7 +63,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testNGramDeprecationError() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, null))
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, null))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.filter", "nGram")
|
||||
|
@ -82,7 +82,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testEdgeNGramDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_4_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
|
@ -101,7 +101,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testEdgeNGramDeprecationError() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, null))
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, null))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
|
||||
|
@ -120,7 +120,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testStandardHtmlStripAnalyzerDeprecationError() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
|
||||
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
|
||||
|
@ -139,7 +139,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
|
||||
VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
|
||||
|
@ -162,7 +162,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testnGramFilterInCustomAnalyzerDeprecationError() throws IOException {
|
||||
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
|
@ -182,7 +182,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
*/
|
||||
public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOException {
|
||||
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
|
||||
|
@ -221,7 +221,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
public void doTestPrebuiltTokenizerDeprecation(String deprecatedName, String replacement, Version version, boolean expectWarning)
|
||||
throws IOException {
|
||||
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version).build();
|
||||
|
||||
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
|
||||
Map<String, TokenizerFactory> tokenizers = createTestAnalysis(
|
||||
|
@ -240,7 +240,7 @@ public class CommonAnalysisPluginTests extends ESTestCase {
|
|||
public void doTestCustomTokenizerDeprecation(String deprecatedName, String replacement, Version version, boolean expectWarning)
|
||||
throws IOException {
|
||||
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
|
||||
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "my_tokenizer")
|
||||
.put("index.analysis.tokenizer.my_tokenizer.type", deprecatedName)
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
|
@ -100,7 +100,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
String json = "/org/elasticsearch/analysis/common/test1.json";
|
||||
return Settings.builder()
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
String yaml = "/org/elasticsearch/analysis/common/test1.yml";
|
||||
return Settings.builder()
|
||||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.analysis.common;
|
|||
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
|
@ -45,7 +45,7 @@ public class EdgeNGramTokenizerTests extends ESTokenStreamTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, version)
|
||||
.put("index.analysis.analyzer.my_analyzer.tokenizer", tokenizer)
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -41,7 +41,7 @@ public class HtmlStripCharFilterFactoryTests extends ESTestCase {
|
|||
*/
|
||||
public void testDeprecationWarning() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT))
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT))
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
@ -59,7 +59,7 @@ public class HtmlStripCharFilterFactoryTests extends ESTestCase {
|
|||
*/
|
||||
public void testNoDeprecationWarningPre6_3() throws IOException {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_2_4))
|
||||
.build();
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
|
@ -41,7 +41,7 @@ public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("index.analysis.filter.t.type", "truncate")
|
||||
.put("index.analysis.filter.t.length", "2")
|
||||
.put("index.analysis.filter.multiplexFilter.type", "multiplexer")
|
||||
|
@ -77,7 +77,7 @@ public class MultiplexerTokenFilterTests extends ESTokenStreamTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("index.analysis.filter.t.type", "truncate")
|
||||
.put("index.analysis.filter.t.length", "2")
|
||||
.put("index.analysis.filter.multiplexFilter.type", "multiplexer")
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.analysis.Tokenizer;
|
|||
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
|
||||
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -154,7 +154,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
|
|||
builder.put("side", "back");
|
||||
}
|
||||
Settings settings = builder.build();
|
||||
Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, v.id).build();
|
||||
Settings indexSettings = newAnalysisSettingsBuilder().put(IndexMetadata.SETTING_VERSION_CREATED, v.id).build();
|
||||
Tokenizer tokenizer = new MockTokenizer();
|
||||
tokenizer.setReader(new StringReader("foo bar"));
|
||||
TokenStream edgeNGramTokenFilter =
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -38,7 +38,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
|
@ -44,7 +44,7 @@ public class PredicateTokenScriptFilterTests extends ESTokenStreamTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("index.analysis.filter.f.type", "predicate_token_filter")
|
||||
.put("index.analysis.filter.f.script.source", "my_script")
|
||||
.put("index.analysis.analyzer.myAnalyzer.type", "custom")
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
|
@ -43,7 +43,7 @@ public class ScriptedConditionTokenFilterTests extends ESTokenStreamTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("index.analysis.filter.cond.type", "condition")
|
||||
.put("index.analysis.filter.cond.script.source", "token.getPosition() > 1")
|
||||
.putList("index.analysis.filter.cond.filter", "uppercase")
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
|||
import java.io.StringReader;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_VERSION_CREATED;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
import org.apache.lucene.analysis.core.KeywordTokenizer;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -69,7 +69,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
Settings settings = Settings.builder().
|
||||
loadFromStream(json, getClass().getResourceAsStream(json), false)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), home)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers;
|
||||
|
@ -89,7 +89,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
public void testSynonymWordDeleteByAnalyzer() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonym.type", "synonym")
|
||||
.putList("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
|
||||
|
@ -110,7 +110,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
public void testExpandSynonymWordDeleteByAnalyzer() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonym_expand.type", "synonym")
|
||||
.putList("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
|
||||
|
@ -131,7 +131,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
public void testSynonymsWrappedByMultiplexer() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonyms.type", "synonym")
|
||||
.putList("index.analysis.filter.synonyms.synonyms", "programmer, developer")
|
||||
|
@ -152,7 +152,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
public void testAsciiFoldingFilterForSynonyms() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonyms.type", "synonym")
|
||||
.putList("index.analysis.filter.synonyms.synonyms", "hoj, height")
|
||||
|
@ -169,7 +169,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
public void testPreconfigured() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonyms.type", "synonym")
|
||||
.putList("index.analysis.filter.synonyms.synonyms", "würst, sausage")
|
||||
|
@ -186,7 +186,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
|
||||
public void testChainedSynonymFilters() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonyms1.type", "synonym")
|
||||
.putList("index.analysis.filter.synonyms1.synonyms", "term1, term2")
|
||||
|
@ -205,7 +205,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
public void testShingleFilters() {
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("index.analysis.filter.synonyms.type", "synonym")
|
||||
|
@ -225,7 +225,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
public void testTokenFiltersBypassSynonymAnalysis() throws IOException {
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.putList("word_list", "a")
|
||||
.put("hyphenation_patterns_path", "foo")
|
||||
|
@ -257,7 +257,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
));
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
|
@ -281,7 +281,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
}
|
||||
|
||||
Settings settings2 = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.putList("common_words", "a", "b")
|
||||
|
@ -305,7 +305,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
public void testDisallowedTokenFilters() throws IOException {
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.putList("common_words", "a", "b")
|
||||
|
@ -334,7 +334,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
}
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.putList("common_words", "a", "b")
|
||||
|
@ -356,7 +356,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
assertWarnings(expectedWarnings.toArray(new String[0]));
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put("preserve_original", "false")
|
||||
|
|
|
@ -23,7 +23,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
|||
|
||||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -39,7 +39,7 @@ import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStre
|
|||
public class WhitespaceTokenizerFactoryTests extends ESTestCase {
|
||||
|
||||
public void testSimpleWhiteSpaceTokenizer() throws IOException {
|
||||
final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
final Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(new Index("test", "_na_"), indexSettings);
|
||||
WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen",
|
||||
Settings.EMPTY).create();
|
||||
|
@ -51,7 +51,7 @@ public class WhitespaceTokenizerFactoryTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMaxTokenLength() throws IOException {
|
||||
final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
final Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(new Index("test", "_na_"), indexSettings);
|
||||
final Settings settings = Settings.builder().put(WhitespaceTokenizerFactory.MAX_TOKEN_LENGTH, 2).build();
|
||||
WhitespaceTokenizer tokenizer = (WhitespaceTokenizer) new WhitespaceTokenizerFactory(indexProperties, null, "whitespace_maxlen",
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.analysis.common;
|
|||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
|
@ -125,7 +125,7 @@ public class WordDelimiterGraphTokenFilterFactoryTests
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED,
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED,
|
||||
VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_3_0)))
|
||||
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph")
|
||||
|
@ -148,7 +148,7 @@ public class WordDelimiterGraphTokenFilterFactoryTests
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
|
||||
.putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph")
|
||||
.build();
|
||||
|
|
|
@ -102,7 +102,7 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
|
|||
.append('}')
|
||||
.append('>');
|
||||
String dynamicIndexName = builder.toString();
|
||||
ingestDocument.setFieldValue(IngestDocument.MetaData.INDEX.getFieldName(), dynamicIndexName);
|
||||
ingestDocument.setFieldValue(IngestDocument.Metadata.INDEX.getFieldName(), dynamicIndexName);
|
||||
return ingestDocument;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.IngestDocument.MetaData;
|
||||
import org.elasticsearch.ingest.IngestDocument.Metadata;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.ingest.TestTemplateService;
|
||||
|
@ -126,25 +126,25 @@ public class AppendProcessorTests extends ESTestCase {
|
|||
public void testAppendMetadataExceptVersion() throws Exception {
|
||||
// here any metadata field value becomes a list, which won't make sense in most of the cases,
|
||||
// but support for append is streamlined like for set so we test it
|
||||
MetaData randomMetaData = randomFrom(MetaData.INDEX, MetaData.TYPE, MetaData.ID, MetaData.ROUTING);
|
||||
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING);
|
||||
List<String> values = new ArrayList<>();
|
||||
Processor appendProcessor;
|
||||
if (randomBoolean()) {
|
||||
String value = randomAlphaOfLengthBetween(1, 10);
|
||||
values.add(value);
|
||||
appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), value);
|
||||
appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), value);
|
||||
} else {
|
||||
int valuesSize = randomIntBetween(0, 10);
|
||||
for (int i = 0; i < valuesSize; i++) {
|
||||
values.add(randomAlphaOfLengthBetween(1, 10));
|
||||
}
|
||||
appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), values);
|
||||
appendProcessor = createAppendProcessor(randomMetadata.getFieldName(), values);
|
||||
}
|
||||
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName());
|
||||
Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetadata.getFieldName());
|
||||
appendProcessor.execute(ingestDocument);
|
||||
List<?> list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class);
|
||||
List<?> list = ingestDocument.getFieldValue(randomMetadata.getFieldName(), List.class);
|
||||
if (initialValue == null) {
|
||||
assertThat(list, equalTo(values));
|
||||
} else {
|
||||
|
|
|
@ -102,7 +102,7 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
assertThat(ingestDocument.getFieldValue("values", List.class), equalTo(Arrays.asList("A", "B", "c")));
|
||||
}
|
||||
|
||||
public void testMetaDataAvailable() throws Exception {
|
||||
public void testMetadataAvailable() throws Exception {
|
||||
List<Map<String, Object>> values = new ArrayList<>();
|
||||
values.add(new HashMap<>());
|
||||
values.add(new HashMap<>());
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.IngestDocument.MetaData;
|
||||
import org.elasticsearch.ingest.IngestDocument.Metadata;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.ingest.TestTemplateService;
|
||||
|
@ -101,27 +101,27 @@ public class SetProcessorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testSetMetadataExceptVersion() throws Exception {
|
||||
MetaData randomMetaData = randomFrom(MetaData.INDEX, MetaData.TYPE, MetaData.ID, MetaData.ROUTING);
|
||||
Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value", true);
|
||||
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING);
|
||||
Processor processor = createSetProcessor(randomMetadata.getFieldName(), "_value", true);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class), Matchers.equalTo("_value"));
|
||||
assertThat(ingestDocument.getFieldValue(randomMetadata.getFieldName(), String.class), Matchers.equalTo("_value"));
|
||||
}
|
||||
|
||||
public void testSetMetadataVersion() throws Exception {
|
||||
long version = randomNonNegativeLong();
|
||||
Processor processor = createSetProcessor(MetaData.VERSION.getFieldName(), version, true);
|
||||
Processor processor = createSetProcessor(Metadata.VERSION.getFieldName(), version, true);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue(MetaData.VERSION.getFieldName(), Long.class), Matchers.equalTo(version));
|
||||
assertThat(ingestDocument.getFieldValue(Metadata.VERSION.getFieldName(), Long.class), Matchers.equalTo(version));
|
||||
}
|
||||
|
||||
public void testSetMetadataVersionType() throws Exception {
|
||||
String versionType = randomFrom("internal", "external", "external_gte");
|
||||
Processor processor = createSetProcessor(MetaData.VERSION_TYPE.getFieldName(), versionType, true);
|
||||
Processor processor = createSetProcessor(Metadata.VERSION_TYPE.getFieldName(), versionType, true);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue(MetaData.VERSION_TYPE.getFieldName(), String.class), Matchers.equalTo(versionType));
|
||||
assertThat(ingestDocument.getFieldValue(Metadata.VERSION_TYPE.getFieldName(), String.class), Matchers.equalTo(versionType));
|
||||
}
|
||||
|
||||
private static Processor createSetProcessor(String fieldName, Object fieldValue, boolean overrideEnabled) {
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.fielddata.LeafNumericFieldData;
|
||||
|
@ -181,7 +181,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
|||
doc.add(new SortedNumericDocValuesField("scaled_float2", 12));
|
||||
w.addDocument(doc);
|
||||
try (DirectoryReader reader = DirectoryReader.open(w)) {
|
||||
IndexMetaData indexMetadata = new IndexMetaData.Builder("index").settings(
|
||||
IndexMetadata indexMetadata = new IndexMetadata.Builder("index").settings(
|
||||
Settings.builder()
|
||||
.put("index.version.created", Version.CURRENT)
|
||||
.put("index.number_of_shards", 1)
|
||||
|
|
|
@ -69,16 +69,16 @@ public class ChildrenAggregationBuilder
|
|||
}
|
||||
|
||||
protected ChildrenAggregationBuilder(ChildrenAggregationBuilder clone,
|
||||
Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
super(clone, factoriesBuilder, metadata);
|
||||
this.childType = clone.childType;
|
||||
this.childFilter = clone.childFilter;
|
||||
this.parentFilter = clone.parentFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
return new ChildrenAggregationBuilder(this, factoriesBuilder, metaData);
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
return new ChildrenAggregationBuilder(this, factoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -105,7 +105,7 @@ public class ChildrenAggregationBuilder
|
|||
AggregatorFactory parent,
|
||||
Builder subFactoriesBuilder) throws IOException {
|
||||
return new ChildrenAggregatorFactory(name, config, childFilter, parentFilter, queryShardContext, parent,
|
||||
subFactoriesBuilder, metaData);
|
||||
subFactoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,8 +48,8 @@ public class ChildrenAggregatorFactory extends ValuesSourceAggregatorFactory<Wit
|
|||
QueryShardContext context,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
super(name, config, context, parent, subFactoriesBuilder, metadata);
|
||||
|
||||
this.childFilter = childFilter;
|
||||
this.parentFilter = parentFilter;
|
||||
|
@ -57,11 +57,11 @@ public class ChildrenAggregatorFactory extends ValuesSourceAggregatorFactory<Wit
|
|||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(SearchContext searchContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new NonCollectingAggregator(name, searchContext, parent, pipelineAggregators, metaData) {
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) throws IOException {
|
||||
return new NonCollectingAggregator(name, searchContext, parent, pipelineAggregators, metadata) {
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalChildren(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
|
||||
return new InternalChildren(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metadata());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -71,12 +71,12 @@ public class ChildrenAggregatorFactory extends ValuesSourceAggregatorFactory<Wit
|
|||
SearchContext searchContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
|
||||
long maxOrd = valuesSource.globalMaxOrd(searchContext.searcher());
|
||||
if (collectsFromSingleBucket) {
|
||||
return new ParentToChildrenAggregator(name, factories, searchContext, parent, childFilter,
|
||||
parentFilter, valuesSource, maxOrd, pipelineAggregators, metaData);
|
||||
parentFilter, valuesSource, maxOrd, pipelineAggregators, metadata);
|
||||
} else {
|
||||
return asMultiBucketAggregator(this, searchContext, parent);
|
||||
}
|
||||
|
|
|
@ -42,19 +42,19 @@ public class ChildrenToParentAggregator extends ParentJoinAggregator {
|
|||
public ChildrenToParentAggregator(String name, AggregatorFactories factories,
|
||||
SearchContext context, Aggregator parent, Query childFilter,
|
||||
Query parentFilter, ValuesSource.Bytes.WithOrdinals valuesSource,
|
||||
long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, context, parent, childFilter, parentFilter, valuesSource, maxOrd, pipelineAggregators, metaData);
|
||||
long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) throws IOException {
|
||||
super(name, factories, context, parent, childFilter, parentFilter, valuesSource, maxOrd, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
|
||||
return new InternalParent(name, bucketDocCount(owningBucketOrdinal),
|
||||
bucketAggregations(owningBucketOrdinal), pipelineAggregators(), metaData());
|
||||
bucketAggregations(owningBucketOrdinal), pipelineAggregators(), metadata());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalParent(name, 0, buildEmptySubAggregations(), pipelineAggregators(),
|
||||
metaData());
|
||||
metadata());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,8 +33,8 @@ import java.util.Map;
|
|||
*/
|
||||
public class InternalChildren extends InternalSingleBucketAggregation implements Children {
|
||||
public InternalChildren(String name, long docCount, InternalAggregations aggregations, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, docCount, aggregations, pipelineAggregators, metaData);
|
||||
Map<String, Object> metadata) {
|
||||
super(name, docCount, aggregations, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -51,6 +51,6 @@ public class InternalChildren extends InternalSingleBucketAggregation implements
|
|||
|
||||
@Override
|
||||
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
|
||||
return new InternalChildren(name, docCount, subAggregations, pipelineAggregators(), getMetaData());
|
||||
return new InternalChildren(name, docCount, subAggregations, pipelineAggregators(), getMetadata());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,8 +33,8 @@ import java.util.Map;
|
|||
*/
|
||||
public class InternalParent extends InternalSingleBucketAggregation implements Parent {
|
||||
public InternalParent(String name, long docCount, InternalAggregations aggregations, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) {
|
||||
super(name, docCount, aggregations, pipelineAggregators, metaData);
|
||||
Map<String, Object> metadata) {
|
||||
super(name, docCount, aggregations, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -51,6 +51,6 @@ public class InternalParent extends InternalSingleBucketAggregation implements P
|
|||
|
||||
@Override
|
||||
protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) {
|
||||
return new InternalParent(name, docCount, subAggregations, pipelineAggregators(), getMetaData());
|
||||
return new InternalParent(name, docCount, subAggregations, pipelineAggregators(), getMetadata());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,16 +69,16 @@ public class ParentAggregationBuilder
|
|||
}
|
||||
|
||||
protected ParentAggregationBuilder(ParentAggregationBuilder clone,
|
||||
Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
super(clone, factoriesBuilder, metaData);
|
||||
Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
super(clone, factoriesBuilder, metadata);
|
||||
this.childType = clone.childType;
|
||||
this.childFilter = clone.childFilter;
|
||||
this.parentFilter = clone.parentFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
|
||||
return new ParentAggregationBuilder(this, factoriesBuilder, metaData);
|
||||
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
return new ParentAggregationBuilder(this, factoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -105,7 +105,7 @@ public class ParentAggregationBuilder
|
|||
AggregatorFactory parent,
|
||||
Builder subFactoriesBuilder) throws IOException {
|
||||
return new ParentAggregatorFactory(name, config, childFilter, parentFilter, queryShardContext, parent,
|
||||
subFactoriesBuilder, metaData);
|
||||
subFactoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -48,8 +48,8 @@ public class ParentAggregatorFactory extends ValuesSourceAggregatorFactory<WithO
|
|||
QueryShardContext queryShardContext,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, config, queryShardContext, parent, subFactoriesBuilder, metaData);
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
super(name, config, queryShardContext, parent, subFactoriesBuilder, metadata);
|
||||
|
||||
this.childFilter = childFilter;
|
||||
this.parentFilter = parentFilter;
|
||||
|
@ -57,11 +57,11 @@ public class ParentAggregatorFactory extends ValuesSourceAggregatorFactory<WithO
|
|||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(SearchContext searchContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new NonCollectingAggregator(name, searchContext, parent, pipelineAggregators, metaData) {
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) throws IOException {
|
||||
return new NonCollectingAggregator(name, searchContext, parent, pipelineAggregators, metadata) {
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalParent(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
|
||||
return new InternalParent(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metadata());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -71,12 +71,12 @@ public class ParentAggregatorFactory extends ValuesSourceAggregatorFactory<WithO
|
|||
SearchContext searchContext, Aggregator children,
|
||||
boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
|
||||
long maxOrd = valuesSource.globalMaxOrd(searchContext.searcher());
|
||||
if (collectsFromSingleBucket) {
|
||||
return new ChildrenToParentAggregator(name, factories, searchContext, children, childFilter,
|
||||
parentFilter, valuesSource, maxOrd, pipelineAggregators, metaData);
|
||||
parentFilter, valuesSource, maxOrd, pipelineAggregators, metadata);
|
||||
} else {
|
||||
return asMultiBucketAggregator(this, searchContext, children);
|
||||
}
|
||||
|
|
|
@ -74,8 +74,8 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements
|
|||
ValuesSource.Bytes.WithOrdinals valuesSource,
|
||||
long maxOrd,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, context, parent, pipelineAggregators, metaData);
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
super(name, factories, context, parent, pipelineAggregators, metadata);
|
||||
|
||||
if (maxOrd > Integer.MAX_VALUE) {
|
||||
throw new IllegalStateException("the number of parent [" + maxOrd + "] + is greater than the allowed limit " +
|
||||
|
|
|
@ -38,19 +38,19 @@ public class ParentToChildrenAggregator extends ParentJoinAggregator {
|
|||
public ParentToChildrenAggregator(String name, AggregatorFactories factories,
|
||||
SearchContext context, Aggregator parent, Query childFilter,
|
||||
Query parentFilter, ValuesSource.Bytes.WithOrdinals valuesSource,
|
||||
long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, context, parent, parentFilter, childFilter, valuesSource, maxOrd, pipelineAggregators, metaData);
|
||||
long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) throws IOException {
|
||||
super(name, factories, context, parent, parentFilter, childFilter, valuesSource, maxOrd, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
|
||||
return new InternalChildren(name, bucketDocCount(owningBucketOrdinal),
|
||||
bucketAggregations(owningBucketOrdinal), pipelineAggregators(), metaData());
|
||||
bucketAggregations(owningBucketOrdinal), pipelineAggregators(), metadata());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalChildren(name, 0, buildEmptySubAggregations(), pipelineAggregators(),
|
||||
metaData());
|
||||
metadata());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
private static void checkIndexCompatibility(IndexSettings settings, String name) {
|
||||
if (settings.getIndexMetaData().isRoutingPartitionedIndex()) {
|
||||
if (settings.getIndexMetadata().isRoutingPartitionedIndex()) {
|
||||
throw new IllegalStateException("cannot create join field [" + name + "] " +
|
||||
"for the partitioned index " + "[" + settings.getIndex().getName() + "]");
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
|
@ -220,8 +220,8 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
String childType = "variantsku";
|
||||
assertAcked(
|
||||
prepareCreate(indexName)
|
||||
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
|
||||
.addMapping("doc",
|
||||
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true,
|
||||
masterType, childType),
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.lucene.search.TermInSetQuery;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -282,7 +282,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
|
||||
private static ParentJoinFieldMapper createJoinFieldMapper() {
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
return new ParentJoinFieldMapper.Builder("join_field")
|
||||
.addParent(PARENT_TYPE, Collections.singleton(CHILD_TYPE))
|
||||
.build(new Mapper.BuilderContext(settings, new ContentPath(0)));
|
||||
|
|
|
@ -45,8 +45,8 @@ public class InternalChildrenTests extends InternalSingleBucketAggregationTestCa
|
|||
|
||||
@Override
|
||||
protected InternalChildren createTestInstance(String name, long docCount, InternalAggregations aggregations,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalChildren(name, docCount, aggregations, pipelineAggregators, metaData);
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
return new InternalChildren(name, docCount, aggregations, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,8 +44,8 @@ public class InternalParentTests extends InternalSingleBucketAggregationTestCase
|
|||
|
||||
@Override
|
||||
protected InternalParent createTestInstance(String name, long docCount, InternalAggregations aggregations,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalParent(name, docCount, aggregations, pipelineAggregators, metaData);
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
|
||||
return new InternalParent(name, docCount, aggregations, pipelineAggregators, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.lucene.search.TermInSetQuery;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -170,7 +170,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
|
||||
private static ParentJoinFieldMapper createJoinFieldMapper() {
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
return new ParentJoinFieldMapper.Builder("join_field")
|
||||
.addParent(PARENT_TYPE, Collections.singleton(CHILD_TYPE))
|
||||
.build(new Mapper.BuilderContext(settings, new ContentPath(0)));
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
|||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -92,7 +92,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
protected Settings createTestIndexSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.createTestIndexSettings())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -77,7 +77,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
protected Settings createTestIndexSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.createTestIndexSettings())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -68,7 +68,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||
protected Settings createTestIndexSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.createTestIndexSettings())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -151,11 +151,11 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) {
|
||||
KeywordFieldMapper.Builder queryMetaDataFieldBuilder = new KeywordFieldMapper.Builder(name);
|
||||
queryMetaDataFieldBuilder.docValues(false);
|
||||
queryMetaDataFieldBuilder.store(false);
|
||||
queryMetaDataFieldBuilder.indexOptions(IndexOptions.DOCS);
|
||||
return queryMetaDataFieldBuilder.build(context);
|
||||
KeywordFieldMapper.Builder queryMetadataFieldBuilder = new KeywordFieldMapper.Builder(name);
|
||||
queryMetadataFieldBuilder.docValues(false);
|
||||
queryMetadataFieldBuilder.store(false);
|
||||
queryMetadataFieldBuilder.indexOptions(IndexOptions.DOCS);
|
||||
return queryMetadataFieldBuilder.build(context);
|
||||
}
|
||||
|
||||
static BinaryFieldMapper createQueryBuilderFieldBuilder(BuilderContext context) {
|
||||
|
|
|
@ -78,7 +78,7 @@ import org.apache.lucene.store.ByteBuffersDirectory;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -1115,8 +1115,8 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
private void addQuery(Query query, List<ParseContext.Document> docs) {
|
||||
IndexMetaData build = IndexMetaData.builder("")
|
||||
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1).numberOfReplicas(0).build();
|
||||
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
|
||||
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.apache.lucene.search.join.ScoreMode;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -183,8 +183,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
IndexMetaData build = IndexMetaData.builder("")
|
||||
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1).numberOfReplicas(0).build();
|
||||
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
|
||||
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
|
||||
|
@ -237,8 +237,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
bq.add(rangeQuery2, Occur.MUST);
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
IndexMetaData build = IndexMetaData.builder("")
|
||||
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1).numberOfReplicas(0).build();
|
||||
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
|
@ -292,8 +292,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true);
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
IndexMetaData build = IndexMetaData.builder("")
|
||||
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1).numberOfReplicas(0).build();
|
||||
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
|
||||
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
|
||||
|
@ -311,8 +311,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
PhraseQuery phraseQuery = new PhraseQuery("field", "term");
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
IndexMetaData build = IndexMetaData.builder("")
|
||||
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1).numberOfReplicas(0).build();
|
||||
IndexSettings settings = new IndexSettings(build, Settings.EMPTY);
|
||||
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(settings,
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -72,7 +72,7 @@ public class QueryBuilderStoreTests extends ESTestCase {
|
|||
TermQueryBuilder[] queryBuilders = new TermQueryBuilder[randomIntBetween(1, 16)];
|
||||
IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer());
|
||||
config.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder(
|
||||
new Mapper.BuilderContext(settings, new ContentPath(0)));
|
||||
|
||||
|
|
|
@ -41,8 +41,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY_ALLOW_DELETE;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
|
|
|
@ -26,10 +26,10 @@ import org.elasticsearch.action.search.SearchRequest;
|
|||
import org.elasticsearch.action.support.AutoCreateIndex;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetadata;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.metadata.Metadata;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.containsString;
|
|||
* cluster....
|
||||
*/
|
||||
public class ReindexSourceTargetValidationTests extends ESTestCase {
|
||||
private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()
|
||||
private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")).metadata(Metadata.builder()
|
||||
.put(index("target", "target_alias", "target_multi"), true)
|
||||
.put(index("target2", "target_multi"), true)
|
||||
.put(index("target_with_write_index", true, "target_multi_with_write_index"), true)
|
||||
|
@ -130,17 +130,17 @@ public class ReindexSourceTargetValidationTests extends ESTestCase {
|
|||
INDEX_NAME_EXPRESSION_RESOLVER, AUTO_CREATE_INDEX, STATE);
|
||||
}
|
||||
|
||||
private static IndexMetaData index(String name, String... aliases) {
|
||||
private static IndexMetadata index(String name, String... aliases) {
|
||||
return index(name, null, aliases);
|
||||
}
|
||||
|
||||
private static IndexMetaData index(String name, @Nullable Boolean writeIndex, String... aliases) {
|
||||
IndexMetaData.Builder builder = IndexMetaData.builder(name).settings(Settings.builder()
|
||||
private static IndexMetadata index(String name, @Nullable Boolean writeIndex, String... aliases) {
|
||||
IndexMetadata.Builder builder = IndexMetadata.builder(name).settings(Settings.builder()
|
||||
.put("index.version.created", Version.CURRENT.id)
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 1));
|
||||
for (String alias: aliases) {
|
||||
builder.putAlias(AliasMetaData.builder(alias).writeIndex(writeIndex).build());
|
||||
builder.putAlias(AliasMetadata.builder(alias).writeIndex(writeIndex).build());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.common.blobstore.url;
|
|||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobMetadata;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.DeleteResult;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
|
@ -73,7 +73,7 @@ public class URLBlobContainer extends AbstractBlobContainer {
|
|||
* This operation is not supported by URLBlobContainer
|
||||
*/
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobs() throws IOException {
|
||||
public Map<String, BlobMetadata> listBlobs() throws IOException {
|
||||
throw new UnsupportedOperationException("URL repository doesn't support this operation");
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ public class URLBlobContainer extends AbstractBlobContainer {
|
|||
* This operation is not supported by URLBlobContainer
|
||||
*/
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(String blobNamePrefix) throws IOException {
|
||||
public Map<String, BlobMetadata> listBlobsByPrefix(String blobNamePrefix) throws IOException {
|
||||
throw new UnsupportedOperationException("URL repository doesn't support this operation");
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.repositories.url;
|
|||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
|
@ -82,7 +82,7 @@ public class URLRepository extends BlobStoreRepository {
|
|||
/**
|
||||
* Constructs a read-only URL-based repository
|
||||
*/
|
||||
public URLRepository(RepositoryMetaData metadata, Environment environment,
|
||||
public URLRepository(RepositoryMetadata metadata, Environment environment,
|
||||
NamedXContentRegistry namedXContentRegistry, ClusterService clusterService) {
|
||||
super(metadata, false, namedXContentRegistry, clusterService);
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.repositories.url;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -38,8 +38,8 @@ import static org.hamcrest.CoreMatchers.nullValue;
|
|||
|
||||
public class URLRepositoryTests extends ESTestCase {
|
||||
|
||||
private URLRepository createRepository(Settings baseSettings, RepositoryMetaData repositoryMetaData) {
|
||||
return new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings),
|
||||
private URLRepository createRepository(Settings baseSettings, RepositoryMetadata repositoryMetadata) {
|
||||
return new URLRepository(repositoryMetadata, TestEnvironment.newEnvironment(baseSettings),
|
||||
new NamedXContentRegistry(Collections.emptyList()), BlobStoreTestUtil.mockClusterService()) {
|
||||
@Override
|
||||
protected void assertSnapshotOrGenericThread() {
|
||||
|
@ -55,8 +55,8 @@ public class URLRepositoryTests extends ESTestCase {
|
|||
.put(URLRepository.ALLOWED_URLS_SETTING.getKey(), repoPath)
|
||||
.put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath)
|
||||
.build();
|
||||
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetaData);
|
||||
RepositoryMetadata repositoryMetadata = new RepositoryMetadata("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetadata);
|
||||
repository.start();
|
||||
|
||||
assertThat("blob store has to be lazy initialized", repository.getBlobStore(), is(nullValue()));
|
||||
|
@ -70,8 +70,8 @@ public class URLRepositoryTests extends ESTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath)
|
||||
.build();
|
||||
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetaData);
|
||||
RepositoryMetadata repositoryMetadata = new RepositoryMetadata("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetadata);
|
||||
repository.start();
|
||||
try {
|
||||
repository.blobContainer();
|
||||
|
@ -92,8 +92,8 @@ public class URLRepositoryTests extends ESTestCase {
|
|||
.put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath)
|
||||
.put(URLRepository.SUPPORTED_PROTOCOLS_SETTING.getKey(), "http,https")
|
||||
.build();
|
||||
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetaData);
|
||||
RepositoryMetadata repositoryMetadata = new RepositoryMetadata("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetadata);
|
||||
repository.start();
|
||||
try {
|
||||
repository.blobContainer();
|
||||
|
@ -109,8 +109,8 @@ public class URLRepositoryTests extends ESTestCase {
|
|||
.put(URLRepository.ALLOWED_URLS_SETTING.getKey(), "file:/tmp/")
|
||||
.put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), "file:/var/" )
|
||||
.build();
|
||||
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetaData);
|
||||
RepositoryMetadata repositoryMetadata = new RepositoryMetadata("url", URLRepository.TYPE, baseSettings);
|
||||
final URLRepository repository = createRepository(baseSettings, repositoryMetadata);
|
||||
repository.start();
|
||||
try {
|
||||
repository.blobContainer();
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.client.Request;
|
|||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -61,8 +61,8 @@ public class Zen2RestApiIT extends ESNetty4IntegTestCase {
|
|||
createIndex("test",
|
||||
Settings.builder()
|
||||
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.ZERO) // assign shards
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) // causes rebalancing
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) // causes rebalancing
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.build());
|
||||
ensureGreen("test");
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.analysis;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin;
|
||||
|
@ -37,7 +37,7 @@ public class IcuAnalyzerTests extends BaseTokenStreamTestCase {
|
|||
public void testMixedAlphabetTokenization() throws IOException {
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
||||
|
@ -52,7 +52,7 @@ public class IcuAnalyzerTests extends BaseTokenStreamTestCase {
|
|||
|
||||
public void testMiddleDots() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
||||
|
@ -66,7 +66,7 @@ public class IcuAnalyzerTests extends BaseTokenStreamTestCase {
|
|||
public void testUnicodeNumericCharacters() throws IOException {
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
||||
|
@ -80,7 +80,7 @@ public class IcuAnalyzerTests extends BaseTokenStreamTestCase {
|
|||
public void testBadSettings() {
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put("mode", "wrong")
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.analysis;
|
|||
|
||||
import org.apache.lucene.analysis.icu.segmentation.ICUTokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -96,7 +96,7 @@ public class IcuTokenizerFactoryTests extends ESTestCase {
|
|||
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, IcuTokenizerFactoryTests.class.getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.analysis.ja.JapaneseAnalyzer;
|
|||
import org.apache.lucene.analysis.ja.JapaneseTokenizer;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -214,7 +214,7 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
|||
|
||||
Settings settings = Settings.builder()
|
||||
.loadFromStream(json, KuromojiAnalysisTests.class.getResourceAsStream(json), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
|
||||
return createTestAnalysis(new Index("test", "_na_"), nodeSettings, settings, new AnalysisKuromojiPlugin());
|
||||
|
@ -355,7 +355,7 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
|||
Files.createDirectory(config);
|
||||
Files.copy(dict, config.resolve("user_dict.txt"));
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), home)
|
||||
.put(analysisSettings)
|
||||
.build();
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.analysis.Tokenizer;
|
|||
import org.apache.lucene.analysis.ko.KoreanAnalyzer;
|
||||
import org.apache.lucene.analysis.ko.KoreanTokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.plugin.analysis.nori.AnalysisNoriPlugin;
|
||||
|
@ -177,7 +177,7 @@ public class NoriAnalysisTests extends ESTokenStreamTestCase {
|
|||
|
||||
public void testNoriReadingForm() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put("index.analysis.filter.my_filter.type", "nori_readingform")
|
||||
.build();
|
||||
|
@ -191,7 +191,7 @@ public class NoriAnalysisTests extends ESTokenStreamTestCase {
|
|||
|
||||
public void testNoriNumber() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put("index.analysis.filter.my_filter.type", "nori_number")
|
||||
.build();
|
||||
|
@ -217,7 +217,7 @@ public class NoriAnalysisTests extends ESTokenStreamTestCase {
|
|||
Files.createDirectory(config);
|
||||
Files.copy(dict, config.resolve("user_dict.txt"));
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), home)
|
||||
.put(analysisSettings)
|
||||
.build();
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;
|
||||
|
@ -51,7 +51,7 @@ public class AnalysisPhoneticFactoryTests extends AnalysisFactoryTestCase {
|
|||
AnalysisPhoneticPlugin plugin = new AnalysisPhoneticPlugin();
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, Version.CURRENT))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||
|
@ -62,7 +62,7 @@ public class AnalysisPhoneticFactoryTests extends AnalysisFactoryTestCase {
|
|||
assertEquals("Token filter [phonetic] cannot be used to parse synonyms", e.getMessage());
|
||||
|
||||
settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(),
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(),
|
||||
Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)))
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build();
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.analysis.Tokenizer;
|
|||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.phonetic.DaitchMokotoffSoundexFilter;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.plugin.analysis.AnalysisPhoneticPlugin;
|
||||
|
@ -45,7 +45,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase {
|
|||
public void setup() throws IOException {
|
||||
String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml";
|
||||
Settings settings = Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml), false)
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
this.analysis = createTestAnalysis(new Index("test", "_na_"), settings, new AnalysisPhoneticPlugin());
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
|||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -54,15 +54,15 @@ public class AnalysisPolishFactoryTests extends AnalysisFactoryTestCase {
|
|||
// TODO: is this the right boilerplate? I forked this out of TransportAnalyzeAction.java:
|
||||
Settings settings = Settings.builder()
|
||||
// for _na_
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build();
|
||||
Environment environment = TestEnvironment.newEnvironment(settings);
|
||||
IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build();
|
||||
IndexSettings indexSettings = new IndexSettings(metaData, Settings.EMPTY);
|
||||
IndexMetadata metadata = IndexMetadata.builder(IndexMetadata.INDEX_UUID_NA_VALUE).settings(settings).build();
|
||||
IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY);
|
||||
testThreadSafety(new PolishStemTokenFilterFactory(indexSettings, environment, "stempelpolishstem", settings));
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.action.search.SearchRequest;
|
|||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.cluster.metadata.IndexAbstraction;
|
||||
import org.elasticsearch.cluster.metadata.IndexAbstraction.Index;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
@ -131,7 +131,7 @@ public class CustomAuthorizationEngineTests extends ESTestCase {
|
|||
public void testAuthorizeIndexAction() {
|
||||
CustomAuthorizationEngine engine = new CustomAuthorizationEngine();
|
||||
Map<String, IndexAbstraction> indicesMap = new HashMap<>();
|
||||
indicesMap.put("index", new Index(IndexMetaData.builder("index")
|
||||
indicesMap.put("index", new Index(IndexMetadata.builder("index")
|
||||
.settings(Settings.builder().put("index.version.created", Version.CURRENT))
|
||||
.numberOfShards(1)
|
||||
.numberOfReplicas(0)
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.action.support.GroupedActionListener;
|
|||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobMetadata;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.DeleteResult;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
|
@ -152,7 +152,7 @@ public class AzureBlobContainer extends AbstractBlobContainer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(@Nullable String prefix) throws IOException {
|
||||
public Map<String, BlobMetadata> listBlobsByPrefix(@Nullable String prefix) throws IOException {
|
||||
logger.trace("listBlobsByPrefix({})", prefix);
|
||||
|
||||
try {
|
||||
|
@ -164,7 +164,7 @@ public class AzureBlobContainer extends AbstractBlobContainer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobs() throws IOException {
|
||||
public Map<String, BlobMetadata> listBlobs() throws IOException {
|
||||
logger.trace("listBlobs()");
|
||||
return listBlobsByPrefix(null);
|
||||
}
|
||||
|
|
|
@ -21,9 +21,9 @@ package org.elasticsearch.repositories.azure;
|
|||
|
||||
import com.microsoft.azure.storage.LocationMode;
|
||||
import com.microsoft.azure.storage.StorageException;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobMetadata;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.blobstore.DeleteResult;
|
||||
|
@ -50,7 +50,7 @@ public class AzureBlobStore implements BlobStore {
|
|||
private final String container;
|
||||
private final LocationMode locationMode;
|
||||
|
||||
public AzureBlobStore(RepositoryMetaData metadata, AzureStorageService service, ThreadPool threadPool) {
|
||||
public AzureBlobStore(RepositoryMetadata metadata, AzureStorageService service, ThreadPool threadPool) {
|
||||
this.container = Repository.CONTAINER_SETTING.get(metadata.settings());
|
||||
this.clientName = Repository.CLIENT_NAME.get(metadata.settings());
|
||||
this.service = service;
|
||||
|
@ -104,7 +104,7 @@ public class AzureBlobStore implements BlobStore {
|
|||
return service.getInputStream(clientName, container, blob);
|
||||
}
|
||||
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(String keyPath, String prefix)
|
||||
public Map<String, BlobMetadata> listBlobsByPrefix(String keyPath, String prefix)
|
||||
throws URISyntaxException, StorageException, IOException {
|
||||
return service.listBlobsByPrefix(clientName, container, keyPath, prefix);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import com.microsoft.azure.storage.LocationMode;
|
|||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
|
@ -78,7 +78,7 @@ public class AzureRepository extends BlobStoreRepository {
|
|||
private final boolean readonly;
|
||||
|
||||
public AzureRepository(
|
||||
final RepositoryMetaData metadata,
|
||||
final RepositoryMetadata metadata,
|
||||
final NamedXContentRegistry namedXContentRegistry,
|
||||
final AzureStorageService storageService,
|
||||
final ClusterService clusterService) {
|
||||
|
|
|
@ -43,10 +43,10 @@ import org.apache.logging.log4j.LogManager;
|
|||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobMetadata;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.DeleteResult;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetadata;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -267,12 +267,12 @@ public class AzureStorageService {
|
|||
return giveSocketPermissionsToStream(is);
|
||||
}
|
||||
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(String account, String container, String keyPath, String prefix)
|
||||
public Map<String, BlobMetadata> listBlobsByPrefix(String account, String container, String keyPath, String prefix)
|
||||
throws URISyntaxException, StorageException, IOException {
|
||||
// NOTE: this should be here: if (prefix == null) prefix = "";
|
||||
// however, this is really inefficient since deleteBlobsByPrefix enumerates everything and
|
||||
// then does a prefix match on the result; it should just call listBlobsByPrefix with the prefix!
|
||||
final MapBuilder<String, BlobMetaData> blobsBuilder = MapBuilder.newMapBuilder();
|
||||
final MapBuilder<String, BlobMetadata> blobsBuilder = MapBuilder.newMapBuilder();
|
||||
final EnumSet<BlobListingDetails> enumBlobListingDetails = EnumSet.of(BlobListingDetails.METADATA);
|
||||
final Tuple<CloudBlobClient, Supplier<OperationContext>> client = client(account);
|
||||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
||||
|
@ -289,7 +289,7 @@ public class AzureStorageService {
|
|||
final BlobProperties properties = ((CloudBlob) blobItem).getProperties();
|
||||
final String name = blobPath.substring(keyPath.length());
|
||||
logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength()));
|
||||
blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength()));
|
||||
blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getLength()));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -25,7 +25,7 @@ import com.microsoft.azure.storage.blob.BlobRequestOptions;
|
|||
import com.sun.net.httpserver.HttpExchange;
|
||||
import com.sun.net.httpserver.HttpServer;
|
||||
import fixture.azure.AzureHttpHandler;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
|
@ -139,13 +139,13 @@ public class AzureBlobContainerRetriesTests extends ESTestCase {
|
|||
}
|
||||
};
|
||||
|
||||
final RepositoryMetaData repositoryMetaData = new RepositoryMetaData("repository", AzureRepository.TYPE,
|
||||
final RepositoryMetadata repositoryMetadata = new RepositoryMetadata("repository", AzureRepository.TYPE,
|
||||
Settings.builder()
|
||||
.put(CONTAINER_SETTING.getKey(), "container")
|
||||
.put(ACCOUNT_SETTING.getKey(), clientName)
|
||||
.build());
|
||||
|
||||
return new AzureBlobContainer(BlobPath.cleanPath(), new AzureBlobStore(repositoryMetaData, service, threadPool), threadPool);
|
||||
return new AzureBlobContainer(BlobPath.cleanPath(), new AzureBlobStore(repositoryMetadata, service, threadPool), threadPool);
|
||||
}
|
||||
|
||||
public void testReadNonexistentBlobThrowsNoSuchFileException() {
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.repositories.azure;
|
||||
|
||||
import com.microsoft.azure.storage.LocationMode;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetadata;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
@ -41,7 +41,7 @@ public class AzureRepositorySettingsTests extends ESTestCase {
|
|||
.putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
|
||||
.put(settings)
|
||||
.build();
|
||||
final AzureRepository azureRepository = new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings),
|
||||
final AzureRepository azureRepository = new AzureRepository(new RepositoryMetadata("foo", "azure", internalSettings),
|
||||
NamedXContentRegistry.EMPTY, mock(AzureStorageService.class), BlobStoreTestUtil.mockClusterService());
|
||||
assertThat(azureRepository.getBlobStore(), is(nullValue()));
|
||||
return azureRepository;
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.repositories.gcs;
|
||||
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobMetadata;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.DeleteResult;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
|
@ -43,7 +43,7 @@ class GoogleCloudStorageBlobContainer extends AbstractBlobContainer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobs() throws IOException {
|
||||
public Map<String, BlobMetadata> listBlobs() throws IOException {
|
||||
return blobStore.listBlobs(path);
|
||||
}
|
||||
|
||||
|
@ -53,7 +53,7 @@ class GoogleCloudStorageBlobContainer extends AbstractBlobContainer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(String prefix) throws IOException {
|
||||
public Map<String, BlobMetadata> listBlobsByPrefix(String prefix) throws IOException {
|
||||
return blobStore.listBlobsByPrefix(path, prefix);
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue