Deprecate slicing on `_uid`. (#29353)

Deprecate slicing on `_uid`.

`_id` should be used instead on 6.x.
This commit is contained in:
Adrien Grand 2018-04-10 14:28:30 +02:00 committed by GitHub
parent 03d1a7e132
commit a091d950a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 102 additions and 14 deletions

View File

@ -27,7 +27,7 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.slice.SliceBuilder;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
@ -127,7 +127,7 @@ class BulkByScrollParallelizationHelper {
LeaderBulkByScrollTaskState worker = task.getLeaderState(); LeaderBulkByScrollTaskState worker = task.getLeaderState();
int totalSlices = worker.getSlices(); int totalSlices = worker.getSlices();
TaskId parentTaskId = new TaskId(localNodeId, task.getId()); TaskId parentTaskId = new TaskId(localNodeId, task.getId());
for (final SearchRequest slice : sliceIntoSubRequests(request.getSearchRequest(), UidFieldMapper.NAME, totalSlices)) { for (final SearchRequest slice : sliceIntoSubRequests(request.getSearchRequest(), IdFieldMapper.NAME, totalSlices)) {
// TODO move the request to the correct node. maybe here or somehow do it as part of startup for reindex in general.... // TODO move the request to the correct node. maybe here or somehow do it as part of startup for reindex in general....
Request requestForSlice = request.forSlice(parentTaskId, slice, totalSlices); Request requestForSlice = request.forSlice(parentTaskId, slice, totalSlices);
ActionListener<BulkByScrollResponse> sliceListener = ActionListener.wrap( ActionListener<BulkByScrollResponse> sliceListener = ActionListener.wrap(

View File

@ -42,8 +42,8 @@ setup:
--- ---
"Sliced scroll": "Sliced scroll":
- skip: - skip:
version: " - 5.3.0" version: " - 6.99.99"
reason: Prior version uses a random seed per node to compute the hash of the keys. reason: Disabled until pr 29353 is backported
- do: - do:
search: search:

View File

@ -161,7 +161,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override @Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
if (indexOptions() == IndexOptions.NONE) { if (indexOptions() == IndexOptions.NONE) {
throw new IllegalArgumentException("Fielddata access on the _uid field is disallowed"); throw new IllegalArgumentException("Fielddata access on the _id field is disallowed");
} }
final IndexFieldData.Builder fieldDataBuilder = new PagedBytesIndexFieldData.Builder( final IndexFieldData.Builder fieldDataBuilder = new PagedBytesIndexFieldData.Builder(
TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY, TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY,

View File

@ -22,11 +22,14 @@ package org.elasticsearch.search.slice;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -53,6 +56,9 @@ import java.util.Objects;
* {@link org.elasticsearch.search.slice.DocValuesSliceQuery} is used to filter the results. * {@link org.elasticsearch.search.slice.DocValuesSliceQuery} is used to filter the results.
*/ */
public class SliceBuilder implements Writeable, ToXContentObject { public class SliceBuilder implements Writeable, ToXContentObject {
private static final DeprecationLogger DEPRECATION_LOG = new DeprecationLogger(Loggers.getLogger(SliceBuilder.class));
public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField FIELD_FIELD = new ParseField("field");
public static final ParseField ID_FIELD = new ParseField("id"); public static final ParseField ID_FIELD = new ParseField("id");
public static final ParseField MAX_FIELD = new ParseField("max"); public static final ParseField MAX_FIELD = new ParseField("max");
@ -66,7 +72,7 @@ public class SliceBuilder implements Writeable, ToXContentObject {
} }
/** Name of field to slice against (_uid by default) */ /** Name of field to slice against (_uid by default) */
private String field = UidFieldMapper.NAME; private String field = IdFieldMapper.NAME;
/** The id of the slice */ /** The id of the slice */
private int id = -1; private int id = -1;
/** Max number of slices */ /** Max number of slices */
@ -75,7 +81,7 @@ public class SliceBuilder implements Writeable, ToXContentObject {
private SliceBuilder() {} private SliceBuilder() {}
public SliceBuilder(int id, int max) { public SliceBuilder(int id, int max) {
this(UidFieldMapper.NAME, id, max); this(IdFieldMapper.NAME, id, max);
} }
/** /**
@ -91,14 +97,23 @@ public class SliceBuilder implements Writeable, ToXContentObject {
} }
public SliceBuilder(StreamInput in) throws IOException { public SliceBuilder(StreamInput in) throws IOException {
this.field = in.readString(); String field = in.readString();
if (UidFieldMapper.NAME.equals(field) && in.getVersion().before(Version.V_6_3_0)) {
// This is safe because _id and _uid are handled the same way in #toFilter
field = IdFieldMapper.NAME;
}
this.field = field;
this.id = in.readVInt(); this.id = in.readVInt();
this.max = in.readVInt(); this.max = in.readVInt();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
if (IdFieldMapper.NAME.equals(field) && out.getVersion().before(Version.V_6_3_0)) {
out.writeString(UidFieldMapper.NAME);
} else {
out.writeString(field); out.writeString(field);
}
out.writeVInt(id); out.writeVInt(id);
out.writeVInt(max); out.writeVInt(max);
} }
@ -201,6 +216,14 @@ public class SliceBuilder implements Writeable, ToXContentObject {
if (context.getIndexSettings().isSingleType()) { if (context.getIndexSettings().isSingleType()) {
// on new indices, the _id acts as a _uid // on new indices, the _id acts as a _uid
field = IdFieldMapper.NAME; field = IdFieldMapper.NAME;
DEPRECATION_LOG.deprecated("Computing slices on the [_uid] field is deprecated for 6.x indices, use [_id] instead");
}
useTermQuery = true;
} else if (IdFieldMapper.NAME.equals(field)) {
if (context.getIndexSettings().isSingleType() == false) {
// on old indices, we need _uid. We maintain this so that users
// can use _id to slice even if they still have 5.x indices.
field = UidFieldMapper.NAME;
} }
useTermQuery = true; useTermQuery = true;
} else if (type.hasDocValues() == false) { } else if (type.hasDocValues() == false) {

View File

@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
@ -158,9 +159,9 @@ public class SliceBuilderTests extends ESTestCase {
return null; return null;
} }
}; };
fieldType.setName(UidFieldMapper.NAME); fieldType.setName(IdFieldMapper.NAME);
fieldType.setHasDocValues(false); fieldType.setHasDocValues(false);
when(context.fieldMapper(UidFieldMapper.NAME)).thenReturn(fieldType); when(context.fieldMapper(IdFieldMapper.NAME)).thenReturn(fieldType);
when(context.getIndexReader()).thenReturn(reader); when(context.getIndexReader()).thenReturn(reader);
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
@ -225,7 +226,7 @@ public class SliceBuilderTests extends ESTestCase {
Map<Integer, AtomicInteger> numSliceMap = new HashMap<>(); Map<Integer, AtomicInteger> numSliceMap = new HashMap<>();
for (int i = 0; i < numSlices; i++) { for (int i = 0; i < numSlices; i++) {
for (int j = 0; j < numShards; j++) { for (int j = 0; j < numShards; j++) {
SliceBuilder slice = new SliceBuilder("_uid", i, numSlices); SliceBuilder slice = new SliceBuilder("_id", i, numSlices);
Query q = slice.toFilter(context, j, numShards); Query q = slice.toFilter(context, j, numShards);
if (q instanceof TermsSliceQuery || q instanceof MatchAllDocsQuery) { if (q instanceof TermsSliceQuery || q instanceof MatchAllDocsQuery) {
AtomicInteger count = numSliceMap.get(j); AtomicInteger count = numSliceMap.get(j);
@ -254,7 +255,7 @@ public class SliceBuilderTests extends ESTestCase {
List<Integer> targetShards = new ArrayList<>(); List<Integer> targetShards = new ArrayList<>();
for (int i = 0; i < numSlices; i++) { for (int i = 0; i < numSlices; i++) {
for (int j = 0; j < numShards; j++) { for (int j = 0; j < numShards; j++) {
SliceBuilder slice = new SliceBuilder("_uid", i, numSlices); SliceBuilder slice = new SliceBuilder("_id", i, numSlices);
Query q = slice.toFilter(context, j, numShards); Query q = slice.toFilter(context, j, numShards);
if (q instanceof MatchNoDocsQuery == false) { if (q instanceof MatchNoDocsQuery == false) {
assertThat(q, instanceOf(MatchAllDocsQuery.class)); assertThat(q, instanceOf(MatchAllDocsQuery.class));
@ -270,7 +271,7 @@ public class SliceBuilderTests extends ESTestCase {
numSlices = numShards; numSlices = numShards;
for (int i = 0; i < numSlices; i++) { for (int i = 0; i < numSlices; i++) {
for (int j = 0; j < numShards; j++) { for (int j = 0; j < numShards; j++) {
SliceBuilder slice = new SliceBuilder("_uid", i, numSlices); SliceBuilder slice = new SliceBuilder("_id", i, numSlices);
Query q = slice.toFilter(context, j, numShards); Query q = slice.toFilter(context, j, numShards);
if (i == j) { if (i == j) {
assertThat(q, instanceOf(MatchAllDocsQuery.class)); assertThat(q, instanceOf(MatchAllDocsQuery.class));
@ -311,4 +312,68 @@ public class SliceBuilderTests extends ESTestCase {
assertThat(exc.getMessage(), containsString("cannot load numeric doc values")); assertThat(exc.getMessage(), containsString("cannot load numeric doc values"));
} }
} }
public void testToFilterDeprecationMessage() throws IOException {
Directory dir = new RAMDirectory();
try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())))) {
writer.commit();
}
QueryShardContext context = mock(QueryShardContext.class);
try (IndexReader reader = DirectoryReader.open(dir)) {
MappedFieldType fieldType = new MappedFieldType() {
@Override
public MappedFieldType clone() {
return null;
}
@Override
public String typeName() {
return null;
}
@Override
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return null;
}
public Query existsQuery(QueryShardContext context) {
return null;
}
};
fieldType.setName(UidFieldMapper.NAME);
fieldType.setHasDocValues(false);
when(context.fieldMapper(UidFieldMapper.NAME)).thenReturn(fieldType);
when(context.getIndexReader()).thenReturn(reader);
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.build();
IndexMetaData indexState = IndexMetaData.builder("index").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(indexState, Settings.EMPTY);
when(context.getIndexSettings()).thenReturn(indexSettings);
SliceBuilder builder = new SliceBuilder("_uid", 5, 10);
Query query = builder.toFilter(context, 0, 1);
assertThat(query, instanceOf(TermsSliceQuery.class));
assertThat(builder.toFilter(context, 0, 1), equalTo(query));
assertWarnings("Computing slices on the [_uid] field is deprecated for 6.x indices, use [_id] instead");
}
}
public void testSerializationBackcompat() throws IOException {
SliceBuilder sliceBuilder = new SliceBuilder(1, 5);
assertEquals(IdFieldMapper.NAME, sliceBuilder.getField());
SliceBuilder copy62 = copyWriteable(sliceBuilder,
new NamedWriteableRegistry(Collections.emptyList()),
SliceBuilder::new, Version.V_6_2_0);
assertEquals(sliceBuilder, copy62);
SliceBuilder copy63 = copyWriteable(copy62,
new NamedWriteableRegistry(Collections.emptyList()),
SliceBuilder::new, Version.V_6_3_0);
assertEquals(sliceBuilder, copy63);
}
} }