[Remove] types from SearchHit and Explain API (#2205)
* [Remove] types from SearchHit and Explain API Removes type support from SearchHit and Explain API. Signed-off-by: Nicholas Walter Knize <nknize@apache.org> * fix failing tests Signed-off-by: Nicholas Walter Knize <nknize@apache.org> * move TYPES_DEPRECATION_MESSAGE from RestExplainAction to FullClusterRestartIT Signed-off-by: Nicholas Walter Knize <nknize@apache.org> * fix FullClusterRestartIT Signed-off-by: Nicholas Walter Knize <nknize@apache.org> * fix failing tests Signed-off-by: Nicholas Walter Knize <nknize@apache.org> * fix testSoftDelete Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
1b571ece28
commit
3445befd36
|
@ -542,9 +542,7 @@ final class RequestConverters {
|
|||
}
|
||||
|
||||
static Request explain(ExplainRequest explainRequest) throws IOException {
|
||||
String endpoint = explainRequest.type().equals(MapperService.SINGLE_MAPPING_NAME)
|
||||
? endpoint(explainRequest.index(), "_explain", explainRequest.id())
|
||||
: endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain");
|
||||
String endpoint = endpoint(explainRequest.index(), "_explain", explainRequest.id());
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params params = new Params();
|
||||
|
|
|
@ -1529,21 +1529,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||
assertToXContentBody(explainRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testExplainWithType() throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String type = randomAlphaOfLengthBetween(3, 10);
|
||||
String id = randomAlphaOfLengthBetween(3, 10);
|
||||
|
||||
ExplainRequest explainRequest = new ExplainRequest(index, type, id);
|
||||
explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10)));
|
||||
|
||||
Request request = RequestConverters.explain(explainRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/" + index + "/" + type + "/" + id + "/_explain", request.getEndpoint());
|
||||
|
||||
assertToXContentBody(explainRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testTermVectors() throws IOException {
|
||||
String index = randomAlphaOfLengthBetween(3, 10);
|
||||
String id = randomAlphaOfLengthBetween(3, 10);
|
||||
|
|
|
@ -1134,7 +1134,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
|
|||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("_doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
|
@ -1149,7 +1148,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
|
|||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("_doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertTrue(explainResponse.isMatch());
|
||||
|
@ -1164,7 +1162,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
|
|||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("_doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
|
@ -1180,7 +1177,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
|
|||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("_doc"));
|
||||
assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1));
|
||||
assertTrue(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
|
@ -1212,7 +1208,6 @@ public class SearchIT extends OpenSearchRestHighLevelClientTestCase {
|
|||
ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync);
|
||||
|
||||
assertThat(explainResponse.getIndex(), equalTo("index1"));
|
||||
assertThat(explainResponse.getType(), equalTo("_doc"));
|
||||
assertThat(explainResponse.getId(), equalTo("999"));
|
||||
assertFalse(explainResponse.isExists());
|
||||
assertFalse(explainResponse.isMatch());
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.lucene.search.TotalHits;
|
|||
import org.opensearch.action.search.SearchResponse;
|
||||
import org.opensearch.action.search.ShardSearchFailure;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
|
@ -183,7 +182,7 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase<Search
|
|||
}
|
||||
|
||||
public void testSearchResponseToXContent() throws IOException {
|
||||
SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(1, "id", Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.score(2.0f);
|
||||
SearchHit[] hits = new SearchHit[] { hit };
|
||||
|
||||
|
@ -229,7 +228,6 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase<Search
|
|||
.field("max_score", 1.5F)
|
||||
.startArray("hits")
|
||||
.startObject()
|
||||
.field("_type", "type")
|
||||
.field("_id", "id")
|
||||
.field("_score", 2.0F)
|
||||
.endObject()
|
||||
|
|
|
@ -132,7 +132,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
|
||||
logger.info("total_hits={}", topHits.getHits().getTotalHits().value);
|
||||
for (SearchHit searchHit : topHits.getHits()) {
|
||||
logger.info("hit= {} {} {}", searchHit.getSortValues()[0], searchHit.getType(), searchHit.getId());
|
||||
logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -608,7 +608,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1"));
|
||||
|
||||
ExplainResponse explainResponse = client().prepareExplain("test", "doc", parentId)
|
||||
ExplainResponse explainResponse = client().prepareExplain("test", parentId)
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max))
|
||||
.get();
|
||||
assertThat(explainResponse.isExists(), equalTo(true));
|
||||
|
|
|
@ -151,9 +151,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(innerHits.getTotalHits().value, equalTo(2L));
|
||||
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("c1"));
|
||||
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("c2"));
|
||||
assertThat(innerHits.getAt(1).getType(), equalTo("doc"));
|
||||
|
||||
final boolean seqNoAndTerm = randomBoolean();
|
||||
response = client().prepareSearch("articles")
|
||||
|
@ -172,11 +170,8 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(innerHits.getTotalHits().value, equalTo(3L));
|
||||
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("c4"));
|
||||
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("c5"));
|
||||
assertThat(innerHits.getAt(1).getType(), equalTo("doc"));
|
||||
assertThat(innerHits.getAt(2).getId(), equalTo("c6"));
|
||||
assertThat(innerHits.getAt(2).getType(), equalTo("doc"));
|
||||
|
||||
if (seqNoAndTerm) {
|
||||
assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L));
|
||||
|
@ -301,7 +296,6 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
int offset2 = 0;
|
||||
for (int parent = 0; parent < numDocs; parent++) {
|
||||
SearchHit searchHit = searchResponse.getHits().getAt(parent);
|
||||
assertThat(searchHit.getType(), equalTo("doc"));
|
||||
assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent)));
|
||||
assertThat(searchHit.getShard(), notNullValue());
|
||||
|
||||
|
@ -309,7 +303,6 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent]));
|
||||
for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
|
||||
SearchHit innerHit = inner.getAt(child);
|
||||
assertThat(innerHit.getType(), equalTo("doc"));
|
||||
String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child);
|
||||
assertThat(innerHit.getId(), equalTo(childId));
|
||||
assertThat(innerHit.getNestedIdentity(), nullValue());
|
||||
|
@ -320,7 +313,6 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent]));
|
||||
for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
|
||||
SearchHit innerHit = inner.getAt(child);
|
||||
assertThat(innerHit.getType(), equalTo("doc"));
|
||||
String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child);
|
||||
assertThat(innerHit.getId(), equalTo(childId));
|
||||
assertThat(innerHit.getNestedIdentity(), nullValue());
|
||||
|
@ -376,16 +368,12 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
|
||||
SearchHit searchHit = response.getHits().getAt(0);
|
||||
assertThat(searchHit.getId(), equalTo("3"));
|
||||
assertThat(searchHit.getType(), equalTo("doc"));
|
||||
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc"));
|
||||
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
|
||||
|
||||
searchHit = response.getHits().getAt(1);
|
||||
assertThat(searchHit.getId(), equalTo("4"));
|
||||
assertThat(searchHit.getType(), equalTo("doc"));
|
||||
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc"));
|
||||
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
|
||||
}
|
||||
|
||||
|
@ -430,12 +418,10 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("3"));
|
||||
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
|
||||
|
||||
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
|
||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("5"));
|
||||
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
|
@ -455,12 +441,10 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
|
||||
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
|
||||
|
||||
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
|
||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("6"));
|
||||
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
|
||||
}
|
||||
|
||||
public void testRoyals() throws Exception {
|
||||
|
@ -613,7 +597,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertHitCount(response, 1);
|
||||
}
|
||||
|
||||
public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception {
|
||||
public void testNestedInnerHitWrappedInParentChildInnerhit() {
|
||||
assertAcked(
|
||||
prepareCreate("test").addMapping(
|
||||
"doc",
|
||||
|
@ -646,7 +630,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue());
|
||||
}
|
||||
|
||||
public void testInnerHitsWithIgnoreUnmapped() throws Exception {
|
||||
public void testInnerHitsWithIgnoreUnmapped() {
|
||||
assertAcked(
|
||||
prepareCreate("index1").addMapping(
|
||||
"doc",
|
||||
|
@ -676,7 +660,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertSearchHits(response, "1", "3");
|
||||
}
|
||||
|
||||
public void testTooHighResultWindow() throws Exception {
|
||||
public void testTooHighResultWindow() {
|
||||
assertAcked(
|
||||
prepareCreate("index1").addMapping(
|
||||
"doc",
|
||||
|
|
|
@ -49,35 +49,29 @@ teardown:
|
|||
|
||||
- match: { hits.total: 6 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0._type: "_doc" }
|
||||
- match: { hits.hits.0._id: "3" }
|
||||
- match: { hits.hits.0._source.join_field.name: "child" }
|
||||
- match: { hits.hits.0._source.join_field.parent: "1" }
|
||||
- is_false: hits.hits.0.fields.join_field#child }
|
||||
- match: { hits.hits.1._index: "test" }
|
||||
- match: { hits.hits.1._type: "_doc" }
|
||||
- match: { hits.hits.1._id: "4" }
|
||||
- match: { hits.hits.1._source.join_field.name: "child" }
|
||||
- match: { hits.hits.1._source.join_field.parent: "1" }
|
||||
- is_false: hits.hits.1.fields.join_field#child }
|
||||
- match: { hits.hits.2._index: "test" }
|
||||
- match: { hits.hits.2._type: "_doc" }
|
||||
- match: { hits.hits.2._id: "5" }
|
||||
- match: { hits.hits.2._source.join_field.name: "child" }
|
||||
- match: { hits.hits.2._source.join_field.parent: "2" }
|
||||
- is_false: hits.hits.2.fields.join_field#child }
|
||||
- match: { hits.hits.3._index: "test" }
|
||||
- match: { hits.hits.3._type: "_doc" }
|
||||
- match: { hits.hits.3._id: "6" }
|
||||
- match: { hits.hits.3._source.join_field.name: "grand_child" }
|
||||
- match: { hits.hits.3._source.join_field.parent: "5" }
|
||||
- match: { hits.hits.4._index: "test" }
|
||||
- match: { hits.hits.4._type: "_doc" }
|
||||
- match: { hits.hits.4._id: "1" }
|
||||
- match: { hits.hits.4._source.join_field.name: "parent" }
|
||||
- is_false: hits.hits.4._source.join_field.parent
|
||||
- match: { hits.hits.5._index: "test" }
|
||||
- match: { hits.hits.5._type: "_doc" }
|
||||
- match: { hits.hits.5._id: "2" }
|
||||
- match: { hits.hits.5._source.join_field.name: "parent" }
|
||||
- is_false: hits.hits.5._source.join_field.parent
|
||||
|
@ -96,12 +90,10 @@ teardown:
|
|||
|
||||
- match: { hits.total: 2 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0._type: "_doc" }
|
||||
- match: { hits.hits.0._id: "3" }
|
||||
- match: { hits.hits.0._source.join_field.name: "child" }
|
||||
- match: { hits.hits.0._source.join_field.parent: "1" }
|
||||
- match: { hits.hits.1._index: "test" }
|
||||
- match: { hits.hits.1._type: "_doc" }
|
||||
- match: { hits.hits.1._id: "4" }
|
||||
- match: { hits.hits.1._source.join_field.name: "child" }
|
||||
- match: { hits.hits.1._source.join_field.parent: "1" }
|
||||
|
|
|
@ -889,7 +889,6 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase {
|
|||
.get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("type"));
|
||||
assertThat(response.getHits().getAt(0).getIndex(), equalTo("test1"));
|
||||
|
||||
response = client().prepareSearch()
|
||||
|
@ -898,7 +897,6 @@ public class PercolatorQuerySearchIT extends OpenSearchIntegTestCase {
|
|||
.get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("type"));
|
||||
assertThat(response.getHits().getAt(0).getIndex(), equalTo("test2"));
|
||||
|
||||
// Unacceptable:
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.QueryVisitor;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.document.DocumentField;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.fetch.FetchContext;
|
||||
import org.opensearch.search.fetch.FetchSubPhase;
|
||||
|
@ -108,13 +107,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
|
|||
int slot = (int) matchedSlot;
|
||||
BytesReference document = percolateQuery.getDocuments().get(slot);
|
||||
HitContext subContext = new HitContext(
|
||||
new SearchHit(
|
||||
slot,
|
||||
"unknown",
|
||||
new Text(hit.hit().getType()),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
),
|
||||
new SearchHit(slot, "unknown", Collections.emptyMap(), Collections.emptyMap()),
|
||||
percolatorLeafReaderContext,
|
||||
slot,
|
||||
new SourceLookup()
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.opensearch.action.OriginalIndices;
|
|||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
|
@ -44,7 +43,6 @@ import org.opensearch.common.xcontent.XContentParseException;
|
|||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.common.xcontent.json.JsonXContent;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.SearchShardTarget;
|
||||
|
@ -87,13 +85,7 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase {
|
|||
SearchHit[] hits = new SearchHit[6];
|
||||
for (int i = 0; i < 6; i++) {
|
||||
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
|
||||
hits[i] = new SearchHit(
|
||||
i,
|
||||
Integer.toString(i),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
@ -143,13 +135,7 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase {
|
|||
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
|
||||
}
|
||||
}
|
||||
hits[i] = new SearchHit(
|
||||
i,
|
||||
Integer.toString(i),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
@ -206,13 +192,7 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase {
|
|||
// only create four hits
|
||||
SearchHit[] hits = new SearchHit[4];
|
||||
for (int i = 0; i < 4; i++) {
|
||||
hits[i] = new SearchHit(
|
||||
i,
|
||||
Integer.toString(i),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.index.rankeval;
|
|||
import org.opensearch.action.OriginalIndices;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
|
@ -43,7 +42,6 @@ import org.opensearch.common.xcontent.XContentParseException;
|
|||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.common.xcontent.json.JsonXContent;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.SearchShardTarget;
|
||||
|
@ -130,13 +128,7 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase {
|
|||
if (relevanceRatings[i] != null) {
|
||||
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
|
||||
}
|
||||
hits[i] = new SearchHit(
|
||||
i,
|
||||
Integer.toString(i),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.index.rankeval;
|
|||
import org.opensearch.action.OriginalIndices;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
|
@ -216,7 +215,7 @@ public class MeanReciprocalRankTests extends OpenSearchTestCase {
|
|||
private static SearchHit[] createSearchHits(int from, int to, String index) {
|
||||
SearchHit[] hits = new SearchHit[to + 1 - from];
|
||||
for (int i = from; i <= to; i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.index.rankeval;
|
|||
import org.opensearch.action.OriginalIndices;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
|
@ -43,7 +42,6 @@ import org.opensearch.common.xcontent.XContentParseException;
|
|||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.common.xcontent.json.JsonXContent;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.SearchShardTarget;
|
||||
|
@ -127,7 +125,7 @@ public class PrecisionAtKTests extends OpenSearchTestCase {
|
|||
rated.add(createRatedDoc("test", "1", RELEVANT_RATING));
|
||||
// add an unlabeled search hit
|
||||
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
|
||||
searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
|
||||
searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap());
|
||||
searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
|
||||
|
@ -146,7 +144,7 @@ public class PrecisionAtKTests extends OpenSearchTestCase {
|
|||
public void testNoRatedDocs() throws Exception {
|
||||
SearchHit[] hits = new SearchHit[5];
|
||||
for (int i = 0; i < 5; i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
|
||||
|
@ -283,7 +281,7 @@ public class PrecisionAtKTests extends OpenSearchTestCase {
|
|||
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
|
||||
SearchHit[] hits = new SearchHit[rated.size()];
|
||||
for (int i = 0; i < rated.size(); i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -44,14 +44,12 @@ import org.opensearch.common.breaker.CircuitBreakingException;
|
|||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.BytesStreamOutput;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
import org.opensearch.common.xcontent.XContentLocation;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.SearchParseException;
|
||||
|
@ -188,9 +186,9 @@ public class RankEvalResponseTests extends OpenSearchTestCase {
|
|||
+ " \"coffee_query\": {"
|
||||
+ " \"metric_score\": 0.1,"
|
||||
+ " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}],"
|
||||
+ " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"123\",\"_score\":1.0},"
|
||||
+ " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_id\":\"123\",\"_score\":1.0},"
|
||||
+ " \"rating\":5},"
|
||||
+ " {\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"456\",\"_score\":1.0},"
|
||||
+ " {\"hit\":{\"_index\":\"index\",\"_id\":\"456\",\"_score\":1.0},"
|
||||
+ " \"rating\":null}"
|
||||
+ " ]"
|
||||
+ " }"
|
||||
|
@ -210,13 +208,7 @@ public class RankEvalResponseTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
|
||||
SearchHit hit = new SearchHit(
|
||||
docId,
|
||||
docId + "",
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
hit.score(1.0f);
|
||||
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
|
||||
|
|
|
@ -34,11 +34,9 @@ package org.opensearch.index.rankeval;
|
|||
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
|
@ -55,7 +53,6 @@ public class RatedSearchHitTests extends OpenSearchTestCase {
|
|||
SearchHit searchHit = new SearchHit(
|
||||
randomIntBetween(0, 10),
|
||||
randomAlphaOfLength(10),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
|
@ -71,13 +68,7 @@ public class RatedSearchHitTests extends OpenSearchTestCase {
|
|||
rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
|
||||
break;
|
||||
case 1:
|
||||
hit = new SearchHit(
|
||||
hit.docId(),
|
||||
hit.getId() + randomAlphaOfLength(10),
|
||||
new Text(MapperService.SINGLE_MAPPING_NAME),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(), Collections.emptyMap());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.index.rankeval;
|
|||
import org.opensearch.action.OriginalIndices;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
|
@ -128,7 +127,7 @@ public class RecallAtKTests extends OpenSearchTestCase {
|
|||
int k = 5;
|
||||
SearchHit[] hits = new SearchHit[k];
|
||||
for (int i = 0; i < k; i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
|
||||
|
@ -252,7 +251,7 @@ public class RecallAtKTests extends OpenSearchTestCase {
|
|||
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
|
||||
SearchHit[] hits = new SearchHit[rated.size()];
|
||||
for (int i = 0; i < rated.size(); i++) {
|
||||
hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
|
||||
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
|
||||
}
|
||||
return hits;
|
||||
|
|
|
@ -60,7 +60,6 @@ import org.opensearch.index.mapper.IdFieldMapper;
|
|||
import org.opensearch.index.mapper.IndexFieldMapper;
|
||||
import org.opensearch.index.mapper.RoutingFieldMapper;
|
||||
import org.opensearch.index.mapper.SourceFieldMapper;
|
||||
import org.opensearch.index.mapper.TypeFieldMapper;
|
||||
import org.opensearch.index.mapper.VersionFieldMapper;
|
||||
import org.opensearch.index.reindex.ScrollableHitSource.SearchFailure;
|
||||
import org.opensearch.script.Script;
|
||||
|
@ -249,7 +248,7 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
* change the "fields" part of the search request it is unlikely that we got here because we didn't fetch _source.
|
||||
* Thus the error message assumes that it wasn't stored.
|
||||
*/
|
||||
throw new IllegalArgumentException("[" + doc.getIndex() + "][" + doc.getType() + "][" + doc.getId() + "] didn't store _source");
|
||||
throw new IllegalArgumentException("[" + doc.getIndex() + "][" + doc.getId() + "] didn't store _source");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -597,10 +596,6 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
|
||||
String getIndex();
|
||||
|
||||
void setType(String type);
|
||||
|
||||
String getType();
|
||||
|
||||
void setId(String id);
|
||||
|
||||
String getId();
|
||||
|
@ -643,16 +638,6 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
return request.index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setType(String type) {
|
||||
request.type(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return request.type();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setId(String id) {
|
||||
request.id(id);
|
||||
|
@ -732,16 +717,6 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
return request.index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setType(String type) {
|
||||
request.type(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return request.type();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setId(String id) {
|
||||
request.id(id);
|
||||
|
@ -831,7 +806,6 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
|
||||
Map<String, Object> context = new HashMap<>();
|
||||
context.put(IndexFieldMapper.NAME, doc.getIndex());
|
||||
context.put(TypeFieldMapper.NAME, doc.getType());
|
||||
context.put(IdFieldMapper.NAME, doc.getId());
|
||||
Long oldVersion = doc.getVersion();
|
||||
context.put(VersionFieldMapper.NAME, oldVersion);
|
||||
|
@ -861,10 +835,6 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
if (false == doc.getIndex().equals(newValue)) {
|
||||
scriptChangedIndex(request, newValue);
|
||||
}
|
||||
newValue = context.remove(TypeFieldMapper.NAME);
|
||||
if (false == doc.getType().equals(newValue)) {
|
||||
scriptChangedType(request, newValue);
|
||||
}
|
||||
newValue = context.remove(IdFieldMapper.NAME);
|
||||
if (false == doc.getId().equals(newValue)) {
|
||||
scriptChangedId(request, newValue);
|
||||
|
@ -899,7 +869,7 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
taskWorker.countNoop();
|
||||
return null;
|
||||
case DELETE:
|
||||
RequestWrapper<DeleteRequest> delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId()));
|
||||
RequestWrapper<DeleteRequest> delete = wrap(new DeleteRequest(request.getIndex(), request.getId()));
|
||||
delete.setVersion(request.getVersion());
|
||||
delete.setVersionType(VersionType.INTERNAL);
|
||||
delete.setRouting(request.getRouting());
|
||||
|
@ -911,8 +881,6 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
|
||||
protected abstract void scriptChangedIndex(RequestWrapper<?> request, Object to);
|
||||
|
||||
protected abstract void scriptChangedType(RequestWrapper<?> request, Object to);
|
||||
|
||||
protected abstract void scriptChangedId(RequestWrapper<?> request, Object to);
|
||||
|
||||
protected abstract void scriptChangedVersion(RequestWrapper<?> request, Object to);
|
||||
|
|
|
@ -67,7 +67,6 @@ public class AsyncDeleteByQueryAction extends AbstractAsyncBulkByScrollAction<De
|
|||
protected RequestWrapper<DeleteRequest> buildRequest(ScrollableHitSource.Hit doc) {
|
||||
DeleteRequest delete = new DeleteRequest();
|
||||
delete.index(doc.getIndex());
|
||||
delete.type(doc.getType());
|
||||
delete.id(doc.getId());
|
||||
delete.setIfSeqNo(doc.getSeqNo());
|
||||
delete.setIfPrimaryTerm(doc.getPrimaryTerm());
|
||||
|
|
|
@ -352,13 +352,6 @@ public class Reindexer {
|
|||
// Copy the index from the request so we always write where it asked to write
|
||||
index.index(mainRequest.getDestination().index());
|
||||
|
||||
// If the request override's type then the user wants all documents in that type. Otherwise keep the doc's type.
|
||||
if (mainRequest.getDestination().type() == null) {
|
||||
index.type(doc.getType());
|
||||
} else {
|
||||
index.type(mainRequest.getDestination().type());
|
||||
}
|
||||
|
||||
/*
|
||||
* Internal versioning can just use what we copied from the destination request. Otherwise we assume we're using external
|
||||
* versioning and use the doc's version.
|
||||
|
@ -460,12 +453,6 @@ public class Reindexer {
|
|||
request.setIndex(to.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void scriptChangedType(RequestWrapper<?> request, Object to) {
|
||||
requireNonNull(to, "Can't reindex without a destination type!");
|
||||
request.setType(to.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void scriptChangedId(RequestWrapper<?> request, Object to) {
|
||||
request.setId(Objects.toString(to, null));
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.opensearch.common.io.stream.Writeable;
|
|||
import org.opensearch.index.mapper.IdFieldMapper;
|
||||
import org.opensearch.index.mapper.IndexFieldMapper;
|
||||
import org.opensearch.index.mapper.RoutingFieldMapper;
|
||||
import org.opensearch.index.mapper.TypeFieldMapper;
|
||||
import org.opensearch.script.Script;
|
||||
import org.opensearch.script.ScriptService;
|
||||
import org.opensearch.tasks.Task;
|
||||
|
@ -138,7 +137,6 @@ public class TransportUpdateByQueryAction extends HandledTransportAction<UpdateB
|
|||
protected RequestWrapper<IndexRequest> buildRequest(ScrollableHitSource.Hit doc) {
|
||||
IndexRequest index = new IndexRequest();
|
||||
index.index(doc.getIndex());
|
||||
index.type(doc.getType());
|
||||
index.id(doc.getId());
|
||||
index.source(doc.getSource(), doc.getXContentType());
|
||||
index.setIfSeqNo(doc.getSeqNo());
|
||||
|
@ -163,11 +161,6 @@ public class TransportUpdateByQueryAction extends HandledTransportAction<UpdateB
|
|||
throw new IllegalArgumentException("Modifying [" + IndexFieldMapper.NAME + "] not allowed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void scriptChangedType(RequestWrapper<?> request, Object to) {
|
||||
throw new IllegalArgumentException("Modifying [" + TypeFieldMapper.NAME + "] not allowed");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void scriptChangedId(RequestWrapper<?> request, Object to) {
|
||||
throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed");
|
||||
|
|
|
@ -75,14 +75,12 @@ final class RemoteResponseParsers {
|
|||
public static final ConstructingObjectParser<BasicHit, XContentType> HIT_PARSER = new ConstructingObjectParser<>("hit", true, a -> {
|
||||
int i = 0;
|
||||
String index = (String) a[i++];
|
||||
String type = (String) a[i++];
|
||||
String id = (String) a[i++];
|
||||
Long version = (Long) a[i++];
|
||||
return new BasicHit(index, type, id, version == null ? -1 : version);
|
||||
return new BasicHit(index, id, version == null ? -1 : version);
|
||||
});
|
||||
static {
|
||||
HIT_PARSER.declareString(constructorArg(), new ParseField("_index"));
|
||||
HIT_PARSER.declareString(constructorArg(), new ParseField("_type"));
|
||||
HIT_PARSER.declareString(constructorArg(), new ParseField("_id"));
|
||||
HIT_PARSER.declareLong(optionalConstructorArg(), new ParseField("_version"));
|
||||
HIT_PARSER.declareObject(((basicHit, tuple) -> basicHit.setSource(tuple.v1(), tuple.v2())), (p, s) -> {
|
||||
|
|
|
@ -37,7 +37,7 @@ public abstract class AbstractAsyncBulkByScrollActionMetadataTestCase<
|
|||
Response extends BulkByScrollResponse> extends AbstractAsyncBulkByScrollActionTestCase<Request, Response> {
|
||||
|
||||
protected ScrollableHitSource.BasicHit doc() {
|
||||
return new ScrollableHitSource.BasicHit("index", "type", "id", 0);
|
||||
return new ScrollableHitSource.BasicHit("index", "id", 0);
|
||||
}
|
||||
|
||||
protected abstract AbstractAsyncBulkByScrollAction<Request, ?> action();
|
||||
|
|
|
@ -65,8 +65,8 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T extends ActionRequest> T applyScript(Consumer<Map<String, Object>> scriptBody) {
|
||||
IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar"));
|
||||
ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0);
|
||||
IndexRequest index = new IndexRequest("index").id("1").source(singletonMap("foo", "bar"));
|
||||
ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "id", 0);
|
||||
UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) {
|
||||
@Override
|
||||
public void execute() {
|
||||
|
@ -79,11 +79,6 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
|
|||
return (result != null) ? (T) result.self() : null;
|
||||
}
|
||||
|
||||
public void testTypeDeprecation() {
|
||||
applyScript((Map<String, Object> ctx) -> ctx.get("_type"));
|
||||
assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated.");
|
||||
}
|
||||
|
||||
public void testScriptAddingJunkToCtxIsError() {
|
||||
try {
|
||||
applyScript((Map<String, Object> ctx) -> ctx.put("junk", "junk"));
|
||||
|
@ -102,16 +97,9 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
|
|||
assertEquals("cat", index.sourceAsMap().get("bar"));
|
||||
}
|
||||
|
||||
public void testSetOpTypeNoop() throws Exception {
|
||||
assertThat(task.getStatus().getNoops(), equalTo(0L));
|
||||
assertNull(applyScript((Map<String, Object> ctx) -> ctx.put("op", OpType.NOOP.toString())));
|
||||
assertThat(task.getStatus().getNoops(), equalTo(1L));
|
||||
}
|
||||
|
||||
public void testSetOpTypeDelete() throws Exception {
|
||||
DeleteRequest delete = applyScript((Map<String, Object> ctx) -> ctx.put("op", OpType.DELETE.toString()));
|
||||
assertThat(delete.index(), equalTo("index"));
|
||||
assertThat(delete.type(), equalTo("type"));
|
||||
assertThat(delete.id(), equalTo("1"));
|
||||
}
|
||||
|
||||
|
|
|
@ -73,7 +73,6 @@ import org.opensearch.cluster.node.DiscoveryNode;
|
|||
import org.opensearch.common.CheckedConsumer;
|
||||
import org.opensearch.common.bytes.BytesArray;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException;
|
||||
|
@ -288,7 +287,7 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase {
|
|||
public void testScrollResponseBatchingBehavior() throws Exception {
|
||||
int maxBatches = randomIntBetween(0, 100);
|
||||
for (int batches = 1; batches < maxBatches; batches++) {
|
||||
Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0);
|
||||
Hit hit = new ScrollableHitSource.BasicHit("index", "id", 0);
|
||||
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null);
|
||||
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
|
||||
simulateScrollResponse(action, System.nanoTime(), 0, response);
|
||||
|
@ -456,7 +455,7 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase {
|
|||
throw new RuntimeException("surprise");
|
||||
}
|
||||
};
|
||||
ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0);
|
||||
ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "id", 0);
|
||||
hit.setSource(new BytesArray("{}"), XContentType.JSON);
|
||||
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null);
|
||||
simulateScrollResponse(action, System.nanoTime(), 0, response);
|
||||
|
@ -541,7 +540,7 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase {
|
|||
action.start();
|
||||
|
||||
// create a simulated response.
|
||||
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHits hits = new SearchHits(
|
||||
IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new),
|
||||
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.opensearch.common.xcontent.XContentType;
|
|||
import org.opensearch.index.IndexModule;
|
||||
import org.opensearch.index.engine.Engine;
|
||||
import org.opensearch.index.engine.Engine.Operation.Origin;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.query.QueryBuilders;
|
||||
import org.opensearch.index.shard.IndexingOperationListener;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
|
@ -77,7 +78,6 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
public class CancelTests extends ReindexTestCase {
|
||||
|
||||
protected static final String INDEX = "reindex-cancel-index";
|
||||
protected static final String TYPE = "reindex-cancel-type";
|
||||
|
||||
// Semaphore used to allow & block indexing operations during the test
|
||||
private static final Semaphore ALLOWED_OPERATIONS = new Semaphore(0);
|
||||
|
@ -116,7 +116,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
false,
|
||||
true,
|
||||
IntStream.range(0, numDocs)
|
||||
.mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i))
|
||||
.mapToObj(i -> client().prepareIndex(INDEX, MapperService.SINGLE_MAPPING_NAME, String.valueOf(i)).setSource("n", i))
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
|
||||
|
@ -247,12 +247,17 @@ public class CancelTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
public void testReindexCancel() throws Exception {
|
||||
testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest", TYPE), (response, total, modified) -> {
|
||||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
|
||||
testCancel(
|
||||
ReindexAction.NAME,
|
||||
reindex().source(INDEX).destination("dest", MapperService.SINGLE_MAPPING_NAME),
|
||||
(response, total, modified) -> {
|
||||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
|
||||
|
||||
refresh("dest");
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
|
||||
}, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]"));
|
||||
refresh("dest");
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
|
||||
},
|
||||
equalTo("reindex from [" + INDEX + "] to [dest][" + MapperService.SINGLE_MAPPING_NAME + "]")
|
||||
);
|
||||
}
|
||||
|
||||
public void testUpdateByQueryCancel() throws Exception {
|
||||
|
@ -289,13 +294,16 @@ public class CancelTests extends ReindexTestCase {
|
|||
public void testReindexCancelWithWorkers() throws Exception {
|
||||
testCancel(
|
||||
ReindexAction.NAME,
|
||||
reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", TYPE).setSlices(5),
|
||||
reindex().source(INDEX)
|
||||
.filter(QueryBuilders.matchAllQuery())
|
||||
.destination("dest", MapperService.SINGLE_MAPPING_NAME)
|
||||
.setSlices(5),
|
||||
(response, total, modified) -> {
|
||||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
|
||||
refresh("dest");
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
|
||||
},
|
||||
equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")
|
||||
equalTo("reindex from [" + INDEX + "] to [dest][" + MapperService.SINGLE_MAPPING_NAME + "]")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -355,16 +363,16 @@ public class CancelTests extends ReindexTestCase {
|
|||
|
||||
@Override
|
||||
public Engine.Index preIndex(ShardId shardId, Engine.Index index) {
|
||||
return preCheck(index, index.type());
|
||||
return preCheck(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
|
||||
return preCheck(delete, delete.type());
|
||||
return preCheck(delete);
|
||||
}
|
||||
|
||||
private <T extends Engine.Operation> T preCheck(T operation, String type) {
|
||||
if ((TYPE.equals(type) == false) || (operation.origin() != Origin.PRIMARY)) {
|
||||
private <T extends Engine.Operation> T preCheck(T operation) {
|
||||
if ((operation.origin() != Origin.PRIMARY)) {
|
||||
return operation;
|
||||
}
|
||||
|
||||
|
|
|
@ -47,7 +47,6 @@ import org.opensearch.client.ParentTaskAssigningClient;
|
|||
import org.opensearch.client.support.AbstractClient;
|
||||
import org.opensearch.common.bytes.BytesArray;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.common.util.concurrent.OpenSearchRejectedExecutionException;
|
||||
import org.opensearch.search.SearchHit;
|
||||
|
@ -183,7 +182,7 @@ public class ClientScrollableHitSourceTests extends OpenSearchTestCase {
|
|||
|
||||
private SearchResponse createSearchResponse() {
|
||||
// create a simulated response.
|
||||
SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}"));
|
||||
SearchHits hits = new SearchHits(
|
||||
IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new),
|
||||
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
|
||||
|
|
|
@ -60,20 +60,6 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes
|
|||
}
|
||||
}
|
||||
|
||||
public void testSetType() throws Exception {
|
||||
Object type = randomFrom(new Object[] { 234, 234L, "pancake" });
|
||||
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_type", type));
|
||||
assertEquals(type.toString(), index.type());
|
||||
}
|
||||
|
||||
public void testSettingTypeToNullIsError() throws Exception {
|
||||
try {
|
||||
applyScript((Map<String, Object> ctx) -> ctx.put("_type", null));
|
||||
} catch (NullPointerException e) {
|
||||
assertThat(e.getMessage(), containsString("Can't reindex without a destination type!"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSetId() throws Exception {
|
||||
Object id = randomFrom(new Object[] { null, 234, 234L, "pancake" });
|
||||
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_id", id));
|
||||
|
|
|
@ -56,7 +56,7 @@ public class UpdateByQueryWithScriptTests extends AbstractAsyncBulkByScrollActio
|
|||
* error message to the user, not some ClassCastException.
|
||||
*/
|
||||
Object[] options = new Object[] { "cat", new Object(), 123, new Date(), Math.PI };
|
||||
for (String ctxVar : new String[] { "_index", "_type", "_id", "_version", "_routing" }) {
|
||||
for (String ctxVar : new String[] { "_index", "_id", "_version", "_routing" }) {
|
||||
try {
|
||||
applyScript((Map<String, Object> ctx) -> ctx.put(ctxVar, randomFrom(options)));
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
|
|
@ -178,7 +178,6 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase {
|
|||
assertThat(r.getFailures(), empty());
|
||||
assertThat(r.getHits(), hasSize(1));
|
||||
assertEquals("test", r.getHits().get(0).getIndex());
|
||||
assertEquals("test", r.getHits().get(0).getType());
|
||||
assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId());
|
||||
assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString());
|
||||
assertNull(r.getHits().get(0).getRouting());
|
||||
|
@ -196,7 +195,6 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase {
|
|||
assertThat(r.getFailures(), empty());
|
||||
assertThat(r.getHits(), hasSize(1));
|
||||
assertEquals("test", r.getHits().get(0).getIndex());
|
||||
assertEquals("test", r.getHits().get(0).getType());
|
||||
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
|
||||
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
|
||||
assertNull(r.getHits().get(0).getRouting());
|
||||
|
@ -246,7 +244,6 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase {
|
|||
assertThat(r.getFailures(), empty());
|
||||
assertThat(r.getHits(), hasSize(1));
|
||||
assertEquals("test", r.getHits().get(0).getIndex());
|
||||
assertEquals("test", r.getHits().get(0).getType());
|
||||
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
|
||||
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
|
||||
assertNull(r.getHits().get(0).getRouting());
|
||||
|
@ -277,7 +274,6 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase {
|
|||
);
|
||||
assertThat(r.getHits(), hasSize(1));
|
||||
assertEquals("test", r.getHits().get(0).getIndex());
|
||||
assertEquals("test", r.getHits().get(0).getType());
|
||||
assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId());
|
||||
assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString());
|
||||
called.set(true);
|
||||
|
@ -308,7 +304,6 @@ public class RemoteScrollableHitSourceTests extends OpenSearchTestCase {
|
|||
);
|
||||
assertThat(r.getHits(), hasSize(1));
|
||||
assertEquals("test", r.getHits().get(0).getIndex());
|
||||
assertEquals("test", r.getHits().get(0).getType());
|
||||
assertEquals("10000", r.getHits().get(0).getId());
|
||||
assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString());
|
||||
called.set(true);
|
||||
|
|
|
@ -285,7 +285,7 @@
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
catch: /\[test\]\[_doc\]\[1\] didn't store _source/
|
||||
catch: /\[test\]\[1\] didn't store _source/
|
||||
reindex:
|
||||
body:
|
||||
source:
|
||||
|
|
|
@ -150,7 +150,7 @@
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
catch: /\[test\]\[_doc\]\[1\] didn't store _source/
|
||||
catch: /\[test\]\[1\] didn't store _source/
|
||||
update_by_query:
|
||||
index: test
|
||||
|
||||
|
|
|
@ -49,8 +49,6 @@ import org.opensearch.common.xcontent.XContentBuilder;
|
|||
import org.opensearch.common.xcontent.json.JsonXContent;
|
||||
import org.opensearch.common.xcontent.support.XContentMapValues;
|
||||
import org.opensearch.index.IndexSettings;
|
||||
import org.opensearch.rest.action.document.RestBulkAction;
|
||||
import org.opensearch.rest.action.search.RestExplainAction;
|
||||
import org.opensearch.test.NotEqualMessageBuilder;
|
||||
import org.opensearch.test.XContentTestUtils;
|
||||
import org.opensearch.test.rest.OpenSearchRestTestCase;
|
||||
|
@ -95,11 +93,10 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
* with {@code tests.is_old_cluster} set to {@code false}.
|
||||
*/
|
||||
public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
||||
|
||||
private String index;
|
||||
private String type;
|
||||
|
||||
public static final String TYPES_DEPRECATION_MESSAGE_BULK = "[types removal]" + " Specifying types in bulk requests is deprecated.";
|
||||
|
||||
@Before
|
||||
public void setIndex() {
|
||||
index = getTestName().toLowerCase(Locale.ROOT);
|
||||
|
@ -163,6 +160,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
count,
|
||||
true,
|
||||
true,
|
||||
randomBoolean(),
|
||||
i -> JsonXContent.contentBuilder().startObject()
|
||||
.field("string", randomAlphaOfLength(10))
|
||||
.field("int", randomInt(100))
|
||||
|
@ -182,7 +180,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
assertBasicSearchWorks(count);
|
||||
assertAllSearchWorks(count);
|
||||
assertBasicAggregationWorks();
|
||||
assertRealtimeGetWorks(type);
|
||||
assertRealtimeGetWorks();
|
||||
assertStoredBinaryFields(count);
|
||||
}
|
||||
|
||||
|
@ -198,9 +196,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
}
|
||||
{
|
||||
mappingsAndSettings.startObject("mappings");
|
||||
if (isRunningAgainstAncientCluster()) {
|
||||
mappingsAndSettings.startObject(type);
|
||||
}
|
||||
mappingsAndSettings.startObject("properties");
|
||||
{
|
||||
mappingsAndSettings.startObject("field");
|
||||
|
@ -208,21 +203,17 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
if (isRunningAgainstAncientCluster()) {
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
createIndex.setOptions(allowTypesRemovalWarnings());
|
||||
client().performRequest(createIndex);
|
||||
|
||||
int numDocs = randomIntBetween(2000, 3000);
|
||||
indexRandomDocuments(
|
||||
numDocs, true, false, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
|
||||
numDocs, true, false, randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
|
||||
logger.info("Refreshing [{}]", index);
|
||||
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
|
||||
} else {
|
||||
|
@ -304,9 +295,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
{
|
||||
mappingsAndSettings.startObject("mappings");
|
||||
{
|
||||
if (isRunningAgainstAncientCluster()) {
|
||||
mappingsAndSettings.startObject(type);
|
||||
}
|
||||
mappingsAndSettings.startObject("properties");
|
||||
{
|
||||
mappingsAndSettings.startObject("field");
|
||||
|
@ -316,30 +304,23 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
if (isRunningAgainstAncientCluster()) {
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
if (isRunningAgainstAncientCluster() == false) {
|
||||
// the default number of shards is now one so we have to set the number of shards to be more than one explicitly
|
||||
mappingsAndSettings.startObject("settings");
|
||||
{
|
||||
mappingsAndSettings.field("index.number_of_shards", 5);
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
mappingsAndSettings.startObject("settings");
|
||||
{
|
||||
mappingsAndSettings.field("index.number_of_shards", 5);
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
createIndex.setOptions(allowTypesRemovalWarnings());
|
||||
client().performRequest(createIndex);
|
||||
|
||||
numDocs = randomIntBetween(512, 1024);
|
||||
indexRandomDocuments(
|
||||
numDocs, true, true, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
|
||||
numDocs, true, true, randomBoolean(), i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
|
||||
|
||||
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
||||
|
||||
|
@ -387,9 +368,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
{
|
||||
mappingsAndSettings.startObject("mappings");
|
||||
{
|
||||
if (isRunningAgainstAncientCluster()) {
|
||||
mappingsAndSettings.startObject(type);
|
||||
}
|
||||
mappingsAndSettings.startObject("properties");
|
||||
{
|
||||
mappingsAndSettings.startObject("field");
|
||||
|
@ -399,23 +377,17 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
if (isRunningAgainstAncientCluster()) {
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
if (isRunningAgainstAncientCluster() == false) {
|
||||
// the default number of shards is now one so we have to set the number of shards to be more than one explicitly
|
||||
mappingsAndSettings.startObject("settings");
|
||||
mappingsAndSettings.field("index.number_of_shards", 5);
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
// the default number of shards is now one so we have to set the number of shards to be more than one explicitly
|
||||
mappingsAndSettings.startObject("settings");
|
||||
mappingsAndSettings.field("index.number_of_shards", 5);
|
||||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
createIndex.setOptions(allowTypesRemovalWarnings());
|
||||
client().performRequest(createIndex);
|
||||
|
||||
numDocs = randomIntBetween(512, 1024);
|
||||
|
@ -423,6 +395,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
numDocs,
|
||||
true,
|
||||
true,
|
||||
randomBoolean(),
|
||||
i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()
|
||||
);
|
||||
} else {
|
||||
|
@ -567,12 +540,10 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
|
||||
String stringValue = (String) XContentMapValues.extractValue("_source.string", bestHit);
|
||||
assertNotNull(stringValue);
|
||||
String type = (String) bestHit.get("_type");
|
||||
String id = (String) bestHit.get("_id");
|
||||
|
||||
Request explainRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
|
||||
Request explainRequest = new Request("GET", "/" + index + "/_explain" + "/" + id);
|
||||
explainRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
|
||||
explainRequest.setOptions(expectWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE));
|
||||
String explanation = toStr(client().performRequest(explainRequest));
|
||||
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
|
||||
|
||||
|
@ -612,7 +583,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
assertTotalHits(termsCount, boolTerms);
|
||||
}
|
||||
|
||||
void assertRealtimeGetWorks(final String typeName) throws IOException {
|
||||
void assertRealtimeGetWorks() throws IOException {
|
||||
Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
|
||||
disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}");
|
||||
client().performRequest(disableAutoRefresh);
|
||||
|
@ -627,7 +598,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}");
|
||||
client().performRequest(updateRequest);
|
||||
|
||||
Request getRequest = new Request("GET", "/" + index + "/" + typeName + "/" + docId);
|
||||
Request getRequest = new Request("GET", "/" + index + "/" + type + "/" + docId);
|
||||
Map<String, Object> getRsp = entityAsMap(client().performRequest(getRequest));
|
||||
Map<?, ?> source = (Map<?, ?>) getRsp.get("_source");
|
||||
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
|
||||
|
@ -715,14 +686,13 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
* an index without a translog so we randomize whether
|
||||
* or not we have one. */
|
||||
shouldHaveTranslog = randomBoolean();
|
||||
|
||||
Settings.Builder settings = Settings.builder();
|
||||
if (minimumNodeVersion().before(Version.V_2_0_0) && randomBoolean()) {
|
||||
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
|
||||
}
|
||||
createIndex(index, settings.build());
|
||||
|
||||
indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null;
|
||||
createIndex(index, settings.build(), mappings);
|
||||
indexRandomDocuments(count, true, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
|
||||
// make sure all recoveries are done
|
||||
ensureGreen(index);
|
||||
|
@ -733,28 +703,26 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
flushRequest.addParameter("wait_if_ongoing", "true");
|
||||
assertOK(client().performRequest(flushRequest));
|
||||
|
||||
if (randomBoolean()) {
|
||||
syncedFlush(index, randomBoolean());
|
||||
}
|
||||
if (shouldHaveTranslog) {
|
||||
// Update a few documents so we are sure to have a translog
|
||||
indexRandomDocuments(
|
||||
count / 10,
|
||||
false, // flushing here would invalidate the whole thing
|
||||
false,
|
||||
i -> jsonBuilder().startObject().field("field", "value").endObject()
|
||||
count / 10,
|
||||
false, // flushing here would invalidate the whole thing
|
||||
false,
|
||||
true,
|
||||
i -> jsonBuilder().startObject().field("field", "value").endObject()
|
||||
);
|
||||
}
|
||||
saveInfoDocument("should_have_translog", Boolean.toString(shouldHaveTranslog));
|
||||
saveInfoDocument(index + "_should_have_translog", Boolean.toString(shouldHaveTranslog));
|
||||
} else {
|
||||
count = countOfIndexedRandomDocuments();
|
||||
shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument("should_have_translog"));
|
||||
shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument(index + "_should_have_translog"));
|
||||
}
|
||||
|
||||
// Count the documents in the index to make sure we have as many as we put there
|
||||
Request countRequest = new Request("GET", "/" + index + "/_search");
|
||||
countRequest.addParameter("size", "0");
|
||||
refresh();
|
||||
refreshAllIndices();
|
||||
Map<String, Object> countResponse = entityAsMap(client().performRequest(countRequest));
|
||||
assertTotalHits(count, countResponse);
|
||||
|
||||
|
@ -787,6 +755,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
|
||||
String currentLuceneVersion = Version.CURRENT.luceneVersion.toString();
|
||||
String bwcLuceneVersion = getOldClusterVersion().luceneVersion.toString();
|
||||
String minCompatibleBWCVersion = Version.CURRENT.minimumCompatibilityVersion().luceneVersion.toString();
|
||||
if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) {
|
||||
int numCurrentVersion = 0;
|
||||
int numBwcVersion = 0;
|
||||
|
@ -805,6 +774,10 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
numCurrentVersion++;
|
||||
} else if (bwcLuceneVersion.equals(version)) {
|
||||
numBwcVersion++;
|
||||
} else if (minCompatibleBWCVersion.equals(version) && minCompatibleBWCVersion.equals(bwcLuceneVersion) == false) {
|
||||
// Our upgrade path from 7.non-last always goes through 7.last, which depending on timing can create 7.last
|
||||
// index segment. We ignore those.
|
||||
continue;
|
||||
} else {
|
||||
fail("expected version to be one of [" + currentLuceneVersion + "," + bwcLuceneVersion + "] but was " + line);
|
||||
}
|
||||
|
@ -835,7 +808,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
|
||||
}
|
||||
createIndex(index, settings.build());
|
||||
indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
indexRandomDocuments(count, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
} else {
|
||||
count = countOfIndexedRandomDocuments();
|
||||
}
|
||||
|
@ -978,10 +951,10 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
int numDocs = between(10, 100);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject());
|
||||
Request request = new Request("POST", "/" + index + "/" + type + "/" + i);
|
||||
Request request = new Request("POST", "/" + index + "/_doc/" + i);
|
||||
request.setJsonEntity(doc);
|
||||
client().performRequest(request);
|
||||
refresh();
|
||||
refreshAllIndices();
|
||||
}
|
||||
client().performRequest(new Request("POST", "/" + index + "/_flush"));
|
||||
int liveDocs = numDocs;
|
||||
|
@ -989,19 +962,19 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
if (randomBoolean()) {
|
||||
String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v2").endObject());
|
||||
Request request = new Request("POST", "/" + index + "/" + type + "/" + i);
|
||||
Request request = new Request("POST", "/" + index + "/_doc/" + i);
|
||||
request.setJsonEntity(doc);
|
||||
client().performRequest(request);
|
||||
} else if (randomBoolean()) {
|
||||
client().performRequest(new Request("DELETE", "/" + index + "/" + type + "/" + i));
|
||||
client().performRequest(new Request("DELETE", "/" + index + "/_doc/" + i));
|
||||
liveDocs--;
|
||||
}
|
||||
}
|
||||
refresh();
|
||||
refreshAllIndices();
|
||||
assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))));
|
||||
saveInfoDocument("doc_count", Integer.toString(liveDocs));
|
||||
saveInfoDocument(index + "_doc_count", Integer.toString(liveDocs));
|
||||
} else {
|
||||
int liveDocs = Integer.parseInt(loadInfoDocument("doc_count"));
|
||||
int liveDocs = Integer.parseInt(loadInfoDocument(index + "_doc_count"));
|
||||
assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))));
|
||||
}
|
||||
}
|
||||
|
@ -1185,19 +1158,20 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
|
||||
|
||||
private void indexRandomDocuments(
|
||||
final int count,
|
||||
final boolean flushAllowed,
|
||||
final boolean saveInfo,
|
||||
final CheckedFunction<Integer, XContentBuilder, IOException> docSupplier)
|
||||
throws IOException {
|
||||
final int count,
|
||||
final boolean flushAllowed,
|
||||
final boolean saveInfo,
|
||||
final boolean specifyId,
|
||||
final CheckedFunction<Integer, XContentBuilder, IOException> docSupplier
|
||||
) throws IOException {
|
||||
logger.info("Indexing {} random documents", count);
|
||||
for (int i = 0; i < count; i++) {
|
||||
logger.debug("Indexing document [{}]", i);
|
||||
Request createDocument = new Request("POST", "/" + index + "/" + type + "/" + i);
|
||||
Request createDocument = new Request("POST", "/" + index + "/_doc/" + (specifyId ? i : ""));
|
||||
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
|
||||
client().performRequest(createDocument);
|
||||
if (rarely()) {
|
||||
refresh();
|
||||
refreshAllIndices();
|
||||
}
|
||||
if (flushAllowed && rarely()) {
|
||||
logger.debug("Flushing [{}]", index);
|
||||
|
@ -1205,7 +1179,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
}
|
||||
}
|
||||
if (saveInfo) {
|
||||
saveInfoDocument("count", Integer.toString(count));
|
||||
saveInfoDocument(index + "_count", Integer.toString(count));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1216,22 +1190,22 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
}
|
||||
|
||||
private int countOfIndexedRandomDocuments() throws IOException {
|
||||
return Integer.parseInt(loadInfoDocument("count"));
|
||||
return Integer.parseInt(loadInfoDocument(index + "_count"));
|
||||
}
|
||||
|
||||
private void saveInfoDocument(String type, String value) throws IOException {
|
||||
private void saveInfoDocument(String id, String value) throws IOException {
|
||||
XContentBuilder infoDoc = JsonXContent.contentBuilder().startObject();
|
||||
infoDoc.field("value", value);
|
||||
infoDoc.endObject();
|
||||
// Only create the first version so we know how many documents are created when the index is first created
|
||||
Request request = new Request("PUT", "/info/" + this.type + "/" + index + "_" + type);
|
||||
Request request = new Request("PUT", "/info/" + type + "/" + id);
|
||||
request.addParameter("op_type", "create");
|
||||
request.setJsonEntity(Strings.toString(infoDoc));
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
||||
private String loadInfoDocument(String type) throws IOException {
|
||||
Request request = new Request("GET", "/info/" + this.type + "/" + index + "_" + type);
|
||||
private String loadInfoDocument(String id) throws IOException {
|
||||
Request request = new Request("GET", "/info/_doc/" + id);
|
||||
request.addParameter("filter_path", "_source");
|
||||
String doc = toStr(client().performRequest(request));
|
||||
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
|
||||
|
@ -1239,10 +1213,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
return m.group(1);
|
||||
}
|
||||
|
||||
private Object randomLenientBoolean() {
|
||||
return randomFrom(new Object[] {"off", "no", "0", 0, "false", false, "on", "yes", "1", 1, "true", true});
|
||||
}
|
||||
|
||||
private void refresh() throws IOException {
|
||||
logger.debug("Refreshing [{}]", index);
|
||||
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
|
||||
|
@ -1581,7 +1551,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
createIndex(index, settings.build());
|
||||
ensureGreen(index);
|
||||
int numDocs = randomIntBetween(0, 100);
|
||||
indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
indexRandomDocuments(numDocs, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
// create repo
|
||||
XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject();
|
||||
{
|
||||
|
@ -1635,7 +1605,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
createIndex(index, settings.build());
|
||||
ensureGreen(index);
|
||||
int numDocs = randomIntBetween(0, 100);
|
||||
indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
indexRandomDocuments(numDocs, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
|
||||
// create repo
|
||||
XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject();
|
||||
{
|
||||
|
|
|
@ -30,7 +30,6 @@ setup:
|
|||
- is_true: matched
|
||||
- match: { explanation.value: 1 }
|
||||
- match: { _index: test_1 }
|
||||
- match: { _type: _doc }
|
||||
- match: { _id: id_1 }
|
||||
|
||||
---
|
||||
|
@ -47,7 +46,6 @@ setup:
|
|||
- is_true: matched
|
||||
- match: { explanation.value: 1 }
|
||||
- match: { _index: test_1 }
|
||||
- match: { _type: _doc }
|
||||
- match: { _id: id_1 }
|
||||
|
||||
---
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
- do:
|
||||
explain: { index: test_1, id: 1, _source: false, body: { query: { match_all: {}} } }
|
||||
- match: { _index: test_1 }
|
||||
- match: { _type: _doc }
|
||||
- match: { _id: "1" }
|
||||
- is_false: get._source
|
||||
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
---
|
||||
"Test Create and update mapping":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text1:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
text2:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
subfield.text3:
|
||||
type: text
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
index: test_index
|
||||
|
||||
- match: {test_index.mappings.test_type.properties.text1.type: text}
|
||||
- match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace}
|
||||
- match: {test_index.mappings.test_type.properties.text2.type: text}
|
||||
- match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace}
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text1:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
fields:
|
||||
text_raw:
|
||||
type: keyword
|
||||
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
index: test_index
|
||||
|
||||
- match: {test_index.mappings.test_type.properties.text1.type: text}
|
||||
- match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text}
|
||||
- match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword}
|
||||
|
||||
---
|
||||
"Create index with invalid mappings":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index
|
||||
- do:
|
||||
catch: /illegal_argument_exception/
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
"":
|
||||
type: keyword
|
|
@ -1,77 +0,0 @@
|
|||
---
|
||||
"PUT mapping with typeless API on an index that has types":
|
||||
|
||||
- do:
|
||||
indices.create: # not using include_type_name: false on purpose
|
||||
include_type_name: true
|
||||
index: index
|
||||
body:
|
||||
mappings:
|
||||
not_doc:
|
||||
properties:
|
||||
foo:
|
||||
type: "keyword"
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: false
|
||||
index: index
|
||||
body:
|
||||
properties:
|
||||
bar:
|
||||
type: "long"
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: false
|
||||
index: index
|
||||
|
||||
- match: { index.mappings.properties.foo.type: "keyword" }
|
||||
- match: { index.mappings.properties.bar.type: "long" }
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: false
|
||||
index: index
|
||||
body:
|
||||
properties:
|
||||
foo:
|
||||
type: "keyword" # also test no-op updates that trigger special logic wrt the mapping version
|
||||
|
||||
- do:
|
||||
catch: /the final mapping would have more than 1 type/
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: index
|
||||
type: some_other_type
|
||||
body:
|
||||
some_other_type:
|
||||
properties:
|
||||
bar:
|
||||
type: "long"
|
||||
|
||||
|
||||
---
|
||||
"PUT mapping with _doc on an index that has types":
|
||||
- do:
|
||||
indices.create:
|
||||
include_type_name: true
|
||||
index: index
|
||||
body:
|
||||
mappings:
|
||||
my_type:
|
||||
properties:
|
||||
foo:
|
||||
type: "keyword"
|
||||
|
||||
- do:
|
||||
catch: /the final mapping would have more than 1 type/
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: index
|
||||
type: _doc
|
||||
body:
|
||||
_doc:
|
||||
properties:
|
||||
bar:
|
||||
type: "long"
|
|
@ -1,227 +0,0 @@
|
|||
setup:
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index1
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index2
|
||||
- do:
|
||||
indices.create:
|
||||
index: foo
|
||||
|
||||
|
||||
---
|
||||
"put one mapping per index":
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index1
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index2
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {test_index2.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: { foo.mappings: {} }
|
||||
|
||||
---
|
||||
"put mapping in _all index":
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: _all
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {test_index2.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {foo.mappings.test_type.properties.text.type: text}
|
||||
- match: {foo.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
---
|
||||
"put mapping in * index":
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: "*"
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {test_index2.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {foo.mappings.test_type.properties.text.type: text}
|
||||
- match: {foo.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
---
|
||||
"put mapping in prefix* index":
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: "test_index*"
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {test_index2.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: { foo.mappings: {} }
|
||||
|
||||
---
|
||||
"put mapping in list of indices":
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: [test_index1, test_index2]
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {test_index2.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: { foo.mappings: {} }
|
||||
|
||||
---
|
||||
"put mapping with blank index":
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
properties:
|
||||
text:
|
||||
type: text
|
||||
analyzer: whitespace
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {test_index2.mappings.test_type.properties.text.type: text}
|
||||
- match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
- match: {foo.mappings.test_type.properties.text.type: text}
|
||||
- match: {foo.mappings.test_type.properties.text.analyzer: whitespace}
|
||||
|
||||
---
|
||||
"put mapping with missing type":
|
||||
|
||||
|
||||
- do:
|
||||
catch: param
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
|
||||
---
|
||||
"post a mapping with default analyzer twice":
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index1
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
dynamic: false
|
||||
properties:
|
||||
text:
|
||||
analyzer: default
|
||||
type: text
|
||||
|
||||
- do:
|
||||
indices.put_mapping:
|
||||
include_type_name: true
|
||||
index: test_index1
|
||||
type: test_type
|
||||
body:
|
||||
test_type:
|
||||
dynamic: false
|
||||
properties:
|
||||
text:
|
||||
analyzer: default
|
||||
type: text
|
||||
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
include_type_name: true
|
||||
|
||||
- match: {test_index1.mappings.test_type.properties.text.type: text}
|
||||
|
|
@ -30,10 +30,8 @@ setup:
|
|||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : {} } } }
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0._type: "_doc" }
|
||||
- match: { hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._type: "_doc" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.field: "nested_field" }
|
||||
- match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.offset: 0 }
|
||||
|
@ -63,7 +61,6 @@ setup:
|
|||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0._type: "_doc" }
|
||||
- match: { hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0._version: 1 }
|
||||
- match: { hits.hits.0.fields._seq_no: [0] }
|
||||
|
@ -86,7 +83,6 @@ setup:
|
|||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0._type: "_doc" }
|
||||
- match: { hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0._version: 2 }
|
||||
- match: { hits.hits.0.fields._seq_no: [1] }
|
||||
|
|
|
@ -19,7 +19,6 @@ setup:
|
|||
index: test
|
||||
|
||||
- is_true: hits.hits.0._id
|
||||
- is_true: hits.hits.0._type
|
||||
- is_true: hits.hits.0._source
|
||||
|
||||
- do:
|
||||
|
@ -30,7 +29,6 @@ setup:
|
|||
stored_fields: []
|
||||
|
||||
- is_true: hits.hits.0._id
|
||||
- is_true: hits.hits.0._type
|
||||
- is_false: hits.hits.0._source
|
||||
|
||||
- do:
|
||||
|
|
|
@ -73,19 +73,16 @@ setup:
|
|||
- match: {hits.total: 6 }
|
||||
- length: {hits.hits: 3 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0.fields.numeric_group: [3] }
|
||||
- match: {hits.hits.0.sort: [36] }
|
||||
- match: {hits.hits.0._id: "6" }
|
||||
- is_false: hits.hits.0.inner_hits
|
||||
- match: {hits.hits.1._index: test }
|
||||
- match: {hits.hits.1._type: _doc }
|
||||
- match: {hits.hits.1.fields.numeric_group: [1] }
|
||||
- match: {hits.hits.1.sort: [24] }
|
||||
- match: {hits.hits.1._id: "3" }
|
||||
- is_false: hits.hits.1.inner_hits
|
||||
- match: {hits.hits.2._index: test }
|
||||
- match: {hits.hits.2._type: _doc }
|
||||
- match: {hits.hits.2.fields.numeric_group: [25] }
|
||||
- match: {hits.hits.2.sort: [10] }
|
||||
- match: {hits.hits.2._id: "4" }
|
||||
|
@ -111,7 +108,6 @@ setup:
|
|||
- match: {hits.total: 6 }
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0.fields.numeric_group: [25]}
|
||||
- match: {hits.hits.0.sort: [10] }
|
||||
- match: {hits.hits.0._id: "4" }
|
||||
|
@ -140,7 +136,6 @@ setup:
|
|||
- match: { hits.total: 6 }
|
||||
- length: { hits.hits: 3 }
|
||||
- match: { hits.hits.0._index: test }
|
||||
- match: { hits.hits.0._type: _doc }
|
||||
- match: { hits.hits.0.fields.numeric_group: [3] }
|
||||
- match: { hits.hits.0.sort: [36] }
|
||||
- match: { hits.hits.0._id: "6" }
|
||||
|
@ -148,7 +143,6 @@ setup:
|
|||
- length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 }
|
||||
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" }
|
||||
- match: { hits.hits.1._index: test }
|
||||
- match: { hits.hits.1._type: _doc }
|
||||
- match: { hits.hits.1.fields.numeric_group: [1] }
|
||||
- match: { hits.hits.1.sort: [24] }
|
||||
- match: { hits.hits.1._id: "3" }
|
||||
|
@ -157,7 +151,6 @@ setup:
|
|||
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" }
|
||||
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" }
|
||||
- match: { hits.hits.2._index: test }
|
||||
- match: { hits.hits.2._type: _doc }
|
||||
- match: { hits.hits.2.fields.numeric_group: [25] }
|
||||
- match: { hits.hits.2.sort: [10] }
|
||||
- match: { hits.hits.2._id: "4" }
|
||||
|
@ -219,7 +212,6 @@ setup:
|
|||
- match: { hits.total: 6 }
|
||||
- length: { hits.hits: 3 }
|
||||
- match: { hits.hits.0._index: test }
|
||||
- match: { hits.hits.0._type: _doc }
|
||||
- match: { hits.hits.0.fields.numeric_group: [3] }
|
||||
- match: { hits.hits.0.sort: [36] }
|
||||
- match: { hits.hits.0._id: "6" }
|
||||
|
@ -227,7 +219,6 @@ setup:
|
|||
- length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 }
|
||||
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" }
|
||||
- match: { hits.hits.1._index: test }
|
||||
- match: { hits.hits.1._type: _doc }
|
||||
- match: { hits.hits.1.fields.numeric_group: [1] }
|
||||
- match: { hits.hits.1.sort: [24] }
|
||||
- match: { hits.hits.1._id: "3" }
|
||||
|
@ -236,7 +227,6 @@ setup:
|
|||
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" }
|
||||
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" }
|
||||
- match: { hits.hits.2._index: test }
|
||||
- match: { hits.hits.2._type: _doc }
|
||||
- match: { hits.hits.2.fields.numeric_group: [25] }
|
||||
- match: { hits.hits.2.sort: [10] }
|
||||
- match: { hits.hits.2._id: "4" }
|
||||
|
@ -348,7 +338,6 @@ setup:
|
|||
- match: { hits.total: 6 }
|
||||
- length: { hits.hits: 3 }
|
||||
- match: { hits.hits.0._index: test }
|
||||
- match: { hits.hits.0._type: _doc }
|
||||
- match: { hits.hits.0.fields.numeric_group: [3] }
|
||||
- match: { hits.hits.0.sort: [36] }
|
||||
- match: { hits.hits.0._id: "6" }
|
||||
|
@ -359,7 +348,6 @@ setup:
|
|||
- length: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits: 1 }
|
||||
- match: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits.0._id: "6" }
|
||||
- match: { hits.hits.1._index: test }
|
||||
- match: { hits.hits.1._type: _doc }
|
||||
- match: { hits.hits.1.fields.numeric_group: [1] }
|
||||
- match: { hits.hits.1.sort: [24] }
|
||||
- match: { hits.hits.1._id: "3" }
|
||||
|
@ -371,7 +359,6 @@ setup:
|
|||
- length: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits: 1 }
|
||||
- match: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits.0._id: "3" }
|
||||
- match: { hits.hits.2._index: test }
|
||||
- match: { hits.hits.2._type: _doc }
|
||||
- match: { hits.hits.2.fields.numeric_group: [25] }
|
||||
- match: { hits.hits.2.sort: [10] }
|
||||
- match: { hits.hits.2._id: "4" }
|
||||
|
@ -402,7 +389,6 @@ setup:
|
|||
- match: { hits.total: 6 }
|
||||
- length: { hits.hits: 3 }
|
||||
- match: { hits.hits.0._index: test }
|
||||
- match: { hits.hits.0._type: _doc }
|
||||
- match: { hits.hits.0.fields.numeric_group: [3] }
|
||||
- match: { hits.hits.0.sort: [36] }
|
||||
- match: { hits.hits.0._id: "6" }
|
||||
|
@ -412,7 +398,6 @@ setup:
|
|||
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" }
|
||||
- match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._version: 66 }
|
||||
- match: { hits.hits.1._index: test }
|
||||
- match: { hits.hits.1._type: _doc }
|
||||
- match: { hits.hits.1.fields.numeric_group: [1] }
|
||||
- match: { hits.hits.1.sort: [24] }
|
||||
- match: { hits.hits.1._id: "3" }
|
||||
|
@ -424,7 +409,6 @@ setup:
|
|||
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" }
|
||||
- match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._version: 11 }
|
||||
- match: { hits.hits.2._index: test }
|
||||
- match: { hits.hits.2._type: _doc }
|
||||
- match: { hits.hits.2.fields.numeric_group: [25] }
|
||||
- match: { hits.hits.2.sort: [10] }
|
||||
- match: { hits.hits.2._id: "4" }
|
||||
|
@ -517,7 +501,6 @@ setup:
|
|||
- gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._seq_no: 0 }
|
||||
- gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._primary_term: 1 }
|
||||
- match: { hits.hits.2._index: test }
|
||||
- match: { hits.hits.2._type: _doc }
|
||||
- match: { hits.hits.2.fields.numeric_group: [25] }
|
||||
- match: { hits.hits.2.sort: [10] }
|
||||
- match: { hits.hits.2._id: "4" }
|
||||
|
|
|
@ -46,7 +46,6 @@ setup:
|
|||
|
||||
- match: {hits.total: 1}
|
||||
- match: {hits.hits.0._index: test_1 }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0._id: "1" }
|
||||
|
||||
- do:
|
||||
|
@ -60,7 +59,6 @@ setup:
|
|||
|
||||
- match: {hits.total: 1}
|
||||
- match: {hits.hits.0._index: test_2 }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0._id: "42" }
|
||||
|
||||
---
|
||||
|
|
|
@ -31,10 +31,8 @@
|
|||
- is_true: _shards.total
|
||||
- is_true: hits.total
|
||||
- is_true: hits.hits.0._index
|
||||
- is_true: hits.hits.0._type
|
||||
- is_true: hits.hits.0._id
|
||||
- is_true: hits.hits.1._index
|
||||
- is_true: hits.hits.1._type
|
||||
- is_true: hits.hits.1._id
|
||||
|
||||
- do:
|
||||
|
@ -48,10 +46,8 @@
|
|||
- is_false: _shards.total
|
||||
- is_false: hits.total
|
||||
- is_false: hits.hits.0._index
|
||||
- is_false: hits.hits.0._type
|
||||
- is_false: hits.hits.0._id
|
||||
- is_false: hits.hits.1._index
|
||||
- is_false: hits.hits.1._type
|
||||
- is_false: hits.hits.1._id
|
||||
|
||||
- do:
|
||||
|
@ -65,10 +61,8 @@
|
|||
- is_true: _shards.total
|
||||
- is_false: hits.total
|
||||
- is_false: hits.hits.0._index
|
||||
- is_false: hits.hits.0._type
|
||||
- is_false: hits.hits.0._id
|
||||
- is_false: hits.hits.1._index
|
||||
- is_false: hits.hits.1._type
|
||||
- is_false: hits.hits.1._id
|
||||
|
||||
- do:
|
||||
|
@ -82,10 +76,8 @@
|
|||
- is_true: _shards.total
|
||||
- is_true: hits.total
|
||||
- is_true: hits.hits.0._index
|
||||
- is_false: hits.hits.0._type
|
||||
- is_true: hits.hits.0._id
|
||||
- is_true: hits.hits.1._index
|
||||
- is_false: hits.hits.1._type
|
||||
- is_true: hits.hits.1._id
|
||||
|
||||
---
|
||||
|
|
|
@ -38,7 +38,6 @@
|
|||
- match: {hits.total: 3 }
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0._id: "172" }
|
||||
- match: {hits.hits.0.sort: [24, 172] }
|
||||
|
||||
|
@ -57,7 +56,6 @@
|
|||
- match: {hits.total: 3 }
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0._id: "42" }
|
||||
- match: {hits.hits.0.sort: [18, 42] }
|
||||
|
||||
|
@ -76,7 +74,6 @@
|
|||
- match: {hits.total: 3}
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: _doc }
|
||||
- match: {hits.hits.0._id: "1" }
|
||||
- match: {hits.hits.0.sort: [18, 1] }
|
||||
|
||||
|
|
|
@ -281,12 +281,10 @@ setup:
|
|||
- length: { suggest.result.0.options: 2 }
|
||||
- match: { suggest.result.0.options.0.text: "baz" }
|
||||
- match: { suggest.result.0.options.0._index: "test" }
|
||||
- match: { suggest.result.0.options.0._type: "_doc" }
|
||||
- match: { suggest.result.0.options.0._source.title: "title_baz" }
|
||||
- match: { suggest.result.0.options.0._source.count: 3 }
|
||||
- match: { suggest.result.0.options.1.text: "bar" }
|
||||
- match: { suggest.result.0.options.1._index: "test" }
|
||||
- match: { suggest.result.0.options.1._type: "_doc" }
|
||||
- match: { suggest.result.0.options.1._source.title: "title_bar" }
|
||||
- match: { suggest.result.0.options.1._source.count: 4 }
|
||||
|
||||
|
|
|
@ -349,7 +349,7 @@ public class IndicesRequestIT extends OpenSearchIntegTestCase {
|
|||
String explainShardAction = ExplainAction.NAME + "[s]";
|
||||
interceptTransportActions(explainShardAction);
|
||||
|
||||
ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").query(QueryBuilders.matchAllQuery());
|
||||
ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "id").query(QueryBuilders.matchAllQuery());
|
||||
internalCluster().coordOnlyNodeClient().explain(explainRequest).actionGet();
|
||||
|
||||
clearInterceptedActions();
|
||||
|
|
|
@ -65,36 +65,33 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
|
||||
client().prepareIndex("test", "test", "1").setSource("field", "value1").get();
|
||||
|
||||
ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
ExplainResponse response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertNotNull(response);
|
||||
assertFalse(response.isExists()); // not a match b/c not realtime
|
||||
assertThat(response.getIndex(), equalTo("test"));
|
||||
assertThat(response.getType(), equalTo("test"));
|
||||
assertThat(response.getId(), equalTo("1"));
|
||||
assertFalse(response.isMatch()); // not a match b/c not realtime
|
||||
|
||||
refresh();
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertNotNull(response);
|
||||
assertTrue(response.isMatch());
|
||||
assertNotNull(response.getExplanation());
|
||||
assertTrue(response.getExplanation().isMatch());
|
||||
assertThat(response.getIndex(), equalTo("test"));
|
||||
assertThat(response.getType(), equalTo("test"));
|
||||
assertThat(response.getId(), equalTo("1"));
|
||||
assertThat(response.getExplanation().getValue(), equalTo(1.0f));
|
||||
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "1").setQuery(QueryBuilders.termQuery("field", "value2")).get();
|
||||
response = client().prepareExplain(indexOrAlias(), "1").setQuery(QueryBuilders.termQuery("field", "value2")).get();
|
||||
assertNotNull(response);
|
||||
assertTrue(response.isExists());
|
||||
assertFalse(response.isMatch());
|
||||
assertThat(response.getIndex(), equalTo("test"));
|
||||
assertThat(response.getType(), equalTo("test"));
|
||||
assertThat(response.getId(), equalTo("1"));
|
||||
assertNotNull(response.getExplanation());
|
||||
assertFalse(response.getExplanation().isMatch());
|
||||
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "1")
|
||||
response = client().prepareExplain(indexOrAlias(), "1")
|
||||
.setQuery(
|
||||
QueryBuilders.boolQuery().must(QueryBuilders.termQuery("field", "value1")).must(QueryBuilders.termQuery("field", "value2"))
|
||||
)
|
||||
|
@ -103,18 +100,16 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
assertTrue(response.isExists());
|
||||
assertFalse(response.isMatch());
|
||||
assertThat(response.getIndex(), equalTo("test"));
|
||||
assertThat(response.getType(), equalTo("test"));
|
||||
assertThat(response.getId(), equalTo("1"));
|
||||
assertNotNull(response.getExplanation());
|
||||
assertFalse(response.getExplanation().isMatch());
|
||||
assertThat(response.getExplanation().getDetails().length, equalTo(2));
|
||||
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "2").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
response = client().prepareExplain(indexOrAlias(), "2").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertNotNull(response);
|
||||
assertFalse(response.isExists());
|
||||
assertFalse(response.isMatch());
|
||||
assertThat(response.getIndex(), equalTo("test"));
|
||||
assertThat(response.getType(), equalTo("test"));
|
||||
assertThat(response.getId(), equalTo("2"));
|
||||
}
|
||||
|
||||
|
@ -132,7 +127,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
.get();
|
||||
|
||||
refresh();
|
||||
ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1")
|
||||
ExplainResponse response = client().prepareExplain(indexOrAlias(), "1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setStoredFields("obj1.field1")
|
||||
.get();
|
||||
|
@ -149,7 +144,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
assertThat(response.getGetResult().isSourceEmpty(), equalTo(true));
|
||||
|
||||
refresh();
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "1")
|
||||
response = client().prepareExplain(indexOrAlias(), "1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setStoredFields("obj1.field1")
|
||||
.setFetchSource(true)
|
||||
|
@ -166,20 +161,20 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1"));
|
||||
assertThat(response.getGetResult().isSourceEmpty(), equalTo(false));
|
||||
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "1")
|
||||
response = client().prepareExplain(indexOrAlias(), "1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setStoredFields("obj1.field1", "obj1.field2")
|
||||
.get();
|
||||
assertNotNull(response);
|
||||
assertTrue(response.isMatch());
|
||||
String v1 = (String) response.getGetResult().field("obj1.field1").getValue();
|
||||
String v2 = (String) response.getGetResult().field("obj1.field2").getValue();
|
||||
String v1 = response.getGetResult().field("obj1.field1").getValue();
|
||||
String v2 = response.getGetResult().field("obj1.field2").getValue();
|
||||
assertThat(v1, equalTo("value1"));
|
||||
assertThat(v2, equalTo("value2"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testExplainWitSource() throws Exception {
|
||||
public void testExplainWithSource() throws Exception {
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
|
||||
ensureGreen("test");
|
||||
|
||||
|
@ -190,7 +185,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
.get();
|
||||
|
||||
refresh();
|
||||
ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1")
|
||||
ExplainResponse response = client().prepareExplain(indexOrAlias(), "1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setFetchSource("obj1.field1", null)
|
||||
.get();
|
||||
|
@ -204,7 +199,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
assertThat(response.getGetResult().getSource().size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1"));
|
||||
|
||||
response = client().prepareExplain(indexOrAlias(), "test", "1")
|
||||
response = client().prepareExplain(indexOrAlias(), "1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setFetchSource(null, "obj1.field2")
|
||||
.get();
|
||||
|
@ -213,7 +208,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
assertThat(((Map<String, Object>) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1"));
|
||||
}
|
||||
|
||||
public void testExplainWithFilteredAlias() throws Exception {
|
||||
public void testExplainWithFilteredAlias() {
|
||||
assertAcked(
|
||||
prepareCreate("test").addMapping("test", "field2", "type=text")
|
||||
.addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2")))
|
||||
|
@ -223,7 +218,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get();
|
||||
refresh();
|
||||
|
||||
ExplainResponse response = client().prepareExplain("alias1", "test", "1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
ExplainResponse response = client().prepareExplain("alias1", "1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertNotNull(response);
|
||||
assertTrue(response.isExists());
|
||||
assertFalse(response.isMatch());
|
||||
|
@ -242,7 +237,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get();
|
||||
refresh();
|
||||
|
||||
ExplainResponse response = client().prepareExplain("alias1", "test", "1")
|
||||
ExplainResponse response = client().prepareExplain("alias1", "1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setFetchSource(true)
|
||||
.get();
|
||||
|
@ -251,7 +246,6 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
assertTrue(response.isExists());
|
||||
assertFalse(response.isMatch());
|
||||
assertThat(response.getIndex(), equalTo("test"));
|
||||
assertThat(response.getType(), equalTo("test"));
|
||||
assertThat(response.getId(), equalTo("1"));
|
||||
assertThat(response.getGetResult(), notNullValue());
|
||||
assertThat(response.getGetResult().getIndex(), equalTo("test"));
|
||||
|
@ -271,9 +265,7 @@ public class ExplainActionIT extends OpenSearchIntegTestCase {
|
|||
|
||||
refresh();
|
||||
|
||||
ExplainResponse explainResponse = client().prepareExplain("test", "type", "1")
|
||||
.setQuery(queryStringQuery("past:[now-2M/d TO now/d]"))
|
||||
.get();
|
||||
ExplainResponse explainResponse = client().prepareExplain("test", "1").setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get();
|
||||
assertThat(explainResponse.isExists(), equalTo(true));
|
||||
assertThat(explainResponse.isMatch(), equalTo(true));
|
||||
}
|
||||
|
|
|
@ -590,7 +590,7 @@ public class SimpleRoutingIT extends OpenSearchIntegTestCase {
|
|||
}
|
||||
|
||||
logger.info("--> verifying explain with id [2], with routing [0], should succeed");
|
||||
ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "type1", "2")
|
||||
ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "2")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setRouting(routingValue)
|
||||
.get();
|
||||
|
@ -599,7 +599,7 @@ public class SimpleRoutingIT extends OpenSearchIntegTestCase {
|
|||
|
||||
logger.info("--> verifying explain with id [2], with no routing, should fail");
|
||||
try {
|
||||
client().prepareExplain(indexOrAlias(), "type1", "2").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
client().prepareExplain(indexOrAlias(), "2").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
fail();
|
||||
} catch (RoutingMissingException e) {
|
||||
assertThat(e.getMessage(), equalTo("routing is required for [test]/[2]"));
|
||||
|
|
|
@ -1123,7 +1123,6 @@ public class TopHitsIT extends OpenSearchIntegTestCase {
|
|||
for (SearchHit hit : hits) {
|
||||
assertThat(hit.getSourceAsMap(), nullValue());
|
||||
assertThat(hit.getId(), nullValue());
|
||||
assertThat(hit.getType(), equalTo("type"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -156,9 +156,8 @@ public class ExistsIT extends OpenSearchIntegTestCase {
|
|||
} catch (AssertionError e) {
|
||||
for (SearchHit searchHit : allDocs.getHits()) {
|
||||
final String index = searchHit.getIndex();
|
||||
final String type = searchHit.getType();
|
||||
final String id = searchHit.getId();
|
||||
final ExplainResponse explanation = client().prepareExplain(index, type, id)
|
||||
final ExplainResponse explanation = client().prepareExplain(index, id)
|
||||
.setQuery(QueryBuilders.existsQuery(fieldName))
|
||||
.get();
|
||||
logger.info(
|
||||
|
|
|
@ -60,13 +60,11 @@ public class MetadataFetchingIT extends OpenSearchIntegTestCase {
|
|||
|
||||
SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true).get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("_doc"));
|
||||
assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getVersion(), notNullValue());
|
||||
|
||||
response = client().prepareSearch("test").storedFields("_none_").get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("_doc"));
|
||||
assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue());
|
||||
}
|
||||
|
||||
|
@ -88,13 +86,11 @@ public class MetadataFetchingIT extends OpenSearchIntegTestCase {
|
|||
.get();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("_doc"));
|
||||
assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested");
|
||||
assertThat(hits.getTotalHits().value, equalTo(1L));
|
||||
assertThat(hits.getAt(0).getId(), nullValue());
|
||||
assertThat(hits.getAt(0).getType(), equalTo("_doc"));
|
||||
assertThat(hits.getAt(0).getSourceAsString(), nullValue());
|
||||
}
|
||||
|
||||
|
@ -107,13 +103,11 @@ public class MetadataFetchingIT extends OpenSearchIntegTestCase {
|
|||
|
||||
SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("_doc"));
|
||||
assertThat(response.getHits().getAt(0).field("_routing"), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue());
|
||||
|
||||
response = client().prepareSearch("test").storedFields("_none_").get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), equalTo("_doc"));
|
||||
assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue());
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
|
||||
package org.opensearch.action.explain;
|
||||
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.ValidateActions;
|
||||
import org.opensearch.action.support.single.shard.SingleShardRequest;
|
||||
|
@ -57,7 +58,6 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> implement
|
|||
|
||||
private static final ParseField QUERY_FIELD = new ParseField("query");
|
||||
|
||||
private String type = MapperService.SINGLE_MAPPING_NAME;
|
||||
private String id;
|
||||
private String routing;
|
||||
private String preference;
|
||||
|
@ -71,16 +71,6 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> implement
|
|||
|
||||
public ExplainRequest() {}
|
||||
|
||||
/**
|
||||
* @deprecated Types are in the process of being removed. Use {@link ExplainRequest(String, String) instead.}
|
||||
*/
|
||||
@Deprecated
|
||||
public ExplainRequest(String index, String type, String id) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public ExplainRequest(String index, String id) {
|
||||
this.index = index;
|
||||
this.id = id;
|
||||
|
@ -88,7 +78,9 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> implement
|
|||
|
||||
ExplainRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
type = in.readString();
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
in.readString();
|
||||
}
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
preference = in.readOptionalString();
|
||||
|
@ -99,23 +91,6 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> implement
|
|||
nowInMillis = in.readVLong();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Types are in the process of being removed.
|
||||
*/
|
||||
@Deprecated
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Types are in the process of being removed.
|
||||
*/
|
||||
@Deprecated
|
||||
public ExplainRequest type(String type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return id;
|
||||
}
|
||||
|
@ -196,9 +171,6 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> implement
|
|||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = super.validateNonNullIndex();
|
||||
if (Strings.isEmpty(type)) {
|
||||
validationException = addValidationError("type is missing", validationException);
|
||||
}
|
||||
if (Strings.isEmpty(id)) {
|
||||
validationException = addValidationError("id is missing", validationException);
|
||||
}
|
||||
|
@ -211,7 +183,9 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> implement
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(type);
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeString(MapperService.SINGLE_MAPPING_NAME);
|
||||
}
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(preference);
|
||||
|
|
|
@ -48,16 +48,8 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<Ex
|
|||
super(client, action, new ExplainRequest());
|
||||
}
|
||||
|
||||
public ExplainRequestBuilder(OpenSearchClient client, ExplainAction action, String index, String type, String id) {
|
||||
super(client, action, new ExplainRequest().index(index).type(type).id(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the type to get a score explanation for.
|
||||
*/
|
||||
public ExplainRequestBuilder setType(String type) {
|
||||
request().type(type);
|
||||
return this;
|
||||
public ExplainRequestBuilder(OpenSearchClient client, ExplainAction action, String index, String id) {
|
||||
super(client, action, new ExplainRequest().index(index).id(id));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
package org.opensearch.action.explain;
|
||||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionResponse;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -42,6 +43,7 @@ import org.opensearch.common.xcontent.StatusToXContentObject;
|
|||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.index.get.GetResult;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -57,7 +59,6 @@ import static org.opensearch.common.lucene.Lucene.writeExplanation;
|
|||
public class ExplainResponse extends ActionResponse implements StatusToXContentObject {
|
||||
|
||||
private static final ParseField _INDEX = new ParseField("_index");
|
||||
private static final ParseField _TYPE = new ParseField("_type");
|
||||
private static final ParseField _ID = new ParseField("_id");
|
||||
private static final ParseField MATCHED = new ParseField("matched");
|
||||
private static final ParseField EXPLANATION = new ParseField("explanation");
|
||||
|
@ -67,36 +68,33 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
private static final ParseField GET = new ParseField("get");
|
||||
|
||||
private String index;
|
||||
private String type;
|
||||
private String id;
|
||||
private boolean exists;
|
||||
private Explanation explanation;
|
||||
private GetResult getResult;
|
||||
|
||||
// TODO(talevy): remove dependency on empty constructor from ExplainResponseTests
|
||||
ExplainResponse() {}
|
||||
|
||||
public ExplainResponse(String index, String type, String id, boolean exists) {
|
||||
public ExplainResponse(String index, String id, boolean exists) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.exists = exists;
|
||||
}
|
||||
|
||||
public ExplainResponse(String index, String type, String id, boolean exists, Explanation explanation) {
|
||||
this(index, type, id, exists);
|
||||
public ExplainResponse(String index, String id, boolean exists, Explanation explanation) {
|
||||
this(index, id, exists);
|
||||
this.explanation = explanation;
|
||||
}
|
||||
|
||||
public ExplainResponse(String index, String type, String id, boolean exists, Explanation explanation, GetResult getResult) {
|
||||
this(index, type, id, exists, explanation);
|
||||
public ExplainResponse(String index, String id, boolean exists, Explanation explanation, GetResult getResult) {
|
||||
this(index, id, exists, explanation);
|
||||
this.getResult = getResult;
|
||||
}
|
||||
|
||||
public ExplainResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
index = in.readString();
|
||||
type = in.readString();
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
in.readString();
|
||||
}
|
||||
id = in.readString();
|
||||
exists = in.readBoolean();
|
||||
if (in.readBoolean()) {
|
||||
|
@ -111,14 +109,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
return index;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Types are in the process of being removed.
|
||||
*/
|
||||
@Deprecated
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
@ -151,7 +141,9 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(index);
|
||||
out.writeString(type);
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeString(MapperService.SINGLE_MAPPING_NAME);
|
||||
}
|
||||
out.writeString(id);
|
||||
out.writeBoolean(exists);
|
||||
if (explanation == null) {
|
||||
|
@ -171,19 +163,11 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
private static final ConstructingObjectParser<ExplainResponse, Boolean> PARSER = new ConstructingObjectParser<>(
|
||||
"explain",
|
||||
true,
|
||||
(arg, exists) -> new ExplainResponse(
|
||||
(String) arg[0],
|
||||
(String) arg[1],
|
||||
(String) arg[2],
|
||||
exists,
|
||||
(Explanation) arg[3],
|
||||
(GetResult) arg[4]
|
||||
)
|
||||
(arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], (GetResult) arg[3])
|
||||
);
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), _TYPE);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID);
|
||||
final ConstructingObjectParser<Explanation, Boolean> explanationParser = new ConstructingObjectParser<>(
|
||||
"explanation",
|
||||
|
@ -211,7 +195,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(_INDEX.getPreferredName(), index);
|
||||
builder.field(_TYPE.getPreferredName(), type);
|
||||
builder.field(_ID.getPreferredName(), id);
|
||||
builder.field(MATCHED.getPreferredName(), isMatch());
|
||||
if (hasExplanation()) {
|
||||
|
@ -253,7 +236,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
}
|
||||
ExplainResponse other = (ExplainResponse) obj;
|
||||
return index.equals(other.index)
|
||||
&& type.equals(other.type)
|
||||
&& id.equals(other.id)
|
||||
&& Objects.equals(explanation, other.explanation)
|
||||
&& getResult.isExists() == other.getResult.isExists()
|
||||
|
@ -263,6 +245,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(index, type, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields());
|
||||
return Objects.hash(index, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
request.request().filteringAlias(aliasFilter);
|
||||
// Fail fast on the node that received the request.
|
||||
if (request.request().routing() == null && state.getMetadata().routingRequired(request.concreteIndex())) {
|
||||
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());
|
||||
throw new RoutingMissingException(request.concreteIndex(), request.request().id());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,7 +142,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(request.id()));
|
||||
result = context.indexShard().get(new Engine.Get(false, false, request.id(), uidTerm));
|
||||
if (!result.exists()) {
|
||||
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), false);
|
||||
return new ExplainResponse(shardId.getIndexName(), request.id(), false);
|
||||
}
|
||||
context.parsedQuery(context.getQueryShardContext().toQuery(request.query()));
|
||||
context.preProcess(true);
|
||||
|
@ -159,9 +159,9 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
GetResult getResult = context.indexShard()
|
||||
.getService()
|
||||
.get(result, request.id(), request.storedFields(), request.fetchSourceContext());
|
||||
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult);
|
||||
return new ExplainResponse(shardId.getIndexName(), request.id(), true, explanation, getResult);
|
||||
} else {
|
||||
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation);
|
||||
return new ExplainResponse(shardId.getIndexName(), request.id(), true, explanation);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new OpenSearchException("Could not explain", e);
|
||||
|
|
|
@ -398,10 +398,9 @@ public interface Client extends OpenSearchClient, Releasable {
|
|||
* Computes a score explanation for the specified request.
|
||||
*
|
||||
* @param index The index this explain is targeted for
|
||||
* @param type The type this explain is targeted for
|
||||
* @param id The document identifier this explain is targeted for
|
||||
*/
|
||||
ExplainRequestBuilder prepareExplain(String index, String type, String id);
|
||||
ExplainRequestBuilder prepareExplain(String index, String id);
|
||||
|
||||
/**
|
||||
* Computes a score explanation for the specified request.
|
||||
|
|
|
@ -632,8 +632,8 @@ public abstract class AbstractClient implements Client {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ExplainRequestBuilder prepareExplain(String index, String type, String id) {
|
||||
return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, type, id);
|
||||
public ExplainRequestBuilder prepareExplain(String index, String id) {
|
||||
return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, id);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -57,17 +57,14 @@ import org.opensearch.index.mapper.DocumentMapper;
|
|||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.MapperService.MergeReason;
|
||||
import org.opensearch.indices.IndicesService;
|
||||
import org.opensearch.indices.InvalidTypeNameException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.opensearch.index.mapper.MapperService.isMappingSourceTyped;
|
||||
import static org.opensearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.NO_LONGER_ASSIGNED;
|
||||
|
||||
/**
|
||||
|
@ -263,7 +260,6 @@ public class MetadataMappingService {
|
|||
PutMappingClusterStateUpdateRequest request,
|
||||
Map<Index, MapperService> indexMapperServices
|
||||
) throws IOException {
|
||||
String mappingType = request.type();
|
||||
CompressedXContent mappingUpdateSource = new CompressedXContent(request.source());
|
||||
final Metadata metadata = currentState.metadata();
|
||||
final List<IndexMetadata> updateList = new ArrayList<>();
|
||||
|
@ -278,34 +274,11 @@ public class MetadataMappingService {
|
|||
updateList.add(indexMetadata);
|
||||
// try and parse it (no need to add it here) so we can bail early in case of parsing exception
|
||||
DocumentMapper existingMapper = mapperService.documentMapper();
|
||||
|
||||
String typeForUpdate = mapperService.getTypeForUpdate(mappingType, mappingUpdateSource);
|
||||
if (existingMapper != null && existingMapper.type().equals(typeForUpdate) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"Rejecting mapping update to ["
|
||||
+ mapperService.index().getName()
|
||||
+ "] as the final mapping would have more than 1 type: "
|
||||
+ Arrays.asList(existingMapper.type(), typeForUpdate)
|
||||
);
|
||||
}
|
||||
|
||||
DocumentMapper newMapper = mapperService.parse(request.type(), mappingUpdateSource);
|
||||
if (existingMapper != null) {
|
||||
// first, simulate: just call merge and ignore the result
|
||||
existingMapper.merge(newMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
||||
}
|
||||
if (mappingType == null) {
|
||||
mappingType = newMapper.type();
|
||||
} else if (mappingType.equals(newMapper.type()) == false
|
||||
&& (isMappingSourceTyped(request.type(), mappingUpdateSource)
|
||||
|| mapperService.resolveDocumentType(mappingType).equals(newMapper.type()) == false)) {
|
||||
throw new InvalidTypeNameException("Type name provided does not match type name within mapping definition.");
|
||||
}
|
||||
}
|
||||
assert mappingType != null;
|
||||
|
||||
if (MapperService.SINGLE_MAPPING_NAME.equals(mappingType) == false && mappingType.charAt(0) == '_') {
|
||||
throw new InvalidTypeNameException("Document mapping type name can't start with '_', found: [" + mappingType + "]");
|
||||
}
|
||||
Metadata.Builder builder = Metadata.builder(metadata);
|
||||
boolean updated = false;
|
||||
|
@ -316,13 +289,12 @@ public class MetadataMappingService {
|
|||
final Index index = indexMetadata.getIndex();
|
||||
final MapperService mapperService = indexMapperServices.get(index);
|
||||
|
||||
String typeForUpdate = mapperService.getTypeForUpdate(mappingType, mappingUpdateSource);
|
||||
CompressedXContent existingSource = null;
|
||||
DocumentMapper existingMapper = mapperService.documentMapper(typeForUpdate);
|
||||
DocumentMapper existingMapper = mapperService.documentMapper();
|
||||
if (existingMapper != null) {
|
||||
existingSource = existingMapper.mappingSource();
|
||||
}
|
||||
DocumentMapper mergedMapper = mapperService.merge(typeForUpdate, mappingUpdateSource, MergeReason.MAPPING_UPDATE);
|
||||
DocumentMapper mergedMapper = mapperService.merge(request.type(), mappingUpdateSource, MergeReason.MAPPING_UPDATE);
|
||||
CompressedXContent updatedSource = mergedMapper.mappingSource();
|
||||
|
||||
if (existingSource != null) {
|
||||
|
@ -341,9 +313,9 @@ public class MetadataMappingService {
|
|||
} else {
|
||||
updatedMapping = true;
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("{} create_mapping [{}] with source [{}]", index, mappingType, updatedSource);
|
||||
logger.debug("{} create_mapping with source [{}]", index, updatedSource);
|
||||
} else if (logger.isInfoEnabled()) {
|
||||
logger.info("{} create_mapping [{}]", index, mappingType);
|
||||
logger.info("{} create_mapping", index);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -80,7 +80,6 @@ import java.util.HashSet;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.BooleanSupplier;
|
||||
import java.util.function.Function;
|
||||
|
@ -411,7 +410,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
|
||||
public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason) {
|
||||
return internalMerge(Collections.singletonMap(type, mappingSource), reason).get(type);
|
||||
return internalMerge(Collections.singletonMap(type, mappingSource), reason).values().iterator().next();
|
||||
}
|
||||
|
||||
private synchronized Map<String, DocumentMapper> internalMerge(IndexMetadata indexMetadata, MergeReason reason) {
|
||||
|
@ -468,20 +467,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
}
|
||||
|
||||
private synchronized Map<String, DocumentMapper> internalMerge(DocumentMapper mapper, MergeReason reason) {
|
||||
|
||||
Map<String, DocumentMapper> results = new LinkedHashMap<>(2);
|
||||
|
||||
{
|
||||
if (mapper != null && this.mapper != null && Objects.equals(this.mapper.type(), mapper.type()) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"Rejecting mapping update to ["
|
||||
+ index().getName()
|
||||
+ "] as the final mapping would have more than 1 type: "
|
||||
+ Arrays.asList(this.mapper.type(), mapper.type())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
DocumentMapper newMapper = null;
|
||||
if (mapper != null) {
|
||||
// check naming
|
||||
|
@ -567,15 +553,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
return isMappingSourceTyped(type, root);
|
||||
}
|
||||
|
||||
/**
|
||||
* If the _type name is _doc and there is no _doc top-level key then this means that we
|
||||
* are handling a typeless call. In such a case, we override _doc with the actual type
|
||||
* name in the mappings. This allows to use typeless APIs on typed indices.
|
||||
*/
|
||||
public String getTypeForUpdate(String type, CompressedXContent mappingSource) {
|
||||
return isMappingSourceTyped(type, mappingSource) == false ? resolveDocumentType(type) : type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a type from a mapping-related request into the type that should be used when
|
||||
* merging and updating mappings.
|
||||
|
|
|
@ -191,11 +191,6 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
|
|||
return delegate.getIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return delegate.getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return delegate.getId();
|
||||
|
|
|
@ -253,11 +253,6 @@ public abstract class ScrollableHitSource {
|
|||
*/
|
||||
String getIndex();
|
||||
|
||||
/**
|
||||
* The type that the hit has.
|
||||
*/
|
||||
String getType();
|
||||
|
||||
/**
|
||||
* The document id of the hit.
|
||||
*/
|
||||
|
@ -304,7 +299,6 @@ public abstract class ScrollableHitSource {
|
|||
*/
|
||||
public static class BasicHit implements Hit {
|
||||
private final String index;
|
||||
private final String type;
|
||||
private final String id;
|
||||
private final long version;
|
||||
|
||||
|
@ -314,9 +308,8 @@ public abstract class ScrollableHitSource {
|
|||
private long seqNo;
|
||||
private long primaryTerm;
|
||||
|
||||
public BasicHit(String index, String type, String id, long version) {
|
||||
public BasicHit(String index, String id, long version) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.version = version;
|
||||
}
|
||||
|
@ -326,11 +319,6 @@ public abstract class ScrollableHitSource {
|
|||
return index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return id;
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.rest.action.search;
|
|||
import org.opensearch.action.explain.ExplainRequest;
|
||||
import org.opensearch.client.node.NodeClient;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.logging.DeprecationLogger;
|
||||
import org.opensearch.index.query.QueryBuilder;
|
||||
import org.opensearch.rest.BaseRestHandler;
|
||||
import org.opensearch.rest.RestRequest;
|
||||
|
@ -55,20 +54,10 @@ import static org.opensearch.rest.RestRequest.Method.POST;
|
|||
* Rest action for computing a score explanation for specific documents.
|
||||
*/
|
||||
public class RestExplainAction extends BaseRestHandler {
|
||||
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestExplainAction.class);
|
||||
public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying a type in explain requests is deprecated.";
|
||||
|
||||
@Override
|
||||
public List<Route> routes() {
|
||||
return unmodifiableList(
|
||||
asList(
|
||||
new Route(GET, "/{index}/_explain/{id}"),
|
||||
new Route(POST, "/{index}/_explain/{id}"),
|
||||
// Deprecated typed endpoints.
|
||||
new Route(GET, "/{index}/{type}/{id}/_explain"),
|
||||
new Route(POST, "/{index}/{type}/{id}/_explain")
|
||||
)
|
||||
);
|
||||
return unmodifiableList(asList(new Route(GET, "/{index}/_explain/{id}"), new Route(POST, "/{index}/_explain/{id}")));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -78,14 +67,7 @@ public class RestExplainAction extends BaseRestHandler {
|
|||
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
ExplainRequest explainRequest;
|
||||
if (request.hasParam("type")) {
|
||||
deprecationLogger.deprecate("explain_with_types", TYPES_DEPRECATION_MESSAGE);
|
||||
explainRequest = new ExplainRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
} else {
|
||||
explainRequest = new ExplainRequest(request.param("index"), request.param("id"));
|
||||
}
|
||||
|
||||
ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("id"));
|
||||
explainRequest.parent(request.param("parent"));
|
||||
explainRequest.routing(request.param("routing"));
|
||||
explainRequest.preference(request.param("preference"));
|
||||
|
|
|
@ -35,6 +35,7 @@ package org.opensearch.search;
|
|||
import org.apache.lucene.search.Explanation;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.OriginalIndices;
|
||||
import org.opensearch.common.Nullable;
|
||||
import org.opensearch.common.ParseField;
|
||||
|
@ -98,7 +99,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
private float score = DEFAULT_SCORE;
|
||||
|
||||
private final Text id;
|
||||
private final Text type;
|
||||
|
||||
private final NestedIdentity nestedIdentity;
|
||||
|
||||
|
@ -134,17 +134,16 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
// used only in tests
|
||||
public SearchHit(int docId) {
|
||||
this(docId, null, null, null, null);
|
||||
this(docId, null, null, null);
|
||||
}
|
||||
|
||||
public SearchHit(int docId, String id, Text type, Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
|
||||
this(docId, id, type, null, documentFields, metaFields);
|
||||
public SearchHit(int docId, String id, Map<String, DocumentField> documentFields, Map<String, DocumentField> metaFields) {
|
||||
this(docId, id, null, documentFields, metaFields);
|
||||
}
|
||||
|
||||
public SearchHit(
|
||||
int nestedTopDocId,
|
||||
String id,
|
||||
Text type,
|
||||
NestedIdentity nestedIdentity,
|
||||
Map<String, DocumentField> documentFields,
|
||||
Map<String, DocumentField> metaFields
|
||||
|
@ -155,7 +154,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
} else {
|
||||
this.id = null;
|
||||
}
|
||||
this.type = type;
|
||||
this.nestedIdentity = nestedIdentity;
|
||||
this.documentFields = documentFields == null ? emptyMap() : documentFields;
|
||||
this.metaFields = metaFields == null ? emptyMap() : metaFields;
|
||||
|
@ -165,7 +163,9 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
docId = -1;
|
||||
score = in.readFloat();
|
||||
id = in.readOptionalText();
|
||||
type = in.readOptionalText();
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
in.readOptionalText();
|
||||
}
|
||||
nestedIdentity = in.readOptionalWriteable(NestedIdentity::new);
|
||||
version = in.readLong();
|
||||
seqNo = in.readZLong();
|
||||
|
@ -261,11 +261,15 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
}
|
||||
}
|
||||
|
||||
private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME);
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeFloat(score);
|
||||
out.writeOptionalText(id);
|
||||
out.writeOptionalText(type);
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeOptionalText(SINGLE_MAPPING_TYPE);
|
||||
}
|
||||
out.writeOptionalWriteable(nestedIdentity);
|
||||
out.writeLong(version);
|
||||
out.writeZLong(seqNo);
|
||||
|
@ -376,17 +380,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
return id != null ? id.string() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of the document.
|
||||
*
|
||||
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
|
||||
* filter on a field on the document.
|
||||
*/
|
||||
@Deprecated
|
||||
public String getType() {
|
||||
return type != null ? type.string() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* If this is a nested hit then nested reference information is returned otherwise <code>null</code> is returned.
|
||||
*/
|
||||
|
@ -597,7 +590,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
public static class Fields {
|
||||
static final String _INDEX = "_index";
|
||||
static final String _TYPE = "_type";
|
||||
static final String _ID = "_id";
|
||||
static final String _VERSION = "_version";
|
||||
static final String _SEQ_NO = "_seq_no";
|
||||
|
@ -641,9 +633,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
if (index != null) {
|
||||
builder.field(Fields._INDEX, RemoteClusterAware.buildRemoteIndexName(clusterAlias, index));
|
||||
}
|
||||
if (type != null) {
|
||||
builder.field(Fields._TYPE, type);
|
||||
}
|
||||
if (id != null) {
|
||||
builder.field(Fields._ID, id);
|
||||
}
|
||||
|
@ -762,7 +751,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
}
|
||||
|
||||
public static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, Void> parser) {
|
||||
parser.declareString((map, value) -> map.put(Fields._TYPE, new Text(value)), new ParseField(Fields._TYPE));
|
||||
parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX));
|
||||
parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID));
|
||||
parser.declareString((map, value) -> map.put(Fields._NODE, value), new ParseField(Fields._NODE));
|
||||
|
@ -822,12 +810,11 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
|
||||
public static SearchHit createFromMap(Map<String, Object> values) {
|
||||
String id = get(Fields._ID, values, null);
|
||||
Text type = get(Fields._TYPE, values, null);
|
||||
NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null);
|
||||
Map<String, DocumentField> metaFields = get(METADATA_FIELDS, values, Collections.emptyMap());
|
||||
Map<String, DocumentField> documentFields = get(DOCUMENT_FIELDS, values, Collections.emptyMap());
|
||||
|
||||
SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, documentFields, metaFields);
|
||||
SearchHit searchHit = new SearchHit(-1, id, nestedIdentity, documentFields, metaFields);
|
||||
String index = get(Fields._INDEX, values, null);
|
||||
String clusterAlias = null;
|
||||
if (index != null) {
|
||||
|
@ -972,7 +959,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
}
|
||||
SearchHit other = (SearchHit) obj;
|
||||
return Objects.equals(id, other.id)
|
||||
&& Objects.equals(type, other.type)
|
||||
&& Objects.equals(nestedIdentity, other.nestedIdentity)
|
||||
&& Objects.equals(version, other.version)
|
||||
&& Objects.equals(seqNo, other.seqNo)
|
||||
|
|
|
@ -330,7 +330,7 @@ public class FetchPhase {
|
|||
Text typeText = documentMapper.typeText();
|
||||
|
||||
if (fieldsVisitor == null) {
|
||||
SearchHit hit = new SearchHit(docId, null, typeText, null, null);
|
||||
SearchHit hit = new SearchHit(docId, null, null, null);
|
||||
return new HitContext(hit, subReaderContext, subDocId, lookup.source());
|
||||
} else {
|
||||
SearchHit hit;
|
||||
|
@ -340,9 +340,9 @@ public class FetchPhase {
|
|||
Map<String, DocumentField> docFields = new HashMap<>();
|
||||
Map<String, DocumentField> metaFields = new HashMap<>();
|
||||
fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields);
|
||||
hit = new SearchHit(docId, uid.id(), typeText, docFields, metaFields);
|
||||
hit = new SearchHit(docId, uid.id(), docFields, metaFields);
|
||||
} else {
|
||||
hit = new SearchHit(docId, uid.id(), typeText, emptyMap(), emptyMap());
|
||||
hit = new SearchHit(docId, uid.id(), emptyMap(), emptyMap());
|
||||
}
|
||||
|
||||
HitContext hitContext = new HitContext(hit, subReaderContext, subDocId, lookup.source());
|
||||
|
@ -420,7 +420,6 @@ public class FetchPhase {
|
|||
}
|
||||
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper();
|
||||
Text typeText = documentMapper.typeText();
|
||||
|
||||
ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedDocId, context, subReaderContext);
|
||||
assert nestedObjectMapper != null;
|
||||
|
@ -432,7 +431,7 @@ public class FetchPhase {
|
|||
nestedObjectMapper
|
||||
);
|
||||
|
||||
SearchHit hit = new SearchHit(nestedTopDocId, rootId.id(), typeText, nestedIdentity, docFields, metaFields);
|
||||
SearchHit hit = new SearchHit(nestedTopDocId, rootId.id(), nestedIdentity, docFields, metaFields);
|
||||
HitContext hitContext = new HitContext(hit, subReaderContext, nestedDocId, new SourceLookup()); // Use a clean, fresh SourceLookup
|
||||
// for the nested context
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.opensearch.common.lucene.search.TopDocsAndMaxScore;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.Uid;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.SearchHits;
|
||||
|
@ -95,7 +96,7 @@ public final class InnerHitsPhase implements FetchSubPhase {
|
|||
docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc;
|
||||
}
|
||||
innerHitsContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
|
||||
innerHitsContext.setRootId(new Uid(hit.getType(), hit.getId()));
|
||||
innerHitsContext.setRootId(new Uid(MapperService.SINGLE_MAPPING_NAME, hit.getId()));
|
||||
innerHitsContext.setRootLookup(rootLookup);
|
||||
|
||||
fetchPhase.execute(innerHitsContext);
|
||||
|
|
|
@ -116,7 +116,7 @@ public class PlainHighlighter implements Highlighter {
|
|||
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments();
|
||||
ArrayList<TextFragment> fragsList = new ArrayList<>();
|
||||
List<Object> textsToHighlight;
|
||||
Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
|
||||
Analyzer analyzer = context.mapperService().documentMapper().mappers().indexAnalyzer();
|
||||
final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset();
|
||||
|
||||
textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context.getQueryShardContext(), hitContext, fieldContext.forceSource);
|
||||
|
|
|
@ -68,9 +68,9 @@ public class ExplainRequestTests extends OpenSearchTestCase {
|
|||
|
||||
public void testSerialize() throws IOException {
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
ExplainRequest request = new ExplainRequest("index", "type", "id");
|
||||
ExplainRequest request = new ExplainRequest("index", "id");
|
||||
request.fetchSourceContext(new FetchSourceContext(true, new String[] { "field1.*" }, new String[] { "field2.*" }));
|
||||
request.filteringAlias(new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), new String[] { "alias0", "alias1" }));
|
||||
request.filteringAlias(new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), "alias0", "alias1"));
|
||||
request.preference("the_preference");
|
||||
request.query(QueryBuilders.termQuery("field", "value"));
|
||||
request.storedFields(new String[] { "field1", "field2" });
|
||||
|
@ -90,7 +90,7 @@ public class ExplainRequestTests extends OpenSearchTestCase {
|
|||
|
||||
public void testValidation() {
|
||||
{
|
||||
final ExplainRequest request = new ExplainRequest("index4", "_doc", "0");
|
||||
final ExplainRequest request = new ExplainRequest("index4", "0");
|
||||
request.query(QueryBuilders.termQuery("field", "value"));
|
||||
|
||||
final ActionRequestValidationException validate = request.validate();
|
||||
|
@ -99,12 +99,12 @@ public class ExplainRequestTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
{
|
||||
final ExplainRequest request = new ExplainRequest("index4", randomBoolean() ? "" : null, randomBoolean() ? "" : null);
|
||||
final ExplainRequest request = new ExplainRequest("index4", randomBoolean() ? "" : null);
|
||||
request.query(QueryBuilders.termQuery("field", "value"));
|
||||
final ActionRequestValidationException validate = request.validate();
|
||||
|
||||
assertThat(validate, not(nullValue()));
|
||||
assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing"));
|
||||
assertThat(validate.validationErrors(), hasItems("id is missing"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,6 @@ public class ExplainResponseTests extends AbstractSerializingTestCase<ExplainRes
|
|||
@Override
|
||||
protected ExplainResponse createTestInstance() {
|
||||
String index = randomAlphaOfLength(5);
|
||||
String type = randomAlphaOfLength(5);
|
||||
String id = String.valueOf(randomIntBetween(1, 100));
|
||||
boolean exist = randomBoolean();
|
||||
Explanation explanation = randomExplanation(randomExplanation(randomExplanation()), randomExplanation());
|
||||
|
@ -88,7 +87,7 @@ public class ExplainResponseTests extends AbstractSerializingTestCase<ExplainRes
|
|||
singletonMap(fieldName, new DocumentField(fieldName, values)),
|
||||
null
|
||||
);
|
||||
return new ExplainResponse(index, type, id, exist, explanation, getResult);
|
||||
return new ExplainResponse(index, id, exist, explanation, getResult);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -113,7 +112,7 @@ public class ExplainResponseTests extends AbstractSerializingTestCase<ExplainRes
|
|||
singletonMap("field1", new DocumentField("field1", singletonList("value1"))),
|
||||
null
|
||||
);
|
||||
ExplainResponse response = new ExplainResponse(index, type, id, exist, explanation, getResult);
|
||||
ExplainResponse response = new ExplainResponse(index, id, exist, explanation, getResult);
|
||||
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
|
@ -122,7 +121,6 @@ public class ExplainResponseTests extends AbstractSerializingTestCase<ExplainRes
|
|||
|
||||
String expectedResponse = ("{\n"
|
||||
+ " \"_index\":\"index\",\n"
|
||||
+ " \"_type\":\"type\",\n"
|
||||
+ " \"_id\":\"1\",\n"
|
||||
+ " \"matched\":true,\n"
|
||||
+ " \"explanation\":{\n"
|
||||
|
|
|
@ -35,7 +35,6 @@ package org.opensearch.action.search;
|
|||
import org.apache.lucene.search.TotalHits;
|
||||
import org.opensearch.action.ActionListener;
|
||||
import org.opensearch.common.document.DocumentField;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.index.query.BoolQueryBuilder;
|
||||
import org.opensearch.index.query.InnerHitBuilder;
|
||||
import org.opensearch.index.query.QueryBuilder;
|
||||
|
@ -66,8 +65,8 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
|
|||
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
|
||||
SearchHits hits = new SearchHits(
|
||||
new SearchHit[] {
|
||||
new SearchHit(innerHitNum, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(innerHitNum + 1, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) },
|
||||
new SearchHit(innerHitNum, "ID", Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(innerHitNum + 1, "ID", Collections.emptyMap(), Collections.emptyMap()) },
|
||||
new TotalHits(2, TotalHits.Relation.EQUAL_TO),
|
||||
1.0F
|
||||
);
|
||||
|
@ -136,7 +135,6 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
|
|||
new SearchHit(
|
||||
1,
|
||||
"ID",
|
||||
new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
|
||||
Collections.emptyMap()
|
||||
) },
|
||||
|
@ -165,8 +163,8 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
|
|||
|
||||
SearchHits collapsedHits = new SearchHits(
|
||||
new SearchHit[] {
|
||||
new SearchHit(2, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(3, "ID", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) },
|
||||
new SearchHit(2, "ID", Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(3, "ID", Collections.emptyMap(), Collections.emptyMap()) },
|
||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||
1.0F
|
||||
);
|
||||
|
@ -209,14 +207,12 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
|
|||
new SearchHit(
|
||||
1,
|
||||
"ID",
|
||||
new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
|
||||
Collections.emptyMap()
|
||||
),
|
||||
new SearchHit(
|
||||
2,
|
||||
"ID2",
|
||||
new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))),
|
||||
Collections.emptyMap()
|
||||
) },
|
||||
|
@ -246,14 +242,12 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
|
|||
new SearchHit(
|
||||
1,
|
||||
"ID",
|
||||
new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))),
|
||||
Collections.emptyMap()
|
||||
),
|
||||
new SearchHit(
|
||||
2,
|
||||
"ID2",
|
||||
new Text("type"),
|
||||
Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))),
|
||||
Collections.emptyMap()
|
||||
) },
|
||||
|
|
|
@ -424,7 +424,7 @@ public class SearchPhaseControllerTests extends OpenSearchTestCase {
|
|||
List<SearchHit> searchHits = new ArrayList<>();
|
||||
for (ScoreDoc scoreDoc : mergedSearchDocs) {
|
||||
if (scoreDoc.shardIndex == shardIndex) {
|
||||
searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap()));
|
||||
searchHits.add(new SearchHit(scoreDoc.doc, "", Collections.emptyMap(), Collections.emptyMap()));
|
||||
if (scoreDoc.score > maxScore) {
|
||||
maxScore = scoreDoc.score;
|
||||
}
|
||||
|
@ -435,7 +435,7 @@ public class SearchPhaseControllerTests extends OpenSearchTestCase {
|
|||
for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) {
|
||||
ScoreDoc doc = option.getDoc();
|
||||
if (doc.shardIndex == shardIndex) {
|
||||
searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap(), Collections.emptyMap()));
|
||||
searchHits.add(new SearchHit(doc.doc, "", Collections.emptyMap(), Collections.emptyMap()));
|
||||
if (doc.score > maxScore) {
|
||||
maxScore = doc.score;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.opensearch.common.Strings;
|
|||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
|
@ -241,7 +240,7 @@ public class SearchResponseTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testToXContent() {
|
||||
SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
|
||||
SearchHit hit = new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap());
|
||||
hit.score(2.0f);
|
||||
SearchHit[] hits = new SearchHit[] { hit };
|
||||
{
|
||||
|
@ -279,7 +278,7 @@ public class SearchResponseTests extends OpenSearchTestCase {
|
|||
{
|
||||
expectedString.append("{\"total\":{\"value\":100,\"relation\":\"eq\"},");
|
||||
expectedString.append("\"max_score\":1.5,");
|
||||
expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}");
|
||||
expectedString.append("\"hits\":[{\"_id\":\"id1\",\"_score\":2.0}]}");
|
||||
}
|
||||
}
|
||||
expectedString.append("}");
|
||||
|
@ -326,7 +325,7 @@ public class SearchResponseTests extends OpenSearchTestCase {
|
|||
{
|
||||
expectedString.append("{\"total\":{\"value\":100,\"relation\":\"eq\"},");
|
||||
expectedString.append("\"max_score\":1.5,");
|
||||
expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}");
|
||||
expectedString.append("\"hits\":[{\"_id\":\"id1\",\"_score\":2.0}]}");
|
||||
}
|
||||
}
|
||||
expectedString.append("}");
|
||||
|
|
|
@ -32,15 +32,11 @@
|
|||
|
||||
package org.opensearch.cluster.metadata;
|
||||
|
||||
import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.opensearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
|
||||
import org.opensearch.cluster.ClusterState;
|
||||
import org.opensearch.cluster.ClusterStateTaskExecutor;
|
||||
import org.opensearch.cluster.service.ClusterService;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.compress.CompressedXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
import org.opensearch.index.Index;
|
||||
import org.opensearch.index.IndexService;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
|
@ -51,7 +47,6 @@ import org.opensearch.test.InternalSettingsPlugin;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
|
@ -155,86 +150,4 @@ public class MetadataMappingServiceTests extends OpenSearchSingleNodeTestCase {
|
|||
mappingMetadata.sourceAsMap()
|
||||
);
|
||||
}
|
||||
|
||||
public void testForbidMultipleTypes() throws Exception {
|
||||
CreateIndexRequestBuilder createIndexRequest = client().admin()
|
||||
.indices()
|
||||
.prepareCreate("test")
|
||||
.addMapping(MapperService.SINGLE_MAPPING_NAME);
|
||||
IndexService indexService = createIndex("test", createIndexRequest);
|
||||
|
||||
MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class);
|
||||
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
|
||||
PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("other_type")
|
||||
.indices(new Index[] { indexService.index() })
|
||||
.source(Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("other_type").endObject().endObject()));
|
||||
ClusterStateTaskExecutor.ClusterTasksResult<PutMappingClusterStateUpdateRequest> result = mappingService.putMappingExecutor.execute(
|
||||
clusterService.state(),
|
||||
Collections.singletonList(request)
|
||||
);
|
||||
assertThat(result.executionResults.size(), equalTo(1));
|
||||
|
||||
ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next();
|
||||
assertFalse(taskResult.isSuccess());
|
||||
assertThat(
|
||||
taskResult.getFailure().getMessage(),
|
||||
containsString("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* This test checks that the multi-type validation is done before we do any other kind of validation
|
||||
* on the mapping that's added, see https://github.com/elastic/elasticsearch/issues/29313
|
||||
*/
|
||||
public void testForbidMultipleTypesWithConflictingMappings() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject(MapperService.SINGLE_MAPPING_NAME)
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
CreateIndexRequestBuilder createIndexRequest = client().admin()
|
||||
.indices()
|
||||
.prepareCreate("test")
|
||||
.addMapping(MapperService.SINGLE_MAPPING_NAME, mapping);
|
||||
IndexService indexService = createIndex("test", createIndexRequest);
|
||||
|
||||
MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class);
|
||||
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
|
||||
String conflictingMapping = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("other_type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "keyword")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
|
||||
PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("other_type")
|
||||
.indices(new Index[] { indexService.index() })
|
||||
.source(conflictingMapping);
|
||||
ClusterStateTaskExecutor.ClusterTasksResult<PutMappingClusterStateUpdateRequest> result = mappingService.putMappingExecutor.execute(
|
||||
clusterService.state(),
|
||||
Collections.singletonList(request)
|
||||
);
|
||||
assertThat(result.executionResults.size(), equalTo(1));
|
||||
|
||||
ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next();
|
||||
assertFalse(taskResult.isSuccess());
|
||||
assertThat(
|
||||
taskResult.getFailure().getMessage(),
|
||||
containsString("Rejecting mapping update to [test] as the final mapping would have more than 1 type: ")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,7 +70,6 @@ import static org.hamcrest.CoreMatchers.containsString;
|
|||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class MapperServiceTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
|
@ -330,58 +329,6 @@ public class MapperServiceTests extends OpenSearchSingleNodeTestCase {
|
|||
assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] has been exceeded", e.getMessage());
|
||||
}
|
||||
|
||||
public void testForbidMultipleTypes() throws IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
|
||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject());
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(e.getMessage(), startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: "));
|
||||
}
|
||||
|
||||
/**
|
||||
* This test checks that the multi-type validation is done before we do any other kind of validation on the mapping that's added,
|
||||
* see https://github.com/elastic/elasticsearch/issues/29313
|
||||
*/
|
||||
public void testForbidMultipleTypesWithConflictingMappings() throws IOException {
|
||||
String mapping = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "integer_range")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
|
||||
String mapping2 = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type2")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "integer")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService.merge("type2", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(e.getMessage(), startsWith("Rejecting mapping update to [test] as the final mapping would have more than 1 type: "));
|
||||
}
|
||||
|
||||
public void testFieldNameLengthLimit() throws Throwable {
|
||||
int maxFieldNameLength = randomIntBetween(25, 30);
|
||||
String testString = new String(new char[maxFieldNameLength + 1]).replace("\0", "a");
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.opensearch.common.bytes.BytesArray;
|
|||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.document.DocumentField;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
|
@ -83,7 +82,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) {
|
||||
int internalId = randomInt();
|
||||
String uid = randomAlphaOfLength(10);
|
||||
Text type = new Text(randomAlphaOfLengthBetween(5, 10));
|
||||
NestedIdentity nestedIdentity = null;
|
||||
if (randomBoolean()) {
|
||||
nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2));
|
||||
|
@ -97,7 +95,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
}
|
||||
}
|
||||
|
||||
SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, documentFields, metaFields);
|
||||
SearchHit hit = new SearchHit(internalId, uid, nestedIdentity, documentFields, metaFields);
|
||||
if (frequently()) {
|
||||
if (rarely()) {
|
||||
hit.score(Float.NaN);
|
||||
|
@ -234,16 +232,15 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
}
|
||||
assertEquals("my_index", parsed.getIndex());
|
||||
assertEquals(1, parsed.getScore(), Float.MIN_VALUE);
|
||||
assertNull(parsed.getType());
|
||||
assertNull(parsed.getId());
|
||||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap());
|
||||
SearchHit searchHit = new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap());
|
||||
searchHit.score(1.5f);
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder));
|
||||
assertEquals("{\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder));
|
||||
}
|
||||
|
||||
public void testSerializeShardTarget() throws Exception {
|
||||
|
@ -256,25 +253,25 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
);
|
||||
|
||||
Map<String, SearchHits> innerHits = new HashMap<>();
|
||||
SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit innerHit1 = new SearchHit(0, "_id", null, null);
|
||||
innerHit1.shard(target);
|
||||
SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null);
|
||||
innerInnerHit2.shard(target);
|
||||
innerHits.put("1", new SearchHits(new SearchHit[] { innerInnerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
|
||||
innerHit1.setInnerHits(innerHits);
|
||||
SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit innerHit2 = new SearchHit(0, "_id", null, null);
|
||||
innerHit2.shard(target);
|
||||
SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit innerHit3 = new SearchHit(0, "_id", null, null);
|
||||
innerHit3.shard(target);
|
||||
|
||||
innerHits = new HashMap<>();
|
||||
SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit hit1 = new SearchHit(0, "_id", null, null);
|
||||
innerHits.put("1", new SearchHits(new SearchHit[] { innerHit1, innerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
|
||||
innerHits.put("2", new SearchHits(new SearchHit[] { innerHit3 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f));
|
||||
hit1.shard(target);
|
||||
hit1.setInnerHits(innerHits);
|
||||
|
||||
SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit hit2 = new SearchHit(0, "_id", null, null);
|
||||
hit2.shard(target);
|
||||
|
||||
SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f);
|
||||
|
@ -301,7 +298,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
}
|
||||
|
||||
public void testNullSource() {
|
||||
SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null, null);
|
||||
SearchHit searchHit = new SearchHit(0, "_id", null, null);
|
||||
|
||||
assertThat(searchHit.getSourceAsMap(), nullValue());
|
||||
assertThat(searchHit.getSourceRef(), nullValue());
|
||||
|
@ -325,7 +322,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
XContentType.JSON.xContent(),
|
||||
"{\n"
|
||||
+ " \"_index\": \"twitter\",\n"
|
||||
+ " \"_type\": \"tweet\",\n"
|
||||
+ " \"_id\": \"1\",\n"
|
||||
+ " \"_score\": 1.0,\n"
|
||||
+ " \"fields\": {\n"
|
||||
|
@ -346,7 +342,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
XContentType.JSON.xContent(),
|
||||
"{\n"
|
||||
+ " \"_index\": \"twitter\",\n"
|
||||
+ " \"_type\": \"tweet\",\n"
|
||||
+ " \"_id\": \"1\",\n"
|
||||
+ " \"_score\": 1.0,\n"
|
||||
+ " \"fields\": {\n"
|
||||
|
@ -371,7 +366,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
|
|||
JsonXContent.jsonXContent,
|
||||
"{\n"
|
||||
+ " \"_index\": \"twitter\",\n"
|
||||
+ " \"_type\": \"tweet\",\n"
|
||||
+ " \"_id\": \"1\",\n"
|
||||
+ " \"_score\": 1.0,\n"
|
||||
+ " \"fields\": {\n"
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.opensearch.common.Strings;
|
|||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.Writeable;
|
||||
import org.opensearch.common.lucene.LuceneTests;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
|
@ -248,8 +247,8 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
|||
|
||||
public void testToXContent() throws IOException {
|
||||
SearchHit[] hits = new SearchHit[] {
|
||||
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) };
|
||||
new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", Collections.emptyMap(), Collections.emptyMap()) };
|
||||
|
||||
long totalHits = 1000;
|
||||
float maxScore = 1.5f;
|
||||
|
@ -260,8 +259,8 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
|||
builder.endObject();
|
||||
assertEquals(
|
||||
"{\"hits\":{\"total\":{\"value\":1000,\"relation\":\"eq\"},\"max_score\":1.5,"
|
||||
+ "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":null},"
|
||||
+ "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":null}]}}",
|
||||
+ "\"hits\":[{\"_id\":\"id1\",\"_score\":null},"
|
||||
+ "{\"_id\":\"id2\",\"_score\":null}]}}",
|
||||
Strings.toString(builder)
|
||||
);
|
||||
}
|
||||
|
@ -269,9 +268,9 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
|||
public void testFromXContentWithShards() throws IOException {
|
||||
for (boolean withExplanation : new boolean[] { true, false }) {
|
||||
final SearchHit[] hits = new SearchHit[] {
|
||||
new SearchHit(1, "id1", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", new Text("type"), Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(10, "id10", new Text("type"), Collections.emptyMap(), Collections.emptyMap()) };
|
||||
new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(2, "id2", Collections.emptyMap(), Collections.emptyMap()),
|
||||
new SearchHit(10, "id10", Collections.emptyMap(), Collections.emptyMap()) };
|
||||
|
||||
for (SearchHit hit : hits) {
|
||||
String index = randomAlphaOfLengthBetween(5, 10);
|
||||
|
|
|
@ -269,7 +269,6 @@ public abstract class AbstractGeoTestCase extends OpenSearchIntegTestCase {
|
|||
for (int i = 0; i < totalHits; i++) {
|
||||
SearchHit searchHit = response.getHits().getAt(i);
|
||||
assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx"));
|
||||
assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getType(), equalTo("type"));
|
||||
DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME);
|
||||
|
||||
assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1));
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.opensearch.common.bytes.BytesReference;
|
|||
import org.opensearch.common.collect.Tuple;
|
||||
import org.opensearch.common.document.DocumentField;
|
||||
import org.opensearch.common.lucene.search.TopDocsAndMaxScore;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.common.xcontent.ToXContent;
|
||||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentHelper;
|
||||
|
@ -174,7 +173,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
|
|||
|
||||
Map<String, DocumentField> searchHitFields = new HashMap<>();
|
||||
scoreDocs[i] = docBuilder.apply(docId, score);
|
||||
hits[i] = new SearchHit(docId, Integer.toString(i), new Text("_docs"), searchHitFields, Collections.emptyMap());
|
||||
hits[i] = new SearchHit(docId, Integer.toString(i), searchHitFields, Collections.emptyMap());
|
||||
hits[i].score(score);
|
||||
}
|
||||
int totalHits = between(actualSize, 500000);
|
||||
|
@ -224,7 +223,6 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
|
|||
SearchHit actual = actualHits.get(i);
|
||||
|
||||
assertEquals(expected.getIndex(), actual.getIndex());
|
||||
assertEquals(expected.getType(), actual.getType());
|
||||
assertEquals(expected.getId(), actual.getId());
|
||||
assertEquals(expected.getVersion(), actual.getVersion());
|
||||
assertEquals(expected.getScore(), actual.getScore(), 0.0f);
|
||||
|
|
|
@ -98,11 +98,8 @@ public class TopHitsAggregatorTests extends AggregatorTestCase {
|
|||
SearchHits searchHits = ((TopHits) result).getHits();
|
||||
assertEquals(3L, searchHits.getTotalHits().value);
|
||||
assertEquals("3", searchHits.getAt(0).getId());
|
||||
assertEquals("type", searchHits.getAt(0).getType());
|
||||
assertEquals("2", searchHits.getAt(1).getId());
|
||||
assertEquals("type", searchHits.getAt(1).getType());
|
||||
assertEquals("1", searchHits.getAt(2).getId());
|
||||
assertEquals("type", searchHits.getAt(2).getType());
|
||||
assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) result)));
|
||||
}
|
||||
|
||||
|
|
|
@ -190,7 +190,7 @@ public class FetchSourcePhaseTests extends OpenSearchTestCase {
|
|||
when(fetchContext.fetchSourceContext()).thenReturn(fetchSourceContext);
|
||||
when(fetchContext.getIndexName()).thenReturn("index");
|
||||
|
||||
final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null, null);
|
||||
final SearchHit searchHit = new SearchHit(1, null, nestedIdentity, null, null);
|
||||
|
||||
// We don't need a real index, just a LeafReaderContext which cannot be mocked.
|
||||
MemoryIndex index = new MemoryIndex();
|
||||
|
|
|
@ -872,13 +872,7 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase {
|
|||
sb.append(value).append("] results. expected [").append(expectedResults).append("]");
|
||||
String failMsg = sb.toString();
|
||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||
sb.append("\n-> _index: [")
|
||||
.append(hit.getIndex())
|
||||
.append("] type [")
|
||||
.append(hit.getType())
|
||||
.append("] id [")
|
||||
.append(hit.getId())
|
||||
.append("]");
|
||||
sb.append("\n-> _index: [").append(hit.getIndex()).append("] id [").append(hit.getId()).append("]");
|
||||
}
|
||||
logger.warn("{}", sb);
|
||||
fail(failMsg);
|
||||
|
|
|
@ -269,14 +269,7 @@ public class OpenSearchAssertions {
|
|||
Set<String> idsSet = new HashSet<>(Arrays.asList(ids));
|
||||
for (SearchHit hit : searchResponse.getHits()) {
|
||||
assertThat(
|
||||
"id ["
|
||||
+ hit.getId()
|
||||
+ "] was found in search results but wasn't expected (type ["
|
||||
+ hit.getType()
|
||||
+ "], index ["
|
||||
+ hit.getIndex()
|
||||
+ "])"
|
||||
+ shardStatus,
|
||||
"id [" + hit.getId() + "] was found in search results but wasn't expected (index [" + hit.getIndex() + "])" + shardStatus,
|
||||
idsSet.remove(hit.getId()),
|
||||
equalTo(true)
|
||||
);
|
||||
|
@ -547,10 +540,6 @@ public class OpenSearchAssertions {
|
|||
return new OpenSearchMatchers.SearchHitHasIdMatcher(id);
|
||||
}
|
||||
|
||||
public static Matcher<SearchHit> hasType(final String type) {
|
||||
return new OpenSearchMatchers.SearchHitHasTypeMatcher(type);
|
||||
}
|
||||
|
||||
public static Matcher<SearchHit> hasIndex(final String index) {
|
||||
return new OpenSearchMatchers.SearchHitHasIndexMatcher(index);
|
||||
}
|
||||
|
|
|
@ -65,29 +65,6 @@ public class OpenSearchMatchers {
|
|||
}
|
||||
}
|
||||
|
||||
public static class SearchHitHasTypeMatcher extends TypeSafeMatcher<SearchHit> {
|
||||
private String type;
|
||||
|
||||
public SearchHitHasTypeMatcher(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matchesSafely(final SearchHit searchHit) {
|
||||
return searchHit.getType().equals(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void describeMismatchSafely(final SearchHit searchHit, final Description mismatchDescription) {
|
||||
mismatchDescription.appendText(" was ").appendValue(searchHit.getType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void describeTo(final Description description) {
|
||||
description.appendText("searchHit type should be ").appendValue(type);
|
||||
}
|
||||
}
|
||||
|
||||
public static class SearchHitHasIndexMatcher extends TypeSafeMatcher<SearchHit> {
|
||||
private String index;
|
||||
|
||||
|
|
|
@ -108,10 +108,6 @@ public abstract class AbstractFullClusterRestartTestCase extends OpenSearchRestT
|
|||
}
|
||||
|
||||
protected static int extractTotalHits(Map<?, ?> response) {
|
||||
if (isRunningAgainstOldCluster() && getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
|
||||
return (Integer) XContentMapValues.extractValue("hits.total", response);
|
||||
} else {
|
||||
return (Integer) XContentMapValues.extractValue("hits.total.value", response);
|
||||
}
|
||||
return (Integer) XContentMapValues.extractValue("hits.total.value", response);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue