Fix NPE in token_count datatype with null value (#25046)
Fixes an issue with the handling of null values for the token_count data type. Closes #24928
This commit is contained in:
parent
8250aa4267
commit
dc5aa993e0
|
@ -134,6 +134,10 @@ public class TokenCountFieldMapper extends FieldMapper {
|
|||
value = context.parser().textOrNull();
|
||||
}
|
||||
|
||||
if (value == null && fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
final int tokenCount;
|
||||
if (value == null) {
|
||||
tokenCount = (Integer) fieldType().nullValue();
|
||||
|
|
|
@ -24,8 +24,10 @@ import org.apache.lucene.analysis.CannedTokenStream;
|
|||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -144,4 +146,55 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
|
|||
);
|
||||
assertThat(e.getMessage(), containsString("name cannot be empty string"));
|
||||
}
|
||||
|
||||
public void testParseNullValue() throws Exception {
|
||||
DocumentMapper mapper = createIndexWithTokenCountField();
|
||||
ParseContext.Document doc = parseDocument(mapper, createDocument(null));
|
||||
assertNull(doc.getField("test.tc"));
|
||||
}
|
||||
|
||||
public void testParseEmptyValue() throws Exception {
|
||||
DocumentMapper mapper = createIndexWithTokenCountField();
|
||||
ParseContext.Document doc = parseDocument(mapper, createDocument(""));
|
||||
assertEquals(0, doc.getField("test.tc").numericValue());
|
||||
}
|
||||
|
||||
public void testParseNotNullValue() throws Exception {
|
||||
DocumentMapper mapper = createIndexWithTokenCountField();
|
||||
ParseContext.Document doc = parseDocument(mapper, createDocument("three tokens string"));
|
||||
assertEquals(3, doc.getField("test.tc").numericValue());
|
||||
}
|
||||
|
||||
private DocumentMapper createIndexWithTokenCountField() throws IOException {
|
||||
final String content = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("person")
|
||||
.startObject("properties")
|
||||
.startObject("test")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("tc")
|
||||
.field("type", "token_count")
|
||||
.field("analyzer", "standard")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content));
|
||||
}
|
||||
|
||||
private SourceToParse createDocument(String fieldValue) throws Exception {
|
||||
BytesReference request = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("test", fieldValue)
|
||||
.endObject().bytes();
|
||||
|
||||
return SourceToParse.source("test", "person", "1", request, XContentType.JSON);
|
||||
}
|
||||
|
||||
private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) {
|
||||
return mapper.parse(request)
|
||||
.docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed"));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue