mirror of
https://github.com/apache/lucene.git
synced 2025-02-20 17:07:09 +00:00
SOLR-14270: fix incorrect date format
This commit is contained in:
parent
449a7ef7b5
commit
4e70711c95
@ -27,7 +27,11 @@ import java.io.PrintStream;
|
|||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -279,6 +283,15 @@ public class ExportTool extends SolrCLI.ToolBase {
|
|||||||
field = ((List) field).get(0);
|
field = ((List) field).get(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
field = constructDateStr(field);
|
||||||
|
if (field instanceof List) {
|
||||||
|
List list = (List) field;
|
||||||
|
if (hasdate(list)) {
|
||||||
|
ArrayList<Object> listCopy = new ArrayList<>(list.size());
|
||||||
|
for (Object o : list) listCopy.add(constructDateStr(o));
|
||||||
|
field = listCopy;
|
||||||
|
}
|
||||||
|
}
|
||||||
m.put(s, field);
|
m.put(s, field);
|
||||||
});
|
});
|
||||||
jsonWriter.write(m);
|
jsonWriter.write(m);
|
||||||
@ -286,6 +299,24 @@ public class ExportTool extends SolrCLI.ToolBase {
|
|||||||
writer.append('\n');
|
writer.append('\n');
|
||||||
super.accept(doc);
|
super.accept(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean hasdate(List list) {
|
||||||
|
boolean hasDate = false;
|
||||||
|
for (Object o : list) {
|
||||||
|
if(o instanceof Date){
|
||||||
|
hasDate = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return hasDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object constructDateStr(Object field) {
|
||||||
|
if (field instanceof Date) {
|
||||||
|
field = DateTimeFormatter.ISO_INSTANT.format(Instant.ofEpochMilli(((Date) field).getTime()));
|
||||||
|
}
|
||||||
|
return field;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static class JavabinSink extends DocsSink {
|
static class JavabinSink extends DocsSink {
|
||||||
|
@ -26,6 +26,7 @@ import java.nio.charset.StandardCharsets;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
@ -49,7 +50,7 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
public void testBasic() throws Exception {
|
public void testBasic() throws Exception {
|
||||||
String COLLECTION_NAME = "globalLoaderColl";
|
String COLLECTION_NAME = "globalLoaderColl";
|
||||||
configureCluster(4)
|
configureCluster(4)
|
||||||
.addConfig("conf", configset("cloud-minimal"))
|
.addConfig("conf", configset("cloud-dynamic"))
|
||||||
.configure();
|
.configure();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -67,7 +68,9 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
int docCount = 1000;
|
int docCount = 1000;
|
||||||
|
|
||||||
for (int i = 0; i < docCount; i++) {
|
for (int i = 0; i < docCount; i++) {
|
||||||
ur.add("id", String.valueOf(i), "desc_s", TestUtil.randomSimpleString(random(), 10, 50));
|
ur.add("id", String.valueOf(i),
|
||||||
|
"desc_s", TestUtil.randomSimpleString(random(), 10, 50) ,
|
||||||
|
"a_dt", "2019-09-30T05:58:03Z");
|
||||||
}
|
}
|
||||||
cluster.getSolrClient().request(ur, COLLECTION_NAME);
|
cluster.getSolrClient().request(ur, COLLECTION_NAME);
|
||||||
|
|
||||||
@ -81,10 +84,10 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
String absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".json";
|
String absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".json";
|
||||||
info.setOutFormat(absolutePath, "jsonl");
|
info.setOutFormat(absolutePath, "jsonl");
|
||||||
info.setLimit("200");
|
info.setLimit("200");
|
||||||
info.fields = "id,desc_s";
|
info.fields = "id,desc_s,a_dt";
|
||||||
info.exportDocs();
|
info.exportDocs();
|
||||||
|
|
||||||
assertJsonDocsCount(info, 200);
|
assertJsonDocsCount(info, 200, record -> "2019-09-30T05:58:03Z".equals(record.get("a_dt")));
|
||||||
|
|
||||||
info = new ExportTool.MultiThreadedRunner(url);
|
info = new ExportTool.MultiThreadedRunner(url);
|
||||||
absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".json";
|
absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".json";
|
||||||
@ -93,7 +96,7 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
info.fields = "id,desc_s";
|
info.fields = "id,desc_s";
|
||||||
info.exportDocs();
|
info.exportDocs();
|
||||||
|
|
||||||
assertJsonDocsCount(info, 1000);
|
assertJsonDocsCount(info, 1000,null);
|
||||||
|
|
||||||
info = new ExportTool.MultiThreadedRunner(url);
|
info = new ExportTool.MultiThreadedRunner(url);
|
||||||
absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".javabin";
|
absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".javabin";
|
||||||
@ -188,7 +191,7 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
info.exportDocs();
|
info.exportDocs();
|
||||||
long actual = ((ExportTool.JsonSink) info.sink).info.docsWritten.get();
|
long actual = ((ExportTool.JsonSink) info.sink).info.docsWritten.get();
|
||||||
assertTrue("docs written :" + actual + "docs produced : " + info.docsWritten.get(), actual >= docCount);
|
assertTrue("docs written :" + actual + "docs produced : " + info.docsWritten.get(), actual >= docCount);
|
||||||
assertJsonDocsCount(info, docCount);
|
assertJsonDocsCount(info, docCount,null);
|
||||||
} finally {
|
} finally {
|
||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
|
|
||||||
@ -213,7 +216,7 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertJsonDocsCount(ExportTool.Info info, int expected) throws IOException {
|
private void assertJsonDocsCount(ExportTool.Info info, int expected, Predicate<Map<String,Object>> predicate) throws IOException {
|
||||||
assertTrue("" + info.docsWritten.get() + " expected " + expected, info.docsWritten.get() >= expected);
|
assertTrue("" + info.docsWritten.get() + " expected " + expected, info.docsWritten.get() >= expected);
|
||||||
|
|
||||||
JsonRecordReader jsonReader;
|
JsonRecordReader jsonReader;
|
||||||
@ -222,7 +225,12 @@ public class TestExportTool extends SolrCloudTestCase {
|
|||||||
rdr = new InputStreamReader(new FileInputStream(info.out), StandardCharsets.UTF_8);
|
rdr = new InputStreamReader(new FileInputStream(info.out), StandardCharsets.UTF_8);
|
||||||
try {
|
try {
|
||||||
int[] count = new int[]{0};
|
int[] count = new int[]{0};
|
||||||
jsonReader.streamRecords(rdr, (record, path) -> count[0]++);
|
jsonReader.streamRecords(rdr, (record, path) -> {
|
||||||
|
if(predicate != null){
|
||||||
|
assertTrue(predicate.test(record));
|
||||||
|
}
|
||||||
|
count[0]++;
|
||||||
|
});
|
||||||
assertTrue(count[0] >= expected);
|
assertTrue(count[0] >= expected);
|
||||||
} finally {
|
} finally {
|
||||||
rdr.close();
|
rdr.close();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user