close DV producers on exception

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1619609 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2014-08-21 22:15:01 +00:00
parent 75ed12e0c7
commit 999f7ab30d
1 changed files with 63 additions and 47 deletions

View File

@ -48,58 +48,74 @@ class SegmentDocValuesProducer extends DocValuesProducer {
final List<Long> dvGens = new ArrayList<>(); final List<Long> dvGens = new ArrayList<>();
SegmentDocValuesProducer(SegmentCommitInfo si, Directory dir, FieldInfos fieldInfos, SegmentDocValues segDocValues, DocValuesFormat dvFormat) throws IOException { SegmentDocValuesProducer(SegmentCommitInfo si, Directory dir, FieldInfos fieldInfos, SegmentDocValues segDocValues, DocValuesFormat dvFormat) throws IOException {
Version ver = si.info.getVersion(); boolean success = false;
if (ver != null && ver.onOrAfter(Version.LUCENE_4_9_0)) { try {
DocValuesProducer baseProducer = null; Version ver = si.info.getVersion();
for (FieldInfo fi : fieldInfos) { if (ver != null && ver.onOrAfter(Version.LUCENE_4_9_0)) {
if (!fi.hasDocValues()) continue; DocValuesProducer baseProducer = null;
long docValuesGen = fi.getDocValuesGen(); for (FieldInfo fi : fieldInfos) {
if (docValuesGen == -1) { if (!fi.hasDocValues()) {
if (baseProducer == null) { continue;
// the base producer gets all the fields, so the Codec can validate properly }
baseProducer = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, fieldInfos); long docValuesGen = fi.getDocValuesGen();
dvGens.add(docValuesGen); if (docValuesGen == -1) {
dvProducers.add(baseProducer); if (baseProducer == null) {
// the base producer gets all the fields, so the Codec can validate properly
baseProducer = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, fieldInfos);
dvGens.add(docValuesGen);
dvProducers.add(baseProducer);
}
dvProducersByField.put(fi.name, baseProducer);
} else {
assert !dvGens.contains(docValuesGen);
final DocValuesProducer dvp = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, new FieldInfos(new FieldInfo[] { fi }));
dvGens.add(docValuesGen);
dvProducers.add(dvp);
dvProducersByField.put(fi.name, dvp);
}
}
} else {
// For pre-4.9 indexes, especially with doc-values updates, multiple
// FieldInfos could belong to the same dvGen. Therefore need to make sure
// we initialize each DocValuesProducer once per gen.
Map<Long,List<FieldInfo>> genInfos = new HashMap<>();
for (FieldInfo fi : fieldInfos) {
if (!fi.hasDocValues()) {
continue;
}
List<FieldInfo> genFieldInfos = genInfos.get(fi.getDocValuesGen());
if (genFieldInfos == null) {
genFieldInfos = new ArrayList<>();
genInfos.put(fi.getDocValuesGen(), genFieldInfos);
}
genFieldInfos.add(fi);
}
for (Map.Entry<Long,List<FieldInfo>> e : genInfos.entrySet()) {
long docValuesGen = e.getKey();
List<FieldInfo> infos = e.getValue();
final DocValuesProducer dvp;
if (docValuesGen == -1) {
// we need to send all FieldInfos to gen=-1, but later we need to
// record the DVP only for the "true" gen=-1 fields (not updated)
dvp = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, fieldInfos);
} else {
dvp = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, new FieldInfos(infos.toArray(new FieldInfo[infos.size()])));
} }
dvProducersByField.put(fi.name, baseProducer);
} else {
assert !dvGens.contains(docValuesGen);
final DocValuesProducer dvp = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, new FieldInfos(new FieldInfo[] { fi }));
dvGens.add(docValuesGen); dvGens.add(docValuesGen);
dvProducers.add(dvp); dvProducers.add(dvp);
dvProducersByField.put(fi.name, dvp); for (FieldInfo fi : infos) {
dvProducersByField.put(fi.name, dvp);
}
} }
} }
} else { success = true;
// For pre-4.9 indexes, especially with doc-values updates, multiple } finally {
// FieldInfos could belong to the same dvGen. Therefore need to make sure if (success == false) {
// we initialize each DocValuesProducer once per gen. try {
Map<Long,List<FieldInfo>> genInfos = new HashMap<>(); segDocValues.decRef(dvGens);
for (FieldInfo fi : fieldInfos) { } catch (Throwable t) {
if (!fi.hasDocValues()) continue; // Ignore so we keep throwing first exception
List<FieldInfo> genFieldInfos = genInfos.get(fi.getDocValuesGen());
if (genFieldInfos == null) {
genFieldInfos = new ArrayList<>();
genInfos.put(fi.getDocValuesGen(), genFieldInfos);
}
genFieldInfos.add(fi);
}
for (Map.Entry<Long,List<FieldInfo>> e : genInfos.entrySet()) {
long docValuesGen = e.getKey();
List<FieldInfo> infos = e.getValue();
final DocValuesProducer dvp;
if (docValuesGen == -1) {
// we need to send all FieldInfos to gen=-1, but later we need to
// record the DVP only for the "true" gen=-1 fields (not updated)
dvp = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, fieldInfos);
} else {
dvp = segDocValues.getDocValuesProducer(docValuesGen, si, IOContext.READ, dir, dvFormat, new FieldInfos(infos.toArray(new FieldInfo[infos.size()])));
}
dvGens.add(docValuesGen);
dvProducers.add(dvp);
for (FieldInfo fi : infos) {
dvProducersByField.put(fi.name, dvp);
} }
} }
} }