mirror of https://github.com/apache/lucene.git
merge trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5969@1632275 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
1d3e228646
|
@ -160,6 +160,10 @@ API Changes
|
||||||
their jflex impl directly.
|
their jflex impl directly.
|
||||||
(Ryan Ernst)
|
(Ryan Ernst)
|
||||||
|
|
||||||
|
* LUCENE-6006: Removed FieldInfo.normType since it's redundant: it
|
||||||
|
will be DocValuesType.NUMERIC if the field indexed and does not omit
|
||||||
|
norms, else null. (Robert Muir, Mike McCandless)
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
|
|
||||||
* LUCENE-5650: Enforce read-only access to any path outside the temporary
|
* LUCENE-5650: Enforce read-only access to any path outside the temporary
|
||||||
|
|
|
@ -0,0 +1,113 @@
|
||||||
|
package org.apache.lucene.codecs;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DocValues;
|
||||||
|
import org.apache.lucene.index.FieldInfo;
|
||||||
|
import org.apache.lucene.index.FieldInfos;
|
||||||
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
|
import org.apache.lucene.util.Accountable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used only for backwards compatibility corner case, to provide
|
||||||
|
* re-animated norms when all fields are undead.
|
||||||
|
*
|
||||||
|
* @lucene.internal */
|
||||||
|
public class UndeadNormsProducer extends NormsProducer {
|
||||||
|
|
||||||
|
/** Used to bring undead norms back to life. */
|
||||||
|
public final static String LEGACY_UNDEAD_NORMS_KEY = UndeadNormsProducer.class.getSimpleName() + ".undeadnorms";
|
||||||
|
|
||||||
|
/** Use this instance */
|
||||||
|
public final static NormsProducer INSTANCE = new UndeadNormsProducer();
|
||||||
|
|
||||||
|
private UndeadNormsProducer() {
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Returns true if all indexed fields have undead norms. */
|
||||||
|
public static boolean isUndeadArmy(FieldInfos fieldInfos) {
|
||||||
|
|
||||||
|
boolean everythingIsUndead = true;
|
||||||
|
for(FieldInfo fieldInfo : fieldInfos) {
|
||||||
|
if (fieldInfo.hasNorms()) {
|
||||||
|
String isUndead = fieldInfo.getAttribute(LEGACY_UNDEAD_NORMS_KEY);
|
||||||
|
if (isUndead != null) {
|
||||||
|
assert "true".equals(isUndead);
|
||||||
|
} else {
|
||||||
|
everythingIsUndead = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return everythingIsUndead;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns true if this field has undead norms. */
|
||||||
|
public static boolean isUndead(FieldInfo fieldInfo) {
|
||||||
|
String isUndead = fieldInfo.getAttribute(LEGACY_UNDEAD_NORMS_KEY);
|
||||||
|
if (isUndead != null) {
|
||||||
|
// Bring undead norms back to life; this is set in Lucene40FieldInfosFormat, to emulate pre-5.0 undead norms
|
||||||
|
assert "true".equals(isUndead);
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Call this to note that the field with these attributes has undead norms. */
|
||||||
|
public static void setUndead(Map<String,String> attributes) {
|
||||||
|
attributes.put(LEGACY_UNDEAD_NORMS_KEY, "true");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
||||||
|
return DocValues.emptyNumeric();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long ramBytesUsed() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Iterable<? extends Accountable> getChildResources() {
|
||||||
|
return Collections.emptyList();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkIntegrity() throws IOException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public NormsProducer getMergeInstance() throws IOException {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return getClass().getSimpleName();
|
||||||
|
}
|
||||||
|
}
|
|
@ -23,13 +23,14 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.FieldInfosFormat;
|
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
|
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
||||||
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.apache.lucene.index.SegmentInfo;
|
import org.apache.lucene.index.SegmentInfo;
|
||||||
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
@ -91,7 +92,7 @@ public class Lucene40FieldInfosFormat extends FieldInfosFormat {
|
||||||
byte val = input.readByte();
|
byte val = input.readByte();
|
||||||
final LegacyDocValuesType oldValuesType = getDocValuesType((byte) (val & 0x0F));
|
final LegacyDocValuesType oldValuesType = getDocValuesType((byte) (val & 0x0F));
|
||||||
final LegacyDocValuesType oldNormsType = getDocValuesType((byte) ((val >>> 4) & 0x0F));
|
final LegacyDocValuesType oldNormsType = getDocValuesType((byte) ((val >>> 4) & 0x0F));
|
||||||
final Map<String,String> attributes = input.readStringStringMap();;
|
final Map<String,String> attributes = input.readStringStringMap();
|
||||||
if (oldValuesType.mapping != null) {
|
if (oldValuesType.mapping != null) {
|
||||||
attributes.put(LEGACY_DV_TYPE_KEY, oldValuesType.name());
|
attributes.put(LEGACY_DV_TYPE_KEY, oldValuesType.name());
|
||||||
}
|
}
|
||||||
|
@ -101,8 +102,12 @@ public class Lucene40FieldInfosFormat extends FieldInfosFormat {
|
||||||
}
|
}
|
||||||
attributes.put(LEGACY_NORM_TYPE_KEY, oldNormsType.name());
|
attributes.put(LEGACY_NORM_TYPE_KEY, oldNormsType.name());
|
||||||
}
|
}
|
||||||
|
if (isIndexed && omitNorms == false && oldNormsType.mapping == null) {
|
||||||
|
// Undead norms! Lucene40NormsReader will check this and bring norms back from the dead:
|
||||||
|
UndeadNormsProducer.setUndead(attributes);
|
||||||
|
}
|
||||||
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
||||||
omitNorms, storePayloads, indexOptions, oldValuesType.mapping, oldNormsType.mapping, -1, Collections.unmodifiableMap(attributes));
|
omitNorms, storePayloads, indexOptions, oldValuesType.mapping, -1, Collections.unmodifiableMap(attributes));
|
||||||
}
|
}
|
||||||
|
|
||||||
CodecUtil.checkEOF(input);
|
CodecUtil.checkEOF(input);
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
||||||
import org.apache.lucene.codecs.NormsConsumer;
|
import org.apache.lucene.codecs.NormsConsumer;
|
||||||
import org.apache.lucene.codecs.NormsFormat;
|
import org.apache.lucene.codecs.NormsFormat;
|
||||||
import org.apache.lucene.codecs.NormsProducer;
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
|
@ -46,6 +47,10 @@ public class Lucene40NormsFormat extends NormsFormat {
|
||||||
String filename = IndexFileNames.segmentFileName(state.segmentInfo.name,
|
String filename = IndexFileNames.segmentFileName(state.segmentInfo.name,
|
||||||
"nrm",
|
"nrm",
|
||||||
Lucene40CompoundFormat.COMPOUND_FILE_EXTENSION);
|
Lucene40CompoundFormat.COMPOUND_FILE_EXTENSION);
|
||||||
|
if (UndeadNormsProducer.isUndeadArmy(state.fieldInfos)) {
|
||||||
|
return UndeadNormsProducer.INSTANCE;
|
||||||
|
} else {
|
||||||
return new Lucene40NormsReader(state, filename);
|
return new Lucene40NormsReader(state, filename);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -21,6 +21,8 @@ import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.DocValuesProducer;
|
import org.apache.lucene.codecs.DocValuesProducer;
|
||||||
import org.apache.lucene.codecs.NormsProducer;
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
|
import org.apache.lucene.index.DocValues;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.NumericDocValues;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
|
@ -45,6 +47,10 @@ final class Lucene40NormsReader extends NormsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
||||||
|
if (UndeadNormsProducer.isUndead(field)) {
|
||||||
|
// Bring undead norms back to life; this is set in Lucene40FieldInfosFormat, to emulate pre-5.0 undead norms
|
||||||
|
return DocValues.emptyNumeric();
|
||||||
|
}
|
||||||
return impl.getNumeric(field);
|
return impl.getNumeric(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,13 +23,14 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.FieldInfosFormat;
|
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
|
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
||||||
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.apache.lucene.index.SegmentInfo;
|
import org.apache.lucene.index.SegmentInfo;
|
||||||
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
@ -86,8 +87,14 @@ public class Lucene42FieldInfosFormat extends FieldInfosFormat {
|
||||||
final DocValuesType docValuesType = getDocValuesType(input, (byte) (val & 0x0F));
|
final DocValuesType docValuesType = getDocValuesType(input, (byte) (val & 0x0F));
|
||||||
final DocValuesType normsType = getDocValuesType(input, (byte) ((val >>> 4) & 0x0F));
|
final DocValuesType normsType = getDocValuesType(input, (byte) ((val >>> 4) & 0x0F));
|
||||||
final Map<String,String> attributes = input.readStringStringMap();
|
final Map<String,String> attributes = input.readStringStringMap();
|
||||||
|
|
||||||
|
if (isIndexed && omitNorms == false && normsType == null) {
|
||||||
|
// Undead norms! Lucene42NormsProducer will check this and bring norms back from the dead:
|
||||||
|
UndeadNormsProducer.setUndead(attributes);
|
||||||
|
}
|
||||||
|
|
||||||
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
||||||
omitNorms, storePayloads, indexOptions, docValuesType, normsType, -1, Collections.unmodifiableMap(attributes));
|
omitNorms, storePayloads, indexOptions, docValuesType, -1, Collections.unmodifiableMap(attributes));
|
||||||
}
|
}
|
||||||
|
|
||||||
CodecUtil.checkEOF(input);
|
CodecUtil.checkEOF(input);
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
||||||
import org.apache.lucene.codecs.NormsConsumer;
|
import org.apache.lucene.codecs.NormsConsumer;
|
||||||
import org.apache.lucene.codecs.NormsFormat;
|
import org.apache.lucene.codecs.NormsFormat;
|
||||||
import org.apache.lucene.codecs.NormsProducer;
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
import org.apache.lucene.util.packed.PackedInts;
|
import org.apache.lucene.util.packed.PackedInts;
|
||||||
|
@ -62,8 +63,12 @@ public class Lucene42NormsFormat extends NormsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final NormsProducer normsProducer(SegmentReadState state) throws IOException {
|
public final NormsProducer normsProducer(SegmentReadState state) throws IOException {
|
||||||
|
if (UndeadNormsProducer.isUndeadArmy(state.fieldInfos)) {
|
||||||
|
return UndeadNormsProducer.INSTANCE;
|
||||||
|
} else {
|
||||||
return new Lucene42NormsProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION);
|
return new Lucene42NormsProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static final String DATA_CODEC = "Lucene41NormsData";
|
static final String DATA_CODEC = "Lucene41NormsData";
|
||||||
static final String DATA_EXTENSION = "nvd";
|
static final String DATA_EXTENSION = "nvd";
|
||||||
|
|
|
@ -21,6 +21,8 @@ import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.DocValuesProducer;
|
import org.apache.lucene.codecs.DocValuesProducer;
|
||||||
import org.apache.lucene.codecs.NormsProducer;
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
|
import org.apache.lucene.index.DocValues;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.NumericDocValues;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
|
@ -45,6 +47,10 @@ final class Lucene42NormsProducer extends NormsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
||||||
|
if (UndeadNormsProducer.isUndead(field)) {
|
||||||
|
// Bring undead norms back to life; this is set in Lucene42FieldInfosFormat, to emulate pre-5.0 undead norms
|
||||||
|
return DocValues.emptyNumeric();
|
||||||
|
}
|
||||||
return impl.getNumeric(field);
|
return impl.getNumeric(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,13 +23,14 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.FieldInfosFormat;
|
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
|
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
||||||
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.apache.lucene.index.SegmentInfo;
|
import org.apache.lucene.index.SegmentInfo;
|
||||||
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
|
||||||
import org.apache.lucene.store.ChecksumIndexInput;
|
import org.apache.lucene.store.ChecksumIndexInput;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IOContext;
|
import org.apache.lucene.store.IOContext;
|
||||||
|
@ -88,8 +89,14 @@ public final class Lucene46FieldInfosFormat extends FieldInfosFormat {
|
||||||
final DocValuesType normsType = getDocValuesType(input, (byte) ((val >>> 4) & 0x0F));
|
final DocValuesType normsType = getDocValuesType(input, (byte) ((val >>> 4) & 0x0F));
|
||||||
final long dvGen = input.readLong();
|
final long dvGen = input.readLong();
|
||||||
final Map<String,String> attributes = input.readStringStringMap();
|
final Map<String,String> attributes = input.readStringStringMap();
|
||||||
|
|
||||||
|
if (isIndexed && omitNorms == false && normsType == null) {
|
||||||
|
// Undead norms! Lucene42NormsProducer will check this and bring norms back from the dead:
|
||||||
|
UndeadNormsProducer.setUndead(attributes);
|
||||||
|
}
|
||||||
|
|
||||||
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
||||||
omitNorms, storePayloads, indexOptions, docValuesType, normsType, dvGen, Collections.unmodifiableMap(attributes));
|
omitNorms, storePayloads, indexOptions, docValuesType, dvGen, Collections.unmodifiableMap(attributes));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (codecVersion >= Lucene46FieldInfosFormat.FORMAT_CHECKSUM) {
|
if (codecVersion >= Lucene46FieldInfosFormat.FORMAT_CHECKSUM) {
|
||||||
|
@ -148,7 +155,7 @@ public final class Lucene46FieldInfosFormat extends FieldInfosFormat {
|
||||||
|
|
||||||
// pack the DV types in one byte
|
// pack the DV types in one byte
|
||||||
final byte dv = docValuesByte(fi.getDocValuesType());
|
final byte dv = docValuesByte(fi.getDocValuesType());
|
||||||
final byte nrm = docValuesByte(fi.getNormType());
|
final byte nrm = docValuesByte(fi.hasNorms() ? DocValuesType.NUMERIC : null);
|
||||||
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
||||||
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
||||||
output.writeByte(val);
|
output.writeByte(val);
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
||||||
import org.apache.lucene.codecs.NormsConsumer;
|
import org.apache.lucene.codecs.NormsConsumer;
|
||||||
import org.apache.lucene.codecs.NormsFormat;
|
import org.apache.lucene.codecs.NormsFormat;
|
||||||
import org.apache.lucene.codecs.NormsProducer;
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.index.SegmentWriteState;
|
import org.apache.lucene.index.SegmentWriteState;
|
||||||
|
|
||||||
|
@ -42,8 +43,12 @@ public class Lucene49NormsFormat extends NormsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final NormsProducer normsProducer(SegmentReadState state) throws IOException {
|
public final NormsProducer normsProducer(SegmentReadState state) throws IOException {
|
||||||
|
if (UndeadNormsProducer.isUndeadArmy(state.fieldInfos)) {
|
||||||
|
return UndeadNormsProducer.INSTANCE;
|
||||||
|
} else {
|
||||||
return new Lucene49NormsProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION);
|
return new Lucene49NormsProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static final String DATA_CODEC = "Lucene49NormsData";
|
static final String DATA_CODEC = "Lucene49NormsData";
|
||||||
static final String DATA_EXTENSION = "nvd";
|
static final String DATA_EXTENSION = "nvd";
|
||||||
|
|
|
@ -25,7 +25,9 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
import org.apache.lucene.codecs.NormsProducer;
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.UndeadNormsProducer;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
|
import org.apache.lucene.index.DocValues;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.IndexFileNames;
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
|
@ -40,8 +42,8 @@ import org.apache.lucene.util.RamUsageEstimator;
|
||||||
import org.apache.lucene.util.packed.BlockPackedReader;
|
import org.apache.lucene.util.packed.BlockPackedReader;
|
||||||
import org.apache.lucene.util.packed.PackedInts;
|
import org.apache.lucene.util.packed.PackedInts;
|
||||||
|
|
||||||
import static org.apache.lucene.codecs.lucene49.Lucene49NormsFormat.VERSION_START;
|
|
||||||
import static org.apache.lucene.codecs.lucene49.Lucene49NormsFormat.VERSION_CURRENT;
|
import static org.apache.lucene.codecs.lucene49.Lucene49NormsFormat.VERSION_CURRENT;
|
||||||
|
import static org.apache.lucene.codecs.lucene49.Lucene49NormsFormat.VERSION_START;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reader for 4.9 norms
|
* Reader for 4.9 norms
|
||||||
|
@ -153,6 +155,10 @@ final class Lucene49NormsProducer extends NormsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized NumericDocValues getNorms(FieldInfo field) throws IOException {
|
public synchronized NumericDocValues getNorms(FieldInfo field) throws IOException {
|
||||||
|
if (UndeadNormsProducer.isUndead(field)) {
|
||||||
|
// Bring undead norms back to life; this is set in Lucene46FieldInfosFormat, to emulate pre-5.0 undead norms
|
||||||
|
return DocValues.emptyNumeric();
|
||||||
|
}
|
||||||
NumericDocValues instance = instances.get(field.name);
|
NumericDocValues instance = instances.get(field.name);
|
||||||
if (instance == null) {
|
if (instance == null) {
|
||||||
instance = loadNorms(field);
|
instance = loadNorms(field);
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
Common APIs for use by backwards compatibility codecs.
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -75,7 +75,7 @@ public final class Lucene40RWFieldInfosFormat extends Lucene40FieldInfosFormat {
|
||||||
|
|
||||||
// pack the DV types in one byte
|
// pack the DV types in one byte
|
||||||
final byte dv = docValuesByte(fi.getDocValuesType(), fi.getAttribute(LEGACY_DV_TYPE_KEY));
|
final byte dv = docValuesByte(fi.getDocValuesType(), fi.getAttribute(LEGACY_DV_TYPE_KEY));
|
||||||
final byte nrm = docValuesByte(fi.getNormType(), fi.getAttribute(LEGACY_NORM_TYPE_KEY));
|
final byte nrm = docValuesByte(fi.hasNorms() ? DocValuesType.NUMERIC : null, fi.getAttribute(LEGACY_NORM_TYPE_KEY));
|
||||||
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
||||||
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
||||||
output.writeByte(val);
|
output.writeByte(val);
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class TestLucene40FieldInfoFormat extends BaseFieldInfoFormatTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fi.getNormType() != null) {
|
if (fi.hasNorms()) {
|
||||||
fi.putAttribute(Lucene40FieldInfosFormat.LEGACY_NORM_TYPE_KEY, LegacyDocValuesType.FIXED_INTS_8.name());
|
fi.putAttribute(Lucene40FieldInfosFormat.LEGACY_NORM_TYPE_KEY, LegacyDocValuesType.FIXED_INTS_8.name());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,8 +17,18 @@ package org.apache.lucene.codecs.lucene40;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.MultiDocValues;
|
||||||
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
/** Tests Lucene40's norms format */
|
/** Tests Lucene40's norms format */
|
||||||
public class TestLucene40NormsFormat extends BaseNormsFormatTestCase {
|
public class TestLucene40NormsFormat extends BaseNormsFormatTestCase {
|
||||||
|
@ -28,4 +38,101 @@ public class TestLucene40NormsFormat extends BaseNormsFormatTestCase {
|
||||||
protected Codec getCodec() {
|
protected Codec getCodec() {
|
||||||
return codec;
|
return codec;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Copy this back to /l/400/lucene/CreateUndeadNorms.java, then:
|
||||||
|
* - ant clean
|
||||||
|
* - pushd analysis/common; ant jar; popd
|
||||||
|
* - pushd core; ant jar; popd
|
||||||
|
* - javac -cp build/analysis/common/lucene-analyzers-common-4.0-SNAPSHOT.jar:build/core/lucene-core-4.0-SNAPSHOT.jar CreateUndeadNorms.java
|
||||||
|
* - java -cp .:build/analysis/common/lucene-analyzers-common-4.0-SNAPSHOT.jar:build/core/lucene-core-4.0-SNAPSHOT.jar CreateUndeadNorms
|
||||||
|
* - cd /tmp/undeadnorms ; zip index.40.undeadnorms.zip *
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
|
public class CreateUndeadNorms {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
File file = new File("/tmp/undeadnorms");
|
||||||
|
if (file.exists()) {
|
||||||
|
throw new RuntimeException("please remove /tmp/undeadnorms first");
|
||||||
|
}
|
||||||
|
Directory dir = FSDirectory.open(new File("/tmp/undeadnorms"));
|
||||||
|
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_40, new WhitespaceAnalyzer(Version.LUCENE_40)));
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "0", Field.Store.NO));
|
||||||
|
w.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.NO));
|
||||||
|
Field content = new TextField("content", "some content", Field.Store.NO);
|
||||||
|
content.setTokenStream(new TokenStream() {
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() throws IOException {
|
||||||
|
throw new IOException("brains brains!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
doc.add(content);
|
||||||
|
try {
|
||||||
|
w.addDocument(doc);
|
||||||
|
throw new RuntimeException("didn't hit exception");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// perfect
|
||||||
|
}
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LUCENE-6006: Test undead norms.
|
||||||
|
* .....
|
||||||
|
* C C /
|
||||||
|
* /< /
|
||||||
|
* ___ __________/_#__=o
|
||||||
|
* /(- /(\_\________ \
|
||||||
|
* \ ) \ )_ \o \
|
||||||
|
* /|\ /|\ |' |
|
||||||
|
* | _|
|
||||||
|
* /o __\
|
||||||
|
* / ' |
|
||||||
|
* / / |
|
||||||
|
* /_/\______|
|
||||||
|
* ( _( <
|
||||||
|
* \ \ \
|
||||||
|
* \ \ |
|
||||||
|
* \____\____\
|
||||||
|
* ____\_\__\_\
|
||||||
|
* /` /` o\
|
||||||
|
* |___ |_______|
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void testReadUndeadNorms() throws Exception {
|
||||||
|
InputStream resource = TestLucene40NormsFormat.class.getResourceAsStream("index.40.undeadnorms.zip");
|
||||||
|
assertNotNull(resource);
|
||||||
|
Path path = createTempDir("undeadnorms");
|
||||||
|
TestUtil.unzip(resource, path);
|
||||||
|
Directory dir = FSDirectory.open(path);
|
||||||
|
IndexReader r = DirectoryReader.open(dir);
|
||||||
|
NumericDocValues undeadNorms = MultiDocValues.getNormValues(r, "content");
|
||||||
|
assertNotNull(undeadNorms);
|
||||||
|
assertEquals(2, r.maxDoc());
|
||||||
|
assertEquals(0, undeadNorms.get(0));
|
||||||
|
assertEquals(0, undeadNorms.get(1));
|
||||||
|
dir.close();
|
||||||
|
r.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
|
@ -76,7 +76,7 @@ public final class Lucene42RWFieldInfosFormat extends Lucene42FieldInfosFormat {
|
||||||
|
|
||||||
// pack the DV types in one byte
|
// pack the DV types in one byte
|
||||||
final byte dv = docValuesByte(fi.getDocValuesType());
|
final byte dv = docValuesByte(fi.getDocValuesType());
|
||||||
final byte nrm = docValuesByte(fi.getNormType());
|
final byte nrm = docValuesByte(fi.hasNorms() ? DocValuesType.NUMERIC : null);
|
||||||
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
||||||
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
||||||
output.writeByte(val);
|
output.writeByte(val);
|
||||||
|
|
|
@ -17,10 +17,18 @@ package org.apache.lucene.codecs.lucene42;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
||||||
import org.junit.BeforeClass;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.MultiDocValues;
|
||||||
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
/** Tests Lucene42's norms format */
|
/** Tests Lucene42's norms format */
|
||||||
public class TestLucene42NormsFormat extends BaseNormsFormatTestCase {
|
public class TestLucene42NormsFormat extends BaseNormsFormatTestCase {
|
||||||
|
@ -30,4 +38,100 @@ public class TestLucene42NormsFormat extends BaseNormsFormatTestCase {
|
||||||
protected Codec getCodec() {
|
protected Codec getCodec() {
|
||||||
return codec;
|
return codec;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Copy this back to /l/421/lucene/CreateUndeadNorms.java, then:
|
||||||
|
* - ant clean
|
||||||
|
* - pushd analysis/common; ant jar; popd
|
||||||
|
* - pushd core; ant jar; popd
|
||||||
|
* - javac -cp build/analysis/common/lucene-analyzers-common-4.2.1-SNAPSHOT.jar:build/core/lucene-core-4.2.1-SNAPSHOT.jar CreateUndeadNorms.java
|
||||||
|
* - java -cp .:build/analysis/common/lucene-analyzers-common-4.2.1-SNAPSHOT.jar:build/core/lucene-core-4.2.1-SNAPSHOT.jar CreateUndeadNorms
|
||||||
|
* - cd /tmp/undeadnorms ; zip index.42.undeadnorms.zip *
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
|
public class CreateUndeadNorms {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
File file = new File("/tmp/undeadnorms");
|
||||||
|
if (file.exists()) {
|
||||||
|
throw new RuntimeException("please remove /tmp/undeadnorms first");
|
||||||
|
}
|
||||||
|
Directory dir = FSDirectory.open(file);
|
||||||
|
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_42, new WhitespaceAnalyzer(Version.LUCENE_42)));
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "0", Field.Store.NO));
|
||||||
|
w.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.NO));
|
||||||
|
Field content = new TextField("content", "some content", Field.Store.NO);
|
||||||
|
content.setTokenStream(new TokenStream() {
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() throws IOException {
|
||||||
|
throw new IOException("brains brains!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
doc.add(content);
|
||||||
|
try {
|
||||||
|
w.addDocument(doc);
|
||||||
|
throw new RuntimeException("didn't hit exception");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// perfect
|
||||||
|
}
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* LUCENE-6006: Test undead norms.
|
||||||
|
* .....
|
||||||
|
* C C /
|
||||||
|
* /< /
|
||||||
|
* ___ __________/_#__=o
|
||||||
|
* /(- /(\_\________ \
|
||||||
|
* \ ) \ )_ \o \
|
||||||
|
* /|\ /|\ |' |
|
||||||
|
* | _|
|
||||||
|
* /o __\
|
||||||
|
* / ' |
|
||||||
|
* / / |
|
||||||
|
* /_/\______|
|
||||||
|
* ( _( <
|
||||||
|
* \ \ \
|
||||||
|
* \ \ |
|
||||||
|
* \____\____\
|
||||||
|
* ____\_\__\_\
|
||||||
|
* /` /` o\
|
||||||
|
* |___ |_______|
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void testReadUndeadNorms() throws Exception {
|
||||||
|
InputStream resource = TestLucene42NormsFormat.class.getResourceAsStream("index.42.undeadnorms.zip");
|
||||||
|
assertNotNull(resource);
|
||||||
|
Path path = createTempDir("undeadnorms");
|
||||||
|
TestUtil.unzip(resource, path);
|
||||||
|
Directory dir = FSDirectory.open(path);
|
||||||
|
IndexReader r = DirectoryReader.open(dir);
|
||||||
|
NumericDocValues undeadNorms = MultiDocValues.getNormValues(r, "content");
|
||||||
|
assertNotNull(undeadNorms);
|
||||||
|
assertEquals(2, r.maxDoc());
|
||||||
|
assertEquals(0, undeadNorms.get(0));
|
||||||
|
assertEquals(0, undeadNorms.get(1));
|
||||||
|
dir.close();
|
||||||
|
r.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
|
@ -0,0 +1,129 @@
|
||||||
|
package org.apache.lucene.codecs.lucene46;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.MultiDocValues;
|
||||||
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
|
public class TestLucene46UndeadNorms extends LuceneTestCase {
|
||||||
|
|
||||||
|
/** Copy this back to /l/461/lucene/CreateUndeadNorms.java, then:
|
||||||
|
* - ant clean
|
||||||
|
* - pushd analysis/common; ant jar; popd
|
||||||
|
* - pushd core; ant jar; popd
|
||||||
|
* - javac -cp build/analysis/common/lucene-analyzers-common-4.6-SNAPSHOT.jar:build/core/lucene-core-4.6-SNAPSHOT.jar CreateUndeadNorms.java
|
||||||
|
* - java -cp .:build/analysis/common/lucene-analyzers-common-4.6-SNAPSHOT.jar:build/core/lucene-core-4.6-SNAPSHOT.jar CreateUndeadNorms
|
||||||
|
* - cd /tmp/undeadnorms ; zip index.46.undeadnorms.zip *
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
|
public class CreateUndeadNorms {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
File file = new File("/tmp/undeadnorms");
|
||||||
|
if (file.exists()) {
|
||||||
|
throw new RuntimeException("please remove /tmp/undeadnorms first");
|
||||||
|
}
|
||||||
|
Directory dir = FSDirectory.open(file);
|
||||||
|
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_46, new WhitespaceAnalyzer(Version.LUCENE_46)));
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "0", Field.Store.NO));
|
||||||
|
w.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.NO));
|
||||||
|
Field content = new TextField("content", "some content", Field.Store.NO);
|
||||||
|
content.setTokenStream(new TokenStream() {
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() throws IOException {
|
||||||
|
throw new IOException("brains brains!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
doc.add(content);
|
||||||
|
try {
|
||||||
|
w.addDocument(doc);
|
||||||
|
throw new RuntimeException("didn't hit exception");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// perfect
|
||||||
|
}
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* LUCENE-6006: Test undead norms.
|
||||||
|
* .....
|
||||||
|
* C C /
|
||||||
|
* /< /
|
||||||
|
* ___ __________/_#__=o
|
||||||
|
* /(- /(\_\________ \
|
||||||
|
* \ ) \ )_ \o \
|
||||||
|
* /|\ /|\ |' |
|
||||||
|
* | _|
|
||||||
|
* /o __\
|
||||||
|
* / ' |
|
||||||
|
* / / |
|
||||||
|
* /_/\______|
|
||||||
|
* ( _( <
|
||||||
|
* \ \ \
|
||||||
|
* \ \ |
|
||||||
|
* \____\____\
|
||||||
|
* ____\_\__\_\
|
||||||
|
* /` /` o\
|
||||||
|
* |___ |_______|
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void testReadUndeadNorms() throws Exception {
|
||||||
|
InputStream resource = TestLucene46UndeadNorms.class.getResourceAsStream("index.46.undeadnorms.zip");
|
||||||
|
assertNotNull(resource);
|
||||||
|
Path path = createTempDir("undeadnorms");
|
||||||
|
TestUtil.unzip(resource, path);
|
||||||
|
Directory dir = FSDirectory.open(path);
|
||||||
|
IndexReader r = DirectoryReader.open(dir);
|
||||||
|
NumericDocValues undeadNorms = MultiDocValues.getNormValues(r, "content");
|
||||||
|
assertNotNull(undeadNorms);
|
||||||
|
assertEquals(2, r.maxDoc());
|
||||||
|
assertEquals(0, undeadNorms.get(0));
|
||||||
|
assertEquals(0, undeadNorms.get(1));
|
||||||
|
dir.close();
|
||||||
|
r.close();
|
||||||
|
}
|
||||||
|
}
|
Binary file not shown.
|
@ -17,8 +17,18 @@ package org.apache.lucene.codecs.lucene49;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
import org.apache.lucene.index.BaseNormsFormatTestCase;
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.MultiDocValues;
|
||||||
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests Lucene49NormsFormat
|
* Tests Lucene49NormsFormat
|
||||||
|
@ -30,4 +40,101 @@ public class TestLucene49NormsFormat extends BaseNormsFormatTestCase {
|
||||||
protected Codec getCodec() {
|
protected Codec getCodec() {
|
||||||
return codec;
|
return codec;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Copy this back to /l/491/lucene/CreateUndeadNorms.java, then:
|
||||||
|
* - ant clean
|
||||||
|
* - pushd analysis/common; ant jar; popd
|
||||||
|
* - pushd core; ant jar; popd
|
||||||
|
* - javac -cp build/analysis/common/lucene-analyzers-common-4.9-SNAPSHOT.jar:build/core/lucene-core-4.9-SNAPSHOT.jar CreateUndeadNorms.java
|
||||||
|
* - java -cp .:build/analysis/common/lucene-analyzers-common-4.9-SNAPSHOT.jar:build/core/lucene-core-4.9-SNAPSHOT.jar CreateUndeadNorms
|
||||||
|
* - cd /tmp/undeadnorms ; zip index.49.undeadnorms.zip *
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.Field;
|
||||||
|
import org.apache.lucene.document.StringField;
|
||||||
|
import org.apache.lucene.document.TextField;
|
||||||
|
import org.apache.lucene.index.IndexWriter;
|
||||||
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
|
public class CreateUndeadNorms {
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
File file = new File("/tmp/undeadnorms");
|
||||||
|
if (file.exists()) {
|
||||||
|
throw new RuntimeException("please remove /tmp/undeadnorms first");
|
||||||
|
}
|
||||||
|
Directory dir = FSDirectory.open(file);
|
||||||
|
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_4_9, new WhitespaceAnalyzer(Version.LUCENE_4_9)));
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", "0", Field.Store.NO));
|
||||||
|
w.addDocument(doc);
|
||||||
|
doc = new Document();
|
||||||
|
doc.add(new StringField("id", "1", Field.Store.NO));
|
||||||
|
Field content = new TextField("content", "some content", Field.Store.NO);
|
||||||
|
content.setTokenStream(new TokenStream() {
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() throws IOException {
|
||||||
|
throw new IOException("brains brains!");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
doc.add(content);
|
||||||
|
try {
|
||||||
|
w.addDocument(doc);
|
||||||
|
throw new RuntimeException("didn't hit exception");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// perfect
|
||||||
|
}
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LUCENE-6006: Test undead norms.
|
||||||
|
* .....
|
||||||
|
* C C /
|
||||||
|
* /< /
|
||||||
|
* ___ __________/_#__=o
|
||||||
|
* /(- /(\_\________ \
|
||||||
|
* \ ) \ )_ \o \
|
||||||
|
* /|\ /|\ |' |
|
||||||
|
* | _|
|
||||||
|
* /o __\
|
||||||
|
* / ' |
|
||||||
|
* / / |
|
||||||
|
* /_/\______|
|
||||||
|
* ( _( <
|
||||||
|
* \ \ \
|
||||||
|
* \ \ |
|
||||||
|
* \____\____\
|
||||||
|
* ____\_\__\_\
|
||||||
|
* /` /` o\
|
||||||
|
* |___ |_______|
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void testReadUndeadNorms() throws Exception {
|
||||||
|
InputStream resource = TestLucene49NormsFormat.class.getResourceAsStream("index.49.undeadnorms.zip");
|
||||||
|
assertNotNull(resource);
|
||||||
|
Path path = createTempDir("undeadnorms");
|
||||||
|
TestUtil.unzip(resource, path);
|
||||||
|
Directory dir = FSDirectory.open(path);
|
||||||
|
IndexReader r = DirectoryReader.open(dir);
|
||||||
|
NumericDocValues undeadNorms = MultiDocValues.getNormValues(r, "content");
|
||||||
|
assertNotNull(undeadNorms);
|
||||||
|
assertEquals(2, r.maxDoc());
|
||||||
|
assertEquals(0, undeadNorms.get(0));
|
||||||
|
assertEquals(0, undeadNorms.get(1));
|
||||||
|
dir.close();
|
||||||
|
r.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
|
@ -22,7 +22,6 @@ import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.lang.reflect.Modifier;
|
import java.lang.reflect.Modifier;
|
||||||
import java.net.URL;
|
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
|
@ -121,7 +120,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
|
|
||||||
private Path getIndexDir() {
|
private Path getIndexDir() {
|
||||||
String path = System.getProperty("tests.bwcdir");
|
String path = System.getProperty("tests.bwcdir");
|
||||||
assumeTrue("backcompat creation tests must be run with -Dtests,bwcdir=/path/to/write/indexes", path != null);
|
assumeTrue("backcompat creation tests must be run with -Dtests.bwcdir=/path/to/write/indexes", path != null);
|
||||||
return Paths.get(path);
|
return Paths.get(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -71,8 +71,7 @@ class SimpleTextDocValuesWriter extends DocValuesConsumer {
|
||||||
@Override
|
@Override
|
||||||
public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException {
|
public void addNumericField(FieldInfo field, Iterable<Number> values) throws IOException {
|
||||||
assert fieldSeen(field.name);
|
assert fieldSeen(field.name);
|
||||||
assert (field.getDocValuesType() == FieldInfo.DocValuesType.NUMERIC ||
|
assert field.getDocValuesType() == FieldInfo.DocValuesType.NUMERIC || field.hasNorms();
|
||||||
field.getNormType() == FieldInfo.DocValuesType.NUMERIC);
|
|
||||||
writeFieldEntry(field, FieldInfo.DocValuesType.NUMERIC);
|
writeFieldEntry(field, FieldInfo.DocValuesType.NUMERIC);
|
||||||
|
|
||||||
// first pass to find min/max
|
// first pass to find min/max
|
||||||
|
|
|
@ -59,7 +59,6 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
|
||||||
static final BytesRef STORETVOFF = new BytesRef(" term vector offsets ");
|
static final BytesRef STORETVOFF = new BytesRef(" term vector offsets ");
|
||||||
static final BytesRef PAYLOADS = new BytesRef(" payloads ");
|
static final BytesRef PAYLOADS = new BytesRef(" payloads ");
|
||||||
static final BytesRef NORMS = new BytesRef(" norms ");
|
static final BytesRef NORMS = new BytesRef(" norms ");
|
||||||
static final BytesRef NORMS_TYPE = new BytesRef(" norms type ");
|
|
||||||
static final BytesRef DOCVALUES = new BytesRef(" doc values ");
|
static final BytesRef DOCVALUES = new BytesRef(" doc values ");
|
||||||
static final BytesRef DOCVALUES_GEN = new BytesRef(" doc values gen ");
|
static final BytesRef DOCVALUES_GEN = new BytesRef(" doc values gen ");
|
||||||
static final BytesRef INDEXOPTIONS = new BytesRef(" index options ");
|
static final BytesRef INDEXOPTIONS = new BytesRef(" index options ");
|
||||||
|
@ -115,11 +114,6 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
|
||||||
assert StringHelper.startsWith(scratch.get(), NORMS);
|
assert StringHelper.startsWith(scratch.get(), NORMS);
|
||||||
boolean omitNorms = !Boolean.parseBoolean(readString(NORMS.length, scratch));
|
boolean omitNorms = !Boolean.parseBoolean(readString(NORMS.length, scratch));
|
||||||
|
|
||||||
SimpleTextUtil.readLine(input, scratch);
|
|
||||||
assert StringHelper.startsWith(scratch.get(), NORMS_TYPE);
|
|
||||||
String nrmType = readString(NORMS_TYPE.length, scratch);
|
|
||||||
final DocValuesType normsType = docValuesType(nrmType);
|
|
||||||
|
|
||||||
SimpleTextUtil.readLine(input, scratch);
|
SimpleTextUtil.readLine(input, scratch);
|
||||||
assert StringHelper.startsWith(scratch.get(), DOCVALUES);
|
assert StringHelper.startsWith(scratch.get(), DOCVALUES);
|
||||||
String dvType = readString(DOCVALUES.length, scratch);
|
String dvType = readString(DOCVALUES.length, scratch);
|
||||||
|
@ -146,7 +140,7 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
|
||||||
omitNorms, storePayloads, indexOptions, docValuesType, normsType, dvGen, Collections.unmodifiableMap(atts));
|
omitNorms, storePayloads, indexOptions, docValuesType, dvGen, Collections.unmodifiableMap(atts));
|
||||||
}
|
}
|
||||||
|
|
||||||
SimpleTextUtil.checkFooter(input);
|
SimpleTextUtil.checkFooter(input);
|
||||||
|
@ -218,10 +212,6 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
|
||||||
SimpleTextUtil.write(out, Boolean.toString(!fi.omitsNorms()), scratch);
|
SimpleTextUtil.write(out, Boolean.toString(!fi.omitsNorms()), scratch);
|
||||||
SimpleTextUtil.writeNewline(out);
|
SimpleTextUtil.writeNewline(out);
|
||||||
|
|
||||||
SimpleTextUtil.write(out, NORMS_TYPE);
|
|
||||||
SimpleTextUtil.write(out, getDocValuesType(fi.getNormType()), scratch);
|
|
||||||
SimpleTextUtil.writeNewline(out);
|
|
||||||
|
|
||||||
SimpleTextUtil.write(out, DOCVALUES);
|
SimpleTextUtil.write(out, DOCVALUES);
|
||||||
SimpleTextUtil.write(out, getDocValuesType(fi.getDocValuesType()), scratch);
|
SimpleTextUtil.write(out, getDocValuesType(fi.getDocValuesType()), scratch);
|
||||||
SimpleTextUtil.writeNewline(out);
|
SimpleTextUtil.writeNewline(out);
|
||||||
|
|
|
@ -149,12 +149,11 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
|
||||||
// DV Types are packed in one byte
|
// DV Types are packed in one byte
|
||||||
byte val = input.readByte();
|
byte val = input.readByte();
|
||||||
final DocValuesType docValuesType = getDocValuesType(input, (byte) (val & 0x0F));
|
final DocValuesType docValuesType = getDocValuesType(input, (byte) (val & 0x0F));
|
||||||
final DocValuesType normsType = getDocValuesType(input, (byte) ((val >>> 4) & 0x0F));
|
|
||||||
final long dvGen = input.readLong();
|
final long dvGen = input.readLong();
|
||||||
final Map<String,String> attributes = input.readStringStringMap();
|
final Map<String,String> attributes = input.readStringStringMap();
|
||||||
try {
|
try {
|
||||||
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads,
|
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads,
|
||||||
indexOptions, docValuesType, normsType, dvGen, Collections.unmodifiableMap(attributes));
|
indexOptions, docValuesType, dvGen, Collections.unmodifiableMap(attributes));
|
||||||
infos[i].checkConsistency();
|
infos[i].checkConsistency();
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalStateException e) {
|
||||||
throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e);
|
throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e);
|
||||||
|
@ -215,12 +214,10 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
|
||||||
output.writeVInt(fi.number);
|
output.writeVInt(fi.number);
|
||||||
output.writeByte(bits);
|
output.writeByte(bits);
|
||||||
|
|
||||||
// pack the DV types in one byte
|
// pack the DV type and hasNorms in one byte
|
||||||
final byte dv = docValuesByte(fi.getDocValuesType());
|
final byte dv = docValuesByte(fi.getDocValuesType());
|
||||||
final byte nrm = docValuesByte(fi.getNormType());
|
assert (dv & (~0xF)) == 0;
|
||||||
assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
|
output.writeByte(dv);
|
||||||
byte val = (byte) (0xff & ((nrm << 4) | dv));
|
|
||||||
output.writeByte(val);
|
|
||||||
output.writeLong(fi.getDocValuesGen());
|
output.writeLong(fi.getDocValuesGen());
|
||||||
output.writeStringStringMap(fi.attributes());
|
output.writeStringStringMap(fi.attributes());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1790,12 +1790,8 @@ public class CheckIndex implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void checkNorms(FieldInfo fi, LeafReader reader, PrintStream infoStream) throws IOException {
|
private static void checkNorms(FieldInfo fi, LeafReader reader, PrintStream infoStream) throws IOException {
|
||||||
switch(fi.getNormType()) {
|
if (fi.hasNorms()) {
|
||||||
case NUMERIC:
|
|
||||||
checkNumericDocValues(fi.name, reader, reader.getNormValues(fi.name), new Bits.MatchAllBits(reader.maxDoc()));
|
checkNumericDocValues(fi.name, reader, reader.getNormValues(fi.name), new Bits.MatchAllBits(reader.maxDoc()));
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new AssertionError("wtf: " + fi.getNormType());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -183,14 +183,10 @@ final class DefaultIndexingChain extends DocConsumer {
|
||||||
|
|
||||||
// we must check the final value of omitNorms for the fieldinfo: it could have
|
// we must check the final value of omitNorms for the fieldinfo: it could have
|
||||||
// changed for this field since the first time we added it.
|
// changed for this field since the first time we added it.
|
||||||
if (fi.omitsNorms() == false) {
|
if (fi.omitsNorms() == false && fi.isIndexed()) {
|
||||||
if (perField.norms != null) {
|
assert perField.norms != null: "field=" + fi.name;
|
||||||
perField.norms.finish(state.segmentInfo.getDocCount());
|
perField.norms.finish(state.segmentInfo.getDocCount());
|
||||||
perField.norms.flush(state, normsConsumer);
|
perField.norms.flush(state, normsConsumer);
|
||||||
assert fi.getNormType() == DocValuesType.NUMERIC;
|
|
||||||
} else if (fi.isIndexed()) {
|
|
||||||
assert fi.getNormType() == null: "got " + fi.getNormType() + "; field=" + fi.name;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -535,6 +531,11 @@ final class DefaultIndexingChain extends DocConsumer {
|
||||||
void setInvertState() {
|
void setInvertState() {
|
||||||
invertState = new FieldInvertState(fieldInfo.name);
|
invertState = new FieldInvertState(fieldInfo.name);
|
||||||
termsHashPerField = termsHash.addField(invertState, fieldInfo);
|
termsHashPerField = termsHash.addField(invertState, fieldInfo);
|
||||||
|
if (fieldInfo.omitsNorms() == false) {
|
||||||
|
assert norms == null;
|
||||||
|
// Even if no documents actually succeed in setting a norm, we still write norms for this segment:
|
||||||
|
norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -543,15 +544,9 @@ final class DefaultIndexingChain extends DocConsumer {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void finish() throws IOException {
|
public void finish() throws IOException {
|
||||||
if (fieldInfo.omitsNorms() == false) {
|
if (fieldInfo.omitsNorms() == false && invertState.length != 0) {
|
||||||
if (norms == null) {
|
|
||||||
fieldInfo.setNormValueType(FieldInfo.DocValuesType.NUMERIC);
|
|
||||||
norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed);
|
|
||||||
}
|
|
||||||
if (invertState.length != 0) {
|
|
||||||
norms.addValue(docState.docID, similarity.computeNorm(invertState));
|
norms.addValue(docState.docID, similarity.computeNorm(invertState));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
termsHashPerField.finish();
|
termsHashPerField.finish();
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,8 +40,8 @@ public final class FieldInfo {
|
||||||
// True if any document indexed term vectors
|
// True if any document indexed term vectors
|
||||||
private boolean storeTermVector;
|
private boolean storeTermVector;
|
||||||
|
|
||||||
private DocValuesType normType;
|
|
||||||
private boolean omitNorms; // omit norms associated with indexed fields
|
private boolean omitNorms; // omit norms associated with indexed fields
|
||||||
|
|
||||||
private IndexOptions indexOptions;
|
private IndexOptions indexOptions;
|
||||||
private boolean storePayloads; // whether this field stores payloads together with term positions
|
private boolean storePayloads; // whether this field stores payloads together with term positions
|
||||||
|
|
||||||
|
@ -120,12 +120,12 @@ public final class FieldInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sole Constructor.
|
* Sole constructor.
|
||||||
*
|
*
|
||||||
* @lucene.experimental
|
* @lucene.experimental
|
||||||
*/
|
*/
|
||||||
public FieldInfo(String name, boolean indexed, int number, boolean storeTermVector, boolean omitNorms,
|
public FieldInfo(String name, boolean indexed, int number, boolean storeTermVector, boolean omitNorms,
|
||||||
boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues, DocValuesType normsType,
|
boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues,
|
||||||
long dvGen, Map<String,String> attributes) {
|
long dvGen, Map<String,String> attributes) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.indexed = indexed;
|
this.indexed = indexed;
|
||||||
|
@ -136,13 +136,11 @@ public final class FieldInfo {
|
||||||
this.storePayloads = storePayloads;
|
this.storePayloads = storePayloads;
|
||||||
this.omitNorms = omitNorms;
|
this.omitNorms = omitNorms;
|
||||||
this.indexOptions = indexOptions;
|
this.indexOptions = indexOptions;
|
||||||
this.normType = !omitNorms ? normsType : null;
|
|
||||||
} else { // for non-indexed fields, leave defaults
|
} else { // for non-indexed fields, leave defaults
|
||||||
this.storeTermVector = false;
|
this.storeTermVector = false;
|
||||||
this.storePayloads = false;
|
this.storePayloads = false;
|
||||||
this.omitNorms = false;
|
this.omitNorms = false;
|
||||||
this.indexOptions = null;
|
this.indexOptions = null;
|
||||||
this.normType = null;
|
|
||||||
}
|
}
|
||||||
this.dvGen = dvGen;
|
this.dvGen = dvGen;
|
||||||
this.attributes = attributes;
|
this.attributes = attributes;
|
||||||
|
@ -158,11 +156,6 @@ public final class FieldInfo {
|
||||||
if (indexOptions == null) {
|
if (indexOptions == null) {
|
||||||
throw new IllegalStateException("indexed field '" + name + "' must have index options");
|
throw new IllegalStateException("indexed field '" + name + "' must have index options");
|
||||||
}
|
}
|
||||||
if (omitNorms) {
|
|
||||||
if (normType != null) {
|
|
||||||
throw new IllegalStateException("indexed field '" + name + "' cannot both omit norms and have norms");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Cannot store payloads unless positions are indexed:
|
// Cannot store payloads unless positions are indexed:
|
||||||
if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0 && storePayloads) {
|
if (indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0 && storePayloads) {
|
||||||
throw new IllegalStateException("indexed field '" + name + "' cannot have payloads without positions");
|
throw new IllegalStateException("indexed field '" + name + "' cannot have payloads without positions");
|
||||||
|
@ -177,12 +170,8 @@ public final class FieldInfo {
|
||||||
if (omitNorms) {
|
if (omitNorms) {
|
||||||
throw new IllegalStateException("non-indexed field '" + name + "' cannot omit norms");
|
throw new IllegalStateException("non-indexed field '" + name + "' cannot omit norms");
|
||||||
}
|
}
|
||||||
if (normType != null) {
|
|
||||||
throw new IllegalStateException("non-indexed field '" + name + "' cannot have norms");
|
|
||||||
}
|
|
||||||
if (indexOptions != null) {
|
if (indexOptions != null) {
|
||||||
throw new IllegalStateException("non-indexed field '" + name + "' cannot have index options");
|
throw new IllegalStateException("non-indexed field '" + name + "' cannot have index options");
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,7 +195,6 @@ public final class FieldInfo {
|
||||||
this.storePayloads |= storePayloads;
|
this.storePayloads |= storePayloads;
|
||||||
if (this.omitNorms != omitNorms) {
|
if (this.omitNorms != omitNorms) {
|
||||||
this.omitNorms = true; // if one require omitNorms at least once, it remains off for life
|
this.omitNorms = true; // if one require omitNorms at least once, it remains off for life
|
||||||
this.normType = null;
|
|
||||||
}
|
}
|
||||||
if (this.indexOptions != indexOptions) {
|
if (this.indexOptions != indexOptions) {
|
||||||
if (this.indexOptions == null) {
|
if (this.indexOptions == null) {
|
||||||
|
@ -265,13 +253,6 @@ public final class FieldInfo {
|
||||||
return dvGen;
|
return dvGen;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns {@link DocValuesType} of the norm. this may be null if the field has no norms.
|
|
||||||
*/
|
|
||||||
public DocValuesType getNormType() {
|
|
||||||
return normType;
|
|
||||||
}
|
|
||||||
|
|
||||||
void setStoreTermVectors() {
|
void setStoreTermVectors() {
|
||||||
storeTermVector = true;
|
storeTermVector = true;
|
||||||
assert checkConsistency();
|
assert checkConsistency();
|
||||||
|
@ -284,14 +265,6 @@ public final class FieldInfo {
|
||||||
assert checkConsistency();
|
assert checkConsistency();
|
||||||
}
|
}
|
||||||
|
|
||||||
void setNormValueType(DocValuesType type) {
|
|
||||||
if (normType != null && normType != type) {
|
|
||||||
throw new IllegalArgumentException("cannot change Norm type from " + normType + " to " + type + " for field \"" + name + "\"");
|
|
||||||
}
|
|
||||||
normType = type;
|
|
||||||
assert checkConsistency();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns true if norms are explicitly omitted for this field
|
* Returns true if norms are explicitly omitted for this field
|
||||||
*/
|
*/
|
||||||
|
@ -303,7 +276,7 @@ public final class FieldInfo {
|
||||||
* Returns true if this field actually has any norms.
|
* Returns true if this field actually has any norms.
|
||||||
*/
|
*/
|
||||||
public boolean hasNorms() {
|
public boolean hasNorms() {
|
||||||
return normType != null;
|
return indexed && omitNorms == false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -276,10 +276,8 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** NOTE: this method does not carry over termVector
|
/** NOTE: this method does not carry over termVector
|
||||||
* booleans nor docValuesType; the indexer chain
|
* the indexer chain must set these fields when they
|
||||||
* (TermVectorsConsumerPerField, DocFieldProcessor) must
|
* succeed in consuming the document */
|
||||||
* set these fields when they succeed in consuming
|
|
||||||
* the document */
|
|
||||||
public FieldInfo addOrUpdate(String name, IndexableFieldType fieldType) {
|
public FieldInfo addOrUpdate(String name, IndexableFieldType fieldType) {
|
||||||
// TODO: really, indexer shouldn't even call this
|
// TODO: really, indexer shouldn't even call this
|
||||||
// method (it's only called from DocFieldProcessor);
|
// method (it's only called from DocFieldProcessor);
|
||||||
|
@ -288,12 +286,12 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
||||||
// be updated by maybe FreqProxTermsWriterPerField:
|
// be updated by maybe FreqProxTermsWriterPerField:
|
||||||
return addOrUpdateInternal(name, -1, fieldType.indexed(), false,
|
return addOrUpdateInternal(name, -1, fieldType.indexed(), false,
|
||||||
fieldType.omitNorms(), false,
|
fieldType.omitNorms(), false,
|
||||||
fieldType.indexOptions(), fieldType.docValueType(), null);
|
fieldType.indexOptions(), fieldType.docValueType());
|
||||||
}
|
}
|
||||||
|
|
||||||
private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, boolean isIndexed,
|
private FieldInfo addOrUpdateInternal(String name, int preferredFieldNumber, boolean isIndexed,
|
||||||
boolean storeTermVector,
|
boolean storeTermVector,
|
||||||
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues, DocValuesType normType) {
|
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues) {
|
||||||
FieldInfo fi = fieldInfo(name);
|
FieldInfo fi = fieldInfo(name);
|
||||||
if (fi == null) {
|
if (fi == null) {
|
||||||
// This field wasn't yet added to this in-RAM
|
// This field wasn't yet added to this in-RAM
|
||||||
|
@ -302,7 +300,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
||||||
// before then we'll get the same name and number,
|
// before then we'll get the same name and number,
|
||||||
// else we'll allocate a new one:
|
// else we'll allocate a new one:
|
||||||
final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, docValues);
|
final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, docValues);
|
||||||
fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, normType, -1, null);
|
fi = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, -1, null);
|
||||||
assert !byName.containsKey(fi.name);
|
assert !byName.containsKey(fi.name);
|
||||||
assert globalFieldNumbers.containsConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType());
|
assert globalFieldNumbers.containsConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType());
|
||||||
byName.put(fi.name, fi);
|
byName.put(fi.name, fi);
|
||||||
|
@ -319,10 +317,6 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
||||||
globalFieldNumbers.setDocValuesType(fi.number, name, docValues);
|
globalFieldNumbers.setDocValuesType(fi.number, name, docValues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!fi.omitsNorms() && normType != null) {
|
|
||||||
fi.setNormValueType(normType);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return fi;
|
return fi;
|
||||||
}
|
}
|
||||||
|
@ -331,14 +325,14 @@ public class FieldInfos implements Iterable<FieldInfo> {
|
||||||
// IMPORTANT - reuse the field number if possible for consistent field numbers across segments
|
// IMPORTANT - reuse the field number if possible for consistent field numbers across segments
|
||||||
return addOrUpdateInternal(fi.name, fi.number, fi.isIndexed(), fi.hasVectors(),
|
return addOrUpdateInternal(fi.name, fi.number, fi.isIndexed(), fi.hasVectors(),
|
||||||
fi.omitsNorms(), fi.hasPayloads(),
|
fi.omitsNorms(), fi.hasPayloads(),
|
||||||
fi.getIndexOptions(), fi.getDocValuesType(), fi.getNormType());
|
fi.getIndexOptions(), fi.getDocValuesType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldInfo fieldInfo(String fieldName) {
|
public FieldInfo fieldInfo(String fieldName) {
|
||||||
return byName.get(fieldName);
|
return byName.get(fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
final FieldInfos finish() {
|
FieldInfos finish() {
|
||||||
return new FieldInfos(byName.values().toArray(new FieldInfo[byName.size()]));
|
return new FieldInfos(byName.values().toArray(new FieldInfo[byName.size()]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -445,7 +445,7 @@ public class MemoryIndex {
|
||||||
|
|
||||||
if (!fieldInfos.containsKey(fieldName)) {
|
if (!fieldInfos.containsKey(fieldName)) {
|
||||||
fieldInfos.put(fieldName,
|
fieldInfos.put(fieldName,
|
||||||
new FieldInfo(fieldName, true, fieldInfos.size(), false, false, false, this.storeOffsets ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS , null, null, -1, null));
|
new FieldInfo(fieldName, true, fieldInfos.size(), false, false, false, this.storeOffsets ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS , null, -1, null));
|
||||||
}
|
}
|
||||||
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
|
TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
|
||||||
PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class);
|
PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class);
|
||||||
|
|
|
@ -21,22 +21,22 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.BinaryDocValuesField; // javadocs
|
||||||
|
import org.apache.lucene.document.DoubleField; // javadocs
|
||||||
|
import org.apache.lucene.document.FloatField; // javadocs
|
||||||
import org.apache.lucene.document.IntField; // javadocs
|
import org.apache.lucene.document.IntField; // javadocs
|
||||||
import org.apache.lucene.document.LongField; // javadocs
|
import org.apache.lucene.document.LongField; // javadocs
|
||||||
import org.apache.lucene.document.FloatField; // javadocs
|
|
||||||
import org.apache.lucene.document.DoubleField; // javadocs
|
|
||||||
import org.apache.lucene.document.BinaryDocValuesField; // javadocs
|
|
||||||
import org.apache.lucene.document.NumericDocValuesField; // javadocs
|
import org.apache.lucene.document.NumericDocValuesField; // javadocs
|
||||||
import org.apache.lucene.document.SortedDocValuesField; // javadocs
|
import org.apache.lucene.document.SortedDocValuesField; // javadocs
|
||||||
import org.apache.lucene.document.SortedSetDocValuesField; // javadocs
|
import org.apache.lucene.document.SortedSetDocValuesField; // javadocs
|
||||||
import org.apache.lucene.document.StringField; // javadocs
|
import org.apache.lucene.document.StringField; // javadocs
|
||||||
import org.apache.lucene.index.FilterLeafReader;
|
|
||||||
import org.apache.lucene.index.LeafReader;
|
|
||||||
import org.apache.lucene.index.BinaryDocValues;
|
import org.apache.lucene.index.BinaryDocValues;
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
import org.apache.lucene.index.FieldInfo;
|
import org.apache.lucene.index.FieldInfo;
|
||||||
import org.apache.lucene.index.FieldInfos;
|
import org.apache.lucene.index.FieldInfos;
|
||||||
import org.apache.lucene.index.FilterDirectoryReader;
|
import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
|
import org.apache.lucene.index.FilterLeafReader;
|
||||||
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.apache.lucene.index.NumericDocValues;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.index.SortedDocValues;
|
import org.apache.lucene.index.SortedDocValues;
|
||||||
import org.apache.lucene.index.SortedSetDocValues;
|
import org.apache.lucene.index.SortedSetDocValues;
|
||||||
|
@ -213,7 +213,7 @@ public class UninvertingReader extends FilterLeafReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
filteredInfos.add(new FieldInfo(fi.name, fi.isIndexed(), fi.number, fi.hasVectors(), fi.omitsNorms(),
|
filteredInfos.add(new FieldInfo(fi.name, fi.isIndexed(), fi.number, fi.hasVectors(), fi.omitsNorms(),
|
||||||
fi.hasPayloads(), fi.getIndexOptions(), type, fi.getNormType(), -1, null));
|
fi.hasPayloads(), fi.getIndexOptions(), type, -1, null));
|
||||||
}
|
}
|
||||||
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
|
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,7 +93,7 @@ public class AssertingNormsFormat extends NormsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
||||||
assert field.getNormType() == FieldInfo.DocValuesType.NUMERIC;
|
assert field.hasNorms();
|
||||||
NumericDocValues values = in.getNorms(field);
|
NumericDocValues values = in.getNorms(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
return new AssertingLeafReader.AssertingNumericDocValues(values, maxDoc);
|
return new AssertingLeafReader.AssertingNumericDocValues(values, maxDoc);
|
||||||
|
|
|
@ -89,11 +89,6 @@ public abstract class BaseFieldInfoFormatTestCase extends BaseIndexFileFormatTes
|
||||||
fi.setStorePayloads();
|
fi.setStorePayloads();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fi.isIndexed() && !fi.omitsNorms()) {
|
|
||||||
if (random().nextBoolean()) {
|
|
||||||
fi.setNormValueType(DocValuesType.NUMERIC);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addAttributes(fi);
|
addAttributes(fi);
|
||||||
}
|
}
|
||||||
FieldInfos infos = builder.finish();
|
FieldInfos infos = builder.finish();
|
||||||
|
@ -165,7 +160,6 @@ public abstract class BaseFieldInfoFormatTestCase extends BaseIndexFileFormatTes
|
||||||
assertEquals(expected.name, actual.name);
|
assertEquals(expected.name, actual.name);
|
||||||
assertEquals(expected.getDocValuesType(), actual.getDocValuesType());
|
assertEquals(expected.getDocValuesType(), actual.getDocValuesType());
|
||||||
assertEquals(expected.getIndexOptions(), actual.getIndexOptions());
|
assertEquals(expected.getIndexOptions(), actual.getIndexOptions());
|
||||||
assertEquals(expected.getNormType(), actual.getNormType());
|
|
||||||
assertEquals(expected.hasDocValues(), actual.hasDocValues());
|
assertEquals(expected.hasDocValues(), actual.hasDocValues());
|
||||||
assertEquals(expected.hasNorms(), actual.hasNorms());
|
assertEquals(expected.hasNorms(), actual.hasNorms());
|
||||||
assertEquals(expected.hasPayloads(), actual.hasPayloads());
|
assertEquals(expected.hasPayloads(), actual.hasPayloads());
|
||||||
|
|
|
@ -18,6 +18,8 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
@ -128,7 +130,6 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
||||||
|
|
||||||
public void testAllZeros() throws Exception {
|
public void testAllZeros() throws Exception {
|
||||||
int iterations = atLeast(1);
|
int iterations = atLeast(1);
|
||||||
final Random r = random();
|
|
||||||
for (int i = 0; i < iterations; i++) {
|
for (int i = 0; i < iterations; i++) {
|
||||||
doTestNormsVersusStoredFields(new LongProducer() {
|
doTestNormsVersusStoredFields(new LongProducer() {
|
||||||
@Override
|
@Override
|
||||||
|
@ -264,4 +265,60 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: test thread safety (e.g. across different fields) explicitly here
|
// TODO: test thread safety (e.g. across different fields) explicitly here
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LUCENE-6006: Tests undead norms.
|
||||||
|
* .....
|
||||||
|
* C C /
|
||||||
|
* /< /
|
||||||
|
* ___ __________/_#__=o
|
||||||
|
* /(- /(\_\________ \
|
||||||
|
* \ ) \ )_ \o \
|
||||||
|
* /|\ /|\ |' |
|
||||||
|
* | _|
|
||||||
|
* /o __\
|
||||||
|
* / ' |
|
||||||
|
* / / |
|
||||||
|
* /_/\______|
|
||||||
|
* ( _( <
|
||||||
|
* \ \ \
|
||||||
|
* \ \ |
|
||||||
|
* \____\____\
|
||||||
|
* ____\_\__\_\
|
||||||
|
* /` /` o\
|
||||||
|
* |___ |_______|
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void testUndeadNorms() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||||
|
int numDocs = atLeast(1000);
|
||||||
|
List<Integer> toDelete = new ArrayList<>();
|
||||||
|
for(int i=0;i<numDocs;i++) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new StringField("id", ""+i, Field.Store.NO));
|
||||||
|
if (random().nextInt(5) == 1) {
|
||||||
|
toDelete.add(i);
|
||||||
|
doc.add(new TextField("content", "some content", Field.Store.NO));
|
||||||
|
}
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
for(Integer id : toDelete) {
|
||||||
|
w.deleteDocuments(new Term("id", ""+id));
|
||||||
|
}
|
||||||
|
w.forceMerge(1);
|
||||||
|
IndexReader r = w.getReader();
|
||||||
|
|
||||||
|
// Confusingly, norms should exist, and should all be 0, even though we deleted all docs that had the field "content". They should not
|
||||||
|
// be undead:
|
||||||
|
NumericDocValues norms = MultiDocValues.getNormValues(r, "content");
|
||||||
|
assertNotNull(norms);
|
||||||
|
for(int i=0;i<r.maxDoc();i++) {
|
||||||
|
assertEquals(0, norms.get(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
r.close();
|
||||||
|
w.close();
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,6 @@ import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
import org.apache.lucene.index.FieldInfo.DocValuesType;
|
|
||||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||||
import org.apache.lucene.index.TermsEnum.SeekStatus;
|
import org.apache.lucene.index.TermsEnum.SeekStatus;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
@ -373,7 +372,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
|
||||||
|
|
||||||
fieldInfoArray[fieldUpto] = new FieldInfo(field, true, fieldUpto, false, false, true,
|
fieldInfoArray[fieldUpto] = new FieldInfo(field, true, fieldUpto, false, false, true,
|
||||||
IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS,
|
IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS,
|
||||||
null, DocValuesType.NUMERIC, -1, null);
|
null, -1, null);
|
||||||
fieldUpto++;
|
fieldUpto++;
|
||||||
|
|
||||||
SortedMap<BytesRef,SeedAndOrd> postings = new TreeMap<>();
|
SortedMap<BytesRef,SeedAndOrd> postings = new TreeMap<>();
|
||||||
|
@ -702,7 +701,6 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
|
||||||
doPayloads,
|
doPayloads,
|
||||||
indexOptions,
|
indexOptions,
|
||||||
null,
|
null,
|
||||||
DocValuesType.NUMERIC,
|
|
||||||
-1,
|
-1,
|
||||||
null);
|
null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -167,7 +167,11 @@ New Features
|
||||||
* SOLR-6585: RequestHandlers can optionaly handle sub paths as well (Noble Paul)
|
* SOLR-6585: RequestHandlers can optionaly handle sub paths as well (Noble Paul)
|
||||||
|
|
||||||
* SOLR-6617: /update/json/docs path will use fully qualified node names by default
|
* SOLR-6617: /update/json/docs path will use fully qualified node names by default
|
||||||
(NOble Paul)
|
(Noble Paul)
|
||||||
|
|
||||||
|
* SOLR-4715: Add CloudSolrServer constructors which accept a HttpClient instance.
|
||||||
|
(Hardik Upadhyay, Shawn Heisey, shalin)
|
||||||
|
|
||||||
|
|
||||||
Bug Fixes
|
Bug Fixes
|
||||||
----------------------
|
----------------------
|
||||||
|
@ -230,6 +234,9 @@ Bug Fixes
|
||||||
|
|
||||||
* SOLR-6624 Spelling mistakes in the Java source (Hrishikesh Gadre)
|
* SOLR-6624 Spelling mistakes in the Java source (Hrishikesh Gadre)
|
||||||
|
|
||||||
|
* SOLR-6307: Atomic update remove does not work for int array or date array
|
||||||
|
(Anurag Sharma , noble)
|
||||||
|
|
||||||
Optimizations
|
Optimizations
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
|
|
@ -966,6 +966,12 @@ public abstract class FieldType extends FieldProperties {
|
||||||
return analyzerProps;
|
return analyzerProps;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**Converts any Object to a java Object native to this field type
|
||||||
|
*/
|
||||||
|
public Object toNativeType(Object val) {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert a value used by the FieldComparator for this FieldType's SortField
|
* Convert a value used by the FieldComparator for this FieldType's SortField
|
||||||
* into a marshalable value for distributed sorting.
|
* into a marshalable value for distributed sorting.
|
||||||
|
|
|
@ -344,4 +344,14 @@ public class TrieDateField extends TrieField implements DateValueFieldType {
|
||||||
max == null ? null : max.getTime(),
|
max == null ? null : max.getTime(),
|
||||||
minInclusive, maxInclusive);
|
minInclusive, maxInclusive);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object toNativeType(Object val) {
|
||||||
|
if(val==null) return null;
|
||||||
|
if (val instanceof Date) return val;
|
||||||
|
|
||||||
|
if (val instanceof String) return parseMath(null,(String)val);
|
||||||
|
|
||||||
|
return super.toNativeType(val);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,4 +37,12 @@ public class TrieFloatField extends TrieField implements FloatValueFieldType {
|
||||||
{
|
{
|
||||||
type=TrieTypes.FLOAT;
|
type=TrieTypes.FLOAT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object toNativeType(Object val) {
|
||||||
|
if(val==null) return null;
|
||||||
|
if (val instanceof Number) return ((Number) val).floatValue();
|
||||||
|
if (val instanceof String) return Float.parseFloat((String) val);
|
||||||
|
return super.toNativeType(val);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,4 +31,17 @@ public class TrieIntField extends TrieField implements IntValueFieldType {
|
||||||
{
|
{
|
||||||
type=TrieTypes.INTEGER;
|
type=TrieTypes.INTEGER;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object toNativeType(Object val) {
|
||||||
|
if(val==null) return null;
|
||||||
|
if (val instanceof Number) return ((Number) val).intValue();
|
||||||
|
try {
|
||||||
|
if (val instanceof String) return Integer.parseInt((String) val);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
Float v = Float.parseFloat((String) val);
|
||||||
|
return v.intValue();
|
||||||
|
}
|
||||||
|
return super.toNativeType(val);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,7 +65,7 @@ public class Insanity {
|
||||||
for (FieldInfo fi : in.getFieldInfos()) {
|
for (FieldInfo fi : in.getFieldInfos()) {
|
||||||
if (fi.name.equals(insaneField)) {
|
if (fi.name.equals(insaneField)) {
|
||||||
filteredInfos.add(new FieldInfo(fi.name, fi.isIndexed(), fi.number, fi.hasVectors(), fi.omitsNorms(),
|
filteredInfos.add(new FieldInfo(fi.name, fi.isIndexed(), fi.number, fi.hasVectors(), fi.omitsNorms(),
|
||||||
fi.hasPayloads(), fi.getIndexOptions(), null, fi.getNormType(), -1, null));
|
fi.hasPayloads(), fi.getIndexOptions(), null, -1, null));
|
||||||
} else {
|
} else {
|
||||||
filteredInfos.add(fi);
|
filteredInfos.add(fi);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ import java.util.concurrent.locks.ReentrantLock;
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefBuilder;
|
import org.apache.lucene.util.BytesRefBuilder;
|
||||||
import org.apache.lucene.util.CharsRef;
|
|
||||||
import org.apache.lucene.util.CharsRefBuilder;
|
import org.apache.lucene.util.CharsRefBuilder;
|
||||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||||
import org.apache.solr.cloud.CloudDescriptor;
|
import org.apache.solr.cloud.CloudDescriptor;
|
||||||
|
@ -75,6 +74,7 @@ import org.apache.solr.request.SolrRequestInfo;
|
||||||
import org.apache.solr.response.SolrQueryResponse;
|
import org.apache.solr.response.SolrQueryResponse;
|
||||||
import org.apache.solr.schema.IndexSchema;
|
import org.apache.solr.schema.IndexSchema;
|
||||||
import org.apache.solr.schema.SchemaField;
|
import org.apache.solr.schema.SchemaField;
|
||||||
|
import org.apache.solr.schema.TrieDateField;
|
||||||
import org.apache.solr.update.AddUpdateCommand;
|
import org.apache.solr.update.AddUpdateCommand;
|
||||||
import org.apache.solr.update.CommitUpdateCommand;
|
import org.apache.solr.update.CommitUpdateCommand;
|
||||||
import org.apache.solr.update.DeleteUpdateCommand;
|
import org.apache.solr.update.DeleteUpdateCommand;
|
||||||
|
@ -1144,7 +1144,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
|
||||||
break;
|
break;
|
||||||
case "remove":
|
case "remove":
|
||||||
updateField = true;
|
updateField = true;
|
||||||
doRemove(oldDoc, sif, fieldVal);
|
doRemove(oldDoc, sif, fieldVal, schema);
|
||||||
break;
|
break;
|
||||||
case "inc":
|
case "inc":
|
||||||
updateField = true;
|
updateField = true;
|
||||||
|
@ -1201,20 +1201,28 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doRemove(SolrInputDocument oldDoc, SolrInputField sif, Object fieldVal) {
|
private boolean doRemove(SolrInputDocument oldDoc, SolrInputField sif, Object fieldVal, IndexSchema schema) {
|
||||||
final String name = sif.getName();
|
final String name = sif.getName();
|
||||||
SolrInputField existingField = oldDoc.get(name);
|
SolrInputField existingField = oldDoc.get(name);
|
||||||
if (existingField != null) {
|
if(existingField == null) return false;
|
||||||
|
SchemaField sf = schema.getField(name);
|
||||||
|
int oldSize = existingField.getValueCount();
|
||||||
|
|
||||||
|
if (sf != null) {
|
||||||
final Collection<Object> original = existingField.getValues();
|
final Collection<Object> original = existingField.getValues();
|
||||||
if (fieldVal instanceof Collection) {
|
if (fieldVal instanceof Collection) {
|
||||||
original.removeAll((Collection) fieldVal);
|
for (Object object : (Collection)fieldVal){
|
||||||
|
original.remove(sf.getType().toNativeType(object));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
original.remove(fieldVal);
|
original.remove(sf.getType().toNativeType(fieldVal));
|
||||||
}
|
}
|
||||||
|
|
||||||
oldDoc.setField(name, original);
|
oldDoc.setField(name, original);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return oldSize > existingField.getValueCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -518,6 +518,9 @@
|
||||||
<field name="timestamp" type="date" indexed="true" stored="true" docValues="true" default="NOW" multiValued="false"/>
|
<field name="timestamp" type="date" indexed="true" stored="true" docValues="true" default="NOW" multiValued="false"/>
|
||||||
<field name="multiDefault" type="string" indexed="true" stored="true" default="muLti-Default" multiValued="true"/>
|
<field name="multiDefault" type="string" indexed="true" stored="true" default="muLti-Default" multiValued="true"/>
|
||||||
<field name="intDefault" type="int" indexed="true" stored="true" default="42" multiValued="false"/>
|
<field name="intDefault" type="int" indexed="true" stored="true" default="42" multiValued="false"/>
|
||||||
|
<field name="intRemove" type="int" indexed="true" stored="true" multiValued="true"/>
|
||||||
|
<field name="dateRemove" type="date" indexed="true" stored="true" multiValued="true"/>
|
||||||
|
<field name="floatRemove" type="float" indexed="true" stored="true" multiValued="true"/>
|
||||||
|
|
||||||
<field name="nopositionstext" type="nopositions" indexed="true" stored="true"/>
|
<field name="nopositionstext" type="nopositions" indexed="true" stored="true"/>
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,18 @@
|
||||||
package org.apache.solr.update.processor;
|
package org.apache.solr.update.processor;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
import org.apache.solr.common.SolrInputDocument;
|
import org.apache.solr.common.SolrInputDocument;
|
||||||
|
import org.apache.solr.schema.TrieDateField;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@ -107,6 +111,710 @@ public class AtomicUpdatesTest extends SolrTestCaseJ4 {
|
||||||
assertQ(req("q", "cat:aaa", "indent", "true"), "//result[@numFound = '3']");
|
assertQ(req("q", "cat:aaa", "indent", "true"), "//result[@numFound = '3']");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveInteger() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "333", "333", "444"});
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1002");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "222", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1020");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
List<Long> removeList = new ArrayList<Long>();
|
||||||
|
removeList.add(new Long(222));
|
||||||
|
removeList.add(new Long(333));
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
removeList = new ArrayList<Long>();
|
||||||
|
removeList.add(new Long(222));
|
||||||
|
removeList.add(new Long(333));
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", 111)); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveIntegerInDocSavedWithInteger() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", new Integer[]{111, 222, 333, 333, 444});
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1002");
|
||||||
|
doc.setField("intRemove", new Integer[]{111, 222, 222, 333, 444});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1020");
|
||||||
|
doc.setField("intRemove", new Integer[]{111, 333, 444});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
doc.setField("intRemove", new Integer[]{111, 222, 444});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
List<Long> removeList = new ArrayList<Long>();
|
||||||
|
removeList.add(new Long(222));
|
||||||
|
removeList.add(new Long(333));
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
removeList = new ArrayList<Long>();
|
||||||
|
removeList.add(new Long(222));
|
||||||
|
removeList.add(new Long(333));
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", 111)); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveIntegerUsingStringType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "333", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1002");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "222", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1020");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
List<String> removeList = new ArrayList<String>();
|
||||||
|
removeList.add("222");
|
||||||
|
removeList.add("333");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
removeList = new ArrayList<String>();
|
||||||
|
removeList.add("222");
|
||||||
|
removeList.add("333");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", "111")); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveIntegerUsingLongType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", new Long[]{111L, 222L, 333L, 333L, 444L});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1002");
|
||||||
|
doc.setField("intRemove", new Long[]{111L, 222L, 222L, 333L, 444L});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1020");
|
||||||
|
doc.setField("intRemove", new Long[]{111L, 333L, 444L});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
doc.setField("intRemove", new Long[]{111L, 222L, 444L});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
List<Long> removeList = new ArrayList<Long>();
|
||||||
|
removeList.add(222L);
|
||||||
|
removeList.add(333L);
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
removeList = new ArrayList<Long>();
|
||||||
|
removeList.add(222L);
|
||||||
|
removeList.add(333L);
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", 111L)); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveIntegerUsingFloatType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
// add with float in integer field
|
||||||
|
// doc.setField("id", "1001");
|
||||||
|
// doc.setField("intRemove", new Float[]{111.10F, 222.20F, 333.30F, 333.30F, 444.40F});
|
||||||
|
// assertU(adoc(doc));
|
||||||
|
//
|
||||||
|
// doc = new SolrInputDocument();
|
||||||
|
// doc.setField("id", "1002");
|
||||||
|
// doc.setField("intRemove", new Float[]{111.10F, 222.20F, 222.20F, 333.30F, 444.40F});
|
||||||
|
// assertU(adoc(doc));
|
||||||
|
//
|
||||||
|
// doc = new SolrInputDocument();
|
||||||
|
// doc.setField("id", "1020");
|
||||||
|
// doc.setField("intRemove", new Float[]{111.10F, 333.30F, 444.40F});
|
||||||
|
// assertU(adoc(doc));
|
||||||
|
//
|
||||||
|
// doc = new SolrInputDocument();
|
||||||
|
// doc.setField("id", "1021");
|
||||||
|
// doc.setField("intRemove", new Float[]{111.10F, 222.20F, 444.40F});
|
||||||
|
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "333", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1002");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "222", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1020");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "333", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
doc.setField("intRemove", new String[]{"111", "222", "444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
List<Float> removeList = new ArrayList<Float>();
|
||||||
|
removeList.add(222.20F);
|
||||||
|
removeList.add(333.30F);
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
removeList = new ArrayList<Float>();
|
||||||
|
removeList.add(222.20F);
|
||||||
|
removeList.add(333.30F);
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", 111L)); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveIntegerUsingDoubleType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", new String[]{"11111111", "22222222", "33333333", "33333333", "44444444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1002");
|
||||||
|
doc.setField("intRemove", new String[]{"11111111", "22222222", "22222222", "33333333", "44444444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1020");
|
||||||
|
doc.setField("intRemove", new String[]{"11111111", "33333333", "44444444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
doc.setField("intRemove", new String[]{"11111111", "22222222", "44444444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
List<Double> removeList = new ArrayList<Double>();
|
||||||
|
removeList.add(22222222D);
|
||||||
|
removeList.add(33333333D);
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1021");
|
||||||
|
removeList = new ArrayList<Double>();
|
||||||
|
removeList.add(22222222D);
|
||||||
|
removeList.add(33333333D);
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "1001");
|
||||||
|
doc.setField("intRemove", ImmutableMap.of("remove", 11111111D)); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "intRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "intRemove:11111111", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveDateUsingStringType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10002");
|
||||||
|
doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10020");
|
||||||
|
doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-04T12:00:00Z"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
List<String> removeList = new ArrayList<String>();
|
||||||
|
removeList.add("2014-09-02T12:00:00Z");
|
||||||
|
removeList.add("2014-09-03T12:00:00Z");
|
||||||
|
|
||||||
|
doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
removeList = new ArrayList<String>();
|
||||||
|
removeList.add("2014-09-02T12:00:00Z");
|
||||||
|
removeList.add("2014-09-03T12:00:00Z");
|
||||||
|
doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("dateRemove", ImmutableMap.of("remove", "2014-09-01T12:00:00Z")); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Ignore("Remove Date is not supported in other formats than UTC")
|
||||||
|
@Test
|
||||||
|
public void testRemoveDateUsingDateType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
TrieDateField trieDF = new TrieDateField();
|
||||||
|
Date tempDate = trieDF.parseMath(null, "2014-02-01T12:00:00Z");
|
||||||
|
doc.setField("dateRemove", new Date[]{trieDF.parseMath(null, "2014-02-01T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-07-02T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-03T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-03T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-04T12:00:00Z")
|
||||||
|
});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10002");
|
||||||
|
doc.setField("dateRemove", new Date[]{trieDF.parseMath(null, "2014-02-01T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-07-02T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-02T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-03T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-04T12:00:00Z")
|
||||||
|
});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10020");
|
||||||
|
doc.setField("dateRemove", new Date[]{trieDF.parseMath(null, "2014-02-01T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-03T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-04T12:00:00Z")
|
||||||
|
});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
doc.setField("dateRemove", new Date[]{trieDF.parseMath(null, "2014-02-01T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-02T12:00:00Z"),
|
||||||
|
trieDF.parseMath(null, "2014-02-04T12:00:00Z")
|
||||||
|
});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
String dateString = trieDF.parseMath(null, "2014-02-02T12:00:00Z").toString();
|
||||||
|
// assertQ(req("q", "dateRemove:"+URLEncoder.encode(dateString, "UTF-8"), "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
// assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
// assertQ(req("q", "dateRemove:"+dateString, "indent", "true"), "//result[@numFound = '3']"); //Sun Feb 02 10:00:00 FNT 2014
|
||||||
|
assertQ(req("q", "dateRemove:\"Sun Feb 02 10:00:00 FNT 2014\"", "indent", "true"), "//result[@numFound = '3']"); //Sun Feb 02 10:00:00 FNT 2014
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
List<Date> removeList = new ArrayList<Date>();
|
||||||
|
removeList.add(trieDF.parseMath(null, "2014-09-02T12:00:00Z"));
|
||||||
|
removeList.add(trieDF.parseMath(null, "2014-09-03T12:00:00Z"));
|
||||||
|
|
||||||
|
doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
removeList = new ArrayList<Date>();
|
||||||
|
removeList.add(trieDF.parseMath(null, "2014-09-02T12:00:00Z"));
|
||||||
|
removeList.add(trieDF.parseMath(null, "2014-09-03T12:00:00Z"));
|
||||||
|
doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("dateRemove", ImmutableMap.of("remove", trieDF.parseMath(null, "2014-09-01T12:00:00Z"))); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveFloatUsingFloatType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("floatRemove", new Float[]{111.111F, 222.222F, 333.333F, 333.333F, 444.444F});
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10002");
|
||||||
|
doc.setField("floatRemove", new Float[]{111.111F, 222.222F, 222.222F, 333.333F, 444.444F});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10020");
|
||||||
|
doc.setField("floatRemove", new Float[]{111.111F, 333.333F, 444.444F});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
doc.setField("floatRemove", new Float[]{111.111F, 222.222F, 444.444F});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
List<Float> removeList = new ArrayList<Float>();
|
||||||
|
removeList.add(222.222F);
|
||||||
|
removeList.add(333.333F);
|
||||||
|
|
||||||
|
doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
removeList = new ArrayList<Float>();
|
||||||
|
removeList.add(222.222F);
|
||||||
|
removeList.add(333.333F);
|
||||||
|
doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("floatRemove", ImmutableMap.of("remove", "111.111")); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"111.111\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRemoveFloatUsingStringType() throws Exception {
|
||||||
|
SolrInputDocument doc;
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("floatRemove", new String[]{"111.111", "222.222", "333.333", "333.333", "444.444"});
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10002");
|
||||||
|
doc.setField("floatRemove", new String[]{"111.111", "222.222", "222.222", "333.333", "444.444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10020");
|
||||||
|
doc.setField("floatRemove", new String[]{"111.111", "333.333", "444.444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
doc.setField("floatRemove", new String[]{"111.111", "222.222", "444.444"});
|
||||||
|
assertU(adoc(doc));
|
||||||
|
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
List<String> removeList = new ArrayList<String>();
|
||||||
|
removeList.add("222.222");
|
||||||
|
removeList.add("333.333");
|
||||||
|
|
||||||
|
doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '2']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10021");
|
||||||
|
removeList = new ArrayList<String>();
|
||||||
|
removeList.add("222.222");
|
||||||
|
removeList.add("333.333");
|
||||||
|
doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '1']");
|
||||||
|
|
||||||
|
doc = new SolrInputDocument();
|
||||||
|
doc.setField("id", "10001");
|
||||||
|
doc.setField("floatRemove", ImmutableMap.of("remove", "111.111")); //behavior when hitting Solr directly
|
||||||
|
|
||||||
|
assertU(adoc(doc));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
assertQ(req("q", "floatRemove:*", "indent", "true"), "//result[@numFound = '4']");
|
||||||
|
assertQ(req("q", "floatRemove:\"111.111\"", "indent", "true"), "//result[@numFound = '3']");
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAdd() throws Exception {
|
public void testAdd() throws Exception {
|
||||||
SolrInputDocument doc = new SolrInputDocument();
|
SolrInputDocument doc = new SolrInputDocument();
|
||||||
|
|
|
@ -96,6 +96,7 @@ public class CloudSolrServer extends SolrServer {
|
||||||
private final LBHttpSolrServer lbServer;
|
private final LBHttpSolrServer lbServer;
|
||||||
private final boolean shutdownLBHttpSolrServer;
|
private final boolean shutdownLBHttpSolrServer;
|
||||||
private HttpClient myClient;
|
private HttpClient myClient;
|
||||||
|
private final boolean clientIsInternal;
|
||||||
//no of times collection state to be reloaded if stale state error is received
|
//no of times collection state to be reloaded if stale state error is received
|
||||||
private static final int MAX_STALE_RETRIES = 5;
|
private static final int MAX_STALE_RETRIES = 5;
|
||||||
Random rand = new Random();
|
Random rand = new Random();
|
||||||
|
@ -177,6 +178,7 @@ public class CloudSolrServer extends SolrServer {
|
||||||
*/
|
*/
|
||||||
public CloudSolrServer(String zkHost) {
|
public CloudSolrServer(String zkHost) {
|
||||||
this.zkHost = zkHost;
|
this.zkHost = zkHost;
|
||||||
|
this.clientIsInternal = true;
|
||||||
this.myClient = HttpClientUtil.createClient(null);
|
this.myClient = HttpClientUtil.createClient(null);
|
||||||
this.lbServer = new LBHttpSolrServer(myClient);
|
this.lbServer = new LBHttpSolrServer(myClient);
|
||||||
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
||||||
|
@ -184,7 +186,41 @@ public class CloudSolrServer extends SolrServer {
|
||||||
this.updatesToLeaders = true;
|
this.updatesToLeaders = true;
|
||||||
shutdownLBHttpSolrServer = true;
|
shutdownLBHttpSolrServer = true;
|
||||||
lbServer.addQueryParams(STATE_VERSION);
|
lbServer.addQueryParams(STATE_VERSION);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new client object that connects to Zookeeper and is always aware
|
||||||
|
* of the SolrCloud state. If there is a fully redundant Zookeeper quorum and
|
||||||
|
* SolrCloud has enough replicas for every shard in a collection, there is no
|
||||||
|
* single point of failure. Updates will be sent to shard leaders by default.
|
||||||
|
*
|
||||||
|
* @param zkHost
|
||||||
|
* The client endpoint of the zookeeper quorum containing the cloud
|
||||||
|
* state. The full specification for this string is one or more comma
|
||||||
|
* separated HOST:PORT values, followed by an optional chroot value
|
||||||
|
* that starts with a forward slash. Using a chroot allows multiple
|
||||||
|
* applications to coexist in one ensemble. For full details, see the
|
||||||
|
* Zookeeper documentation. Some examples:
|
||||||
|
* <p/>
|
||||||
|
* "host1:2181"
|
||||||
|
* <p/>
|
||||||
|
* "host1:2181,host2:2181,host3:2181/mysolrchroot"
|
||||||
|
* <p/>
|
||||||
|
* "zoo1.example.com:2181,zoo2.example.com:2181,zoo3.example.com:2181"
|
||||||
|
* @param httpClient
|
||||||
|
* the {@link HttpClient} instance to be used for all requests. The
|
||||||
|
* provided httpClient should use a multi-threaded connection manager.
|
||||||
|
*/
|
||||||
|
public CloudSolrServer(String zkHost, HttpClient httpClient) {
|
||||||
|
this.zkHost = zkHost;
|
||||||
|
this.clientIsInternal = httpClient == null;
|
||||||
|
this.myClient = httpClient == null ? HttpClientUtil.createClient(null) : httpClient;
|
||||||
|
this.lbServer = new LBHttpSolrServer(myClient);
|
||||||
|
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
||||||
|
this.lbServer.setParser(new BinaryResponseParser());
|
||||||
|
this.updatesToLeaders = true;
|
||||||
|
shutdownLBHttpSolrServer = true;
|
||||||
|
lbServer.addQueryParams(STATE_VERSION);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -206,7 +242,31 @@ public class CloudSolrServer extends SolrServer {
|
||||||
* @see #CloudSolrServer(String)
|
* @see #CloudSolrServer(String)
|
||||||
*/
|
*/
|
||||||
public CloudSolrServer(Collection<String> zkHosts, String chroot) {
|
public CloudSolrServer(Collection<String> zkHosts, String chroot) {
|
||||||
|
this(zkHosts, chroot, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new client object using multiple string values in a Collection
|
||||||
|
* instead of a standard zkHost connection string. Note that this method will
|
||||||
|
* not be used if there is only one String argument - that will use
|
||||||
|
* {@link #CloudSolrServer(String)} instead.
|
||||||
|
*
|
||||||
|
* @param zkHosts
|
||||||
|
* A Java Collection (List, Set, etc) of HOST:PORT strings, one for
|
||||||
|
* each host in the zookeeper ensemble. Note that with certain
|
||||||
|
* Collection types like HashSet, the order of hosts in the final
|
||||||
|
* connect string may not be in the same order you added them.
|
||||||
|
* @param chroot
|
||||||
|
* A chroot value for zookeeper, starting with a forward slash. If no
|
||||||
|
* chroot is required, use null.
|
||||||
|
* @param httpClient
|
||||||
|
* the {@link HttpClient} instance to be used for all requests. The provided httpClient should use a
|
||||||
|
* multi-threaded connection manager.
|
||||||
|
* @throws IllegalArgumentException
|
||||||
|
* if the chroot value does not start with a forward slash.
|
||||||
|
* @see #CloudSolrServer(String)
|
||||||
|
*/
|
||||||
|
public CloudSolrServer(Collection<String> zkHosts, String chroot, HttpClient httpClient) {
|
||||||
StringBuilder zkBuilder = new StringBuilder();
|
StringBuilder zkBuilder = new StringBuilder();
|
||||||
int lastIndexValue = zkHosts.size() - 1;
|
int lastIndexValue = zkHosts.size() - 1;
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
@ -230,7 +290,8 @@ public class CloudSolrServer extends SolrServer {
|
||||||
log.info("Final constructed zkHost string: " + zkBuilder.toString());
|
log.info("Final constructed zkHost string: " + zkBuilder.toString());
|
||||||
|
|
||||||
this.zkHost = zkBuilder.toString();
|
this.zkHost = zkBuilder.toString();
|
||||||
this.myClient = HttpClientUtil.createClient(null);
|
this.clientIsInternal = httpClient == null;
|
||||||
|
this.myClient = httpClient == null ? HttpClientUtil.createClient(null) : httpClient;
|
||||||
this.lbServer = new LBHttpSolrServer(myClient);
|
this.lbServer = new LBHttpSolrServer(myClient);
|
||||||
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
||||||
this.lbServer.setParser(new BinaryResponseParser());
|
this.lbServer.setParser(new BinaryResponseParser());
|
||||||
|
@ -246,8 +307,23 @@ public class CloudSolrServer extends SolrServer {
|
||||||
* @see #CloudSolrServer(String) for full description and details on zkHost
|
* @see #CloudSolrServer(String) for full description and details on zkHost
|
||||||
*/
|
*/
|
||||||
public CloudSolrServer(String zkHost, boolean updatesToLeaders) {
|
public CloudSolrServer(String zkHost, boolean updatesToLeaders) {
|
||||||
|
this(zkHost, updatesToLeaders, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param zkHost
|
||||||
|
* A zookeeper client endpoint.
|
||||||
|
* @param updatesToLeaders
|
||||||
|
* If true, sends updates only to shard leaders.
|
||||||
|
* @param httpClient
|
||||||
|
* the {@link HttpClient} instance to be used for all requests. The provided httpClient should use a
|
||||||
|
* multi-threaded connection manager.
|
||||||
|
* @see #CloudSolrServer(String) for full description and details on zkHost
|
||||||
|
*/
|
||||||
|
public CloudSolrServer(String zkHost, boolean updatesToLeaders, HttpClient httpClient) {
|
||||||
this.zkHost = zkHost;
|
this.zkHost = zkHost;
|
||||||
this.myClient = HttpClientUtil.createClient(null);
|
this.clientIsInternal = httpClient == null;
|
||||||
|
this.myClient = httpClient == null ? HttpClientUtil.createClient(null) : httpClient;
|
||||||
this.lbServer = new LBHttpSolrServer(myClient);
|
this.lbServer = new LBHttpSolrServer(myClient);
|
||||||
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
this.lbServer.setRequestWriter(new BinaryRequestWriter());
|
||||||
this.lbServer.setParser(new BinaryResponseParser());
|
this.lbServer.setParser(new BinaryResponseParser());
|
||||||
|
@ -289,8 +365,8 @@ public class CloudSolrServer extends SolrServer {
|
||||||
this.lbServer = lbServer;
|
this.lbServer = lbServer;
|
||||||
this.updatesToLeaders = updatesToLeaders;
|
this.updatesToLeaders = updatesToLeaders;
|
||||||
shutdownLBHttpSolrServer = false;
|
shutdownLBHttpSolrServer = false;
|
||||||
|
this.clientIsInternal = false;
|
||||||
lbServer.addQueryParams(STATE_VERSION);
|
lbServer.addQueryParams(STATE_VERSION);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResponseParser getParser() {
|
public ResponseParser getParser() {
|
||||||
|
@ -978,7 +1054,7 @@ public class CloudSolrServer extends SolrServer {
|
||||||
lbServer.shutdown();
|
lbServer.shutdown();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (myClient!=null) {
|
if (clientIsInternal && myClient!=null) {
|
||||||
myClient.getConnectionManager().shutdown();
|
myClient.getConnectionManager().shutdown();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,8 +45,9 @@ import java.util.*;
|
||||||
* Do <b>NOT</b> use this class for indexing in master/slave scenarios since documents must be sent to the
|
* Do <b>NOT</b> use this class for indexing in master/slave scenarios since documents must be sent to the
|
||||||
* correct master; no inter-node routing is done.
|
* correct master; no inter-node routing is done.
|
||||||
*
|
*
|
||||||
* In SolrCloud (leader/replica) scenarios, this class may be used for updates since updates will be forwarded
|
* In SolrCloud (leader/replica) scenarios, it is usually better to use
|
||||||
* to the appropriate leader.
|
* {@link org.apache.solr.client.solrj.impl.CloudSolrServer}, but this class may be used
|
||||||
|
* for updates because the server will forward them to the appropriate leader.
|
||||||
*
|
*
|
||||||
* Also see the <a href="http://wiki.apache.org/solr/LBHttpSolrServer">wiki</a> page.
|
* Also see the <a href="http://wiki.apache.org/solr/LBHttpSolrServer">wiki</a> page.
|
||||||
*
|
*
|
||||||
|
@ -631,6 +632,9 @@ public class LBHttpSolrServer extends SolrServer {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the HttpClient this instance uses.
|
||||||
|
*/
|
||||||
public HttpClient getHttpClient() {
|
public HttpClient getHttpClient() {
|
||||||
return httpClient;
|
return httpClient;
|
||||||
}
|
}
|
||||||
|
@ -639,10 +643,24 @@ public class LBHttpSolrServer extends SolrServer {
|
||||||
return parser;
|
return parser;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Changes the {@link ResponseParser} that will be used for the internal
|
||||||
|
* SolrServer objects.
|
||||||
|
*
|
||||||
|
* @param parser Default Response Parser chosen to parse the response if the parser
|
||||||
|
* were not specified as part of the request.
|
||||||
|
* @see org.apache.solr.client.solrj.SolrRequest#getResponseParser()
|
||||||
|
*/
|
||||||
public void setParser(ResponseParser parser) {
|
public void setParser(ResponseParser parser) {
|
||||||
this.parser = parser;
|
this.parser = parser;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Changes the {@link RequestWriter} that will be used for the internal
|
||||||
|
* SolrServer objects.
|
||||||
|
*
|
||||||
|
* @param requestWriter Default RequestWriter, used to encode requests sent to the server.
|
||||||
|
*/
|
||||||
public void setRequestWriter(RequestWriter requestWriter) {
|
public void setRequestWriter(RequestWriter requestWriter) {
|
||||||
this.requestWriter = requestWriter;
|
this.requestWriter = requestWriter;
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,7 @@ import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
|
import org.apache.http.client.HttpClient;
|
||||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||||
import org.apache.solr.client.solrj.SolrQuery;
|
import org.apache.solr.client.solrj.SolrQuery;
|
||||||
import org.apache.solr.client.solrj.SolrServerException;
|
import org.apache.solr.client.solrj.SolrServerException;
|
||||||
|
@ -122,6 +123,7 @@ public class CloudSolrServerTest extends AbstractFullDistribZkTestBase {
|
||||||
public void doTest() throws Exception {
|
public void doTest() throws Exception {
|
||||||
allTests();
|
allTests();
|
||||||
stateVersionParamTest();
|
stateVersionParamTest();
|
||||||
|
customHttpClientTest();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void allTests() throws Exception {
|
private void allTests() throws Exception {
|
||||||
|
@ -439,4 +441,20 @@ public class CloudSolrServerTest extends AbstractFullDistribZkTestBase {
|
||||||
// see SOLR-6146 - this test will fail by virtue of the zkClient tracking performed
|
// see SOLR-6146 - this test will fail by virtue of the zkClient tracking performed
|
||||||
// in the afterClass method of the base class
|
// in the afterClass method of the base class
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void customHttpClientTest() {
|
||||||
|
CloudSolrServer server = null;
|
||||||
|
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||||
|
params.set(HttpClientUtil.PROP_SO_TIMEOUT, 1000);
|
||||||
|
HttpClient client = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
client = HttpClientUtil.createClient(params);
|
||||||
|
server = new CloudSolrServer(zkServer.getZkAddress(), client);
|
||||||
|
assertTrue(server.getLbServer().getHttpClient() == client);
|
||||||
|
} finally {
|
||||||
|
server.shutdown();
|
||||||
|
client.getConnectionManager().shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue