mirror of https://github.com/apache/lucene.git
Merge branch 'master' into feature/autoscaling
# Conflicts: # solr/CHANGES.txt
This commit is contained in:
commit
b8c86d24d4
|
@ -4,6 +4,16 @@ For more information on past and future Lucene versions, please see:
|
|||
http://s.apache.org/luceneversions
|
||||
|
||||
======================= Lucene 8.0.0 =======================
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-7837: Indices that were created before the previous major version
|
||||
will now fail to open even if they have been merged with the previous major
|
||||
version. (Adrien Grand)
|
||||
|
||||
|
||||
|
||||
======================= Lucene 7.1.0 =======================
|
||||
(No Changes)
|
||||
|
||||
======================= Lucene 7.0.0 =======================
|
||||
|
@ -31,6 +41,10 @@ New Features
|
|||
updates concurrently, giving sizable speedups in update-heavy
|
||||
indexing use cases (Simon Willnauer, Mike McCandless)
|
||||
|
||||
* LUCENE-7823: Pure query based naive bayes classifier using BM25 scores (Tommaso Teofili)
|
||||
|
||||
* LUCENE-7838: Knn classifier based on fuzzified term queries (Tommaso Teofili)
|
||||
|
||||
API Changes
|
||||
|
||||
* LUCENE-2605: Classic QueryParser no longer splits on whitespace by default.
|
||||
|
@ -104,6 +118,9 @@ API Changes
|
|||
name contains "slow" in order to cleary indicate that they would usually be a
|
||||
bad choice. (Adrien Grand)
|
||||
|
||||
* LUCENE-7899: FieldValueQuery is renamed to DocValuesFieldExistsQuery
|
||||
(Adrien Grand, Mike McCandless)
|
||||
|
||||
Bug Fixes
|
||||
|
||||
* LUCENE-7626: IndexWriter will no longer accept broken token offsets
|
||||
|
@ -179,6 +196,9 @@ Other
|
|||
|
||||
* LUCENE-5822: Convert README to Markdown (Jason Gerlowski via Mike Drob)
|
||||
|
||||
* LUCENE-7773: Remove unused/deprecated token types from StandardTokenizer.
|
||||
(Ahmet Arslan via Steve Rowe)
|
||||
|
||||
======================= Lucene 6.7.0 =======================
|
||||
|
||||
New Features
|
||||
|
|
|
@ -19,7 +19,7 @@ import re
|
|||
# for inclusion in HTMLStripCharFilter.jflex.
|
||||
|
||||
def main():
|
||||
print get_apache_license()
|
||||
print(get_apache_license())
|
||||
codes = {}
|
||||
regex = re.compile(r'\s*<!ENTITY\s+(\S+)\s+"&(?:#38;)?#(\d+);"')
|
||||
for line in get_entity_text().split('\n'):
|
||||
|
@ -38,51 +38,51 @@ def main():
|
|||
new_entry = ('"%s"' if first_entry else ' | "%s"') % key
|
||||
first_entry = False
|
||||
if len(output_line) + len(new_entry) >= 80:
|
||||
print output_line
|
||||
print(output_line)
|
||||
output_line = ' '
|
||||
output_line += new_entry
|
||||
if key in ('quot','copy','gt','lt','reg','amp'):
|
||||
new_entry = ' | "%s"' % key.upper()
|
||||
if len(output_line) + len(new_entry) >= 80:
|
||||
print output_line
|
||||
print(output_line)
|
||||
output_line = ' '
|
||||
output_line += new_entry
|
||||
print output_line, ')'
|
||||
print(output_line, ')')
|
||||
|
||||
print '%{'
|
||||
print ' private static final Map<String,String> upperCaseVariantsAccepted'
|
||||
print ' = new HashMap<>();'
|
||||
print ' static {'
|
||||
print ' upperCaseVariantsAccepted.put("quot", "QUOT");'
|
||||
print ' upperCaseVariantsAccepted.put("copy", "COPY");'
|
||||
print ' upperCaseVariantsAccepted.put("gt", "GT");'
|
||||
print ' upperCaseVariantsAccepted.put("lt", "LT");'
|
||||
print ' upperCaseVariantsAccepted.put("reg", "REG");'
|
||||
print ' upperCaseVariantsAccepted.put("amp", "AMP");'
|
||||
print ' }'
|
||||
print ' private static final CharArrayMap<Character> entityValues'
|
||||
print ' = new CharArrayMap<>(%i, false);' % len(keys)
|
||||
print ' static {'
|
||||
print ' String[] entities = {'
|
||||
print('%{')
|
||||
print(' private static final Map<String,String> upperCaseVariantsAccepted')
|
||||
print(' = new HashMap<>();')
|
||||
print(' static {')
|
||||
print(' upperCaseVariantsAccepted.put("quot", "QUOT");')
|
||||
print(' upperCaseVariantsAccepted.put("copy", "COPY");')
|
||||
print(' upperCaseVariantsAccepted.put("gt", "GT");')
|
||||
print(' upperCaseVariantsAccepted.put("lt", "LT");')
|
||||
print(' upperCaseVariantsAccepted.put("reg", "REG");')
|
||||
print(' upperCaseVariantsAccepted.put("amp", "AMP");')
|
||||
print(' }')
|
||||
print(' private static final CharArrayMap<Character> entityValues')
|
||||
print(' = new CharArrayMap<>(%i, false);' % len(keys))
|
||||
print(' static {')
|
||||
print(' String[] entities = {')
|
||||
output_line = ' '
|
||||
for key in keys:
|
||||
new_entry = ' "%s", "%s",' % (key, codes[key])
|
||||
if len(output_line) + len(new_entry) >= 80:
|
||||
print output_line
|
||||
print(output_line)
|
||||
output_line = ' '
|
||||
output_line += new_entry
|
||||
print output_line[:-1]
|
||||
print ' };'
|
||||
print ' for (int i = 0 ; i < entities.length ; i += 2) {'
|
||||
print ' Character value = entities[i + 1].charAt(0);'
|
||||
print ' entityValues.put(entities[i], value);'
|
||||
print ' String upperCaseVariant = upperCaseVariantsAccepted.get(entities[i]);'
|
||||
print ' if (upperCaseVariant != null) {'
|
||||
print ' entityValues.put(upperCaseVariant, value);'
|
||||
print ' }'
|
||||
print ' }'
|
||||
print " }"
|
||||
print "%}"
|
||||
print(output_line[:-1])
|
||||
print(' };')
|
||||
print(' for (int i = 0 ; i < entities.length ; i += 2) {')
|
||||
print(' Character value = entities[i + 1].charAt(0);')
|
||||
print(' entityValues.put(entities[i], value);')
|
||||
print(' String upperCaseVariant = upperCaseVariantsAccepted.get(entities[i]);')
|
||||
print(' if (upperCaseVariant != null) {')
|
||||
print(' entityValues.put(upperCaseVariant, value);')
|
||||
print(' }')
|
||||
print(' }')
|
||||
print(" }")
|
||||
print("%}")
|
||||
|
||||
def get_entity_text():
|
||||
# The text below is taken verbatim from
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.analysis.standard;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
|
@ -342,17 +343,17 @@ class ClassicTokenizerImpl {
|
|||
|
||||
/* user code: */
|
||||
|
||||
public static final int ALPHANUM = StandardTokenizer.ALPHANUM;
|
||||
public static final int APOSTROPHE = StandardTokenizer.APOSTROPHE;
|
||||
public static final int ACRONYM = StandardTokenizer.ACRONYM;
|
||||
public static final int COMPANY = StandardTokenizer.COMPANY;
|
||||
public static final int EMAIL = StandardTokenizer.EMAIL;
|
||||
public static final int HOST = StandardTokenizer.HOST;
|
||||
public static final int NUM = StandardTokenizer.NUM;
|
||||
public static final int CJ = StandardTokenizer.CJ;
|
||||
public static final int ACRONYM_DEP = StandardTokenizer.ACRONYM_DEP;
|
||||
public static final int ALPHANUM = ClassicTokenizer.ALPHANUM;
|
||||
public static final int APOSTROPHE = ClassicTokenizer.APOSTROPHE;
|
||||
public static final int ACRONYM = ClassicTokenizer.ACRONYM;
|
||||
public static final int COMPANY = ClassicTokenizer.COMPANY;
|
||||
public static final int EMAIL = ClassicTokenizer.EMAIL;
|
||||
public static final int HOST = ClassicTokenizer.HOST;
|
||||
public static final int NUM = ClassicTokenizer.NUM;
|
||||
public static final int CJ = ClassicTokenizer.CJ;
|
||||
public static final int ACRONYM_DEP = ClassicTokenizer.ACRONYM_DEP;
|
||||
|
||||
public static final String [] TOKEN_TYPES = StandardTokenizer.TOKEN_TYPES;
|
||||
public static final String [] TOKEN_TYPES = ClassicTokenizer.TOKEN_TYPES;
|
||||
|
||||
public final int yychar()
|
||||
{
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
|
||||
package org.apache.lucene.analysis.standard;
|
||||
|
||||
import java.io.Reader;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
|
||||
/**
|
||||
|
@ -36,17 +35,17 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
|||
|
||||
%{
|
||||
|
||||
public static final int ALPHANUM = StandardTokenizer.ALPHANUM;
|
||||
public static final int APOSTROPHE = StandardTokenizer.APOSTROPHE;
|
||||
public static final int ACRONYM = StandardTokenizer.ACRONYM;
|
||||
public static final int COMPANY = StandardTokenizer.COMPANY;
|
||||
public static final int EMAIL = StandardTokenizer.EMAIL;
|
||||
public static final int HOST = StandardTokenizer.HOST;
|
||||
public static final int NUM = StandardTokenizer.NUM;
|
||||
public static final int CJ = StandardTokenizer.CJ;
|
||||
public static final int ACRONYM_DEP = StandardTokenizer.ACRONYM_DEP;
|
||||
public static final int ALPHANUM = ClassicTokenizer.ALPHANUM;
|
||||
public static final int APOSTROPHE = ClassicTokenizer.APOSTROPHE;
|
||||
public static final int ACRONYM = ClassicTokenizer.ACRONYM;
|
||||
public static final int COMPANY = ClassicTokenizer.COMPANY;
|
||||
public static final int EMAIL = ClassicTokenizer.EMAIL;
|
||||
public static final int HOST = ClassicTokenizer.HOST;
|
||||
public static final int NUM = ClassicTokenizer.NUM;
|
||||
public static final int CJ = ClassicTokenizer.CJ;
|
||||
public static final int ACRONYM_DEP = ClassicTokenizer.ACRONYM_DEP;
|
||||
|
||||
public static final String [] TOKEN_TYPES = StandardTokenizer.TOKEN_TYPES;
|
||||
public static final String [] TOKEN_TYPES = ClassicTokenizer.TOKEN_TYPES;
|
||||
|
||||
public final int yychar()
|
||||
{
|
||||
|
|
|
@ -121,13 +121,12 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
@SuppressWarnings("unused")
|
||||
public void _testStandardConstants() {
|
||||
int x = StandardTokenizer.ALPHANUM;
|
||||
x = StandardTokenizer.APOSTROPHE;
|
||||
x = StandardTokenizer.ACRONYM;
|
||||
x = StandardTokenizer.COMPANY;
|
||||
x = StandardTokenizer.EMAIL;
|
||||
x = StandardTokenizer.HOST;
|
||||
x = StandardTokenizer.NUM;
|
||||
x = StandardTokenizer.CJ;
|
||||
x = StandardTokenizer.SOUTHEAST_ASIAN;
|
||||
x = StandardTokenizer.IDEOGRAPHIC;
|
||||
x = StandardTokenizer.HIRAGANA;
|
||||
x = StandardTokenizer.KATAKANA;
|
||||
x = StandardTokenizer.HANGUL;
|
||||
String[] y = StandardTokenizer.TOKEN_TYPES;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,56 +40,25 @@ public final class StandardTokenizer extends Tokenizer {
|
|||
/** A private instance of the JFlex-constructed scanner */
|
||||
private StandardTokenizerImpl scanner;
|
||||
|
||||
// TODO: how can we remove these old types?!
|
||||
/** Alpha/numeric token type */
|
||||
public static final int ALPHANUM = 0;
|
||||
/** @deprecated (3.1) */
|
||||
@Deprecated
|
||||
public static final int APOSTROPHE = 1;
|
||||
/** @deprecated (3.1) */
|
||||
@Deprecated
|
||||
public static final int ACRONYM = 2;
|
||||
/** @deprecated (3.1) */
|
||||
@Deprecated
|
||||
public static final int COMPANY = 3;
|
||||
/** Email token type */
|
||||
public static final int EMAIL = 4;
|
||||
/** @deprecated (3.1) */
|
||||
@Deprecated
|
||||
public static final int HOST = 5;
|
||||
public static final int ALPHANUM = 0;
|
||||
/** Numeric token type */
|
||||
public static final int NUM = 6;
|
||||
/** @deprecated (3.1) */
|
||||
@Deprecated
|
||||
public static final int CJ = 7;
|
||||
|
||||
/** @deprecated (3.1) */
|
||||
@Deprecated
|
||||
public static final int ACRONYM_DEP = 8;
|
||||
|
||||
public static final int NUM = 1;
|
||||
/** Southeast Asian token type */
|
||||
public static final int SOUTHEAST_ASIAN = 9;
|
||||
/** Idiographic token type */
|
||||
public static final int IDEOGRAPHIC = 10;
|
||||
public static final int SOUTHEAST_ASIAN = 2;
|
||||
/** Ideographic token type */
|
||||
public static final int IDEOGRAPHIC = 3;
|
||||
/** Hiragana token type */
|
||||
public static final int HIRAGANA = 11;
|
||||
public static final int HIRAGANA = 4;
|
||||
/** Katakana token type */
|
||||
public static final int KATAKANA = 12;
|
||||
|
||||
public static final int KATAKANA = 5;
|
||||
/** Hangul token type */
|
||||
public static final int HANGUL = 13;
|
||||
public static final int HANGUL = 6;
|
||||
|
||||
/** String token types that correspond to token type int constants */
|
||||
public static final String [] TOKEN_TYPES = new String [] {
|
||||
"<ALPHANUM>",
|
||||
"<APOSTROPHE>",
|
||||
"<ACRONYM>",
|
||||
"<COMPANY>",
|
||||
"<EMAIL>",
|
||||
"<HOST>",
|
||||
"<NUM>",
|
||||
"<CJ>",
|
||||
"<ACRONYM_DEP>",
|
||||
"<SOUTHEAST_ASIAN>",
|
||||
"<IDEOGRAPHIC>",
|
||||
"<HIRAGANA>",
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.analysis.standard;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
|
@ -435,7 +436,7 @@ public final class StandardTokenizerImpl {
|
|||
*/
|
||||
public static final int SOUTH_EAST_ASIAN_TYPE = StandardTokenizer.SOUTHEAST_ASIAN;
|
||||
|
||||
/** Idiographic token type */
|
||||
/** Ideographic token type */
|
||||
public static final int IDEOGRAPHIC_TYPE = StandardTokenizer.IDEOGRAPHIC;
|
||||
|
||||
/** Hiragana token type */
|
||||
|
|
|
@ -82,7 +82,7 @@ ComplexContextEx = \p{LB:Complex_Context}
|
|||
*/
|
||||
public static final int SOUTH_EAST_ASIAN_TYPE = StandardTokenizer.SOUTHEAST_ASIAN;
|
||||
|
||||
/** Idiographic token type */
|
||||
/** Ideographic token type */
|
||||
public static final int IDEOGRAPHIC_TYPE = StandardTokenizer.IDEOGRAPHIC;
|
||||
|
||||
/** Hiragana token type */
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.index.NumericDocValues;
|
|||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
|
@ -84,7 +84,7 @@ abstract class SortedNumericDocValuesRangeQuery extends Query {
|
|||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
if (lowerValue == Long.MIN_VALUE && upperValue == Long.MAX_VALUE) {
|
||||
return new FieldValueQuery(field);
|
||||
return new DocValuesFieldExistsQuery(field);
|
||||
}
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.index.SortedDocValues;
|
|||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
|
@ -95,7 +95,7 @@ abstract class SortedSetDocValuesRangeQuery extends Query {
|
|||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
if (lowerValue == null && upperValue == null) {
|
||||
return new FieldValueQuery(field);
|
||||
return new DocValuesFieldExistsQuery(field);
|
||||
}
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
|
|
|
@ -3758,7 +3758,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// and re-resolve against the newly merged segment:
|
||||
|
||||
Map<String,List<DocValuesFieldUpdates>> mergingDVUpdates = rld.getMergingDVUpdates();
|
||||
|
||||
for (Map.Entry<String,List<DocValuesFieldUpdates>> ent : mergingDVUpdates.entrySet()) {
|
||||
|
||||
String field = ent.getKey();
|
||||
|
@ -4356,6 +4355,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
// Hold onto the "live" reader; we will use this to
|
||||
// commit merged deletes
|
||||
final ReadersAndUpdates rld = readerPool.get(info, true);
|
||||
rld.setIsMerging();
|
||||
|
||||
SegmentReader reader = rld.getReaderForMerge(context);
|
||||
int delCount = reader.numDeletedDocs();
|
||||
|
|
|
@ -811,9 +811,10 @@ class ReadersAndUpdates {
|
|||
synchronized public void setIsMerging() {
|
||||
// This ensures any newly resolved doc value updates while we are merging are
|
||||
// saved for re-applying after this segment is done merging:
|
||||
isMerging = true;
|
||||
|
||||
assert mergingDVUpdates.isEmpty();
|
||||
if (isMerging == false) {
|
||||
isMerging = true;
|
||||
assert mergingDVUpdates.isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns a reader for merge, with the latest doc values updates and deletions. */
|
||||
|
|
|
@ -65,7 +65,7 @@ import org.apache.lucene.util.Version;
|
|||
* Files:
|
||||
* <ul>
|
||||
* <li><tt>segments_N</tt>: Header, LuceneVersion, Version, NameCounter, SegCount, MinSegmentLuceneVersion, <SegName,
|
||||
* HasSegID, SegID, SegCodec, DelGen, DeletionCount, FieldInfosGen, DocValuesGen,
|
||||
* SegID, SegCodec, DelGen, DeletionCount, FieldInfosGen, DocValuesGen,
|
||||
* UpdatesFiles><sup>SegCount</sup>, CommitUserData, Footer
|
||||
* </ul>
|
||||
* Data types:
|
||||
|
@ -78,7 +78,6 @@ import org.apache.lucene.util.Version;
|
|||
* {@link DataOutput#writeInt Int32}</li>
|
||||
* <li>Generation, Version, DelGen, Checksum, FieldInfosGen, DocValuesGen -->
|
||||
* {@link DataOutput#writeLong Int64}</li>
|
||||
* <li>HasSegID --> {@link DataOutput#writeByte Int8}</li>
|
||||
* <li>SegID --> {@link DataOutput#writeByte Int8<sup>ID_LENGTH</sup>}</li>
|
||||
* <li>SegName, SegCodec --> {@link DataOutput#writeString String}</li>
|
||||
* <li>CommitUserData --> {@link DataOutput#writeMapOfStrings
|
||||
|
@ -100,9 +99,6 @@ import org.apache.lucene.util.Version;
|
|||
* <li>DeletionCount records the number of deleted documents in this segment.</li>
|
||||
* <li>SegCodec is the {@link Codec#getName() name} of the Codec that encoded
|
||||
* this segment.</li>
|
||||
* <li>HasSegID is nonzero if the segment has an identifier. Otherwise, when it is 0
|
||||
* the identifier is {@code null} and no SegID is written. Null only happens for Lucene
|
||||
* 4.x segments referenced in commits.</li>
|
||||
* <li>SegID is the identifier of the Codec that encoded this segment. </li>
|
||||
* <li>CommitUserData stores an optional user-supplied opaque
|
||||
* Map<String,String> that was passed to
|
||||
|
@ -122,8 +118,6 @@ import org.apache.lucene.util.Version;
|
|||
*/
|
||||
public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo> {
|
||||
|
||||
/** Adds the {@link Version} that committed this segments_N file, as well as the {@link Version} of the oldest segment, since 5.3+ */
|
||||
public static final int VERSION_53 = 6;
|
||||
/** The version that added information about the Lucene version at the time when the index has been created. */
|
||||
public static final int VERSION_70 = 7;
|
||||
|
||||
|
@ -306,20 +300,22 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
if (magic != CodecUtil.CODEC_MAGIC) {
|
||||
throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
|
||||
}
|
||||
int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_53, VERSION_CURRENT);
|
||||
int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
|
||||
byte id[] = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(id, 0, id.length);
|
||||
CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
|
||||
|
||||
Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
|
||||
if (luceneVersion.onOrAfter(Version.LUCENE_7_0_0) == false) {
|
||||
// TODO: should we check indexCreatedVersion instead?
|
||||
throw new IndexFormatTooOldException(input, "this index is too old (version: " + luceneVersion + ")");
|
||||
int indexCreatedVersion = input.readVInt();
|
||||
if (luceneVersion.major < indexCreatedVersion) {
|
||||
throw new CorruptIndexException("Creation version [" + indexCreatedVersion
|
||||
+ ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
|
||||
}
|
||||
|
||||
int indexCreatedVersion = 6;
|
||||
if (format >= VERSION_70) {
|
||||
indexCreatedVersion = input.readVInt();
|
||||
if (indexCreatedVersion < Version.LATEST.major - 1) {
|
||||
throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
|
||||
+ indexCreatedVersion + ".x while the current version is " + Version.LATEST
|
||||
+ " and Lucene only supports reading the current and previous major versions.");
|
||||
}
|
||||
|
||||
SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
|
||||
|
@ -345,17 +341,9 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
long totalDocs = 0;
|
||||
for (int seg = 0; seg < numSegments; seg++) {
|
||||
String segName = input.readString();
|
||||
final byte segmentID[];
|
||||
byte hasID = input.readByte();
|
||||
if (hasID == 1) {
|
||||
segmentID = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(segmentID, 0, segmentID.length);
|
||||
} else if (hasID == 0) {
|
||||
throw new IndexFormatTooOldException(input, "Segment is from Lucene 4.x");
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid hasID byte, got: " + hasID, input);
|
||||
}
|
||||
Codec codec = readCodec(input, format < VERSION_53);
|
||||
byte[] segmentID = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(segmentID, 0, segmentID.length);
|
||||
Codec codec = readCodec(input);
|
||||
SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
|
||||
info.setCodec(codec);
|
||||
totalDocs += info.maxDoc();
|
||||
|
@ -409,24 +397,12 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
return infos;
|
||||
}
|
||||
|
||||
private static final List<String> unsupportedCodecs = Arrays.asList(
|
||||
"Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45", "Lucene46", "Lucene49", "Lucene410"
|
||||
);
|
||||
|
||||
private static Codec readCodec(DataInput input, boolean unsupportedAllowed) throws IOException {
|
||||
private static Codec readCodec(DataInput input) throws IOException {
|
||||
final String name = input.readString();
|
||||
try {
|
||||
return Codec.forName(name);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// give better error messages if we can, first check if this is a legacy codec
|
||||
if (unsupportedCodecs.contains(name)) {
|
||||
// We should only get here on pre-5.3 indices, but we can't test this until 7.0 when 5.x indices become too old:
|
||||
assert unsupportedAllowed;
|
||||
IOException newExc = new IndexFormatTooOldException(input, "Codec '" + name + "' is too old");
|
||||
newExc.initCause(e);
|
||||
throw newExc;
|
||||
}
|
||||
// or maybe it's an old default codec that moved
|
||||
// maybe it's an old default codec that moved
|
||||
if (name.startsWith("Lucene")) {
|
||||
throw new IllegalArgumentException("Could not load codec '" + name + "'. Did you forget to add lucene-backward-codecs.jar?", e);
|
||||
}
|
||||
|
@ -523,16 +499,10 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
|||
}
|
||||
out.writeString(si.name);
|
||||
byte segmentID[] = si.getId();
|
||||
// TODO: remove this in lucene 6, we don't need to include 4.x segments in commits anymore
|
||||
if (segmentID == null) {
|
||||
out.writeByte((byte)0);
|
||||
} else {
|
||||
if (segmentID.length != StringHelper.ID_LENGTH) {
|
||||
throw new IllegalStateException("cannot write segment: invalid id segment=" + si.name + "id=" + StringHelper.idToString(segmentID));
|
||||
}
|
||||
out.writeByte((byte)1);
|
||||
out.writeBytes(segmentID, segmentID.length);
|
||||
if (segmentID.length != StringHelper.ID_LENGTH) {
|
||||
throw new IllegalStateException("cannot write segment: invalid id segment=" + si.name + "id=" + StringHelper.idToString(segmentID));
|
||||
}
|
||||
out.writeBytes(segmentID, segmentID.length);
|
||||
out.writeString(si.getCodec().getName());
|
||||
out.writeLong(siPerCommit.getDelGen());
|
||||
int delCount = siPerCommit.getDelCount();
|
||||
|
|
|
@ -30,13 +30,13 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
* A {@link Query} that matches documents that have a value for a given field
|
||||
* as reported by doc values iterators.
|
||||
*/
|
||||
public final class FieldValueQuery extends Query {
|
||||
public final class DocValuesFieldExistsQuery extends Query {
|
||||
|
||||
private final String field;
|
||||
|
||||
/** Create a query that will match that have a value for the given
|
||||
* {@code field}. */
|
||||
public FieldValueQuery(String field) {
|
||||
public DocValuesFieldExistsQuery(String field) {
|
||||
this.field = Objects.requireNonNull(field);
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,7 @@ public final class FieldValueQuery extends Query {
|
|||
@Override
|
||||
public boolean equals(Object other) {
|
||||
return sameClassAs(other) &&
|
||||
field.equals(((FieldValueQuery) other).field);
|
||||
field.equals(((DocValuesFieldExistsQuery) other).field);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -57,7 +57,7 @@ public final class FieldValueQuery extends Query {
|
|||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "FieldValueQuery [field=" + this.field + "]";
|
||||
return "DocValuesFieldExistsQuery [field=" + this.field + "]";
|
||||
}
|
||||
|
||||
@Override
|
|
@ -39,6 +39,13 @@ public final class Version {
|
|||
@Deprecated
|
||||
public static final Version LUCENE_7_0_0 = new Version(7, 0, 0);
|
||||
|
||||
/**
|
||||
* Match settings and bugs in Lucene's 7.1.0 release.
|
||||
* @deprecated Use latest
|
||||
*/
|
||||
@Deprecated
|
||||
public static final Version LUCENE_7_1_0 = new Version(7, 1, 0);
|
||||
|
||||
/**
|
||||
* Match settings and bugs in Lucene's 8.0.0 release.
|
||||
* <p>
|
||||
|
|
|
@ -59,8 +59,8 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
|||
final IndexSearcher searcher = newSearcher(reader);
|
||||
iw.close();
|
||||
|
||||
assertSameMatches(searcher, new TermQuery(new Term("has_value", "yes")), new FieldValueQuery("dv1"), false);
|
||||
assertSameMatches(searcher, new TermQuery(new Term("has_value", "yes")), new FieldValueQuery("dv2"), false);
|
||||
assertSameMatches(searcher, new TermQuery(new Term("has_value", "yes")), new DocValuesFieldExistsQuery("dv1"), false);
|
||||
assertSameMatches(searcher, new TermQuery(new Term("has_value", "yes")), new DocValuesFieldExistsQuery("dv2"), false);
|
||||
|
||||
reader.close();
|
||||
dir.close();
|
||||
|
@ -99,12 +99,12 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
|||
|
||||
BooleanQuery.Builder bq1 = new BooleanQuery.Builder();
|
||||
bq1.add(new TermQuery(new Term("f", "yes")), Occur.MUST);
|
||||
bq1.add(new FieldValueQuery("dv1"), Occur.FILTER);
|
||||
bq1.add(new DocValuesFieldExistsQuery("dv1"), Occur.FILTER);
|
||||
assertSameMatches(searcher, ref.build(), bq1.build(), true);
|
||||
|
||||
BooleanQuery.Builder bq2 = new BooleanQuery.Builder();
|
||||
bq2.add(new TermQuery(new Term("f", "yes")), Occur.MUST);
|
||||
bq2.add(new FieldValueQuery("dv2"), Occur.FILTER);
|
||||
bq2.add(new DocValuesFieldExistsQuery("dv2"), Occur.FILTER);
|
||||
assertSameMatches(searcher, ref.build(), bq2.build(), true);
|
||||
|
||||
reader.close();
|
||||
|
@ -141,10 +141,10 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
|||
final float boost = random().nextFloat() * 10;
|
||||
final Query ref = new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("has_value", "yes"))), boost);
|
||||
|
||||
final Query q1 = new BoostQuery(new FieldValueQuery("dv1"), boost);
|
||||
final Query q1 = new BoostQuery(new DocValuesFieldExistsQuery("dv1"), boost);
|
||||
assertSameMatches(searcher, ref, q1, true);
|
||||
|
||||
final Query q2 = new BoostQuery(new FieldValueQuery("dv2"), boost);
|
||||
final Query q2 = new BoostQuery(new DocValuesFieldExistsQuery("dv2"), boost);
|
||||
assertSameMatches(searcher, ref, q2, true);
|
||||
|
||||
reader.close();
|
||||
|
@ -160,7 +160,7 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
|||
final IndexReader reader = iw.getReader();
|
||||
final IndexSearcher searcher = newSearcher(reader);
|
||||
iw.close();
|
||||
assertEquals(0, searcher.search(new FieldValueQuery("f"), 1).totalHits);
|
||||
assertEquals(0, searcher.search(new DocValuesFieldExistsQuery("f"), 1).totalHits);
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -175,7 +175,7 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
|||
final IndexReader reader = iw.getReader();
|
||||
final IndexSearcher searcher = newSearcher(reader);
|
||||
iw.close();
|
||||
assertEquals(1, searcher.search(new FieldValueQuery("f"), 1).totalHits);
|
||||
assertEquals(1, searcher.search(new DocValuesFieldExistsQuery("f"), 1).totalHits);
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -193,7 +193,7 @@ public class TestFieldValueQuery extends LuceneTestCase {
|
|||
final IndexReader reader = iw.getReader();
|
||||
final IndexSearcher searcher = newSearcher(reader);
|
||||
iw.close();
|
||||
assertEquals(1, searcher.search(new FieldValueQuery("f"), 1).totalHits);
|
||||
assertEquals(1, searcher.search(new DocValuesFieldExistsQuery("f"), 1).totalHits);
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
/antlr/antlr = 2.7.7
|
||||
/com.adobe.xmp/xmpcore = 5.1.2
|
||||
|
||||
com.carrotsearch.randomizedtesting.version = 2.5.0
|
||||
com.carrotsearch.randomizedtesting.version = 2.5.2
|
||||
/com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version}
|
||||
/com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version}
|
||||
|
||||
|
|
|
@ -70,8 +70,8 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.FilterScorer;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
|
@ -532,7 +532,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
static Query numericDocValuesScoreQuery(final String field) {
|
||||
return new Query() {
|
||||
|
||||
private final Query fieldQuery = new FieldValueQuery(field);
|
||||
private final Query fieldQuery = new DocValuesFieldExistsQuery(field);
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
2d00ff1042ae258f33830f26f9b30fc3a43d37e1
|
|
@ -0,0 +1 @@
|
|||
91f3284993b44dcb2f003b5f28617abba13971d2
|
|
@ -57,12 +57,12 @@ Upgrade Notes
|
|||
New Features
|
||||
----------------------
|
||||
|
||||
(No Changes)
|
||||
* SOLR-11019: Add addAll Stream Evaluator (Joel Bernstein)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
||||
(No Changes)
|
||||
* SOLR-10668: fix NPE at sort=childfield(..) .. on absent values (Mikhail Khludnev)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
@ -77,6 +77,8 @@ Other Changes
|
|||
* SOLR-10957: Changed SolrCoreParser.init to use the resource loader from getSchema()
|
||||
instead of the resource loader from getCore(). (Christine Poerschke)
|
||||
|
||||
* SOLR-10748: Make stream.body configurable and disabled by default (janhoy)
|
||||
|
||||
================== 7.0.0 ==================
|
||||
|
||||
Versions of Major Components
|
||||
|
@ -181,11 +183,15 @@ Upgrading from Solr 6.x
|
|||
* The unused 'valType' option has been removed from ExternalFileField, if you have this in your schema you
|
||||
can safely remove it. see SOLR-10929 for more details.
|
||||
|
||||
* SOLR-10574: basic_configs and data_driven_schema_configs have now been merged into _default. It has data driven nature
|
||||
* Config sets basic_configs and data_driven_schema_configs have now been merged into _default. It has data driven nature
|
||||
enabled by default, and can be turned off (after creating a collection) with:
|
||||
curl http://host:8983/solr/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'
|
||||
Please see SOLR-10574 for details.
|
||||
|
||||
* The data driven config (now _default) for auto-creating fields earlier defaulted to "string" for text input.
|
||||
Now the default is to use "text_general" and to add a copyField to the schema, copying to a "*_str" dynamic field,
|
||||
with a cutoff at 256 characters. This enables full text search as well as faceting. See SOLR-9526 for more.
|
||||
|
||||
* SOLR-10123: The Analytics Component has been upgraded to support distributed collections, expressions over multivalued
|
||||
fields, a new JSON request language, and more. DocValues are now required for any field used in the analytics expression
|
||||
whereas previously docValues was not required. Please see SOLR-10123 for details.
|
||||
|
@ -262,10 +268,16 @@ New Features
|
|||
* SOLR-10406: v2 API error messages list the URL request path as /solr/____v2/... when the original path was /v2/... (Cao Manh Dat, noble)
|
||||
|
||||
* SOLR-10574: New _default config set replacing basic_configs and data_driven_schema_configs.
|
||||
(Ishan Chattopadhyaya, noble, shalin, hossman, David Smiley, Jan Hoydahl, Alexandre Rafalovich)
|
||||
(Ishan Chattopadhyaya, noble, shalin, hossman, David Smiley, janhoy, Alexandre Rafalovich)
|
||||
|
||||
* SOLR-10272: Use _default config set if no collection.configName is specified with CREATE (Ishan Chattopadhyaya)
|
||||
|
||||
* SOLR-9526: Data driven schema now indexes text field "foo" as both "foo" (text_general) and as "foo_str" (string)
|
||||
to facilitate both search and faceting. AddSchemaFieldsUpdateProcessor now has the ability to add a "copyField" to
|
||||
the type mappings, with an optional maxChars limitation. You can also define one typeMappings as default.
|
||||
This also solves issues SOLR-8495, SOLR-6966, and SOLR-7058
|
||||
(janhoy, Steve Rowe, hossman, Alexandre Rafalovich, Shawn Heisey, Cao Manh Dat)
|
||||
|
||||
* SOLR-10123: Upgraded the Analytics Component to version 2.0 which now supports distributed collections, expressions over
|
||||
multivalued fields, a new JSON request language, and more. DocValues are now required for any field used in the analytics
|
||||
expression whereas previously docValues was not required. Please see SOLR-10123 for details. (Houston Putman)
|
||||
|
@ -330,6 +342,10 @@ Bug Fixes
|
|||
* SOLR-10879: DELETEREPLICA and DELETENODE commands should prevent data loss when
|
||||
replicationFactor is 1. (ab)
|
||||
|
||||
* SOLR-10983: Fix DOWNNODE -> queue-work explosion (Scott Blum, Joshua Humphries)
|
||||
|
||||
* SOLR-10826: Fix CloudSolrClient to expand the collection parameter correctly (Tim Owen via Varun Thacker)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
@ -474,6 +490,13 @@ Other Changes
|
|||
* SOLR-10456: Deprecate timeout related setters from SolrClients, and replace with Builder based implementation
|
||||
(Jason Gerlowski, Anshum Gupta)
|
||||
|
||||
* SOLR-11004: Consolidate SolrClient builder code in an abstract base class (Jason Gerlowski, Anshum Gupta)
|
||||
|
||||
* SOLR-10967: Cleanup the default configset. The changes involve some documentation corrections, removing the currency
|
||||
field from the examples and a few dynamic fields (Varun Thacker)
|
||||
|
||||
* SOLR-11016: Fix TestCloudJSONFacetJoinDomain test-only bug (hossman)
|
||||
|
||||
================== 6.7.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
|
|
@ -449,7 +449,7 @@ public class ExpressionFactory {
|
|||
}
|
||||
} else if (fieldType instanceof TrieIntField) {
|
||||
if (multivalued) {
|
||||
aField = new IntMultiField(fieldName);
|
||||
aField = new IntMultiTrieField(fieldName);
|
||||
} else {
|
||||
aField = new IntField(fieldName);
|
||||
}
|
||||
|
@ -461,7 +461,7 @@ public class ExpressionFactory {
|
|||
}
|
||||
} else if (fieldType instanceof TrieLongField) {
|
||||
if (multivalued) {
|
||||
aField = new LongMultiField(fieldName);
|
||||
aField = new LongMultiTrieField(fieldName);
|
||||
} else {
|
||||
aField = new LongField(fieldName);
|
||||
}
|
||||
|
@ -473,7 +473,7 @@ public class ExpressionFactory {
|
|||
}
|
||||
} else if (fieldType instanceof TrieFloatField) {
|
||||
if (multivalued) {
|
||||
aField = new FloatMultiField(fieldName);
|
||||
aField = new FloatMultiTrieField(fieldName);
|
||||
} else {
|
||||
aField = new FloatField(fieldName);
|
||||
}
|
||||
|
@ -485,7 +485,7 @@ public class ExpressionFactory {
|
|||
}
|
||||
} else if (fieldType instanceof TrieDoubleField) {
|
||||
if (multivalued) {
|
||||
aField = new DoubleMultiField(fieldName);
|
||||
aField = new DoubleMultiTrieField(fieldName);
|
||||
} else {
|
||||
aField = new DoubleField(fieldName);
|
||||
}
|
||||
|
@ -497,7 +497,7 @@ public class ExpressionFactory {
|
|||
}
|
||||
} else if (fieldType instanceof TrieDateField) {
|
||||
if (multivalued) {
|
||||
aField = new DateMultiField(fieldName);
|
||||
aField = new DateMultiTrieField(fieldName);
|
||||
} else {
|
||||
aField = new DateField(fieldName);
|
||||
}
|
||||
|
|
|
@ -26,9 +26,9 @@ import org.apache.solr.schema.TrieDateField;
|
|||
/**
|
||||
* An analytics wrapper for a multi-valued {@link TrieDateField} with DocValues enabled.
|
||||
*/
|
||||
public class DateMultiField extends LongMultiField implements CastingDateValueStream {
|
||||
public class DateMultiTrieField extends LongMultiTrieField implements CastingDateValueStream {
|
||||
|
||||
public DateMultiField(String fieldName) {
|
||||
public DateMultiTrieField(String fieldName) {
|
||||
super(fieldName);
|
||||
}
|
||||
|
|
@ -31,12 +31,12 @@ import org.apache.solr.schema.TrieDoubleField;
|
|||
/**
|
||||
* An analytics wrapper for a multi-valued {@link TrieDoubleField} with DocValues enabled.
|
||||
*/
|
||||
public class DoubleMultiField extends AnalyticsField implements CastingDoubleValueStream {
|
||||
public class DoubleMultiTrieField extends AnalyticsField implements CastingDoubleValueStream {
|
||||
private SortedSetDocValues docValues;
|
||||
private int count;
|
||||
private double[] values;
|
||||
|
||||
public DoubleMultiField(String fieldName) {
|
||||
public DoubleMultiTrieField(String fieldName) {
|
||||
super(fieldName);
|
||||
count = 0;
|
||||
values = new double[initialArrayLength];
|
|
@ -32,12 +32,12 @@ import org.apache.solr.schema.TrieFloatField;
|
|||
/**
|
||||
* An analytics wrapper for a multi-valued {@link TrieFloatField} with DocValues enabled.
|
||||
*/
|
||||
public class FloatMultiField extends AnalyticsField implements CastingFloatValueStream {
|
||||
public class FloatMultiTrieField extends AnalyticsField implements CastingFloatValueStream {
|
||||
private SortedSetDocValues docValues;
|
||||
private int count;
|
||||
private float[] values;
|
||||
|
||||
public FloatMultiField(String fieldName) {
|
||||
public FloatMultiTrieField(String fieldName) {
|
||||
super(fieldName);
|
||||
count = 0;
|
||||
values = new float[initialArrayLength];
|
|
@ -33,12 +33,12 @@ import org.apache.solr.schema.TrieIntField;
|
|||
/**
|
||||
* An analytics wrapper for a multi-valued {@link TrieIntField} with DocValues enabled.
|
||||
*/
|
||||
public class IntMultiField extends AnalyticsField implements CastingIntValueStream {
|
||||
public class IntMultiTrieField extends AnalyticsField implements CastingIntValueStream {
|
||||
private SortedSetDocValues docValues;
|
||||
private int count;
|
||||
private int[] values;
|
||||
|
||||
public IntMultiField(String fieldName) {
|
||||
public IntMultiTrieField(String fieldName) {
|
||||
super(fieldName);
|
||||
count = 0;
|
||||
values = new int[initialArrayLength];
|
|
@ -31,12 +31,12 @@ import org.apache.solr.schema.TrieLongField;
|
|||
/**
|
||||
* An analytics wrapper for a multi-valued {@link TrieLongField} with DocValues enabled.
|
||||
*/
|
||||
public class LongMultiField extends AnalyticsField implements CastingLongValueStream {
|
||||
public class LongMultiTrieField extends AnalyticsField implements CastingLongValueStream {
|
||||
private SortedSetDocValues docValues;
|
||||
private int count;
|
||||
private long[] values;
|
||||
|
||||
public LongMultiField(String fieldName) {
|
||||
public LongMultiTrieField(String fieldName) {
|
||||
super(fieldName);
|
||||
count = 0;
|
||||
values = new long[initialArrayLength];
|
|
@ -27,9 +27,8 @@ import org.apache.solr.common.SolrException;
|
|||
import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
|
||||
import org.apache.solr.common.params.FacetParams.FacetRangeOther;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.NumericFieldType;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.schema.TrieDateField;
|
||||
import org.apache.solr.schema.TrieField;
|
||||
import org.apache.solr.util.DateMathParser;
|
||||
|
||||
|
||||
|
@ -249,7 +248,7 @@ public abstract class FacetRangeGenerator<T extends Comparable<T>> {
|
|||
final SchemaField sf = rangeFacet.getField();
|
||||
final FieldType ft = sf.getType();
|
||||
final FacetRangeGenerator<?> calc;
|
||||
if (ft instanceof TrieField) {
|
||||
if (ft instanceof NumericFieldType) {
|
||||
switch (ft.getNumberType()) {
|
||||
case FLOAT:
|
||||
calc = new FloatFacetRangeGenerator(rangeFacet);
|
||||
|
@ -267,10 +266,10 @@ public abstract class FacetRangeGenerator<T extends Comparable<T>> {
|
|||
calc = new DateFacetRangeGenerator(rangeFacet, null);
|
||||
break;
|
||||
default:
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on tried field of unexpected type:" + sf.getName());
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on numeric field of unexpected type: " + sf.getName());
|
||||
}
|
||||
} else {
|
||||
throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on field:" + sf);
|
||||
throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on non-numeric field: " + sf);
|
||||
}
|
||||
return calc;
|
||||
}
|
||||
|
@ -330,9 +329,6 @@ class DateFacetRangeGenerator extends FacetRangeGenerator<Date> {
|
|||
public DateFacetRangeGenerator(final RangeFacet rangeFacet, final Date now) {
|
||||
super(rangeFacet);
|
||||
this.now = now;
|
||||
if (! (field.getType() instanceof TrieDateField) ) {
|
||||
throw new IllegalArgumentException("SchemaField must use field type extending TrieDateField");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -39,14 +39,14 @@
|
|||
These are provided more for backward compatability, allowing one
|
||||
to create a schema that matches an existing lucene index.
|
||||
-->
|
||||
<fieldType name="int" class="${solr.tests.IntegerFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="float" class="${solr.tests.FloatFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="long" class="${solr.tests.LongFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="double" class="${solr.tests.DoubleFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="int" class="${solr.tests.IntegerFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="float" class="${solr.tests.FloatFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="long" class="${solr.tests.LongFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="double" class="${solr.tests.DoubleFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
|
||||
seconds part (.999) is optional.
|
||||
-->
|
||||
<fieldType name="date" class="${solr.tests.DateFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="date" class="${solr.tests.DateFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
|
||||
<fieldType name="boolean" class="solr.BoolField"/>
|
||||
<fieldType name="string" class="solr.StrField"/>
|
||||
|
|
|
@ -20,21 +20,16 @@
|
|||
|
||||
<schema name="cloud-analytics" version="1.0">
|
||||
|
||||
<fieldType name="int" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="float" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="long" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
|
||||
|
||||
<!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
|
||||
seconds part (.999) is optional.
|
||||
-->
|
||||
<fieldType name="date" class="solr.TrieDateField" sortMissingLast="true" precisionStep="6"/>
|
||||
|
||||
|
||||
<fieldType name="int" class="${solr.tests.IntegerFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0" />
|
||||
<fieldType name="float" class="${solr.tests.FloatFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0" />
|
||||
<fieldType name="long" class="${solr.tests.LongFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="double" class="${solr.tests.DoubleFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
|
||||
<fieldType name="date" class="${solr.tests.DateFieldType}" precisionStep="0" omitNorms="true" positionIncrementGap="0" />
|
||||
|
||||
<field name="id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>
|
||||
<field name="signatureField" type="string" indexed="true" stored="false"/>
|
||||
|
||||
<!-- for versioning -->
|
||||
<field name="_version_" type="long" indexed="true" stored="true" multiValued="false"/>
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.junit.BeforeClass;
|
|||
import org.junit.Test;
|
||||
|
||||
|
||||
public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
||||
public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest {
|
||||
public static final int INT = 71;
|
||||
public static final int LONG = 36;
|
||||
public static final int LONGM = 50;
|
||||
|
@ -132,6 +132,8 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
multiStringTestMissing = new ArrayList<>();
|
||||
multiDateTestStart = new ArrayList<>();
|
||||
multiDateTestMissing = new ArrayList<>();
|
||||
|
||||
boolean multiCanHaveDuplicates = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP);
|
||||
|
||||
UpdateRequest req = new UpdateRequest();
|
||||
for (int j = 0; j < NUM_LOOPS; ++j) {
|
||||
|
@ -148,8 +150,6 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
List<String> fields = new ArrayList<>();
|
||||
fields.add("id"); fields.add("1000"+j);
|
||||
|
||||
if (dt != 0) {
|
||||
}
|
||||
if( i != 0 ) {
|
||||
fields.add("int_id"); fields.add("" + i);
|
||||
}
|
||||
|
@ -180,16 +180,13 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
if ( sm != 0 ) {
|
||||
fields.add("string_sdm"); fields.add("str" + sm);
|
||||
}
|
||||
if ( dtm != 0 ) {
|
||||
fields.add("date_dtdm"); fields.add((1800+dtm) + "-12-31T23:59:59Z");
|
||||
}
|
||||
req.add(fields.toArray(new String[0]));
|
||||
|
||||
if( dt != 0 ){
|
||||
if( dt != 0 ) {
|
||||
//Dates
|
||||
if (j-DATE<0) {
|
||||
if ( j-DATE < 0 ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
intDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -197,7 +194,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
intDateTestStart.add(list1);
|
||||
ArrayList<Long> list2 = new ArrayList<>();
|
||||
if( l != 0l ){
|
||||
if( l != 0l ) {
|
||||
list2.add(l);
|
||||
longDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -205,7 +202,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
longDateTestStart.add(list2);
|
||||
ArrayList<Float> list3 = new ArrayList<>();
|
||||
if ( f != 0.0f ){
|
||||
if ( f != 0.0f ) {
|
||||
list3.add(f);
|
||||
floatDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -214,7 +211,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
floatDateTestStart.add(list3);
|
||||
ArrayList<Double> list4 = new ArrayList<>();
|
||||
if( d != 0.0d ){
|
||||
if( d != 0.0d ) {
|
||||
list4.add(d);
|
||||
doubleDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -222,7 +219,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
doubleDateTestStart.add(list4);
|
||||
ArrayList<Integer> list5 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list5.add(i);
|
||||
multiDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -239,24 +236,26 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
}
|
||||
|
||||
if (j-DATEM<0 && dtm!=dt && dtm!=0) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
list1.add(i);
|
||||
multiDateTestMissing.add(0l);
|
||||
} else {
|
||||
multiDateTestMissing.add(1l);
|
||||
if ( dtm != 0 ) {
|
||||
if ( j-DATEM < 0 && dtm != dt ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
multiDateTestMissing.add(0l);
|
||||
} else {
|
||||
multiDateTestMissing.add(1l);
|
||||
}
|
||||
multiDateTestStart.add(list1);
|
||||
} else if ( dtm != dt || multiCanHaveDuplicates ) {
|
||||
if( i != 0 ) multiDateTestStart.get(dtm-1).add(i); else increment(multiDateTestMissing,dtm-1);
|
||||
}
|
||||
multiDateTestStart.add(list1);
|
||||
} else if (dtm!=dt && dtm!=0) {
|
||||
if( i != 0 ) multiDateTestStart.get(dtm-1).add(i);
|
||||
}
|
||||
|
||||
if( s != 0 ){
|
||||
//Strings
|
||||
if (j-STRING<0) {
|
||||
if ( j-STRING < 0 ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
intStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -264,7 +263,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
intStringTestStart.add(list1);
|
||||
ArrayList<Long> list2 = new ArrayList<>();
|
||||
if( l != 0l ){
|
||||
if( l != 0l ) {
|
||||
list2.add(l);
|
||||
longStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -280,7 +279,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
floatStringTestStart.add(list3);
|
||||
ArrayList<Double> list4 = new ArrayList<>();
|
||||
if( d != 0.0d ){
|
||||
if( d != 0.0d ) {
|
||||
list4.add(d);
|
||||
doubleStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -288,7 +287,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
doubleStringTestStart.add(list4);
|
||||
ArrayList<Integer> list5 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list5.add(i);
|
||||
multiStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -306,7 +305,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
|
||||
//Strings
|
||||
if( sm != 0 ){
|
||||
if (j-STRINGM<0&&sm!=s) {
|
||||
if ( j-STRINGM < 0 && sm != s ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
list1.add(i);
|
||||
|
@ -315,14 +314,14 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
multiStringTestMissing.add(1l);
|
||||
}
|
||||
multiStringTestStart.add(list1);
|
||||
} else if (sm!=s) {
|
||||
} else if ( sm != s ) {
|
||||
if( i != 0 ) multiStringTestStart.get(sm-1).add(i); else increment(multiStringTestMissing,sm-1);
|
||||
}
|
||||
}
|
||||
|
||||
//Int
|
||||
if( i != 0 ){
|
||||
if (j-INT<0) {
|
||||
if( i != 0 ) {
|
||||
if ( j-INT < 0 ) {
|
||||
ArrayList<String> list1 = new ArrayList<>();
|
||||
if( dt != 0 ){
|
||||
list1.add((1800+dt) + "-12-31T23:59:59Z");
|
||||
|
@ -332,7 +331,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
dateIntTestStart.add(list1);
|
||||
ArrayList<String> list2 = new ArrayList<>();
|
||||
if( s != 0 ){
|
||||
if( s != 0 ) {
|
||||
list2.add("str"+s);
|
||||
stringIntTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -346,8 +345,8 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
|
||||
//Long
|
||||
if( l != 0 ){
|
||||
if (j-LONG<0) {
|
||||
if( l != 0 ) {
|
||||
if ( j-LONG < 0 ) {
|
||||
ArrayList<String> list1 = new ArrayList<>();
|
||||
if( dt != 0 ){
|
||||
list1.add((1800+dt) + "-12-31T23:59:59Z");
|
||||
|
@ -357,7 +356,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
dateLongTestStart.add(list1);
|
||||
ArrayList<String> list2 = new ArrayList<>();
|
||||
if( s != 0 ){
|
||||
if( s != 0 ) {
|
||||
list2.add("str"+s);
|
||||
stringLongTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -365,7 +364,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
stringLongTestStart.add(list2);
|
||||
ArrayList<Integer> list3 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list3.add(i);
|
||||
multiLongTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -380,17 +379,17 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
|
||||
//Long
|
||||
if( lm != 0 ){
|
||||
if (j-LONGM<0&&lm!=l) {
|
||||
if( lm != 0 ) {
|
||||
if ( j-LONGM < 0 && lm != l ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
multiLongTestMissing.add(0l);
|
||||
} else {
|
||||
multiLongTestMissing.add(1l);
|
||||
}
|
||||
multiLongTestStart.add(list1);
|
||||
} else if (lm!=l) {
|
||||
} else if ( lm != l || multiCanHaveDuplicates ) {
|
||||
if( i != 0 ) multiLongTestStart.get((int)lm-1).add(i); else increment( multiLongTestMissing,(int)lm-1);
|
||||
}
|
||||
}
|
||||
|
@ -848,7 +847,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
//Int Date
|
||||
Collection<Integer> intDate = getValueList(response, "maxn", "fieldFacets", "date_dtd", "int", false);
|
||||
ArrayList<Integer> intDateTest = (ArrayList<Integer>)calculateStat(intDateTestStart, "max");
|
||||
//assertEquals(responseStr,intDate,intDateTest);
|
||||
assertEquals(responseStr,intDate,intDateTest);
|
||||
|
||||
//Int String
|
||||
Collection<Integer> intString = getValueList(response, "maxn", "fieldFacets", "string_sd", "int", false);
|
||||
|
@ -1061,7 +1060,6 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
assertEquals(responseStr,stringLong,stringLongTest);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void missingTest() throws Exception {
|
||||
String[] params = new String[] {
|
||||
|
@ -1165,7 +1163,6 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
@Test
|
||||
public void missingFacetTest() throws Exception {
|
||||
String[] params = new String[] {
|
||||
"o.func.facet_show_missing(a)", "fillmissing(a,\"(MISSING)\")",
|
||||
"o.missingf.s.mean", "mean(int_id)",
|
||||
"o.missingf.ff", "date_dtd",
|
||||
"o.missingf.ff", "string_sd",
|
||||
|
@ -1206,6 +1203,7 @@ public class FieldFacetCloudTest extends AbstractAnalyticsFacetCloudTest{
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public static void assertEquals(String mes, Object actual, Object expected) {
|
||||
Collections.sort((List<Comparable>) actual);
|
||||
Collections.sort((List<Comparable>) expected);
|
||||
|
|
|
@ -141,6 +141,8 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
multiStringTestMissing = new ArrayList<>();
|
||||
multiDateTestStart = new ArrayList<>();
|
||||
multiDateTestMissing = new ArrayList<>();
|
||||
|
||||
boolean multiCanHaveDuplicates = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP);
|
||||
|
||||
for (int j = 0; j < NUM_LOOPS; ++j) {
|
||||
int i = j%INT;
|
||||
|
@ -152,32 +154,48 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
int dtm = j%DATEM;
|
||||
int s = j%STRING;
|
||||
int sm = j%STRINGM;
|
||||
if (dt==0 && dtm == 0) {
|
||||
assertU(adoc(filter("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
|
||||
"double_dd", "" + d, "date_dtd", (1800+dt) + "-12-31T23:59:59Z", "string_sd", "str" + s,
|
||||
"long_ldm", "" + l, "long_ldm", ""+lm, "string_sdm", "str" + s, "string_sdm", "str"+sm)));
|
||||
} else if (dt == 0) {
|
||||
assertU(adoc(filter("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
|
||||
"double_dd", "" + d, "date_dtd", (1800+dt) + "-12-31T23:59:59Z", "string_sd", "str" + s,
|
||||
"long_ldm", "" + l, "long_ldm", ""+lm, "string_sdm", "str" + s, "string_sdm", "str"+sm,
|
||||
"date_dtdm", (1800+dtm) + "-12-31T23:59:59Z")));
|
||||
} else if (dtm == 0) {
|
||||
assertU(adoc(filter("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
|
||||
"double_dd", "" + d, "date_dtd", (1800+dt) + "-12-31T23:59:59Z", "string_sd", "str" + s,
|
||||
"long_ldm", "" + l, "long_ldm", ""+lm, "string_sdm", "str" + s, "string_sdm", "str"+sm,
|
||||
"date_dtdm", (1800+dt) + "-12-31T23:59:59Z")));
|
||||
} else {
|
||||
assertU(adoc(filter("id", "1000" + j, "int_id", "" + i, "long_ld", "" + l, "float_fd", "" + f,
|
||||
"double_dd", "" + d, "date_dtd", (1800+dt) + "-12-31T23:59:59Z", "string_sd", "str" + s,
|
||||
"long_ldm", "" + l, "long_ldm", ""+lm, "string_sdm", "str" + s, "string_sdm", "str"+sm,
|
||||
"date_dtdm", (1800+dt) + "-12-31T23:59:59Z", "date_dtdm", (1800+dtm) + "-12-31T23:59:59Z")));
|
||||
}
|
||||
|
||||
if( dt != 0 ){
|
||||
|
||||
List<String> fields = new ArrayList<>();
|
||||
fields.add("id"); fields.add("1000"+j);
|
||||
|
||||
if( i != 0 ) {
|
||||
fields.add("int_id"); fields.add("" + i);
|
||||
}
|
||||
if( l != 0l ) {
|
||||
fields.add("long_ld"); fields.add("" + l);
|
||||
fields.add("long_ldm"); fields.add("" + l);
|
||||
}
|
||||
if( lm != 0l ) {
|
||||
fields.add("long_ldm"); fields.add("" + lm);
|
||||
}
|
||||
if( f != 0.0f ) {
|
||||
fields.add("float_fd"); fields.add("" + f);
|
||||
}
|
||||
if( d != 0.0d ) {
|
||||
fields.add("double_dd"); fields.add("" + d);
|
||||
}
|
||||
if( dt != 0 ) {
|
||||
fields.add("date_dtd"); fields.add((1800+dt) + "-12-31T23:59:59Z");
|
||||
fields.add("date_dtdm"); fields.add((1800+dt) + "-12-31T23:59:59Z");
|
||||
}
|
||||
if ( dtm != 0 ) {
|
||||
fields.add("date_dtdm"); fields.add((1800+dtm) + "-12-31T23:59:59Z");
|
||||
}
|
||||
if ( s != 0 ) {
|
||||
fields.add("string_sd"); fields.add("str" + s);
|
||||
fields.add("string_sdm"); fields.add("str" + s);
|
||||
}
|
||||
if ( sm != 0 ) {
|
||||
fields.add("string_sdm"); fields.add("str" + sm);
|
||||
}
|
||||
assertU(adoc(fields.toArray(new String[0])));
|
||||
|
||||
if( dt != 0 ) {
|
||||
//Dates
|
||||
if (j-DATE<0) {
|
||||
if ( j-DATE < 0 ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
intDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -185,7 +203,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
intDateTestStart.add(list1);
|
||||
ArrayList<Long> list2 = new ArrayList<>();
|
||||
if( l != 0l ){
|
||||
if( l != 0l ) {
|
||||
list2.add(l);
|
||||
longDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -193,7 +211,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
longDateTestStart.add(list2);
|
||||
ArrayList<Float> list3 = new ArrayList<>();
|
||||
if ( f != 0.0f ){
|
||||
if ( f != 0.0f ) {
|
||||
list3.add(f);
|
||||
floatDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -202,7 +220,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
floatDateTestStart.add(list3);
|
||||
ArrayList<Double> list4 = new ArrayList<>();
|
||||
if( d != 0.0d ){
|
||||
if( d != 0.0d ) {
|
||||
list4.add(d);
|
||||
doubleDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -210,7 +228,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
doubleDateTestStart.add(list4);
|
||||
ArrayList<Integer> list5 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list5.add(i);
|
||||
multiDateTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -227,24 +245,26 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
}
|
||||
|
||||
if (j-DATEM<0 && dtm!=dt && dtm!=0) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
list1.add(i);
|
||||
multiDateTestMissing.add(0l);
|
||||
} else {
|
||||
multiDateTestMissing.add(1l);
|
||||
if ( dtm != 0 ) {
|
||||
if ( j-DATEM < 0 && dtm != dt ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
multiDateTestMissing.add(0l);
|
||||
} else {
|
||||
multiDateTestMissing.add(1l);
|
||||
}
|
||||
multiDateTestStart.add(list1);
|
||||
} else if ( dtm != dt || multiCanHaveDuplicates ) {
|
||||
if( i != 0 ) multiDateTestStart.get(dtm-1).add(i); else increment(multiDateTestMissing,dtm-1);
|
||||
}
|
||||
multiDateTestStart.add(list1);
|
||||
} else if (dtm!=dt && dtm!=0) {
|
||||
if( i != 0 ) multiDateTestStart.get(dtm-1).add(i);
|
||||
}
|
||||
|
||||
if( s != 0 ){
|
||||
//Strings
|
||||
if (j-STRING<0) {
|
||||
if ( j-STRING < 0 ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
intStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -252,7 +272,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
intStringTestStart.add(list1);
|
||||
ArrayList<Long> list2 = new ArrayList<>();
|
||||
if( l != 0l ){
|
||||
if( l != 0l ) {
|
||||
list2.add(l);
|
||||
longStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -268,7 +288,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
floatStringTestStart.add(list3);
|
||||
ArrayList<Double> list4 = new ArrayList<>();
|
||||
if( d != 0.0d ){
|
||||
if( d != 0.0d ) {
|
||||
list4.add(d);
|
||||
doubleStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -276,7 +296,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
doubleStringTestStart.add(list4);
|
||||
ArrayList<Integer> list5 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list5.add(i);
|
||||
multiStringTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -294,7 +314,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
|
||||
//Strings
|
||||
if( sm != 0 ){
|
||||
if (j-STRINGM<0&&sm!=s) {
|
||||
if ( j-STRINGM < 0 && sm != s ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
list1.add(i);
|
||||
|
@ -303,14 +323,14 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
multiStringTestMissing.add(1l);
|
||||
}
|
||||
multiStringTestStart.add(list1);
|
||||
} else if (sm!=s) {
|
||||
} else if ( sm != s ) {
|
||||
if( i != 0 ) multiStringTestStart.get(sm-1).add(i); else increment(multiStringTestMissing,sm-1);
|
||||
}
|
||||
}
|
||||
|
||||
//Int
|
||||
if( i != 0 ){
|
||||
if (j-INT<0) {
|
||||
if( i != 0 ) {
|
||||
if ( j-INT < 0 ) {
|
||||
ArrayList<String> list1 = new ArrayList<>();
|
||||
if( dt != 0 ){
|
||||
list1.add((1800+dt) + "-12-31T23:59:59Z");
|
||||
|
@ -320,7 +340,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
dateIntTestStart.add(list1);
|
||||
ArrayList<String> list2 = new ArrayList<>();
|
||||
if( s != 0 ){
|
||||
if( s != 0 ) {
|
||||
list2.add("str"+s);
|
||||
stringIntTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -334,8 +354,8 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
|
||||
//Long
|
||||
if( l != 0 ){
|
||||
if (j-LONG<0) {
|
||||
if( l != 0 ) {
|
||||
if ( j-LONG < 0 ) {
|
||||
ArrayList<String> list1 = new ArrayList<>();
|
||||
if( dt != 0 ){
|
||||
list1.add((1800+dt) + "-12-31T23:59:59Z");
|
||||
|
@ -345,7 +365,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
dateLongTestStart.add(list1);
|
||||
ArrayList<String> list2 = new ArrayList<>();
|
||||
if( s != 0 ){
|
||||
if( s != 0 ) {
|
||||
list2.add("str"+s);
|
||||
stringLongTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -353,7 +373,7 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
stringLongTestStart.add(list2);
|
||||
ArrayList<Integer> list3 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list3.add(i);
|
||||
multiLongTestMissing.add(0l);
|
||||
} else {
|
||||
|
@ -368,17 +388,17 @@ public class FieldFacetTest extends AbstractAnalyticsFacetTest{
|
|||
}
|
||||
|
||||
//Long
|
||||
if( lm != 0 ){
|
||||
if (j-LONGM<0&&lm!=l) {
|
||||
if( lm != 0 ) {
|
||||
if ( j-LONGM < 0 && lm != l ) {
|
||||
ArrayList<Integer> list1 = new ArrayList<>();
|
||||
if( i != 0 ){
|
||||
if( i != 0 ) {
|
||||
list1.add(i);
|
||||
multiLongTestMissing.add(0l);
|
||||
} else {
|
||||
multiLongTestMissing.add(1l);
|
||||
}
|
||||
multiLongTestStart.add(list1);
|
||||
} else if (lm!=l) {
|
||||
} else if ( lm != l || multiCanHaveDuplicates ) {
|
||||
if( i != 0 ) multiLongTestStart.get((int)lm-1).add(i); else increment( multiLongTestMissing,(int)lm-1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,9 @@
|
|||
|
||||
<schemaFactory class="ClassicIndexSchemaFactory" />
|
||||
|
||||
<requestDispatcher>
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<!-- Query parser used to rerank top docs with a provided model -->
|
||||
<queryParser name="ltr"
|
||||
|
|
|
@ -18,7 +18,10 @@
|
|||
|
||||
<schemaFactory class="ClassicIndexSchemaFactory" />
|
||||
|
||||
|
||||
<requestDispatcher>
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<!-- Query parser used to rerank top docs with a provided model -->
|
||||
<queryParser name="ltr" class="org.apache.solr.ltr.search.LTRQParserPlugin" >
|
||||
<int name="threadModule.totalPoolThreads">10</int> <!-- Maximum threads to use for all queries -->
|
||||
|
|
|
@ -18,7 +18,10 @@
|
|||
|
||||
<schemaFactory class="ClassicIndexSchemaFactory" />
|
||||
|
||||
|
||||
<requestDispatcher>
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<!-- Query parser used to rerank top docs with a provided model -->
|
||||
<queryParser name="ltr" class="org.apache.solr.ltr.search.LTRQParserPlugin" />
|
||||
|
||||
|
|
|
@ -228,11 +228,8 @@ public class DistributedQueue {
|
|||
}
|
||||
|
||||
/**
|
||||
* Inserts data into queue. Successfully calling this method does NOT guarantee
|
||||
* that the element will be immediately available in the in-memory queue. In particular,
|
||||
* calling this method on an empty queue will not necessarily cause {@link #poll()} to
|
||||
* return the offered element. Use a blocking method if you must wait for the offered
|
||||
* element to become visible.
|
||||
* Inserts data into queue. If there are no other queue consumers, the offered element
|
||||
* will be immediately visible when this method returns.
|
||||
*/
|
||||
public void offer(byte[] data) throws KeeperException, InterruptedException {
|
||||
Timer.Context time = stats.time(dir + "_offer");
|
||||
|
|
|
@ -197,7 +197,9 @@ public class Overseer implements Closeable {
|
|||
log.error("Exception in Overseer main queue loop", e);
|
||||
}
|
||||
try {
|
||||
boolean[] itemWasMoved = new boolean[1];
|
||||
while (head != null) {
|
||||
itemWasMoved[0] = false;
|
||||
byte[] data = head;
|
||||
final ZkNodeProps message = ZkNodeProps.load(data);
|
||||
log.debug("processMessage: queueSize: {}, message = {} current state version: {}", stateUpdateQueue.getStats().getQueueLength(), message, clusterState.getZkClusterStateVersion());
|
||||
|
@ -205,7 +207,11 @@ public class Overseer implements Closeable {
|
|||
clusterState = processQueueItem(message, clusterState, zkStateWriter, true, new ZkStateWriter.ZkWriteCallback() {
|
||||
@Override
|
||||
public void onEnqueue() throws Exception {
|
||||
workQueue.offer(data);
|
||||
if (!itemWasMoved[0]) {
|
||||
stateUpdateQueue.poll();
|
||||
itemWasMoved[0] = true;
|
||||
workQueue.offer(data);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -215,9 +221,10 @@ public class Overseer implements Closeable {
|
|||
}
|
||||
});
|
||||
|
||||
// it is safer to keep this poll here because an invalid message might never be queued
|
||||
// and therefore we can't rely on the ZkWriteCallback to remove the item
|
||||
stateUpdateQueue.poll();
|
||||
// If the ZkWriteCallback never fired, just dump the item, it might be an invalid message.
|
||||
if (!itemWasMoved[0]) {
|
||||
stateUpdateQueue.poll();
|
||||
}
|
||||
|
||||
if (isClosed) break;
|
||||
// if an event comes in the next 100ms batch it together
|
||||
|
|
|
@ -123,6 +123,7 @@ public class SolrConfig extends Config implements MapSerializable {
|
|||
private int formUploadLimitKB;
|
||||
|
||||
private boolean enableRemoteStreams;
|
||||
private boolean enableStreamBody;
|
||||
|
||||
private boolean handleSelect;
|
||||
|
||||
|
@ -308,6 +309,9 @@ public class SolrConfig extends Config implements MapSerializable {
|
|||
enableRemoteStreams = getBool(
|
||||
"requestDispatcher/requestParsers/@enableRemoteStreaming", false);
|
||||
|
||||
enableStreamBody = getBool(
|
||||
"requestDispatcher/requestParsers/@enableStreamBody", false);
|
||||
|
||||
handleSelect = getBool(
|
||||
"requestDispatcher/@handleSelect", !luceneMatchVersion.onOrAfter(Version.LUCENE_7_0_0));
|
||||
|
||||
|
@ -784,6 +788,10 @@ public class SolrConfig extends Config implements MapSerializable {
|
|||
return enableRemoteStreams;
|
||||
}
|
||||
|
||||
public boolean isEnableStreamBody() {
|
||||
return enableStreamBody;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(String path) {
|
||||
return getInt(path, 0);
|
||||
|
|
|
@ -216,6 +216,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
.withFunctionName("rev", ReverseEvaluator.class)
|
||||
.withFunctionName("scale", ScaleEvaluator.class)
|
||||
.withFunctionName("sequence", SequenceEvaluator.class)
|
||||
.withFunctionName("addAll", AddAllEvaluator.class)
|
||||
|
||||
|
||||
// Boolean Stream Evaluators
|
||||
|
|
|
@ -27,24 +27,24 @@ import java.util.Map;
|
|||
import org.apache.lucene.analysis.util.ResourceLoader;
|
||||
import org.apache.lucene.analysis.util.ResourceLoaderAware;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.DocValuesFieldExistsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.solr.uninverting.UninvertingReader.Type;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.response.TextResponseWriter;
|
||||
import org.apache.solr.search.Filter;
|
||||
import org.apache.solr.search.QParser;
|
||||
import org.apache.solr.search.QueryWrapperFilter;
|
||||
import org.apache.solr.search.SolrConstantScoreQuery;
|
||||
import org.apache.solr.search.function.ValueSourceRangeFilter;
|
||||
import org.apache.solr.uninverting.UninvertingReader.Type;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -331,7 +331,7 @@ public class CurrencyFieldType extends FieldType implements SchemaAware, Resourc
|
|||
(p2 != null) ? p2.getCurrencyCode() : defaultCurrency;
|
||||
|
||||
// ValueSourceRangeFilter doesn't check exists(), so we have to
|
||||
final Filter docsWithValues = new QueryWrapperFilter(new FieldValueQuery(getAmountField(field).getName()));
|
||||
final Filter docsWithValues = new QueryWrapperFilter(new DocValuesFieldExistsQuery(getAmountField(field).getName()));
|
||||
final Filter vsRangeFilter = new ValueSourceRangeFilter
|
||||
(new RawCurrencyValueSource(field, currencyCode, parser),
|
||||
p1 == null ? null : p1.getAmount() + "",
|
||||
|
|
|
@ -109,8 +109,11 @@ public final class ManagedIndexSchema extends IndexSchema {
|
|||
}
|
||||
|
||||
|
||||
/** Persist the schema to local storage or to ZooKeeper */
|
||||
boolean persistManagedSchema(boolean createOnly) {
|
||||
/**
|
||||
* Persist the schema to local storage or to ZooKeeper
|
||||
* @param createOnly set to false to allow update of existing schema
|
||||
*/
|
||||
public boolean persistManagedSchema(boolean createOnly) {
|
||||
if (loader instanceof ZkSolrResourceLoader) {
|
||||
return persistManagedSchemaToZooKeeper(createOnly);
|
||||
}
|
||||
|
|
|
@ -65,7 +65,8 @@ public class ChildFieldValueSourceParser extends ValueSourceParser {
|
|||
|
||||
@Override
|
||||
public String value(int slot) {
|
||||
return byteRefs.value(slot).utf8ToString();
|
||||
final BytesRef value = byteRefs.value(slot);
|
||||
return value!=null ? value.utf8ToString() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -84,6 +84,7 @@ public class SolrRequestParsers
|
|||
private final HashMap<String, SolrRequestParser> parsers =
|
||||
new HashMap<>();
|
||||
private final boolean enableRemoteStreams;
|
||||
private final boolean enableStreamBody;
|
||||
private StandardRequestParser standard;
|
||||
private boolean handleSelect = true;
|
||||
private boolean addHttpRequestToContext;
|
||||
|
@ -101,8 +102,9 @@ public class SolrRequestParsers
|
|||
final int multipartUploadLimitKB, formUploadLimitKB;
|
||||
if( globalConfig == null ) {
|
||||
multipartUploadLimitKB = formUploadLimitKB = Integer.MAX_VALUE;
|
||||
enableRemoteStreams = true;
|
||||
handleSelect = true;
|
||||
enableRemoteStreams = false;
|
||||
enableStreamBody = false;
|
||||
handleSelect = false;
|
||||
addHttpRequestToContext = false;
|
||||
} else {
|
||||
multipartUploadLimitKB = globalConfig.getMultipartUploadLimitKB();
|
||||
|
@ -110,6 +112,7 @@ public class SolrRequestParsers
|
|||
formUploadLimitKB = globalConfig.getFormUploadLimitKB();
|
||||
|
||||
enableRemoteStreams = globalConfig.isEnableRemoteStreams();
|
||||
enableStreamBody = globalConfig.isEnableStreamBody();
|
||||
|
||||
// Let this filter take care of /select?xxx format
|
||||
handleSelect = globalConfig.isHandleSelect();
|
||||
|
@ -121,9 +124,10 @@ public class SolrRequestParsers
|
|||
|
||||
private SolrRequestParsers() {
|
||||
enableRemoteStreams = false;
|
||||
enableStreamBody = false;
|
||||
handleSelect = false;
|
||||
addHttpRequestToContext = false;
|
||||
init(2048, 2048);
|
||||
init(Integer.MAX_VALUE, Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
private void init( int multipartUploadLimitKB, int formUploadLimitKB) {
|
||||
|
@ -202,7 +206,7 @@ public class SolrRequestParsers
|
|||
strs = params.getParams( CommonParams.STREAM_FILE );
|
||||
if( strs != null ) {
|
||||
if( !enableRemoteStreams ) {
|
||||
throw new SolrException( ErrorCode.BAD_REQUEST, "Remote Streaming is disabled." );
|
||||
throw new SolrException( ErrorCode.BAD_REQUEST, "Remote Streaming is disabled. See http://lucene.apache.org/solr/guide/requestdispatcher-in-solrconfig.html for help" );
|
||||
}
|
||||
for( final String file : strs ) {
|
||||
ContentStreamBase stream = new ContentStreamBase.FileStream( new File(file) );
|
||||
|
@ -216,6 +220,9 @@ public class SolrRequestParsers
|
|||
// Check for streams in the request parameters
|
||||
strs = params.getParams( CommonParams.STREAM_BODY );
|
||||
if( strs != null ) {
|
||||
if( !enableStreamBody ) {
|
||||
throw new SolrException( ErrorCode.BAD_REQUEST, "Stream Body is disabled. See http://lucene.apache.org/solr/guide/requestdispatcher-in-solrconfig.html for help" );
|
||||
}
|
||||
for( final String body : strs ) {
|
||||
ContentStreamBase stream = new ContentStreamBase.StringStream( body );
|
||||
if( contentType != null ) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
|
@ -73,52 +74,61 @@ import static org.apache.solr.core.ConfigSetProperties.IMMUTABLE_CONFIGSET_ARG;
|
|||
* </p>
|
||||
* <p>
|
||||
* This processor takes as configuration a sequence of zero or more "typeMapping"-s from
|
||||
* one or more "valueClass"-s, specified as either an <arr> of <str>, or
|
||||
* multiple <str> with the same name, to an existing schema "fieldType".
|
||||
* one or more "valueClass"-s, specified as either an <code><arr></code> of
|
||||
* <code><str></code>, or multiple <code><str></code> with the same name,
|
||||
* to an existing schema "fieldType".
|
||||
* </p>
|
||||
* <p>
|
||||
* If more than one "valueClass" is specified in a "typeMapping", field values with any
|
||||
* of the specified "valueClass"-s will be mapped to the specified target "fieldType".
|
||||
* The "typeMapping"-s are attempted in the specified order; if a field value's class
|
||||
* is not specified in a "valueClass", the next "typeMapping" is attempted. If no
|
||||
* "typeMapping" succeeds, then the specified "defaultFieldType" is used.
|
||||
* "typeMapping" succeeds, then either the "typeMapping" configured with
|
||||
* <code><bool name="default">true</bool></code> is used, or if none is so
|
||||
* configured, the <code>lt;str name="defaultFieldType">...</str></code> is
|
||||
* used.
|
||||
* </p>
|
||||
* <p>
|
||||
* Zero or more "copyField" directives may be included with each "typeMapping", using a
|
||||
* <code><lst></code>. The copy field source is automatically set to the new field
|
||||
* name; "dest" must specify the destination field or dynamic field in a
|
||||
* <code><str></code>; and "maxChars" may optionally be specified in an
|
||||
* <code><int></code>.
|
||||
* </p>
|
||||
* <p>
|
||||
* Example configuration:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="prettyprint">
|
||||
* <processor class="solr.AddSchemaFieldsUpdateProcessorFactory">
|
||||
* <str name="defaultFieldType">text_general</str>
|
||||
* <updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
|
||||
* <lst name="typeMapping">
|
||||
* <str name="valueClass">Boolean</str>
|
||||
* <str name="valueClass">java.lang.String</str>
|
||||
* <str name="fieldType">text_general</str>
|
||||
* <lst name="copyField">
|
||||
* <str name="dest">*_str</str>
|
||||
* <int name="maxChars">256</int>
|
||||
* </lst>
|
||||
* <!-- Use as default mapping instead of defaultFieldType -->
|
||||
* <bool name="default">true</bool>
|
||||
* </lst>
|
||||
* <lst name="typeMapping">
|
||||
* <str name="valueClass">java.lang.Boolean</str>
|
||||
* <str name="fieldType">booleans</str>
|
||||
* </lst>
|
||||
* <lst name="typeMapping">
|
||||
* <str name="valueClass">Integer</str>
|
||||
* <str name="fieldType">pints</str>
|
||||
* </lst>
|
||||
* <lst name="typeMapping">
|
||||
* <str name="valueClass">Float</str>
|
||||
* <str name="fieldType">pfloats</str>
|
||||
* </lst>
|
||||
* <lst name="typeMapping">
|
||||
* <str name="valueClass">Date</str>
|
||||
* <str name="valueClass">java.util.Date</str>
|
||||
* <str name="fieldType">pdates</str>
|
||||
* </lst>
|
||||
* <lst name="typeMapping">
|
||||
* <str name="valueClass">Long</str>
|
||||
* <str name="valueClass">Integer</str>
|
||||
* <str name="valueClass">java.lang.Long</str>
|
||||
* <str name="valueClass">java.lang.Integer</str>
|
||||
* <str name="fieldType">plongs</str>
|
||||
* </lst>
|
||||
* <lst name="typeMapping">
|
||||
* <arr name="valueClass">
|
||||
* <str>Double</str>
|
||||
* <str>Float</str>
|
||||
* </arr>
|
||||
* <str name="valueClass">java.lang.Number</str>
|
||||
* <str name="fieldType">pdoubles</str>
|
||||
* </lst>
|
||||
* </processor></pre>
|
||||
* </updateProcessor></pre>
|
||||
*/
|
||||
public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcessorFactory
|
||||
implements SolrCoreAware, UpdateRequestProcessorFactory.RunAlways {
|
||||
|
@ -128,7 +138,11 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
private static final String VALUE_CLASS_PARAM = "valueClass";
|
||||
private static final String FIELD_TYPE_PARAM = "fieldType";
|
||||
private static final String DEFAULT_FIELD_TYPE_PARAM = "defaultFieldType";
|
||||
|
||||
private static final String COPY_FIELD_PARAM = "copyField";
|
||||
private static final String DEST_PARAM = "dest";
|
||||
private static final String MAX_CHARS_PARAM = "maxChars";
|
||||
private static final String IS_DEFAULT_PARAM = "default";
|
||||
|
||||
private List<TypeMapping> typeMappings = Collections.emptyList();
|
||||
private SelectorParams inclusions = new SelectorParams();
|
||||
private Collection<SelectorParams> exclusions = new ArrayList<>();
|
||||
|
@ -152,16 +166,18 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
validateSelectorParams(exclusion);
|
||||
}
|
||||
Object defaultFieldTypeParam = args.remove(DEFAULT_FIELD_TYPE_PARAM);
|
||||
if (null == defaultFieldTypeParam) {
|
||||
throw new SolrException(SERVER_ERROR, "Missing required init param '" + DEFAULT_FIELD_TYPE_PARAM + "'");
|
||||
} else {
|
||||
if (null != defaultFieldTypeParam) {
|
||||
if ( ! (defaultFieldTypeParam instanceof CharSequence)) {
|
||||
throw new SolrException(SERVER_ERROR, "Init param '" + DEFAULT_FIELD_TYPE_PARAM + "' must be a <str>");
|
||||
}
|
||||
defaultFieldType = defaultFieldTypeParam.toString();
|
||||
}
|
||||
defaultFieldType = defaultFieldTypeParam.toString();
|
||||
|
||||
typeMappings = parseTypeMappings(args);
|
||||
if (null == defaultFieldType && typeMappings.stream().noneMatch(TypeMapping::isDefault)) {
|
||||
throw new SolrException(SERVER_ERROR, "Must specify either '" + DEFAULT_FIELD_TYPE_PARAM +
|
||||
"' or declare one typeMapping as default.");
|
||||
}
|
||||
|
||||
super.init(args);
|
||||
}
|
||||
|
@ -207,8 +223,59 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
throw new SolrException(SERVER_ERROR,
|
||||
"Each '" + TYPE_MAPPING_PARAM + "' <lst/> must contain at least one '" + VALUE_CLASS_PARAM + "' <str>");
|
||||
}
|
||||
typeMappings.add(new TypeMapping(fieldType, valueClasses));
|
||||
|
||||
// isDefault (optional)
|
||||
Boolean isDefault = false;
|
||||
Object isDefaultObj = typeMappingNamedList.remove(IS_DEFAULT_PARAM);
|
||||
if (null != isDefaultObj) {
|
||||
if ( ! (isDefaultObj instanceof Boolean)) {
|
||||
throw new SolrException(SERVER_ERROR, "'" + IS_DEFAULT_PARAM + "' init param must be a <bool>");
|
||||
}
|
||||
if (null != typeMappingNamedList.get(IS_DEFAULT_PARAM)) {
|
||||
throw new SolrException(SERVER_ERROR,
|
||||
"Each '" + COPY_FIELD_PARAM + "' <lst/> may contain only one '" + IS_DEFAULT_PARAM + "' <bool>");
|
||||
}
|
||||
isDefault = Boolean.parseBoolean(isDefaultObj.toString());
|
||||
}
|
||||
|
||||
Collection<CopyFieldDef> copyFieldDefs = new ArrayList<>();
|
||||
while (typeMappingNamedList.get(COPY_FIELD_PARAM) != null) {
|
||||
Object copyFieldObj = typeMappingNamedList.remove(COPY_FIELD_PARAM);
|
||||
if ( ! (copyFieldObj instanceof NamedList)) {
|
||||
throw new SolrException(SERVER_ERROR, "'" + COPY_FIELD_PARAM + "' init param must be a <lst>");
|
||||
}
|
||||
NamedList copyFieldNamedList = (NamedList)copyFieldObj;
|
||||
// dest
|
||||
Object destObj = copyFieldNamedList.remove(DEST_PARAM);
|
||||
if (null == destObj) {
|
||||
throw new SolrException(SERVER_ERROR,
|
||||
"Each '" + COPY_FIELD_PARAM + "' <lst/> must contain a '" + DEST_PARAM + "' <str>");
|
||||
}
|
||||
if ( ! (destObj instanceof CharSequence)) {
|
||||
throw new SolrException(SERVER_ERROR, "'" + COPY_FIELD_PARAM + "' init param must be a <str>");
|
||||
}
|
||||
if (null != copyFieldNamedList.get(COPY_FIELD_PARAM)) {
|
||||
throw new SolrException(SERVER_ERROR,
|
||||
"Each '" + COPY_FIELD_PARAM + "' <lst/> may contain only one '" + COPY_FIELD_PARAM + "' <str>");
|
||||
}
|
||||
String dest = destObj.toString();
|
||||
// maxChars (optional)
|
||||
Integer maxChars = 0;
|
||||
Object maxCharsObj = copyFieldNamedList.remove(MAX_CHARS_PARAM);
|
||||
if (null != maxCharsObj) {
|
||||
if ( ! (maxCharsObj instanceof Integer)) {
|
||||
throw new SolrException(SERVER_ERROR, "'" + MAX_CHARS_PARAM + "' init param must be a <int>");
|
||||
}
|
||||
if (null != copyFieldNamedList.get(MAX_CHARS_PARAM)) {
|
||||
throw new SolrException(SERVER_ERROR,
|
||||
"Each '" + COPY_FIELD_PARAM + "' <lst/> may contain only one '" + MAX_CHARS_PARAM + "' <str>");
|
||||
}
|
||||
maxChars = Integer.parseInt(maxCharsObj.toString());
|
||||
}
|
||||
copyFieldDefs.add(new CopyFieldDef(dest, maxChars));
|
||||
}
|
||||
typeMappings.add(new TypeMapping(fieldType, valueClasses, isDefault, copyFieldDefs));
|
||||
|
||||
if (0 != typeMappingNamedList.size()) {
|
||||
throw new SolrException(SERVER_ERROR,
|
||||
"Unexpected '" + TYPE_MAPPING_PARAM + "' init sub-param(s): '" + typeMappingNamedList.toString() + "'");
|
||||
|
@ -233,11 +300,16 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
private static class TypeMapping {
|
||||
public String fieldTypeName;
|
||||
public Collection<String> valueClassNames;
|
||||
public Collection<CopyFieldDef> copyFieldDefs;
|
||||
public Set<Class<?>> valueClasses;
|
||||
public Boolean isDefault;
|
||||
|
||||
public TypeMapping(String fieldTypeName, Collection<String> valueClassNames) {
|
||||
public TypeMapping(String fieldTypeName, Collection<String> valueClassNames, boolean isDefault,
|
||||
Collection<CopyFieldDef> copyFieldDefs) {
|
||||
this.fieldTypeName = fieldTypeName;
|
||||
this.valueClassNames = valueClassNames;
|
||||
this.isDefault = isDefault;
|
||||
this.copyFieldDefs = copyFieldDefs;
|
||||
// this.valueClasses population is delayed until the schema is available
|
||||
}
|
||||
|
||||
|
@ -257,6 +329,38 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isDefault() {
|
||||
return isDefault;
|
||||
}
|
||||
}
|
||||
|
||||
private static class CopyFieldDef {
|
||||
private final String destGlob;
|
||||
private final Integer maxChars;
|
||||
|
||||
public CopyFieldDef(String destGlob, Integer maxChars) {
|
||||
this.destGlob = destGlob;
|
||||
this.maxChars = maxChars;
|
||||
if (destGlob.contains("*") && (!destGlob.startsWith("*") && !destGlob.endsWith("*"))) {
|
||||
throw new SolrException(SERVER_ERROR, "dest '" + destGlob +
|
||||
"' is invalid. Must either be a plain field name or start or end with '*'");
|
||||
}
|
||||
}
|
||||
|
||||
public Integer getMaxChars() {
|
||||
return maxChars;
|
||||
}
|
||||
|
||||
public String getDest(String srcFieldName) {
|
||||
if (!destGlob.contains("*")) {
|
||||
return destGlob;
|
||||
} else if (destGlob.startsWith("*")) {
|
||||
return srcFieldName + destGlob.substring(1);
|
||||
} else {
|
||||
return destGlob.substring(0,destGlob.length()-1) + srcFieldName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class AddSchemaFieldsUpdateProcessor extends UpdateRequestProcessor {
|
||||
|
@ -278,6 +382,8 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
IndexSchema oldSchema = cmd.getReq().getSchema();
|
||||
for (;;) {
|
||||
List<SchemaField> newFields = new ArrayList<>();
|
||||
// Group copyField defs per field and then per maxChar, to adapt to IndexSchema API
|
||||
Map<String,Map<Integer,List<CopyFieldDef>>> newCopyFields = new HashMap<>();
|
||||
// build a selector each time through the loop b/c the schema we are
|
||||
// processing may have changed
|
||||
FieldNameSelector selector = buildSelector(oldSchema);
|
||||
|
@ -285,12 +391,20 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
getUnknownFields(selector, doc, unknownFields);
|
||||
for (final Map.Entry<String,List<SolrInputField>> entry : unknownFields.entrySet()) {
|
||||
String fieldName = entry.getKey();
|
||||
String fieldTypeName = mapValueClassesToFieldType(entry.getValue());
|
||||
String fieldTypeName = defaultFieldType;
|
||||
TypeMapping typeMapping = mapValueClassesToFieldType(entry.getValue());
|
||||
if (typeMapping != null) {
|
||||
fieldTypeName = typeMapping.fieldTypeName;
|
||||
if (!typeMapping.copyFieldDefs.isEmpty()) {
|
||||
newCopyFields.put(fieldName,
|
||||
typeMapping.copyFieldDefs.stream().collect(Collectors.groupingBy(CopyFieldDef::getMaxChars)));
|
||||
}
|
||||
}
|
||||
newFields.add(oldSchema.newField(fieldName, fieldTypeName, Collections.<String,Object>emptyMap()));
|
||||
}
|
||||
if (newFields.isEmpty()) {
|
||||
if (newFields.isEmpty() && newCopyFields.isEmpty()) {
|
||||
// nothing to do - no fields will be added - exit from the retry loop
|
||||
log.debug("No fields to add to the schema.");
|
||||
log.debug("No fields or copyFields to add to the schema.");
|
||||
break;
|
||||
} else if ( isImmutableConfigSet(core) ) {
|
||||
final String message = "This ConfigSet is immutable.";
|
||||
|
@ -298,7 +412,7 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
}
|
||||
if (log.isDebugEnabled()) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("Fields to be added to the schema: [");
|
||||
builder.append("\nFields to be added to the schema: [");
|
||||
boolean isFirst = true;
|
||||
for (SchemaField field : newFields) {
|
||||
builder.append(isFirst ? "" : ",");
|
||||
|
@ -307,20 +421,44 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
builder.append("{type=").append(field.getType().getTypeName()).append("}");
|
||||
}
|
||||
builder.append("]");
|
||||
builder.append("\nCopyFields to be added to the schema: [");
|
||||
isFirst = true;
|
||||
for (String fieldName : newCopyFields.keySet()) {
|
||||
builder.append(isFirst ? "" : ",");
|
||||
isFirst = false;
|
||||
builder.append("source=").append(fieldName).append("{");
|
||||
for (List<CopyFieldDef> copyFieldDefList : newCopyFields.get(fieldName).values()) {
|
||||
for (CopyFieldDef copyFieldDef : copyFieldDefList) {
|
||||
builder.append("{dest=").append(copyFieldDef.getDest(fieldName));
|
||||
builder.append(", maxChars=").append(copyFieldDef.getMaxChars()).append("}");
|
||||
}
|
||||
}
|
||||
builder.append("}");
|
||||
}
|
||||
builder.append("]");
|
||||
log.debug(builder.toString());
|
||||
}
|
||||
// Need to hold the lock during the entire attempt to ensure that
|
||||
// the schema on the request is the latest
|
||||
synchronized (oldSchema.getSchemaUpdateLock()) {
|
||||
try {
|
||||
IndexSchema newSchema = oldSchema.addFields(newFields);
|
||||
IndexSchema newSchema = oldSchema.addFields(newFields, Collections.emptyMap(), false);
|
||||
// Add copyFields
|
||||
for (String srcField : newCopyFields.keySet()) {
|
||||
for (Integer maxChars : newCopyFields.get(srcField).keySet()) {
|
||||
newSchema = newSchema.addCopyFields(srcField,
|
||||
newCopyFields.get(srcField).get(maxChars).stream().map(f -> f.getDest(srcField)).collect(Collectors.toList()),
|
||||
maxChars);
|
||||
}
|
||||
}
|
||||
if (null != newSchema) {
|
||||
((ManagedIndexSchema)newSchema).persistManagedSchema(false);
|
||||
core.setLatestSchema(newSchema);
|
||||
cmd.getReq().updateSchemaToLatest();
|
||||
log.debug("Successfully added field(s) to the schema.");
|
||||
log.debug("Successfully added field(s) and copyField(s) to the schema.");
|
||||
break; // success - exit from the retry loop
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed to add fields.");
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed to add fields and/or copyFields.");
|
||||
}
|
||||
} catch (ManagedIndexSchema.FieldExistsException e) {
|
||||
log.error("At least one field to be added already exists in the schema - retrying.");
|
||||
|
@ -360,11 +498,11 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
}
|
||||
|
||||
/**
|
||||
* Maps all given field values' classes to a field type using the configured type mapping rules.
|
||||
* Maps all given field values' classes to a typeMapping object
|
||||
*
|
||||
* @param fields one or more (same-named) field values from one or more documents
|
||||
*/
|
||||
private String mapValueClassesToFieldType(List<SolrInputField> fields) {
|
||||
private TypeMapping mapValueClassesToFieldType(List<SolrInputField> fields) {
|
||||
NEXT_TYPE_MAPPING: for (TypeMapping typeMapping : typeMappings) {
|
||||
for (SolrInputField field : fields) {
|
||||
NEXT_FIELD_VALUE: for (Object fieldValue : field.getValues()) {
|
||||
|
@ -379,10 +517,18 @@ public class AddSchemaFieldsUpdateProcessorFactory extends UpdateRequestProcesso
|
|||
}
|
||||
}
|
||||
// Success! Each of this field's values is an instance of a mapped valueClass
|
||||
return typeMapping.fieldTypeName;
|
||||
return typeMapping;
|
||||
}
|
||||
// At least one of this field's values is not an instance of any of the mapped valueClass-s
|
||||
return defaultFieldType;
|
||||
// Return the typeMapping marked as default, if we have one, else return null to use fallback type
|
||||
List<TypeMapping> defaultMappings = typeMappings.stream().filter(TypeMapping::isDefault).collect(Collectors.toList());
|
||||
if (defaultMappings.size() > 1) {
|
||||
throw new SolrException(SERVER_ERROR, "Only one typeMapping can be default");
|
||||
} else if (defaultMappings.size() == 1) {
|
||||
return defaultMappings.get(0);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private FieldNameSelector buildSelector(IndexSchema schema) {
|
||||
|
|
|
@ -65,7 +65,8 @@
|
|||
"requestParsers":{
|
||||
"multipartUploadLimitInKB":0,
|
||||
"formdataUploadLimitInKB":0,
|
||||
"enableRemoteStreaming":0,
|
||||
"enableRemoteStreaming":10,
|
||||
"enableStreamBody":10,
|
||||
"addHttpRequestToContext":0}},
|
||||
"peerSync":{
|
||||
"useRangeVersions":11
|
||||
|
|
|
@ -48,6 +48,7 @@
|
|||
<field name="_version_" type="long" indexed="true" stored="true"/>
|
||||
<field name="_root_" type="string" indexed="true" stored="true" multiValued="false"/>
|
||||
|
||||
<dynamicField name="*_str" type="string" stored="false" multiValued="true" docValues="true" useDocValuesAsStored="false"/>
|
||||
<dynamicField name="*_t" type="text" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
|
||||
|
|
|
@ -68,6 +68,80 @@
|
|||
<updateRequestProcessorChain name="add-fields">
|
||||
<processor class="solr.AddSchemaFieldsUpdateProcessorFactory">
|
||||
<str name="defaultFieldType">text</str>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.String</str>
|
||||
<str name="fieldType">text</str>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_str</str>
|
||||
</lst>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Boolean</str>
|
||||
<str name="fieldType">boolean</str>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Integer</str>
|
||||
<str name="fieldType">pints</str>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Float</str>
|
||||
<str name="fieldType">pfloats</str>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.util.Date</str>
|
||||
<str name="fieldType">pdates</str>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Long</str>
|
||||
<str name="valueClass">java.lang.Integer</str>
|
||||
<str name="fieldType">plongs</str>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Number</str>
|
||||
<str name="fieldType">pdoubles</str>
|
||||
</lst>
|
||||
</processor>
|
||||
<processor class="solr.RunUpdateProcessorFactory" />
|
||||
</updateRequestProcessorChain>
|
||||
|
||||
<updateRequestProcessorChain name="add-fields-maxchars">
|
||||
<processor class="solr.AddSchemaFieldsUpdateProcessorFactory">
|
||||
<str name="defaultFieldType">text</str>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.String</str>
|
||||
<str name="fieldType">text</str>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_str</str>
|
||||
<int name="maxChars">10</int>
|
||||
</lst>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_t</str>
|
||||
<int name="maxChars">20</int>
|
||||
</lst>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*2_t</str>
|
||||
<int name="maxChars">20</int>
|
||||
</lst>
|
||||
</lst>
|
||||
</processor>
|
||||
<processor class="solr.LogUpdateProcessorFactory" />
|
||||
<processor class="solr.DistributedUpdateProcessorFactory" />
|
||||
<processor class="solr.RunUpdateProcessorFactory" />
|
||||
</updateRequestProcessorChain>
|
||||
|
||||
<!-- This chain has one of the typeMappings set as default=true, instead of falling back to the defaultFieldType -->
|
||||
<updateRequestProcessorChain name="add-fields-default-mapping">
|
||||
<processor class="solr.AddSchemaFieldsUpdateProcessorFactory">
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.String</str>
|
||||
<str name="fieldType">text</str>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_str</str>
|
||||
<int name="maxChars">10</int>
|
||||
</lst>
|
||||
<!-- Use as default mapping instead of defaultFieldType -->
|
||||
<bool name="default">true</bool>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Boolean</str>
|
||||
<str name="fieldType">boolean</str>
|
||||
|
|
|
@ -29,6 +29,11 @@
|
|||
|
||||
<codecFactory class="solr.SchemaCodecFactory"/>
|
||||
|
||||
<requestDispatcher>
|
||||
<!-- Tests rely on stream.body -->
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<query>
|
||||
<filterCache
|
||||
enabled="${filterCache.enabled:false}"
|
||||
|
|
|
@ -35,6 +35,10 @@
|
|||
</updateLog>
|
||||
</updateHandler>
|
||||
|
||||
<requestDispatcher>
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<requestHandler name="/select" class="solr.SearchHandler">
|
||||
<bool name="httpCaching">true</bool>
|
||||
</requestHandler>
|
||||
|
|
|
@ -31,6 +31,9 @@
|
|||
<str name="solr.hdfs.blockcache.global">${solr.hdfs.blockcache.global:true}</str>
|
||||
</directoryFactory>
|
||||
<schemaFactory class="ClassicIndexSchemaFactory"/>
|
||||
<requestDispatcher>
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<dataDir>${solr.data.dir:}</dataDir>
|
||||
|
||||
|
|
|
@ -457,7 +457,7 @@
|
|||
</searchComponent>
|
||||
|
||||
<requestDispatcher>
|
||||
<requestParsers enableRemoteStreaming="true" multipartUploadLimitInKB="-1" />
|
||||
<requestParsers enableRemoteStreaming="true" enableStreamBody="true" multipartUploadLimitInKB="-1" />
|
||||
<httpCaching lastModifiedFrom="openTime" etagSeed="Solr" never304="false">
|
||||
<cacheControl>max-age=30, public</cacheControl>
|
||||
</httpCaching>
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
<?xml version="1.0" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!-- Example exchange rates file for CurrencyFieldType named "currency" in example schema -->
|
||||
|
||||
<currencyConfig version="1.0">
|
||||
<rates>
|
||||
<!-- Updated from http://www.exchangerate.com/ at 2011-09-27 -->
|
||||
<rate from="USD" to="ARS" rate="4.333871" comment="ARGENTINA Peso" />
|
||||
<rate from="USD" to="AUD" rate="1.025768" comment="AUSTRALIA Dollar" />
|
||||
<rate from="USD" to="EUR" rate="0.743676" comment="European Euro" />
|
||||
<rate from="USD" to="BRL" rate="1.881093" comment="BRAZIL Real" />
|
||||
<rate from="USD" to="CAD" rate="1.030815" comment="CANADA Dollar" />
|
||||
<rate from="USD" to="CLP" rate="519.0996" comment="CHILE Peso" />
|
||||
<rate from="USD" to="CNY" rate="6.387310" comment="CHINA Yuan" />
|
||||
<rate from="USD" to="CZK" rate="18.47134" comment="CZECH REP. Koruna" />
|
||||
<rate from="USD" to="DKK" rate="5.515436" comment="DENMARK Krone" />
|
||||
<rate from="USD" to="HKD" rate="7.801922" comment="HONG KONG Dollar" />
|
||||
<rate from="USD" to="HUF" rate="215.6169" comment="HUNGARY Forint" />
|
||||
<rate from="USD" to="ISK" rate="118.1280" comment="ICELAND Krona" />
|
||||
<rate from="USD" to="INR" rate="49.49088" comment="INDIA Rupee" />
|
||||
<rate from="USD" to="XDR" rate="0.641358" comment="INTNL MON. FUND SDR" />
|
||||
<rate from="USD" to="ILS" rate="3.709739" comment="ISRAEL Sheqel" />
|
||||
<rate from="USD" to="JPY" rate="76.32419" comment="JAPAN Yen" />
|
||||
<rate from="USD" to="KRW" rate="1169.173" comment="KOREA (SOUTH) Won" />
|
||||
<rate from="USD" to="KWD" rate="0.275142" comment="KUWAIT Dinar" />
|
||||
<rate from="USD" to="MXN" rate="13.85895" comment="MEXICO Peso" />
|
||||
<rate from="USD" to="NZD" rate="1.285159" comment="NEW ZEALAND Dollar" />
|
||||
<rate from="USD" to="NOK" rate="5.859035" comment="NORWAY Krone" />
|
||||
<rate from="USD" to="PKR" rate="87.57007" comment="PAKISTAN Rupee" />
|
||||
<rate from="USD" to="PEN" rate="2.730683" comment="PERU Sol" />
|
||||
<rate from="USD" to="PHP" rate="43.62039" comment="PHILIPPINES Peso" />
|
||||
<rate from="USD" to="PLN" rate="3.310139" comment="POLAND Zloty" />
|
||||
<rate from="USD" to="RON" rate="3.100932" comment="ROMANIA Leu" />
|
||||
<rate from="USD" to="RUB" rate="32.14663" comment="RUSSIA Ruble" />
|
||||
<rate from="USD" to="SAR" rate="3.750465" comment="SAUDI ARABIA Riyal" />
|
||||
<rate from="USD" to="SGD" rate="1.299352" comment="SINGAPORE Dollar" />
|
||||
<rate from="USD" to="ZAR" rate="8.329761" comment="SOUTH AFRICA Rand" />
|
||||
<rate from="USD" to="SEK" rate="6.883442" comment="SWEDEN Krona" />
|
||||
<rate from="USD" to="CHF" rate="0.906035" comment="SWITZERLAND Franc" />
|
||||
<rate from="USD" to="TWD" rate="30.40283" comment="TAIWAN Dollar" />
|
||||
<rate from="USD" to="THB" rate="30.89487" comment="THAILAND Baht" />
|
||||
<rate from="USD" to="AED" rate="3.672955" comment="U.A.E. Dirham" />
|
||||
<rate from="USD" to="UAH" rate="7.988582" comment="UKRAINE Hryvnia" />
|
||||
<rate from="USD" to="GBP" rate="0.647910" comment="UNITED KINGDOM Pound" />
|
||||
|
||||
<!-- Cross-rates for some common currencies -->
|
||||
<rate from="EUR" to="GBP" rate="0.869914" />
|
||||
<rate from="EUR" to="NOK" rate="7.800095" />
|
||||
<rate from="GBP" to="NOK" rate="8.966508" />
|
||||
</rates>
|
||||
</currencyConfig>
|
|
@ -16,17 +16,14 @@
|
|||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
This is the Solr schema file. This file should be named "schema.xml" and
|
||||
should be in the conf directory under the solr home
|
||||
(i.e. ./solr/conf/schema.xml by default)
|
||||
or located where the classloader for the Solr webapp can find it.
|
||||
<!--
|
||||
|
||||
This example schema is the recommended starting point for users.
|
||||
It should be kept correct and concise, usable out-of-the-box.
|
||||
|
||||
|
||||
For more information, on how to customize this file, please see
|
||||
http://wiki.apache.org/solr/SchemaXml
|
||||
http://lucene.apache.org/solr/guide/documents-fields-and-schema-design.html
|
||||
|
||||
PERFORMANCE NOTE: this schema includes many optional features and should not
|
||||
be used for benchmarking. To improve performance one could
|
||||
|
@ -39,10 +36,6 @@
|
|||
- for best index size and searching performance, set "index" to false
|
||||
for all general text fields, use copyField to copy them to the
|
||||
catchall "text" field, and use that for searching.
|
||||
- For maximum indexing performance, use the ConcurrentUpdateSolrServer
|
||||
java client.
|
||||
- Remember to run the JVM in server mode, and use a higher logging level
|
||||
that avoids logging every request
|
||||
-->
|
||||
|
||||
<schema name="default-config" version="1.6">
|
||||
|
@ -70,14 +63,12 @@
|
|||
fieldTypes section
|
||||
indexed: true if this field should be indexed (searchable or sortable)
|
||||
stored: true if this field should be retrievable
|
||||
docValues: true if this field should have doc values. Doc values are
|
||||
useful (required, if you are using *Point fields) for faceting,
|
||||
grouping, sorting and function queries. Doc values will make the index
|
||||
docValues: true if this field should have doc values. Doc Values is
|
||||
recommended (required, if you are using *Point fields) for faceting,
|
||||
grouping, sorting and function queries. Doc Values will make the index
|
||||
faster to load, more NRT-friendly and more memory-efficient.
|
||||
They however come with some limitations: they are currently only
|
||||
supported by StrField, UUIDField, all Trie*Fields and *PointFields,
|
||||
and depending on the field type, they might require the field to be
|
||||
single-valued, be required or have a default value (check the
|
||||
They are currently only supported by StrField, UUIDField, all Trie*Fields and *PointFields.
|
||||
Some field types may have limitations on using Doc Values (check the
|
||||
documentation of the field type you're interested in for more information)
|
||||
multiValued: true if this field may contain multiple values per document
|
||||
omitNorms: (expert) set to true to omit the norms associated with
|
||||
|
@ -106,10 +97,9 @@
|
|||
trailing underscores (e.g. _version_) are reserved.
|
||||
-->
|
||||
|
||||
<!-- In this _default configset, only three fields are pre-declared:
|
||||
id, _version_, and _text_. All other fields will be type guessed and added via the
|
||||
"add-unknown-fields-to-the-schema" update request processor chain declared
|
||||
in solrconfig.xml.
|
||||
<!-- In this _default configset, only four fields are pre-declared:
|
||||
id, _version_, and _text_ and _root_. All other fields will be type guessed and added via the
|
||||
"add-unknown-fields-to-the-schema" update request processor chain declared in solrconfig.xml.
|
||||
|
||||
Note that many dynamic fields are also defined - you can use them to specify a
|
||||
field's type via field naming conventions - see below.
|
||||
|
@ -117,8 +107,9 @@
|
|||
WARNING: The _text_ catch-all field will significantly increase your index size.
|
||||
If you don't need it, consider removing it and the corresponding copyField directive.
|
||||
-->
|
||||
|
||||
<field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
|
||||
<!-- doc values are enabled by default for primitive types such as long so we don't index the version field -->
|
||||
<!-- docValues are enabled by default for long type so we don't need to index the version field -->
|
||||
<field name="_version_" type="long" indexed="false" stored="false"/>
|
||||
<field name="_root_" type="string" indexed="true" stored="false" docValues="false" />
|
||||
<field name="_text_" type="text_general" indexed="true" stored="false" multiValued="true"/>
|
||||
|
@ -128,20 +119,16 @@
|
|||
<!-- <copyField source="*" dest="_text_"/> -->
|
||||
|
||||
<!-- Dynamic field definitions allow using convention over configuration
|
||||
for fields via the specification of patterns to match field names.
|
||||
for fields via the specification of patterns to match field names.
|
||||
EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i)
|
||||
RESTRICTION: the glob-like pattern in the name attribute must have
|
||||
a "*" only at the start or the end. -->
|
||||
RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end. -->
|
||||
|
||||
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_is" type="ints" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_s" type="string" indexed="true" stored="true" />
|
||||
<dynamicField name="*_s_ns" type="string" indexed="true" stored="false" />
|
||||
<dynamicField name="*_ss" type="strings" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_l" type="long" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_l_ns" type="long" indexed="true" stored="false"/>
|
||||
<dynamicField name="*_ls" type="longs" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_bs" type="booleans" indexed="true" stored="true"/>
|
||||
|
@ -150,6 +137,9 @@
|
|||
<dynamicField name="*_d" type="double" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_ds" type="doubles" indexed="true" stored="true"/>
|
||||
|
||||
<!-- Type used for data-driven schema, to add a string copy for each text field -->
|
||||
<dynamicField name="*_str" type="strings" stored="false" docValues="true" indexed="false" />
|
||||
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_p" type="location" indexed="true" stored="true"/>
|
||||
|
@ -184,22 +174,8 @@
|
|||
<dynamicField name="*_dpi" type="delimited_payloads_int" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dps" type="delimited_payloads_string" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="ignored_*" type="ignored" multiValued="true"/>
|
||||
<dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
|
||||
|
||||
<dynamicField name="random_*" type="random" />
|
||||
|
||||
<!-- uncomment the following to ignore any fields that don't already match an existing
|
||||
field name or dynamic field, rather than reporting them as an error.
|
||||
alternately, change the type="ignored" to some other type e.g. "text" if you want
|
||||
unknown fields indexed and/or stored by default
|
||||
|
||||
NB: use of "*" dynamic fields will disable field type guessing and adding
|
||||
unknown fields to the schema. -->
|
||||
<!--dynamicField name="*" type="ignored" multiValued="true" /-->
|
||||
|
||||
<!-- Field to use to determine and enforce document uniqueness.
|
||||
Unless this field is marked with required="false", it will be a required field
|
||||
-->
|
||||
|
@ -220,18 +196,6 @@
|
|||
standard package such as org.apache.solr.analysis
|
||||
-->
|
||||
|
||||
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
|
||||
It supports doc values but in that case the field needs to be
|
||||
single-valued and either required or have a default value.
|
||||
-->
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true" docValues="true" />
|
||||
<fieldType name="strings" class="solr.StrField" sortMissingLast="true" multiValued="true" docValues="true" />
|
||||
|
||||
<!-- boolean type: "true" or "false" -->
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
|
||||
|
||||
<fieldType name="booleans" class="solr.BoolField" sortMissingLast="true" multiValued="true"/>
|
||||
|
||||
<!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
|
||||
currently supported on types that are sorted internally as strings
|
||||
and on numeric types.
|
||||
|
@ -246,7 +210,18 @@
|
|||
- If sortMissingLast="false" and sortMissingFirst="false" (the default),
|
||||
then default lucene sorting will be used which places docs without the
|
||||
field first in an ascending sort and last in a descending sort.
|
||||
-->
|
||||
-->
|
||||
|
||||
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
|
||||
It supports doc values but in that case the field needs to be
|
||||
single-valued and either required or have a default value.
|
||||
-->
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true" docValues="true" />
|
||||
<fieldType name="strings" class="solr.StrField" sortMissingLast="true" multiValued="true" docValues="true" />
|
||||
|
||||
<!-- boolean type: "true" or "false" -->
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
|
||||
<fieldType name="booleans" class="solr.BoolField" sortMissingLast="true" multiValued="true"/>
|
||||
|
||||
<!--
|
||||
Numeric field types that index values using KD-trees. *Point fields are faster and more efficient than Trie* fields both, at
|
||||
|
@ -332,19 +307,6 @@
|
|||
<!--Binary data type. The data should be sent/retrieved in as Base64 encoded Strings -->
|
||||
<fieldType name="binary" class="solr.BinaryField"/>
|
||||
|
||||
<!-- The "RandomSortField" is not used to store or search any
|
||||
data. You can declare fields of this type it in your schema
|
||||
to generate pseudo-random orderings of your docs for sorting
|
||||
or function purposes. The ordering is generated based on the field
|
||||
name and the version of the index. As long as the index version
|
||||
remains unchanged, and the same field name is reused,
|
||||
the ordering of the docs will be consistent.
|
||||
If you want different psuedo-random orderings of documents,
|
||||
for the same version of the index, use a dynamicField and
|
||||
change the field name in the request.
|
||||
-->
|
||||
<fieldType name="random" class="solr.RandomSortField" indexed="true" />
|
||||
|
||||
<!-- solr.TextField allows the specification of custom text analyzers
|
||||
specified as a tokenizer and a list of token filters. Different
|
||||
analyzers may be specified for indexing and querying.
|
||||
|
@ -354,7 +316,7 @@
|
|||
matching across fields.
|
||||
|
||||
For more info on customizing your analyzer chain, please see
|
||||
http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
|
||||
http://lucene.apache.org/solr/guide/understanding-analyzers-tokenizers-and-filters.html#understanding-analyzers-tokenizers-and-filters
|
||||
-->
|
||||
|
||||
<!-- One can also specify an existing Analyzer class that has a
|
||||
|
@ -397,11 +359,9 @@
|
|||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- A text field with defaults appropriate for English: it
|
||||
tokenizes with StandardTokenizer, removes English stop words
|
||||
(lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
|
||||
finally applies Porter's stemming. The query time analyzer
|
||||
also applies synonyms from synonyms.txt. -->
|
||||
<!-- A text field with defaults appropriate for English: it tokenizes with StandardTokenizer,
|
||||
removes English stop words (lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
|
||||
finally applies Porter's stemming. The query time analyzer also applies synonyms from synonyms.txt. -->
|
||||
<dynamicField name="*_txt_en" type="text_en" indexed="true" stored="true"/>
|
||||
<fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer type="index">
|
||||
|
@ -579,10 +539,6 @@
|
|||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- since fields of this type are by default not stored or indexed,
|
||||
any data added to them will be ignored outright. -->
|
||||
<fieldType name="ignored" stored="false" indexed="false" docValues="false" multiValued="true" class="solr.StrField" />
|
||||
|
||||
<!-- This point type indexes the coordinates as separate fields (subFields)
|
||||
If subFieldType is defined, it references a type, and a dynamic field
|
||||
definition is created matching *___<typename>. Alternately, if
|
||||
|
@ -600,9 +556,9 @@
|
|||
<!-- A specialized field for geospatial search filters and distance sorting. -->
|
||||
<fieldType name="location" class="solr.LatLonPointSpatialField" docValues="true"/>
|
||||
|
||||
<!-- An alternative geospatial field type new to Solr 4. It supports multiValued and polygon shapes.
|
||||
For more information about this and other Spatial fields new to Solr 4, see:
|
||||
http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
|
||||
<!-- A geospatial field type that supports multiValued and polygon shapes.
|
||||
For more information about this and other spatial fields see:
|
||||
http://lucene.apache.org/solr/guide/spatial-search.html
|
||||
-->
|
||||
<fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
|
||||
geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
|
||||
|
@ -627,26 +583,6 @@
|
|||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
|
||||
Parameters:
|
||||
amountLongSuffix: Required. Refers to a dynamic field for the raw amount sub-field.
|
||||
The dynamic field must have a field type that extends LongValueFieldType.
|
||||
Note: If you expect to use Atomic Updates, this dynamic field may not be stored.
|
||||
codeStrSuffix: Required. Refers to a dynamic field for the currency code sub-field.
|
||||
The dynamic field must have a field type that extends StrField.
|
||||
Note: If you expect to use Atomic Updates, this dynamic field may not be stored.
|
||||
defaultCurrency: Specifies the default currency if none specified. Defaults to "USD"
|
||||
providerClass: Lets you plug in other exchange provider backend:
|
||||
solr.FileExchangeRateProvider is the default and takes one parameter:
|
||||
currencyConfig: name of an xml file holding exchange rates
|
||||
solr.OpenExchangeRatesOrgProvider uses rates from openexchangerates.org:
|
||||
ratesFileLocation: URL or path to rates JSON file (default latest.json on the web)
|
||||
refreshInterval: Number of minutes between each rates fetch (default: 1440, min: 60)
|
||||
-->
|
||||
<fieldType name="currency" class="solr.CurrencyFieldType" amountLongSuffix="_l_ns" codeStrSuffix="_s_ns"
|
||||
defaultCurrency="USD" currencyConfig="currency.xml" />
|
||||
|
||||
|
||||
<!-- some examples for different languages (generally ordered by ISO code) -->
|
||||
|
||||
<!-- Arabic -->
|
||||
|
@ -936,8 +872,6 @@
|
|||
See lang/userdict_ja.txt for a sample user dictionary file.
|
||||
|
||||
Punctuation characters are discarded by default. Use discardPunctuation="false" to keep them.
|
||||
|
||||
See http://wiki.apache.org/solr/JapaneseLanguageSupport for more on Japanese language support.
|
||||
-->
|
||||
<tokenizer class="solr.JapaneseTokenizerFactory" mode="search"/>
|
||||
<!--<tokenizer class="solr.JapaneseTokenizerFactory" mode="search" userDictionary="lang/userdict_ja.txt"/>-->
|
||||
|
@ -1065,7 +999,7 @@
|
|||
<!-- Similarity is the scoring routine for each document vs. a query.
|
||||
A custom Similarity or SimilarityFactory may be specified here, but
|
||||
the default is fine for most applications.
|
||||
For more info: http://wiki.apache.org/solr/SchemaXml#Similarity
|
||||
For more info: http://lucene.apache.org/solr/guide/other-schema-elements.html#OtherSchemaElements-Similarity
|
||||
-->
|
||||
<!--
|
||||
<similarity class="com.example.solr.CustomSimilarityFactory">
|
||||
|
|
|
@ -1138,7 +1138,8 @@
|
|||
Field type guessing update processors that will
|
||||
attempt to parse string-typed field values as Booleans, Longs,
|
||||
Doubles, or Dates, and then add schema fields with the guessed
|
||||
field types.
|
||||
field types. Text content will be indexed as "text_general" as
|
||||
well as a copy to a plain string version in *_str.
|
||||
|
||||
These require that the schema is both managed and mutable, by
|
||||
declaring schemaFactory as ManagedIndexSchemaFactory, with
|
||||
|
@ -1177,7 +1178,16 @@
|
|||
</arr>
|
||||
</updateProcessor>
|
||||
<updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
|
||||
<str name="defaultFieldType">strings</str>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.String</str>
|
||||
<str name="fieldType">text_general</str>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_str</str>
|
||||
<int name="maxChars">256</int>
|
||||
</lst>
|
||||
<!-- Use as default mapping instead of defaultFieldType -->
|
||||
<bool name="default">true</bool>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Boolean</str>
|
||||
<str name="fieldType">booleans</str>
|
||||
|
|
|
@ -38,7 +38,9 @@
|
|||
<boolTofilterOptimizer enabled="true" cacheSize="32" threshold=".05"/>
|
||||
</query>
|
||||
|
||||
<requestDispatcher/>
|
||||
<requestDispatcher>
|
||||
<requestParsers enableStreamBody="true" />
|
||||
</requestDispatcher>
|
||||
|
||||
<requestHandler name="/select" class="solr.SearchHandler" />
|
||||
<requestHandler name="/crazy_custom_qt" class="solr.SearchHandler">
|
||||
|
|
|
@ -102,7 +102,11 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
|
|||
|
||||
@Override
|
||||
protected CloudSolrClient createCloudClient(String defaultCollection) {
|
||||
CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, clientSoTimeout);
|
||||
return this.createCloudClient(defaultCollection, this.clientSoTimeout);
|
||||
}
|
||||
|
||||
protected CloudSolrClient createCloudClient(String defaultCollection, int socketTimeout) {
|
||||
CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, socketTimeout);
|
||||
client.setParallelUpdates(random().nextBoolean());
|
||||
if (defaultCollection != null) client.setDefaultCollection(defaultCollection);
|
||||
return client;
|
||||
|
@ -250,7 +254,7 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
|
|||
restartZk(1000 * (5 + random().nextInt(4)));
|
||||
}
|
||||
|
||||
try (CloudSolrClient client = createCloudClient("collection1")) {
|
||||
try (CloudSolrClient client = createCloudClient("collection1", 30000)) {
|
||||
createCollection(null, "testcollection",
|
||||
1, 1, 1, client, null, "conf1");
|
||||
|
||||
|
|
|
@ -125,7 +125,11 @@ public class ChaosMonkeyNothingIsSafeWithPullReplicasTest extends AbstractFullDi
|
|||
|
||||
@Override
|
||||
protected CloudSolrClient createCloudClient(String defaultCollection) {
|
||||
CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, clientSoTimeout);
|
||||
return this.createCloudClient(defaultCollection, this.clientSoTimeout);
|
||||
}
|
||||
|
||||
protected CloudSolrClient createCloudClient(String defaultCollection, int socketTimeout) {
|
||||
CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, socketTimeout);
|
||||
client.setParallelUpdates(random().nextBoolean());
|
||||
if (defaultCollection != null) client.setDefaultCollection(defaultCollection);
|
||||
return client;
|
||||
|
@ -292,7 +296,7 @@ public class ChaosMonkeyNothingIsSafeWithPullReplicasTest extends AbstractFullDi
|
|||
restartZk(1000 * (5 + random().nextInt(4)));
|
||||
}
|
||||
|
||||
try (CloudSolrClient client = createCloudClient("collection1")) {
|
||||
try (CloudSolrClient client = createCloudClient("collection1", 30000)) {
|
||||
// We don't really know how many live nodes we have at this point, so "maxShardsPerNode" needs to be > 1
|
||||
createCollection(null, "testcollection",
|
||||
1, 1, 10, client, null, "conf1");
|
||||
|
|
|
@ -38,6 +38,7 @@ public class MoveReplicaHDFSTest extends MoveReplicaTest {
|
|||
|
||||
@BeforeClass
|
||||
public static void setupClass() throws Exception {
|
||||
System.setProperty("solr.hdfs.blockcache.enabled", "false");
|
||||
dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
|
||||
|
||||
ZkConfigManager configManager = new ZkConfigManager(zkClient());
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.nio.file.Path;
|
|||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -39,6 +40,7 @@ import org.apache.solr.client.solrj.impl.CloudSolrClient;
|
|||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
|
@ -243,7 +245,6 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Test some small, hand crafted, but non-trivial queries that are
|
||||
* easier to trace/debug then a pure random monstrosity.
|
||||
|
@ -251,6 +252,30 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
*/
|
||||
public void testBespoke() throws Exception {
|
||||
|
||||
{ // sanity check our test methods can handle a query matching no docs
|
||||
Map<String,TermFacet> facets = new LinkedHashMap<>();
|
||||
TermFacet top = new TermFacet(strfield(9), new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]"));
|
||||
top.subFacets.put("sub", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null)));
|
||||
facets.put("empty_top", top);
|
||||
assertFacetCountsAreCorrect(facets, strfield(7) + ":bogus");
|
||||
}
|
||||
|
||||
{ // sanity check our test methods can handle a query where a facet filter prevents any doc from having terms
|
||||
Map<String,TermFacet> facets = new LinkedHashMap<>();
|
||||
TermFacet top = new TermFacet(strfield(9), new JoinDomain(null, null, "-*:*"));
|
||||
top.subFacets.put("sub", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null)));
|
||||
facets.put("filtered_top", top);
|
||||
assertFacetCountsAreCorrect(facets, "*:*");
|
||||
}
|
||||
|
||||
{ // sanity check our test methods can handle a query where a facet filter prevents any doc from having sub-terms
|
||||
Map<String,TermFacet> facets = new LinkedHashMap<>();
|
||||
TermFacet top = new TermFacet(strfield(9), new JoinDomain(strfield(8), strfield(8), null));
|
||||
top.subFacets.put("sub", new TermFacet(strfield(11), new JoinDomain(null, null, "-*:*")));
|
||||
facets.put("filtered_top", top);
|
||||
assertFacetCountsAreCorrect(facets, "*:*");
|
||||
}
|
||||
|
||||
{ // strings
|
||||
Map<String,TermFacet> facets = new LinkedHashMap<>();
|
||||
TermFacet top = new TermFacet(strfield(9), new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]"));
|
||||
|
@ -274,11 +299,8 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
assertFacetCountsAreCorrect(facets, "("+strfield(7)+":6 OR "+strfield(9)+":6 OR "+strfield(6)+":19 OR "+strfield(0)+":11)");
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
public void testRandom() throws Exception {
|
||||
|
||||
final int numIters = atLeast(3);
|
||||
|
@ -320,21 +342,31 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
final SolrParams initParams = SolrParams.wrapAppended(facetParams, baseParams);
|
||||
|
||||
log.info("Doing full run: {}", initParams);
|
||||
|
||||
NamedList topResponse = null;
|
||||
|
||||
QueryResponse rsp = null;
|
||||
// JSON Facets not (currently) available from QueryResponse...
|
||||
NamedList topNamedList = null;
|
||||
try {
|
||||
topResponse = getRandClient(random()).request(new QueryRequest(initParams));
|
||||
assertNotNull(initParams + " is null response?", topResponse);
|
||||
rsp = (new QueryRequest(initParams)).process(getRandClient(random()));
|
||||
assertNotNull(initParams + " is null rsp?", rsp);
|
||||
topNamedList = rsp.getResponse();
|
||||
assertNotNull(initParams + " is null topNamedList?", topNamedList);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("init query failed: " + initParams + ": " +
|
||||
e.getMessage(), e);
|
||||
}
|
||||
try {
|
||||
final NamedList facetResponse = (NamedList) topResponse.get("facets");
|
||||
final NamedList facetResponse = (NamedList) topNamedList.get("facets");
|
||||
assertNotNull("null facet results?", facetResponse);
|
||||
assertEquals("numFound mismatch with top count?",
|
||||
rsp.getResults().getNumFound(), ((Number)facetResponse.get("count")).longValue());
|
||||
if (0 == rsp.getResults().getNumFound()) {
|
||||
// when the query matches nothing, we should expect no top level facets
|
||||
expected = Collections.emptyMap();
|
||||
}
|
||||
assertFacetCountsAreCorrect(expected, baseParams, facetResponse);
|
||||
} catch (AssertionError e) {
|
||||
throw new AssertionError(initParams + " ===> " + topResponse + " --> " + e.getMessage(), e);
|
||||
throw new AssertionError(initParams + " ===> " + topNamedList + " --> " + e.getMessage(), e);
|
||||
} finally {
|
||||
log.info("Ending full run");
|
||||
}
|
||||
|
@ -373,7 +405,7 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
assertTrue("facets have unexpeted keys left over: " + actualFacetResponse,
|
||||
assertTrue("facets have unexpected keys left over: " + actualFacetResponse,
|
||||
// should alwasy be a count, maybe a 'val' if we're a subfacet
|
||||
(actualFacetResponse.size() == expected.size() + 1) ||
|
||||
(actualFacetResponse.size() == expected.size() + 2));
|
||||
|
|
|
@ -37,6 +37,7 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -47,10 +48,10 @@ import java.util.LinkedList;
|
|||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.client.HttpClient;
|
||||
|
@ -61,27 +62,22 @@ import org.apache.http.util.EntityUtils;
|
|||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
import org.apache.solr.client.solrj.impl.CloudSolrClient;
|
||||
import org.apache.solr.client.solrj.impl.HttpClientUtil;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.request.ConfigSetAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.ConfigSetAdminRequest.Create;
|
||||
import org.apache.solr.client.solrj.request.ConfigSetAdminRequest.Delete;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
|
||||
import org.apache.solr.client.solrj.response.ConfigSetAdminResponse;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.cloud.ZkConfigManager;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CollectionParams.CollectionAction;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.ConfigSetParams;
|
||||
import org.apache.solr.common.params.ConfigSetParams.ConfigSetAction;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
|
@ -104,8 +100,6 @@ import org.noggit.ObjectBuilder;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
/**
|
||||
* Simple ConfigSets API tests on user errors and simple success cases.
|
||||
*/
|
||||
|
@ -331,7 +325,6 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 {
|
|||
uploadConfigSet("regular", suffix, null, null);
|
||||
// try to create a collection with the uploaded configset
|
||||
createCollection("newcollection", "regular" + suffix, 1, 1, solrCluster.getSolrClient());
|
||||
xsltRequest("newcollection");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -506,35 +499,6 @@ public class TestConfigSetsAPI extends SolrTestCaseJ4 {
|
|||
zout.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void xsltRequest(String collection) throws SolrServerException, IOException {
|
||||
String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString();
|
||||
try (HttpSolrClient client = getHttpSolrClient(baseUrl + "/" + collection)) {
|
||||
String xml =
|
||||
"<random>" +
|
||||
" <document>" +
|
||||
" <node name=\"id\" value=\"12345\"/>" +
|
||||
" <node name=\"name\" value=\"kitten\"/>" +
|
||||
" <node name=\"text\" enhance=\"3\" value=\"some other day\"/>" +
|
||||
" <node name=\"title\" enhance=\"4\" value=\"A story\"/>" +
|
||||
" <node name=\"timestamp\" enhance=\"5\" value=\"2011-07-01T10:31:57.140Z\"/>" +
|
||||
" </document>" +
|
||||
"</random>";
|
||||
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery( "*:*" );//for anything
|
||||
query.add("qt","/update");
|
||||
query.add(CommonParams.TR, "xsl-update-handler-test.xsl");
|
||||
query.add("stream.body", xml);
|
||||
query.add("commit", "true");
|
||||
try {
|
||||
client.query(query);
|
||||
fail("This should've returned a 401.");
|
||||
} catch (SolrException ex) {
|
||||
assertEquals(ErrorCode.UNAUTHORIZED.code, ex.code());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void scriptRequest(String collection) throws SolrServerException, IOException {
|
||||
SolrClient client = solrCluster.getSolrClient();
|
||||
|
|
|
@ -53,6 +53,7 @@ public class TestConfigOverlay extends LuceneTestCase {
|
|||
assertTrue(isEditableProp("requestDispatcher.requestParsers.multipartUploadLimitInKB", false, null));
|
||||
assertTrue(isEditableProp("requestDispatcher.requestParsers.formdataUploadLimitInKB", false, null));
|
||||
assertTrue(isEditableProp("requestDispatcher.requestParsers.enableRemoteStreaming", false, null));
|
||||
assertTrue(isEditableProp("requestDispatcher.requestParsers.enableStreamBody", false, null));
|
||||
assertTrue(isEditableProp("requestDispatcher.requestParsers.addHttpRequestToContext", false, null));
|
||||
|
||||
assertTrue(isEditableProp("requestDispatcher.handleSelect", false, null));
|
||||
|
|
|
@ -182,7 +182,8 @@ public class TestSolrConfigHandler extends RestTestBase {
|
|||
log.info("going to send config command. path {} , payload: {}", uri, payload);
|
||||
String response = harness.post(uri, json);
|
||||
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
|
||||
assertNull(response, map.get("errors"));
|
||||
assertNull(response, map.get("errorMessages"));
|
||||
assertNull(response, map.get("errors")); // Will this ever be returned?
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -20,15 +20,14 @@ import org.apache.commons.io.IOUtils;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.solr.SolrJettyTestBase;
|
||||
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -120,30 +119,7 @@ public class TestRemoteStreaming extends SolrJettyTestBase {
|
|||
assertSame(ErrorCode.BAD_REQUEST, ErrorCode.getErrorCode(se.code()));
|
||||
}
|
||||
}
|
||||
|
||||
/** SOLR-3161
|
||||
* Technically stream.body isn't remote streaming, but there wasn't a better place for this test method. */
|
||||
@Test(expected = SolrException.class)
|
||||
public void testQtUpdateFails() throws SolrServerException, IOException {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery( "*:*" );//for anything
|
||||
query.add("echoHandler","true");
|
||||
//sneaky sneaky
|
||||
query.add("qt","/update");
|
||||
query.add("stream.body","<delete><query>*:*</query></delete>");
|
||||
|
||||
QueryRequest queryRequest = new QueryRequest(query) {
|
||||
@Override
|
||||
public String getPath() { //don't let superclass substitute qt for the path
|
||||
return "/select";
|
||||
}
|
||||
};
|
||||
QueryResponse rsp = queryRequest.process(getSolrClient());
|
||||
//!! should *fail* above for security purposes
|
||||
String handler = (String) rsp.getHeader().get("handler");
|
||||
System.out.println(handler);
|
||||
}
|
||||
|
||||
|
||||
/** Compose a url that if you get it, it will delete all the data. */
|
||||
private String makeDeleteAllUrl() throws UnsupportedEncodingException {
|
||||
HttpSolrClient client = (HttpSolrClient) getSolrClient();
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.request;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.util.RestTestBase;
|
||||
import org.apache.solr.util.RestTestHarness;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.restlet.ext.servlet.ServerServlet;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.core.TestSolrConfigHandler.runConfigCommand;
|
||||
|
||||
public class TestStreamBody extends RestTestBase {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private static final String collection = "collection1";
|
||||
private static final String confDir = collection + "/conf";
|
||||
|
||||
@Before
|
||||
public void before() throws Exception {
|
||||
File tmpSolrHome = createTempDir().toFile();
|
||||
FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
|
||||
|
||||
final SortedMap<ServletHolder, String> extraServlets = new TreeMap<>();
|
||||
final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
|
||||
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
|
||||
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
|
||||
|
||||
System.setProperty("managed.schema.mutable", "true");
|
||||
System.setProperty("enable.update.log", "false");
|
||||
|
||||
createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-minimal.xml", "schema-rest.xml",
|
||||
"/solr", true, extraServlets);
|
||||
if (random().nextBoolean()) {
|
||||
log.info("These tests are run with V2 API");
|
||||
restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void after() throws Exception {
|
||||
if (jetty != null) {
|
||||
jetty.stop();
|
||||
jetty = null;
|
||||
}
|
||||
if (client != null) {
|
||||
client.close();
|
||||
client = null;
|
||||
}
|
||||
if (restTestHarness != null) {
|
||||
restTestHarness.close();
|
||||
restTestHarness = null;
|
||||
}
|
||||
}
|
||||
|
||||
// SOLR-3161
|
||||
@Test
|
||||
public void testQtUpdateFails() throws Exception {
|
||||
enableStreamBody(true);
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.setQuery( "*:*" );//for anything
|
||||
query.add("echoHandler","true");
|
||||
//sneaky sneaky
|
||||
query.add("qt","/update");
|
||||
query.add(CommonParams.STREAM_BODY,"<delete><query>*:*</query></delete>");
|
||||
|
||||
QueryRequest queryRequest = new QueryRequest(query) {
|
||||
@Override
|
||||
public String getPath() { //don't let superclass substitute qt for the path
|
||||
return "/select";
|
||||
}
|
||||
};
|
||||
try {
|
||||
queryRequest.process(getSolrClient());
|
||||
fail();
|
||||
} catch (SolrException se) {
|
||||
assertTrue(se.getMessage(), se.getMessage().contains("Bad contentType for search handler :text/xml"));
|
||||
}
|
||||
}
|
||||
|
||||
// Tests that stream.body is disabled by default, and can be edited through Config API
|
||||
@Test
|
||||
public void testStreamBodyDefaultAndConfigApi() throws Exception {
|
||||
SolrQuery query = new SolrQuery();
|
||||
query.add(CommonParams.STREAM_BODY,"<delete><query>*:*</query></delete>");
|
||||
query.add("commit","true");
|
||||
|
||||
QueryRequest queryRequest = new QueryRequest(query) {
|
||||
@Override
|
||||
public String getPath() { //don't let superclass substitute qt for the path
|
||||
return "/update";
|
||||
}
|
||||
};
|
||||
try {
|
||||
queryRequest.process(getSolrClient());
|
||||
fail();
|
||||
} catch (SolrException se) {
|
||||
assertTrue(se.getMessage(), se.getMessage().contains("Stream Body is disabled"));
|
||||
}
|
||||
enableStreamBody(true);
|
||||
queryRequest.process(getSolrClient());
|
||||
}
|
||||
|
||||
// Enables/disables stream.body through Config API
|
||||
private void enableStreamBody(boolean enable) throws Exception {
|
||||
RestTestHarness harness = restTestHarness;
|
||||
String payload = "{ 'set-property' : { 'requestDispatcher.requestParsers.enableStreamBody':" + enable + "} }";
|
||||
runConfigCommand(harness, "/config?wt=json", payload);
|
||||
}
|
||||
}
|
|
@ -39,8 +39,6 @@ import org.apache.solr.common.cloud.ZkStateReader;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
||||
|
||||
public class TestCloudNestedDocsSort extends SolrCloudTestCase {
|
||||
|
||||
private static ArrayList<String> vals = new ArrayList<>();
|
||||
|
@ -78,7 +76,8 @@ public class TestCloudNestedDocsSort extends SolrCloudTestCase {
|
|||
|
||||
{
|
||||
List<SolrInputDocument> docs = new ArrayList<>();
|
||||
int parentsNum = 10+random().nextInt(20);
|
||||
int parentsNum = 10 +random().nextInt(20)
|
||||
;
|
||||
for (int i=0; i<parentsNum || (matchingParent==null ||matchingChild==null); i++) {
|
||||
final String parentTieVal = "" + random().nextInt(5);
|
||||
final String parentId = ""+random().nextInt();
|
||||
|
@ -93,9 +92,11 @@ public class TestCloudNestedDocsSort extends SolrCloudTestCase {
|
|||
SolrInputDocument child = new SolrInputDocument("id", ""+random().nextInt(),
|
||||
"type_s", "child",
|
||||
"parentTie_s1", parentTieVal,
|
||||
"val_s1", Integer.toString(random().nextInt(1000), Character.MAX_RADIX)+"" ,
|
||||
"parent_id_s1", parentId);
|
||||
child.addField("parentFilter_s", parentFilter);
|
||||
if (usually()) {
|
||||
child.addField( "val_s1", Integer.toString(random().nextInt(1000), Character.MAX_RADIX)+"" );
|
||||
}
|
||||
final List<String> chVals = addValsField(child, "childFilter_s");
|
||||
parent.addChildDocument(child );
|
||||
|
||||
|
@ -115,7 +116,7 @@ public class TestCloudNestedDocsSort extends SolrCloudTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test @Repeat(iterations=2)
|
||||
@Test
|
||||
public void test() throws SolrServerException, IOException {
|
||||
final boolean asc = random().nextBoolean();
|
||||
final String dir = asc ? "asc": "desc";
|
||||
|
@ -134,10 +135,17 @@ public class TestCloudNestedDocsSort extends SolrCloudTestCase {
|
|||
|
||||
final QueryResponse children = client.query(q);
|
||||
|
||||
final SolrQuery bjq = new SolrQuery("q", "{!parent which=type_s:parent}(+type_s:child^=0 "+parentFilter+" "+
|
||||
childFilter+")",
|
||||
"sort", sortClause.replace("val_s1 ", "childfield(val_s1)"),
|
||||
"rows", ""+maxDocs, "fl", fl);
|
||||
final SolrQuery bjq = random().nextBoolean() ?
|
||||
new SolrQuery(// top level bjq
|
||||
"q", "{!parent which=type_s:parent}(+type_s:child^=0 "+parentFilter+" "+ childFilter+")",
|
||||
"sort", sortClause.replace("val_s1", "childfield(val_s1)"),
|
||||
"rows", ""+maxDocs, "fl", fl)
|
||||
:
|
||||
new SolrQuery(// same bjq as a subordinate clause
|
||||
"q", "+type_s:parent "+parentFilter+" +{!v=$parentcaluse}",
|
||||
"parentcaluse","{!parent which=type_s:parent v='"+(childFilter).replace("+", "")+"'}",
|
||||
"sort", sortClause.replace("val_s1", "childfield(val_s1,$parentcaluse)"),
|
||||
"rows", ""+maxDocs, "fl", fl);
|
||||
|
||||
final QueryResponse parents = client.query(bjq);
|
||||
|
||||
|
@ -153,11 +161,11 @@ public class TestCloudNestedDocsSort extends SolrCloudTestCase {
|
|||
final String actParentId = ""+ parent.get("id");
|
||||
if (!actParentId.equals(parentId)) {
|
||||
final String chDump = children.toString().replace("SolrDocument","\nSolrDocument");
|
||||
System.out.println("\n\n"+chDump.substring(0,5000)+"\n\n");
|
||||
System.out.println("\n\n"+chDump
|
||||
System.out.println("\n\n"+chDump+"\n\n");
|
||||
System.out.println("\n\n"+parents.toString().replace("SolrDocument","\nSolrDocument")
|
||||
+"\n\n");
|
||||
}
|
||||
assertEquals(actParentId, parentId);
|
||||
assertEquals(""+child+"\n"+parent,actParentId, parentId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,6 +85,11 @@ public class TestNestedDocsSort extends SolrTestCaseJ4 {
|
|||
parse("childfield(name_s1,$q)");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOmitSpaceinFrontOfOrd(){
|
||||
parseAssertEq("childfield(name_s1,$q)asc", "childfield(name_s1,$q) asc");
|
||||
}
|
||||
|
||||
private void parseAssertEq(String sortField, String sortField2) {
|
||||
assertEq(parse(sortField), parse(sortField2));
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
import org.apache.solr.JSONTestUtil;
|
||||
import org.apache.solr.SolrTestCaseHS;
|
||||
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
@ -181,33 +182,33 @@ public class TestJsonRequest extends SolrTestCaseHS {
|
|||
//
|
||||
// with body
|
||||
//
|
||||
client.testJQ(params("stream.body", "{query:'cat_s:A'}", "stream.contentType", "application/json")
|
||||
client.testJQ(params(CommonParams.STREAM_BODY, "{query:'cat_s:A'}", "stream.contentType", "application/json")
|
||||
, "response/numFound==2"
|
||||
);
|
||||
|
||||
// test body in conjunction with query params
|
||||
client.testJQ(params("stream.body", "{query:'cat_s:A'}", "stream.contentType", "application/json", "json.filter", "'where_s:NY'")
|
||||
client.testJQ(params(CommonParams.STREAM_BODY, "{query:'cat_s:A'}", "stream.contentType", "application/json", "json.filter", "'where_s:NY'")
|
||||
, "response/numFound==1"
|
||||
);
|
||||
|
||||
// test that json body in params come "after" (will overwrite)
|
||||
client.testJQ(params("stream.body", "{query:'*:*', filter:'where_s:NY'}", "stream.contentType", "application/json", "json","{query:'cat_s:A'}")
|
||||
client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', filter:'where_s:NY'}", "stream.contentType", "application/json", "json","{query:'cat_s:A'}")
|
||||
, "response/numFound==1"
|
||||
);
|
||||
|
||||
// test that json.x params come after body
|
||||
client.testJQ(params("stream.body", "{query:'*:*', filter:'where_s:NY'}", "stream.contentType", "application/json", "json.query","'cat_s:A'")
|
||||
client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', filter:'where_s:NY'}", "stream.contentType", "application/json", "json.query","'cat_s:A'")
|
||||
, "response/numFound==1"
|
||||
);
|
||||
|
||||
|
||||
// test facet with json body
|
||||
client.testJQ(params("stream.body", "{query:'*:*', facet:{x:'unique(where_s)'}}", "stream.contentType", "application/json")
|
||||
client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', facet:{x:'unique(where_s)'}}", "stream.contentType", "application/json")
|
||||
, "facets=={count:6,x:2}"
|
||||
);
|
||||
|
||||
// test facet with json body, insert additional facets via query parameter
|
||||
client.testJQ(params("stream.body", "{query:'*:*', facet:{x:'unique(where_s)'}}", "stream.contentType", "application/json", "json.facet.y","{terms:{field:where_s}}", "json.facet.z","'unique(where_s)'")
|
||||
client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', facet:{x:'unique(where_s)'}}", "stream.contentType", "application/json", "json.facet.y","{terms:{field:where_s}}", "json.facet.z","'unique(where_s)'")
|
||||
, "facets=={count:6,x:2, y:{buckets:[{val:NJ,count:3},{val:NY,count:2}]}, z:2}"
|
||||
);
|
||||
|
||||
|
|
|
@ -17,10 +17,12 @@
|
|||
package org.apache.solr.update.processor;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
|
@ -115,6 +117,29 @@ public class AddSchemaFieldsUpdateProcessorFactoryTest extends UpdateProcessorTe
|
|||
schema = h.getCore().getLatestSchema();
|
||||
assertNotNull(schema.getFieldOrNull(fieldName));
|
||||
assertEquals("text", schema.getFieldType(fieldName).getTypeName());
|
||||
assertEquals(0, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size());
|
||||
assertU(commit());
|
||||
assertQ(req("id:4")
|
||||
,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue1.toString() + "']"
|
||||
,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue2.toString() + "']"
|
||||
,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue3.toString() + "']"
|
||||
);
|
||||
}
|
||||
|
||||
public void testSingleFieldDefaultTypeMappingRoundTrip() throws Exception {
|
||||
IndexSchema schema = h.getCore().getLatestSchema();
|
||||
final String fieldName = "newfield4";
|
||||
assertNull(schema.getFieldOrNull(fieldName));
|
||||
Float fieldValue1 = -13258.0f;
|
||||
Double fieldValue2 = 8.4828800808E10;
|
||||
String fieldValue3 = "blah blah";
|
||||
SolrInputDocument d = processAdd
|
||||
("add-fields-default-mapping", doc(f("id", "4"), f(fieldName, fieldValue1, fieldValue2, fieldValue3)));
|
||||
assertNotNull(d);
|
||||
schema = h.getCore().getLatestSchema();
|
||||
assertNotNull(schema.getFieldOrNull(fieldName));
|
||||
assertEquals("text", schema.getFieldType(fieldName).getTypeName());
|
||||
assertEquals(1, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size());
|
||||
assertU(commit());
|
||||
assertQ(req("id:4")
|
||||
,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue1.toString() + "']"
|
||||
|
@ -209,6 +234,60 @@ public class AddSchemaFieldsUpdateProcessorFactoryTest extends UpdateProcessorTe
|
|||
,"//arr[@name='" + fieldName3 + "']/str[.='" + field3String2 + "']"
|
||||
,"//arr[@name='" + fieldName4 + "']/date[.='" + field4Value1String + "']");
|
||||
}
|
||||
|
||||
public void testStringWithCopyField() throws Exception {
|
||||
IndexSchema schema = h.getCore().getLatestSchema();
|
||||
final String fieldName = "stringField";
|
||||
final String strFieldName = fieldName+"_str";
|
||||
assertNull(schema.getFieldOrNull(fieldName));
|
||||
String content = "This is a text that should be copied to a string field but not be cutoff";
|
||||
SolrInputDocument d = processAdd("add-fields", doc(f("id", "1"), f(fieldName, content)));
|
||||
assertNotNull(d);
|
||||
schema = h.getCore().getLatestSchema();
|
||||
assertNotNull(schema.getFieldOrNull(fieldName));
|
||||
assertNotNull(schema.getFieldOrNull(strFieldName));
|
||||
assertEquals("text", schema.getFieldType(fieldName).getTypeName());
|
||||
assertEquals(1, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(strFieldName)).size());
|
||||
}
|
||||
|
||||
public void testStringWithCopyFieldAndMaxChars() throws Exception {
|
||||
IndexSchema schema = h.getCore().getLatestSchema();
|
||||
final String fieldName = "stringField";
|
||||
final String strFieldName = fieldName+"_str";
|
||||
assertNull(schema.getFieldOrNull(fieldName));
|
||||
String content = "This is a text that should be copied to a string field and cutoff at 10 characters";
|
||||
SolrInputDocument d = processAdd("add-fields-maxchars", doc(f("id", "1"), f(fieldName, content)));
|
||||
assertNotNull(d);
|
||||
System.out.println("Document is "+d);
|
||||
schema = h.getCore().getLatestSchema();
|
||||
assertNotNull(schema.getFieldOrNull(fieldName));
|
||||
assertNotNull(schema.getFieldOrNull(strFieldName));
|
||||
assertEquals("text", schema.getFieldType(fieldName).getTypeName());
|
||||
// We have three copyFields, one with maxChars 10 and two with maxChars 20
|
||||
assertEquals(3, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size());
|
||||
assertEquals("The configured maxChars cutoff does not exist on the copyField", 10,
|
||||
schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(strFieldName))
|
||||
.get(0).get("maxChars"));
|
||||
assertEquals("The configured maxChars cutoff does not exist on the copyField", 20,
|
||||
schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(fieldName+"_t"))
|
||||
.get(0).get("maxChars"));
|
||||
assertEquals("The configured maxChars cutoff does not exist on the copyField", 20,
|
||||
schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(fieldName+"2_t"))
|
||||
.get(0).get("maxChars"));
|
||||
}
|
||||
|
||||
public void testCopyFieldByIndexing() throws Exception {
|
||||
String content = "This is a text that should be copied to a string field and cutoff at 10 characters";
|
||||
SolrInputDocument d = processAdd("add-fields-default-mapping", doc(f("id", "1"), f("mynewfield", content)));
|
||||
assertU(commit());
|
||||
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.add("q", "*:*").add("facet", "true").add("facet.field", "mynewfield_str");
|
||||
assertQ(req(params)
|
||||
, "*[count(//doc)=1]"
|
||||
,"//lst[@name='mynewfield_str']/int[@name='This is a '][.='1']"
|
||||
);
|
||||
}
|
||||
|
||||
@After
|
||||
private void deleteCoreAndTempSolrHomeDirectory() throws Exception {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
a001f32ba5b330bb9b9b82c601771b9ad2b94eb0
|
|
@ -0,0 +1 @@
|
|||
b8f91682cfeb8f9196aad56ace9c9a13330acef6
|
|
@ -1 +0,0 @@
|
|||
2d00ff1042ae258f33830f26f9b30fc3a43d37e1
|
|
@ -0,0 +1 @@
|
|||
91f3284993b44dcb2f003b5f28617abba13971d2
|
|
@ -1,67 +0,0 @@
|
|||
<?xml version="1.0" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!-- Example exchange rates file for CurrencyFieldType named "currency" in example schema -->
|
||||
|
||||
<currencyConfig version="1.0">
|
||||
<rates>
|
||||
<!-- Updated from http://www.exchangerate.com/ at 2011-09-27 -->
|
||||
<rate from="USD" to="ARS" rate="4.333871" comment="ARGENTINA Peso" />
|
||||
<rate from="USD" to="AUD" rate="1.025768" comment="AUSTRALIA Dollar" />
|
||||
<rate from="USD" to="EUR" rate="0.743676" comment="European Euro" />
|
||||
<rate from="USD" to="BRL" rate="1.881093" comment="BRAZIL Real" />
|
||||
<rate from="USD" to="CAD" rate="1.030815" comment="CANADA Dollar" />
|
||||
<rate from="USD" to="CLP" rate="519.0996" comment="CHILE Peso" />
|
||||
<rate from="USD" to="CNY" rate="6.387310" comment="CHINA Yuan" />
|
||||
<rate from="USD" to="CZK" rate="18.47134" comment="CZECH REP. Koruna" />
|
||||
<rate from="USD" to="DKK" rate="5.515436" comment="DENMARK Krone" />
|
||||
<rate from="USD" to="HKD" rate="7.801922" comment="HONG KONG Dollar" />
|
||||
<rate from="USD" to="HUF" rate="215.6169" comment="HUNGARY Forint" />
|
||||
<rate from="USD" to="ISK" rate="118.1280" comment="ICELAND Krona" />
|
||||
<rate from="USD" to="INR" rate="49.49088" comment="INDIA Rupee" />
|
||||
<rate from="USD" to="XDR" rate="0.641358" comment="INTNL MON. FUND SDR" />
|
||||
<rate from="USD" to="ILS" rate="3.709739" comment="ISRAEL Sheqel" />
|
||||
<rate from="USD" to="JPY" rate="76.32419" comment="JAPAN Yen" />
|
||||
<rate from="USD" to="KRW" rate="1169.173" comment="KOREA (SOUTH) Won" />
|
||||
<rate from="USD" to="KWD" rate="0.275142" comment="KUWAIT Dinar" />
|
||||
<rate from="USD" to="MXN" rate="13.85895" comment="MEXICO Peso" />
|
||||
<rate from="USD" to="NZD" rate="1.285159" comment="NEW ZEALAND Dollar" />
|
||||
<rate from="USD" to="NOK" rate="5.859035" comment="NORWAY Krone" />
|
||||
<rate from="USD" to="PKR" rate="87.57007" comment="PAKISTAN Rupee" />
|
||||
<rate from="USD" to="PEN" rate="2.730683" comment="PERU Sol" />
|
||||
<rate from="USD" to="PHP" rate="43.62039" comment="PHILIPPINES Peso" />
|
||||
<rate from="USD" to="PLN" rate="3.310139" comment="POLAND Zloty" />
|
||||
<rate from="USD" to="RON" rate="3.100932" comment="ROMANIA Leu" />
|
||||
<rate from="USD" to="RUB" rate="32.14663" comment="RUSSIA Ruble" />
|
||||
<rate from="USD" to="SAR" rate="3.750465" comment="SAUDI ARABIA Riyal" />
|
||||
<rate from="USD" to="SGD" rate="1.299352" comment="SINGAPORE Dollar" />
|
||||
<rate from="USD" to="ZAR" rate="8.329761" comment="SOUTH AFRICA Rand" />
|
||||
<rate from="USD" to="SEK" rate="6.883442" comment="SWEDEN Krona" />
|
||||
<rate from="USD" to="CHF" rate="0.906035" comment="SWITZERLAND Franc" />
|
||||
<rate from="USD" to="TWD" rate="30.40283" comment="TAIWAN Dollar" />
|
||||
<rate from="USD" to="THB" rate="30.89487" comment="THAILAND Baht" />
|
||||
<rate from="USD" to="AED" rate="3.672955" comment="U.A.E. Dirham" />
|
||||
<rate from="USD" to="UAH" rate="7.988582" comment="UKRAINE Hryvnia" />
|
||||
<rate from="USD" to="GBP" rate="0.647910" comment="UNITED KINGDOM Pound" />
|
||||
|
||||
<!-- Cross-rates for some common currencies -->
|
||||
<rate from="EUR" to="GBP" rate="0.869914" />
|
||||
<rate from="EUR" to="NOK" rate="7.800095" />
|
||||
<rate from="GBP" to="NOK" rate="8.966508" />
|
||||
</rates>
|
||||
</currencyConfig>
|
|
@ -16,17 +16,14 @@
|
|||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
This is the Solr schema file. This file should be named "schema.xml" and
|
||||
should be in the conf directory under the solr home
|
||||
(i.e. ./solr/conf/schema.xml by default)
|
||||
or located where the classloader for the Solr webapp can find it.
|
||||
<!--
|
||||
|
||||
This example schema is the recommended starting point for users.
|
||||
It should be kept correct and concise, usable out-of-the-box.
|
||||
|
||||
|
||||
For more information, on how to customize this file, please see
|
||||
http://wiki.apache.org/solr/SchemaXml
|
||||
http://lucene.apache.org/solr/guide/documents-fields-and-schema-design.html
|
||||
|
||||
PERFORMANCE NOTE: this schema includes many optional features and should not
|
||||
be used for benchmarking. To improve performance one could
|
||||
|
@ -39,10 +36,6 @@
|
|||
- for best index size and searching performance, set "index" to false
|
||||
for all general text fields, use copyField to copy them to the
|
||||
catchall "text" field, and use that for searching.
|
||||
- For maximum indexing performance, use the ConcurrentUpdateSolrServer
|
||||
java client.
|
||||
- Remember to run the JVM in server mode, and use a higher logging level
|
||||
that avoids logging every request
|
||||
-->
|
||||
|
||||
<schema name="default-config" version="1.6">
|
||||
|
@ -70,14 +63,12 @@
|
|||
fieldTypes section
|
||||
indexed: true if this field should be indexed (searchable or sortable)
|
||||
stored: true if this field should be retrievable
|
||||
docValues: true if this field should have doc values. Doc values are
|
||||
useful (required, if you are using *Point fields) for faceting,
|
||||
grouping, sorting and function queries. Doc values will make the index
|
||||
docValues: true if this field should have doc values. Doc Values is
|
||||
recommended (required, if you are using *Point fields) for faceting,
|
||||
grouping, sorting and function queries. Doc Values will make the index
|
||||
faster to load, more NRT-friendly and more memory-efficient.
|
||||
They however come with some limitations: they are currently only
|
||||
supported by StrField, UUIDField, all Trie*Fields and *PointFields,
|
||||
and depending on the field type, they might require the field to be
|
||||
single-valued, be required or have a default value (check the
|
||||
They are currently only supported by StrField, UUIDField, all Trie*Fields and *PointFields.
|
||||
Some field types may have limitations on using Doc Values (check the
|
||||
documentation of the field type you're interested in for more information)
|
||||
multiValued: true if this field may contain multiple values per document
|
||||
omitNorms: (expert) set to true to omit the norms associated with
|
||||
|
@ -106,10 +97,9 @@
|
|||
trailing underscores (e.g. _version_) are reserved.
|
||||
-->
|
||||
|
||||
<!-- In this _default configset, only three fields are pre-declared:
|
||||
id, _version_, and _text_. All other fields will be type guessed and added via the
|
||||
"add-unknown-fields-to-the-schema" update request processor chain declared
|
||||
in solrconfig.xml.
|
||||
<!-- In this _default configset, only four fields are pre-declared:
|
||||
id, _version_, and _text_ and _root_. All other fields will be type guessed and added via the
|
||||
"add-unknown-fields-to-the-schema" update request processor chain declared in solrconfig.xml.
|
||||
|
||||
Note that many dynamic fields are also defined - you can use them to specify a
|
||||
field's type via field naming conventions - see below.
|
||||
|
@ -117,8 +107,9 @@
|
|||
WARNING: The _text_ catch-all field will significantly increase your index size.
|
||||
If you don't need it, consider removing it and the corresponding copyField directive.
|
||||
-->
|
||||
|
||||
<field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
|
||||
<!-- doc values are enabled by default for primitive types such as long so we don't index the version field -->
|
||||
<!-- docValues are enabled by default for long type so we don't need to index the version field -->
|
||||
<field name="_version_" type="long" indexed="false" stored="false"/>
|
||||
<field name="_root_" type="string" indexed="true" stored="false" docValues="false" />
|
||||
<field name="_text_" type="text_general" indexed="true" stored="false" multiValued="true"/>
|
||||
|
@ -128,20 +119,16 @@
|
|||
<!-- <copyField source="*" dest="_text_"/> -->
|
||||
|
||||
<!-- Dynamic field definitions allow using convention over configuration
|
||||
for fields via the specification of patterns to match field names.
|
||||
for fields via the specification of patterns to match field names.
|
||||
EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i)
|
||||
RESTRICTION: the glob-like pattern in the name attribute must have
|
||||
a "*" only at the start or the end. -->
|
||||
RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end. -->
|
||||
|
||||
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_is" type="ints" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_s" type="string" indexed="true" stored="true" />
|
||||
<dynamicField name="*_s_ns" type="string" indexed="true" stored="false" />
|
||||
<dynamicField name="*_ss" type="strings" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_l" type="long" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_l_ns" type="long" indexed="true" stored="false"/>
|
||||
<dynamicField name="*_ls" type="longs" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_bs" type="booleans" indexed="true" stored="true"/>
|
||||
|
@ -150,6 +137,9 @@
|
|||
<dynamicField name="*_d" type="double" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_ds" type="doubles" indexed="true" stored="true"/>
|
||||
|
||||
<!-- Type used for data-driven schema, to add a string copy for each text field -->
|
||||
<dynamicField name="*_str" type="strings" stored="false" docValues="true" indexed="false" />
|
||||
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
|
||||
<dynamicField name="*_p" type="location" indexed="true" stored="true"/>
|
||||
|
@ -184,22 +174,8 @@
|
|||
<dynamicField name="*_dpi" type="delimited_payloads_int" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dps" type="delimited_payloads_string" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="ignored_*" type="ignored" multiValued="true"/>
|
||||
<dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
|
||||
|
||||
<dynamicField name="random_*" type="random" />
|
||||
|
||||
<!-- uncomment the following to ignore any fields that don't already match an existing
|
||||
field name or dynamic field, rather than reporting them as an error.
|
||||
alternately, change the type="ignored" to some other type e.g. "text" if you want
|
||||
unknown fields indexed and/or stored by default
|
||||
|
||||
NB: use of "*" dynamic fields will disable field type guessing and adding
|
||||
unknown fields to the schema. -->
|
||||
<!--dynamicField name="*" type="ignored" multiValued="true" /-->
|
||||
|
||||
<!-- Field to use to determine and enforce document uniqueness.
|
||||
Unless this field is marked with required="false", it will be a required field
|
||||
-->
|
||||
|
@ -220,18 +196,6 @@
|
|||
standard package such as org.apache.solr.analysis
|
||||
-->
|
||||
|
||||
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
|
||||
It supports doc values but in that case the field needs to be
|
||||
single-valued and either required or have a default value.
|
||||
-->
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true" docValues="true" />
|
||||
<fieldType name="strings" class="solr.StrField" sortMissingLast="true" multiValued="true" docValues="true" />
|
||||
|
||||
<!-- boolean type: "true" or "false" -->
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
|
||||
|
||||
<fieldType name="booleans" class="solr.BoolField" sortMissingLast="true" multiValued="true"/>
|
||||
|
||||
<!-- sortMissingLast and sortMissingFirst attributes are optional attributes are
|
||||
currently supported on types that are sorted internally as strings
|
||||
and on numeric types.
|
||||
|
@ -246,7 +210,18 @@
|
|||
- If sortMissingLast="false" and sortMissingFirst="false" (the default),
|
||||
then default lucene sorting will be used which places docs without the
|
||||
field first in an ascending sort and last in a descending sort.
|
||||
-->
|
||||
-->
|
||||
|
||||
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
|
||||
It supports doc values but in that case the field needs to be
|
||||
single-valued and either required or have a default value.
|
||||
-->
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true" docValues="true" />
|
||||
<fieldType name="strings" class="solr.StrField" sortMissingLast="true" multiValued="true" docValues="true" />
|
||||
|
||||
<!-- boolean type: "true" or "false" -->
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
|
||||
<fieldType name="booleans" class="solr.BoolField" sortMissingLast="true" multiValued="true"/>
|
||||
|
||||
<!--
|
||||
Numeric field types that index values using KD-trees. *Point fields are faster and more efficient than Trie* fields both, at
|
||||
|
@ -332,19 +307,6 @@
|
|||
<!--Binary data type. The data should be sent/retrieved in as Base64 encoded Strings -->
|
||||
<fieldType name="binary" class="solr.BinaryField"/>
|
||||
|
||||
<!-- The "RandomSortField" is not used to store or search any
|
||||
data. You can declare fields of this type it in your schema
|
||||
to generate pseudo-random orderings of your docs for sorting
|
||||
or function purposes. The ordering is generated based on the field
|
||||
name and the version of the index. As long as the index version
|
||||
remains unchanged, and the same field name is reused,
|
||||
the ordering of the docs will be consistent.
|
||||
If you want different psuedo-random orderings of documents,
|
||||
for the same version of the index, use a dynamicField and
|
||||
change the field name in the request.
|
||||
-->
|
||||
<fieldType name="random" class="solr.RandomSortField" indexed="true" />
|
||||
|
||||
<!-- solr.TextField allows the specification of custom text analyzers
|
||||
specified as a tokenizer and a list of token filters. Different
|
||||
analyzers may be specified for indexing and querying.
|
||||
|
@ -354,7 +316,7 @@
|
|||
matching across fields.
|
||||
|
||||
For more info on customizing your analyzer chain, please see
|
||||
http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
|
||||
http://lucene.apache.org/solr/guide/understanding-analyzers-tokenizers-and-filters.html#understanding-analyzers-tokenizers-and-filters
|
||||
-->
|
||||
|
||||
<!-- One can also specify an existing Analyzer class that has a
|
||||
|
@ -397,11 +359,9 @@
|
|||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- A text field with defaults appropriate for English: it
|
||||
tokenizes with StandardTokenizer, removes English stop words
|
||||
(lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
|
||||
finally applies Porter's stemming. The query time analyzer
|
||||
also applies synonyms from synonyms.txt. -->
|
||||
<!-- A text field with defaults appropriate for English: it tokenizes with StandardTokenizer,
|
||||
removes English stop words (lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
|
||||
finally applies Porter's stemming. The query time analyzer also applies synonyms from synonyms.txt. -->
|
||||
<dynamicField name="*_txt_en" type="text_en" indexed="true" stored="true"/>
|
||||
<fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer type="index">
|
||||
|
@ -579,10 +539,6 @@
|
|||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- since fields of this type are by default not stored or indexed,
|
||||
any data added to them will be ignored outright. -->
|
||||
<fieldType name="ignored" stored="false" indexed="false" docValues="false" multiValued="true" class="solr.StrField" />
|
||||
|
||||
<!-- This point type indexes the coordinates as separate fields (subFields)
|
||||
If subFieldType is defined, it references a type, and a dynamic field
|
||||
definition is created matching *___<typename>. Alternately, if
|
||||
|
@ -600,9 +556,9 @@
|
|||
<!-- A specialized field for geospatial search filters and distance sorting. -->
|
||||
<fieldType name="location" class="solr.LatLonPointSpatialField" docValues="true"/>
|
||||
|
||||
<!-- An alternative geospatial field type new to Solr 4. It supports multiValued and polygon shapes.
|
||||
For more information about this and other Spatial fields new to Solr 4, see:
|
||||
http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
|
||||
<!-- A geospatial field type that supports multiValued and polygon shapes.
|
||||
For more information about this and other spatial fields see:
|
||||
http://lucene.apache.org/solr/guide/spatial-search.html
|
||||
-->
|
||||
<fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
|
||||
geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
|
||||
|
@ -627,26 +583,6 @@
|
|||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
|
||||
Parameters:
|
||||
amountLongSuffix: Required. Refers to a dynamic field for the raw amount sub-field.
|
||||
The dynamic field must have a field type that extends LongValueFieldType.
|
||||
Note: If you expect to use Atomic Updates, this dynamic field may not be stored.
|
||||
codeStrSuffix: Required. Refers to a dynamic field for the currency code sub-field.
|
||||
The dynamic field must have a field type that extends StrField.
|
||||
Note: If you expect to use Atomic Updates, this dynamic field may not be stored.
|
||||
defaultCurrency: Specifies the default currency if none specified. Defaults to "USD"
|
||||
providerClass: Lets you plug in other exchange provider backend:
|
||||
solr.FileExchangeRateProvider is the default and takes one parameter:
|
||||
currencyConfig: name of an xml file holding exchange rates
|
||||
solr.OpenExchangeRatesOrgProvider uses rates from openexchangerates.org:
|
||||
ratesFileLocation: URL or path to rates JSON file (default latest.json on the web)
|
||||
refreshInterval: Number of minutes between each rates fetch (default: 1440, min: 60)
|
||||
-->
|
||||
<fieldType name="currency" class="solr.CurrencyFieldType" amountLongSuffix="_l_ns" codeStrSuffix="_s_ns"
|
||||
defaultCurrency="USD" currencyConfig="currency.xml" />
|
||||
|
||||
|
||||
<!-- some examples for different languages (generally ordered by ISO code) -->
|
||||
|
||||
<!-- Arabic -->
|
||||
|
@ -936,8 +872,6 @@
|
|||
See lang/userdict_ja.txt for a sample user dictionary file.
|
||||
|
||||
Punctuation characters are discarded by default. Use discardPunctuation="false" to keep them.
|
||||
|
||||
See http://wiki.apache.org/solr/JapaneseLanguageSupport for more on Japanese language support.
|
||||
-->
|
||||
<tokenizer class="solr.JapaneseTokenizerFactory" mode="search"/>
|
||||
<!--<tokenizer class="solr.JapaneseTokenizerFactory" mode="search" userDictionary="lang/userdict_ja.txt"/>-->
|
||||
|
@ -1065,7 +999,7 @@
|
|||
<!-- Similarity is the scoring routine for each document vs. a query.
|
||||
A custom Similarity or SimilarityFactory may be specified here, but
|
||||
the default is fine for most applications.
|
||||
For more info: http://wiki.apache.org/solr/SchemaXml#Similarity
|
||||
For more info: http://lucene.apache.org/solr/guide/other-schema-elements.html#OtherSchemaElements-Similarity
|
||||
-->
|
||||
<!--
|
||||
<similarity class="com.example.solr.CustomSimilarityFactory">
|
||||
|
|
|
@ -1138,7 +1138,8 @@
|
|||
Field type guessing update processors that will
|
||||
attempt to parse string-typed field values as Booleans, Longs,
|
||||
Doubles, or Dates, and then add schema fields with the guessed
|
||||
field types.
|
||||
field types. Text content will be indexed as "text_general" as
|
||||
well as a copy to a plain string version in *_str.
|
||||
|
||||
These require that the schema is both managed and mutable, by
|
||||
declaring schemaFactory as ManagedIndexSchemaFactory, with
|
||||
|
@ -1177,7 +1178,16 @@
|
|||
</arr>
|
||||
</updateProcessor>
|
||||
<updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
|
||||
<str name="defaultFieldType">strings</str>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.String</str>
|
||||
<str name="fieldType">text_general</str>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_str</str>
|
||||
<int name="maxChars">256</int>
|
||||
</lst>
|
||||
<!-- Use as default mapping instead of defaultFieldType -->
|
||||
<bool name="default">true</bool>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Boolean</str>
|
||||
<str name="fieldType">booleans</str>
|
||||
|
|
|
@ -103,7 +103,7 @@ If you are on Windows machine, simply replace `zkcli.sh` with `zkcli.bat` in the
|
|||
|
||||
[source,bash]
|
||||
----
|
||||
./server/scripts/cloud-scripts/zkcli.sh -zkhost 127.0.0.1:9983 -cmd upconfig -confname my_new_config -confdir server/solr/configsets/basic_configs/conf
|
||||
./server/scripts/cloud-scripts/zkcli.sh -zkhost 127.0.0.1:9983 -cmd upconfig -confname my_new_config -confdir server/solr/configsets/_default/conf
|
||||
----
|
||||
|
||||
[[CommandLineUtilities-BootstrapZooKeeperfromexistingSOLR_HOME]]
|
||||
|
|
|
@ -117,6 +117,7 @@ The properties that are configured with these commands are predefined and listed
|
|||
* `requestDispatcher.requestParsers.multipartUploadLimitInKB`
|
||||
* `requestDispatcher.requestParsers.formdataUploadLimitInKB`
|
||||
* `requestDispatcher.requestParsers.enableRemoteStreaming`
|
||||
* `requestDispatcher.requestParsers.enableStreamBody`
|
||||
* `requestDispatcher.requestParsers.addHttpRequestToContext`
|
||||
|
||||
[[ConfigAPI-CommandsforCustomHandlersandLocalComponents]]
|
||||
|
|
|
@ -255,7 +255,7 @@ NOTE: If your operating system does not include cURL, you can download binaries
|
|||
|
||||
=== Create a SolrCloud Collection using bin/solr
|
||||
|
||||
Create a 2-shard, replicationFactor=1 collection named mycollection using the default configset (data_driven_schema_configs):
|
||||
Create a 2-shard, replicationFactor=1 collection named mycollection using the default configset (_default):
|
||||
|
||||
.*nix command
|
||||
[source,bash]
|
||||
|
|
|
@ -89,7 +89,9 @@ Next, the script prompts you for the number of shards to distribute the collecti
|
|||
|
||||
Next, the script will prompt you for the number of replicas to create for each shard. <<shards-and-indexing-data-in-solrcloud.adoc#shards-and-indexing-data-in-solrcloud,Replication>> is covered in more detail later in the guide, so if you're unsure, then use the default of 2 so that you can see how replication is handled in SolrCloud.
|
||||
|
||||
Lastly, the script will prompt you for the name of a configuration directory for your collection. You can choose *basic_configs*, *data_driven_schema_configs*, or *sample_techproducts_configs*. The configuration directories are pulled from `server/solr/configsets/` so you can review them beforehand if you wish. The *data_driven_schema_configs* configuration (the default) is useful when you're still designing a schema for your documents and need some flexiblity as you experiment with Solr.
|
||||
Lastly, the script will prompt you for the name of a configuration directory for your collection. You can choose *_default*, or *sample_techproducts_configs*. The configuration directories are pulled from `server/solr/configsets/` so you can review them beforehand if you wish. The *_default* configuration is useful when you're still designing a schema for your documents and need some flexiblity as you experiment with Solr, since it has schemaless functionality. However, after creating your collection, the schemaless functionality can be disabled in order to lock down the schema (so that documents indexed after doing so will not alter the schema) or to configure the schema by yourself. This can be done as follows (assuming your collection name is `mycollection`):
|
||||
|
||||
`curl http://host:8983/solr/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'`
|
||||
|
||||
At this point, you should have a new collection created in your local SolrCloud cluster. To verify this, you can run the status command:
|
||||
|
||||
|
|
|
@ -39,5 +39,5 @@ This section helps you get Solr up and running quickly, and introduces you to th
|
|||
|
||||
[TIP]
|
||||
====
|
||||
Solr includes a Quick Start tutorial which will be helpful if you are just starting out with Solr. You can find it online at http://lucene.apache.org/solr/quickstart.html, or in your Solr installation at `$SOLR_INSTALL_DIR/docs/quickstart.html`.
|
||||
Solr includes a Quick Start tutorial which will be helpful if you are just starting out with Solr. You can find it online at http://lucene.apache.org/solr/quickstart.html.
|
||||
====
|
||||
|
|
|
@ -29,7 +29,7 @@ For example, if you want several of your search handlers to return the same list
|
|||
|
||||
The properties and configuration of an `<initParams>` section mirror the properties and configuration of a request handler. It can include sections for defaults, appends, and invariants, the same as any request handler.
|
||||
|
||||
For example, here is one of the `<initParams>` sections defined by default in the `data_driven_config` example:
|
||||
For example, here is one of the `<initParams>` sections defined by default in the `_default` example:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
|
|
|
@ -102,7 +102,7 @@ Example of `commit` and `optimize` with optional attributes:
|
|||
[[NearRealTimeSearching-PassingcommitandcommitWithinparametersaspartoftheURL]]
|
||||
=== Passing commit and commitWithin Parameters as Part of the URL
|
||||
|
||||
Update handlers can also get `commit`-related parameters as part of the update URL. This example adds a small test document and causes an explicit commit to happen immediately afterwards:
|
||||
Update handlers can also get `commit`-related parameters as part of the update URL, if the `stream.body` feature is enabled. This example adds a small test document and causes an explicit commit to happen immediately afterwards:
|
||||
|
||||
[source,text]
|
||||
----
|
||||
|
@ -132,6 +132,8 @@ curl http://localhost:8983/solr/my_collection/update?commitWithin=10000
|
|||
-H "Content-Type: text/xml" --data-binary '<add><doc><field name="id">testdoc</field></doc></add>'
|
||||
----
|
||||
|
||||
WARNING: While the `stream.body` feature is great for development and testing, it should normally not be enabled in production systems, as it lets a user with READ permissions post data that may alter the system state. The feature is disabled by default. See <<requestdispatcher-in-solrconfig.adoc#RequestDispatcherinSolrConfig-requestParsersElement,RequestDispatcher in SolrConfig>> for details.
|
||||
|
||||
[[NearRealTimeSearching-ChangingdefaultcommitWithinBehavior]]
|
||||
=== Changing default commitWithin Behavior
|
||||
|
||||
|
|
|
@ -48,6 +48,8 @@ The `<requestParsers>` sub-element controls values related to parsing requests.
|
|||
|
||||
The attribute `enableRemoteStreaming` controls whether remote streaming of content is allowed. If omitted or set to `false` (the default), streaming will not be allowed. Setting it to `true` lets you specify the location of content to be streamed using `stream.file` or `stream.url` parameters.
|
||||
|
||||
The attribute `enableStreamBody` controls whether streaming content from the HTTP parameter `stream.body` is allowed. If omitted or set to `false` (the default), streaming will not be allowed. Setting it to `true` lets you pass data in the `stream.body` parameter.
|
||||
|
||||
If you enable remote streaming, be sure that you have authentication enabled. Otherwise, someone could potentially gain access to your content by accessing arbitrary URLs. It's also a good idea to place Solr behind a firewall to prevent it from being accessed from untrusted clients.
|
||||
|
||||
The attribute `multipartUploadLimitInKB` sets an upper limit in kilobytes on the size of a document that may be submitted in a multi-part HTTP POST request. The value specified is multiplied by 1024 to determine the size in bytes. A value of `-1` means MAX_INT, which is also the system default if omitted.
|
||||
|
@ -59,11 +61,22 @@ The attribute `addHttpRequestToContext` can be used to indicate that the origina
|
|||
[source,xml]
|
||||
----
|
||||
<requestParsers enableRemoteStreaming="false"
|
||||
enableStreamBody="false"
|
||||
multipartUploadLimitInKB="2048"
|
||||
formdataUploadLimitInKB="2048"
|
||||
addHttpRequestToContext="false" />
|
||||
----
|
||||
|
||||
The below command is an example of how to enable RemoteStreaming and BodyStreaming through <<config-api.adoc#ConfigAPI-CreatingandUpdatingCommonProperties,Config API>>:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d'{
|
||||
"set-property" : {"requestDispatcher.requestParsers.enableRemoteStreaming":true},
|
||||
"set-property" : {"requestDispatcher.requestParsers.enableStreamBody":true}
|
||||
}'
|
||||
----
|
||||
|
||||
[[RequestDispatcherinSolrConfig-httpCachingElement]]
|
||||
== httpCaching Element
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ These Solr features, all controlled via `solrconfig.xml`, are:
|
|||
[[SchemalessMode-UsingtheSchemalessExample]]
|
||||
== Using the Schemaless Example
|
||||
|
||||
The three features of schemaless mode are pre-configured in the `data_driven_schema_configs` <<config-sets.adoc#config-sets,config set>> in the Solr distribution. To start an example instance of Solr using these configs, run the following command:
|
||||
The three features of schemaless mode are pre-configured in the `_default` <<config-sets.adoc#config-sets,config set>> in the Solr distribution. To start an example instance of Solr using these configs, run the following command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
@ -67,15 +67,10 @@ You can use the `/schema/fields` <<schema-api.adoc#schema-api,Schema API>> to co
|
|||
"uniqueKey":true}]}
|
||||
----
|
||||
|
||||
[TIP]
|
||||
====
|
||||
The `data_driven_schema_configs` configset includes a `copyField` directive that causes all content to be indexed in a predefined "catch-all" `\_text_` field, which is used to enable single-field search that includes all fields' content. This will cause the index to be larger than it would be without this "catch-all" `copyField`. When you nail down your schema, consider removing the `\_text_` field and the corresponding `copyField` directive if you don't need it.
|
||||
====
|
||||
|
||||
[[SchemalessMode-ConfiguringSchemalessMode]]
|
||||
== Configuring Schemaless Mode
|
||||
|
||||
As described above, there are three configuration elements that need to be in place to use Solr in schemaless mode. In the `data_driven_schema_configs` config set included with Solr these are already configured. If, however, you would like to implement schemaless on your own, you should make the following changes.
|
||||
As described above, there are three configuration elements that need to be in place to use Solr in schemaless mode. In the `_default` config set included with Solr these are already configured. If, however, you would like to implement schemaless on your own, you should make the following changes.
|
||||
|
||||
[[SchemalessMode-EnableManagedSchema]]
|
||||
=== Enable Managed Schema
|
||||
|
@ -99,18 +94,16 @@ The UpdateRequestProcessorChain allows Solr to guess field types, and you can de
|
|||
|
||||
[source,xml]
|
||||
----
|
||||
<updateRequestProcessorChain name="add-unknown-fields-to-the-schema">
|
||||
<!-- UUIDUpdateProcessorFactory will generate an id if none is present in the incoming document -->
|
||||
<processor class="solr.UUIDUpdateProcessorFactory" />
|
||||
<processor class="solr.RemoveBlankFieldUpdateProcessorFactory"/>
|
||||
<processor class="solr.FieldNameMutatingUpdateProcessorFactory">
|
||||
<updateProcessor class="solr.UUIDUpdateProcessorFactory" name="uuid"/>
|
||||
<updateProcessor class="solr.RemoveBlankFieldUpdateProcessorFactory" name="remove-blank"/>
|
||||
<updateProcessor class="solr.FieldNameMutatingUpdateProcessorFactory" name="field-name-mutating">
|
||||
<str name="pattern">[^\w-\.]</str>
|
||||
<str name="replacement">_</str>
|
||||
</processor>
|
||||
<processor class="solr.ParseBooleanFieldUpdateProcessorFactory"/>
|
||||
<processor class="solr.ParseLongFieldUpdateProcessorFactory"/>
|
||||
<processor class="solr.ParseDoubleFieldUpdateProcessorFactory"/>
|
||||
<processor class="solr.ParseDateFieldUpdateProcessorFactory">
|
||||
</updateProcessor>
|
||||
<updateProcessor class="solr.ParseBooleanFieldUpdateProcessorFactory" name="parse-boolean"/>
|
||||
<updateProcessor class="solr.ParseLongFieldUpdateProcessorFactory" name="parse-long"/>
|
||||
<updateProcessor class="solr.ParseDoubleFieldUpdateProcessorFactory" name="parse-double"/>
|
||||
<updateProcessor class="solr.ParseDateFieldUpdateProcessorFactory" name="parse-date">
|
||||
<arr name="format">
|
||||
<str>yyyy-MM-dd'T'HH:mm:ss.SSSZ</str>
|
||||
<str>yyyy-MM-dd'T'HH:mm:ss,SSSZ</str>
|
||||
|
@ -130,9 +123,18 @@ The UpdateRequestProcessorChain allows Solr to guess field types, and you can de
|
|||
<str>yyyy-MM-dd HH:mm</str>
|
||||
<str>yyyy-MM-dd</str>
|
||||
</arr>
|
||||
</processor>
|
||||
<processor class="solr.AddSchemaFieldsUpdateProcessorFactory">
|
||||
<str name="defaultFieldType">strings</str>
|
||||
</updateProcessor>
|
||||
<updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.String</str>
|
||||
<str name="fieldType">text_general</str>
|
||||
<lst name="copyField">
|
||||
<str name="dest">*_str</str>
|
||||
<int name="maxChars">256</int>
|
||||
</lst>
|
||||
<!-- Use as default mapping instead of defaultFieldType -->
|
||||
<bool name="default">true</bool>
|
||||
</lst>
|
||||
<lst name="typeMapping">
|
||||
<str name="valueClass">java.lang.Boolean</str>
|
||||
<str name="fieldType">booleans</str>
|
||||
|
@ -150,11 +152,15 @@ The UpdateRequestProcessorChain allows Solr to guess field types, and you can de
|
|||
<str name="valueClass">java.lang.Number</str>
|
||||
<str name="fieldType">pdoubles</str>
|
||||
</lst>
|
||||
</processor>
|
||||
<processor class="solr.LogUpdateProcessorFactory"/>
|
||||
<processor class="solr.DistributedUpdateProcessorFactory"/>
|
||||
<processor class="solr.RunUpdateProcessorFactory"/>
|
||||
</updateRequestProcessorChain>
|
||||
</updateProcessor>
|
||||
|
||||
<!-- The update.autoCreateFields property can be turned to false to disable schemaless mode -->
|
||||
<updateRequestProcessorChain name="add-unknown-fields-to-the-schema" default="${update.autoCreateFields:true}"
|
||||
processor="uuid,remove-blank,field-name-mutating,parse-boolean,parse-long,parse-double,parse-date,add-schema-fields">
|
||||
<processor class="solr.LogUpdateProcessorFactory"/>
|
||||
<processor class="solr.DistributedUpdateProcessorFactory"/>
|
||||
<processor class="solr.RunUpdateProcessorFactory"/>
|
||||
</updateRequestProcessorChain>
|
||||
----
|
||||
|
||||
Javadocs for update processor factories mentioned above:
|
||||
|
@ -171,7 +177,7 @@ Javadocs for update processor factories mentioned above:
|
|||
[[SchemalessMode-MaketheUpdateRequestProcessorChaintheDefaultfortheUpdateRequestHandler]]
|
||||
=== Make the UpdateRequestProcessorChain the Default for the UpdateRequestHandler
|
||||
|
||||
Once the UpdateRequestProcessorChain has been defined, you must instruct your UpdateRequestHandlers to use it when working with index updates (i.e., adding, removing, replacing documents). Here is an example using <<initparams-in-solrconfig.adoc#initparams-in-solrconfig,InitParams>> to set the defaults on all `/update` request handlers:
|
||||
Once the UpdateRequestProcessorChain has been defined, you must instruct your UpdateRequestHandlers to use it when working with index updates (i.e., adding, removing, replacing documents). There are two ways to do this. The update chain shown above has a `default=true` attribute which will use it for any update handler. An alternative, more explicit way is to use <<initparams-in-solrconfig.adoc#initparams-in-solrconfig,InitParams>> to set the defaults on all `/update` request handlers:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
|
@ -190,9 +196,9 @@ After each of these changes have been made, Solr should be restarted (or, you ca
|
|||
[[SchemalessMode-ExamplesofIndexedDocuments]]
|
||||
== Examples of Indexed Documents
|
||||
|
||||
Once the schemaless mode has been enabled (whether you configured it manually or are using `data_driven_schema_configs` ), documents that include fields that are not defined in your schema should be added to the index, and the new fields added to the schema.
|
||||
Once the schemaless mode has been enabled (whether you configured it manually or are using `_default`), documents that include fields that are not defined in your schema will be indexed, using the guessed field types which are automatically added to the schema.
|
||||
|
||||
For example, adding a CSV document will cause its fields that are not in the schema to be added, with fieldTypes based on values:
|
||||
For example, adding a CSV document will cause unknown fields to be added, with fieldTypes based on values:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
@ -217,37 +223,51 @@ The fields now in the schema (output from `curl \http://localhost:8983/solr/gett
|
|||
{
|
||||
"responseHeader":{
|
||||
"status":0,
|
||||
"QTime":1},
|
||||
"QTime":2},
|
||||
"fields":[{
|
||||
"name":"Album",
|
||||
"type":"strings"}, // Field value guessed as String -> strings fieldType
|
||||
"type":"text_general"},
|
||||
{
|
||||
"name":"Artist",
|
||||
"type":"strings"}, // Field value guessed as String -> strings fieldType
|
||||
"type":"text_general"},
|
||||
{
|
||||
"name":"FromDistributor",
|
||||
"type":"tlongs"}, // Field value guessed as Long -> tlongs fieldType
|
||||
"type":"plongs"},
|
||||
{
|
||||
"name":"Rating",
|
||||
"type":"tdoubles"}, // Field value guessed as Double -> tdoubles fieldType
|
||||
"type":"pdoubles"},
|
||||
{
|
||||
"name":"Released",
|
||||
"type":"tdates"}, // Field value guessed as Date -> tdates fieldType
|
||||
"type":"pdates"},
|
||||
{
|
||||
"name":"Sold",
|
||||
"type":"tlongs"}, // Field value guessed as Long -> tlongs fieldType
|
||||
"type":"plongs"},
|
||||
{
|
||||
"name":"_text_",
|
||||
...
|
||||
},
|
||||
"name":"_root_" ...}
|
||||
{
|
||||
"name":"_version_",
|
||||
...
|
||||
},
|
||||
"name":"_text_" ...}
|
||||
{
|
||||
"name":"id",
|
||||
...
|
||||
}]}
|
||||
"name":"_version_" ...}
|
||||
{
|
||||
"name":"id" ...}
|
||||
----
|
||||
|
||||
In addition string versions of the text fields are indexed, using copyFields to a `*_str` dynamic field: (output from `curl \http://localhost:8983/solr/gettingstarted/schema/copyfields` ):
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"responseHeader":{
|
||||
"status":0,
|
||||
"QTime":0},
|
||||
"copyFields":[{
|
||||
"source":"Artist",
|
||||
"dest":"Artist_str",
|
||||
"maxChars":256},
|
||||
{
|
||||
"source":"Album",
|
||||
"dest":"Album_str",
|
||||
"maxChars":256}]}
|
||||
----
|
||||
|
||||
.You Can Still Be Explicit
|
||||
|
@ -256,9 +276,11 @@ The fields now in the schema (output from `curl \http://localhost:8983/solr/gett
|
|||
Even if you want to use schemaless mode for most fields, you can still use the <<schema-api.adoc#schema-api,Schema API>> to pre-emptively create some fields, with explicit types, before you index documents that use them.
|
||||
|
||||
Internally, the Schema API and the Schemaless Update Processors both use the same <<schema-factory-definition-in-solrconfig.adoc#schema-factory-definition-in-solrconfig,Managed Schema>> functionality.
|
||||
|
||||
Also, if you do not need the `*_str` version of a text field, you can simply remove the `copyField` definition from the auto-generated schema and it will not be re-added since the original field is now defined.
|
||||
====
|
||||
|
||||
Once a field has been added to the schema, its field type is fixed. As a consequence, adding documents with field value(s) that conflict with the previously guessed field type will fail. For example, after adding the above document, the "```Sold```" field has the fieldType `tlongs`, but the document below has a non-integral decimal value in this field:
|
||||
Once a field has been added to the schema, its field type is fixed. As a consequence, adding documents with field value(s) that conflict with the previously guessed field type will fail. For example, after adding the above document, the "```Sold```" field has the fieldType `plongs`, but the document below has a non-integral decimal value in this field:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
|
|
@ -213,7 +213,7 @@ The configset used is customized for DIH, and is found in `$SOLR_HOME/example/ex
|
|||
For more information about DIH, see the section <<uploading-structured-data-store-data-with-the-data-import-handler.adoc#uploading-structured-data-store-data-with-the-data-import-handler,Uploading Structured Data Store Data with the Data Import Handler>>.
|
||||
* *schemaless*: This example starts Solr in standalone mode using a managed schema, as described in the section <<schema-factory-definition-in-solrconfig.adoc#schema-factory-definition-in-solrconfig,Schema Factory Definition in SolrConfig>>, and provides a very minimal pre-defined schema. Solr will run in <<schemaless-mode.adoc#schemaless-mode,Schemaless Mode>> with this configuration, where Solr will create fields in the schema on the fly and will guess field types used in incoming documents.
|
||||
+
|
||||
The configset used can be found in `$SOLR_HOME/server/solr/configsets/data_driven_schema_configs`.
|
||||
The configset used can be found in `$SOLR_HOME/server/solr/configsets/_default`.
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
|
@ -392,11 +392,11 @@ Name of the core or collection to create (required).
|
|||
*Example*: `bin/solr create -c mycollection`
|
||||
|
||||
`-d <confdir>`::
|
||||
The configuration directory. This defaults to `data_driven_schema_configs`.
|
||||
The configuration directory. This defaults to `_default`.
|
||||
+
|
||||
See the section <<Configuration Directories and SolrCloud>> below for more details about this option when running in SolrCloud mode.
|
||||
+
|
||||
*Example*: `bin/solr create -d basic_configs`
|
||||
*Example*: `bin/solr create -d _default`
|
||||
|
||||
`-n <configName>`::
|
||||
The configuration name. This defaults to the same name as the core or collection.
|
||||
|
@ -431,15 +431,15 @@ Before creating a collection in SolrCloud, the configuration directory used by t
|
|||
|
||||
Let's work through a few examples to illustrate how configuration directories work in SolrCloud.
|
||||
|
||||
First, if you don't provide the `-d` or `-n` options, then the default configuration (`$SOLR_HOME/server/solr/configsets/data_driven_schema_configs/conf`) is uploaded to ZooKeeper using the same name as the collection.
|
||||
First, if you don't provide the `-d` or `-n` options, then the default configuration (`$SOLR_HOME/server/solr/configsets/_default/conf`) is uploaded to ZooKeeper using the same name as the collection.
|
||||
|
||||
For example, the following command will result in the `data_driven_schema_configs` configuration being uploaded to `/configs/contacts` in ZooKeeper: `bin/solr create -c contacts`.
|
||||
For example, the following command will result in the `_default` configuration being uploaded to `/configs/contacts` in ZooKeeper: `bin/solr create -c contacts`.
|
||||
|
||||
If you create another collection with `bin/solr create -c contacts2`, then another copy of the `data_driven_schema_configs` directory will be uploaded to ZooKeeper under `/configs/contacts2`.
|
||||
If you create another collection with `bin/solr create -c contacts2`, then another copy of the `_default` directory will be uploaded to ZooKeeper under `/configs/contacts2`.
|
||||
|
||||
Any changes you make to the configuration for the contacts collection will not affect the `contacts2` collection. Put simply, the default behavior creates a unique copy of the configuration directory for each collection you create.
|
||||
|
||||
You can override the name given to the configuration directory in ZooKeeper by using the `-n` option. For instance, the command `bin/solr create -c logs -d basic_configs -n basic` will upload the `server/solr/configsets/basic_configs/conf` directory to ZooKeeper as `/configs/basic`.
|
||||
You can override the name given to the configuration directory in ZooKeeper by using the `-n` option. For instance, the command `bin/solr create -c logs -d _default -n basic` will upload the `server/solr/configsets/_default/conf` directory to ZooKeeper as `/configs/basic`.
|
||||
|
||||
Notice that we used the `-d` option to specify a different configuration than the default. Solr provides several built-in configurations under `server/solr/configsets`. However you can also provide the path to your own configuration directory using the `-d` option. For instance, the command `bin/solr create -c mycoll -d /tmp/myconfigs`, will upload `/tmp/myconfigs` into ZooKeeper under `/configs/mycoll` .
|
||||
|
||||
|
@ -449,7 +449,9 @@ Other collections can share the same configuration by specifying the name of the
|
|||
|
||||
==== Data-driven Schema and Shared Configurations
|
||||
|
||||
The `data_driven_schema_configs` schema can mutate as data is indexed. Consequently, we recommend that you do not share data-driven configurations between collections unless you are certain that all collections should inherit the changes made when indexing data into one of the collections.
|
||||
The `_default` schema can mutate as data is indexed, since it has schemaless functionality (i.e. data-driven changes to the schema). Consequently, we recommend that you do not share data-driven configurations between collections unless you are certain that all collections should inherit the changes made when indexing data into one of the collections. You can turn off schemaless functionality (i.e. data-driven changes to the schema) for a collection by the following (assuming the collection name is `mycollection`):
|
||||
|
||||
`curl http://host:8983/solr/mycollection/config -d '{"set-user-property": {"update.autoCreateFields":"false"}}'`
|
||||
|
||||
=== Delete Core or Collection
|
||||
|
||||
|
|
|
@ -153,7 +153,7 @@ In this example, we simply named the field paths (such as `/exams/test`). Solr w
|
|||
[TIP]
|
||||
====
|
||||
|
||||
If you are working in <<schemaless-mode.adoc#schemaless-mode,Schemaless Mode>>, fields that don't exist will be created on the fly with Solr's best guess for the field type. Documents WILL get rejected if the fields do not exist in the schema before indexing. So, if you are NOT using schemaless mode, pre-create those fields.
|
||||
Documents WILL get rejected if the fields do not exist in the schema before indexing. So, if you are NOT using schemaless mode, pre-create those fields. If you are working in <<schemaless-mode.adoc#schemaless-mode,Schemaless Mode>>, fields that don't exist will be created on the fly with Solr's best guess for the field type.
|
||||
|
||||
====
|
||||
|
||||
|
@ -336,7 +336,7 @@ With this example, the documents indexed would be, as follows:
|
|||
== Tips for Custom JSON Indexing
|
||||
|
||||
1. Schemaless mode: This handles field creation automatically. The field guessing may not be exactly as you expect, but it works. The best thing to do is to setup a local server in schemaless mode, index a few sample docs and create those fields in your real setup with proper field types before indexing
|
||||
2. Pre-created Schema : Post your docs to the `/update/`json`/docs` endpoint with `echo=true`. This gives you the list of field names you need to create. Create the fields before you actually index
|
||||
2. Pre-created Schema : Post your docs to the `/update/json/docs` endpoint with `echo=true`. This gives you the list of field names you need to create. Create the fields before you actually index
|
||||
3. No schema, only full-text search : All you need to do is to do full-text search on your JSON. Set the configuration as given in the Setting JSON Defaults section.
|
||||
|
||||
[[TransformingandIndexingCustomJSON-SettingJSONDefaults]]
|
||||
|
|
|
@ -168,7 +168,7 @@ For posting XML messages contained in a file, you can use the alternative form:
|
|||
curl http://localhost:8983/solr/my_collection/update -H "Content-Type: text/xml" --data-binary @myfile.xml
|
||||
----
|
||||
|
||||
Short requests can also be sent using a HTTP GET command, URL-encoding the request, as in the following. Note the escaping of "<" and ">":
|
||||
Short requests can also be sent using a HTTP GET command, if enabled in <<requestdispatcher-in-solrconfig.adoc#RequestDispatcherinSolrConfig-requestParsersElement,RequestDispatcher in SolrConfig>> element, URL-encoding the request, as in the following. Note the escaping of "<" and ">":
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
|
|
@ -172,7 +172,7 @@ Here is the order in which the Solr Cell framework, using the Extracting Request
|
|||
[[UploadingDatawithSolrCellusingApacheTika-ConfiguringtheSolrExtractingRequestHandler]]
|
||||
== Configuring the Solr ExtractingRequestHandler
|
||||
|
||||
If you are not working with the supplied `sample_techproducts_configs `or` data_driven_schema_configs` <<config-sets.adoc#config-sets,config set>>, you must configure your own `solrconfig.xml` to know about the Jar's containing the `ExtractingRequestHandler` and its dependencies:
|
||||
If you are not working with the supplied `sample_techproducts_configs` or `_default` <<config-sets.adoc#config-sets,config set>>, you must configure your own `solrconfig.xml` to know about the Jar's containing the `ExtractingRequestHandler` and its dependencies:
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
|
|
|
@ -31,7 +31,7 @@ These files are uploaded in either of the following cases:
|
|||
|
||||
When you try SolrCloud for the first time using the `bin/solr -e cloud`, the related configset gets uploaded to ZooKeeper automatically and is linked with the newly created collection.
|
||||
|
||||
The below command would start SolrCloud with the default collection name (gettingstarted) and default configset (data_driven_schema_configs) uploaded and linked to it.
|
||||
The below command would start SolrCloud with the default collection name (gettingstarted) and default configset (_default) uploaded and linked to it.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
|
@ -42,10 +42,10 @@ You can also explicitly upload a configuration directory when creating a collect
|
|||
|
||||
[source,bash]
|
||||
----
|
||||
bin/solr create -c mycollection -d data_driven_schema_configs
|
||||
bin/solr create -c mycollection -d _default
|
||||
----
|
||||
|
||||
The create command will upload a copy of the `data_driven_schema_configs` configuration directory to ZooKeeper under `/configs/mycollection`. Refer to the <<solr-control-script-reference.adoc#solr-control-script-reference,Solr Control Script Reference>> page for more details about the create command for creating collections.
|
||||
The create command will upload a copy of the `_default` configuration directory to ZooKeeper under `/configs/mycollection`. Refer to the <<solr-control-script-reference.adoc#solr-control-script-reference,Solr Control Script Reference>> page for more details about the create command for creating collections.
|
||||
|
||||
Once a configuration directory has been uploaded to ZooKeeper, you can update them using the <<solr-control-script-reference.adoc#solr-control-script-reference,Solr Control Script>>
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
The VelocityResponseWriter is an optional plugin available in the `contrib/velocity` directory. It powers the /browse user interfaces when using configurations such as "basic_configs", "techproducts", and "example/files".
|
||||
The VelocityResponseWriter is an optional plugin available in the `contrib/velocity` directory. It powers the /browse user interfaces when using configurations such as "_default", "techproducts", and "example/files".
|
||||
|
||||
Its JAR and dependencies must be added (via `<lib>` or solr/home lib inclusion), and must be registered in `solrconfig.xml` like this:
|
||||
|
||||
|
|
|
@ -1129,7 +1129,7 @@ public class CloudSolrClient extends SolrClient {
|
|||
// validate collections
|
||||
for (String collectionName : rawCollectionsList) {
|
||||
if (stateProvider.getState(collectionName) == null) {
|
||||
String alias = stateProvider.getAlias(collection);
|
||||
String alias = stateProvider.getAlias(collectionName);
|
||||
if (alias != null) {
|
||||
List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
|
||||
collectionNames.addAll(aliasList);
|
||||
|
@ -1367,18 +1367,15 @@ public class CloudSolrClient extends SolrClient {
|
|||
/**
|
||||
* Constructs {@link CloudSolrClient} instances from provided configuration.
|
||||
*/
|
||||
public static class Builder {
|
||||
public static class Builder extends SolrClientBuilder<Builder> {
|
||||
protected Collection<String> zkHosts;
|
||||
protected List<String> solrUrls;
|
||||
protected HttpClient httpClient;
|
||||
protected String zkChroot;
|
||||
protected LBHttpSolrClient loadBalancedSolrClient;
|
||||
protected LBHttpSolrClient.Builder lbClientBuilder;
|
||||
protected boolean shardLeadersOnly;
|
||||
protected boolean directUpdatesToLeadersOnly;
|
||||
protected ClusterStateProvider stateProvider;
|
||||
protected Integer connectionTimeoutMillis;
|
||||
protected Integer socketTimeoutMillis;
|
||||
|
||||
|
||||
public Builder() {
|
||||
|
@ -1436,15 +1433,6 @@ public class CloudSolrClient extends SolrClient {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link HttpClient} for the builder to use when creating clients.
|
||||
*/
|
||||
public Builder withHttpClient(HttpClient httpClient) {
|
||||
this.httpClient = httpClient;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Provide a series of ZooKeeper client endpoints for the builder to use when creating clients.
|
||||
*
|
||||
|
@ -1515,30 +1503,6 @@ public class CloudSolrClient extends SolrClient {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should obey the following timeout when connecting to Solr servers.
|
||||
*/
|
||||
public Builder withConnectionTimeout(int connectionTimeoutMillis) {
|
||||
if (connectionTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("connectionTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.connectionTimeoutMillis = connectionTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should set the following read timeout on all sockets.
|
||||
*/
|
||||
public Builder withSocketTimeout(int socketTimeoutMillis) {
|
||||
if (socketTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("socketTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.socketTimeoutMillis = socketTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link CloudSolrClient} based on the provided configuration.
|
||||
*/
|
||||
|
@ -1560,5 +1524,10 @@ public class CloudSolrClient extends SolrClient {
|
|||
}
|
||||
return new CloudSolrClient(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder getThis() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -770,15 +770,12 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
|
|||
/**
|
||||
* Constructs {@link ConcurrentUpdateSolrClient} instances from provided configuration.
|
||||
*/
|
||||
public static class Builder {
|
||||
public static class Builder extends SolrClientBuilder<Builder> {
|
||||
protected String baseSolrUrl;
|
||||
protected HttpClient httpClient;
|
||||
protected int queueSize;
|
||||
protected int threadCount;
|
||||
protected ExecutorService executorService;
|
||||
protected boolean streamDeletes;
|
||||
protected Integer connectionTimeoutMillis;
|
||||
protected Integer socketTimeoutMillis;
|
||||
|
||||
/**
|
||||
* Create a Builder object, based on the provided Solr URL.
|
||||
|
@ -804,14 +801,6 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
|
|||
public Builder(String baseSolrUrl) {
|
||||
this.baseSolrUrl = baseSolrUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link HttpClient} for the builder to use when creating clients.
|
||||
*/
|
||||
public Builder withHttpClient(HttpClient httpClient) {
|
||||
this.httpClient = httpClient;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of documents to batch together before sending to Solr.
|
||||
|
@ -860,31 +849,6 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should obey the following timeout when connecting to Solr servers.
|
||||
*/
|
||||
public Builder withConnectionTimeout(int connectionTimeoutMillis) {
|
||||
if (connectionTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("connectionTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.connectionTimeoutMillis = connectionTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should set the following read timeout on all sockets.
|
||||
*/
|
||||
public Builder withSocketTimeout(int socketTimeoutMillis) {
|
||||
if (socketTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("socketTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.socketTimeoutMillis = socketTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a {@link ConcurrentUpdateSolrClient} based on the provided configuration options.
|
||||
*/
|
||||
|
@ -895,5 +859,10 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
|
|||
|
||||
return new ConcurrentUpdateSolrClient(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder getThis() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -827,14 +827,10 @@ s * @deprecated since 7.0 Use {@link Builder} methods instead.
|
|||
/**
|
||||
* Constructs {@link HttpSolrClient} instances from provided configuration.
|
||||
*/
|
||||
public static class Builder {
|
||||
public static class Builder extends SolrClientBuilder<Builder> {
|
||||
protected String baseSolrUrl;
|
||||
protected HttpClient httpClient;
|
||||
protected ResponseParser responseParser;
|
||||
protected boolean compression;
|
||||
protected ModifiableSolrParams invariantParams = new ModifiableSolrParams();
|
||||
protected Integer connectionTimeoutMillis;
|
||||
protected Integer socketTimeoutMillis;
|
||||
|
||||
public Builder() {
|
||||
this.responseParser = new BinaryResponseParser();
|
||||
|
@ -894,22 +890,6 @@ s * @deprecated since 7.0 Use {@link Builder} methods instead.
|
|||
this.responseParser = new BinaryResponseParser();
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link HttpClient} for the builder to use when creating clients.
|
||||
*/
|
||||
public Builder withHttpClient(HttpClient httpClient) {
|
||||
this.httpClient = httpClient;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link ResponseParser} for created clients to use when handling requests.
|
||||
*/
|
||||
public Builder withResponseParser(ResponseParser responseParser) {
|
||||
this.responseParser = responseParser;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Chooses whether created {@link HttpSolrClient}s use compression by default.
|
||||
*/
|
||||
|
@ -941,30 +921,6 @@ s * @deprecated since 7.0 Use {@link Builder} methods instead.
|
|||
this.invariantParams.add(params);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should obey the following timeout when connecting to Solr servers.
|
||||
*/
|
||||
public Builder withConnectionTimeout(int connectionTimeoutMillis) {
|
||||
if (connectionTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("connectionTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.connectionTimeoutMillis = connectionTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should set the following read timeout on all sockets.
|
||||
*/
|
||||
public Builder withSocketTimeout(int socketTimeoutMillis) {
|
||||
if (socketTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("socketTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.socketTimeoutMillis = socketTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link HttpSolrClient} based on provided configuration.
|
||||
|
@ -980,5 +936,10 @@ s * @deprecated since 7.0 Use {@link Builder} methods instead.
|
|||
return new DelegationTokenHttpSolrClient(this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder getThis() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -881,13 +881,9 @@ public class LBHttpSolrClient extends SolrClient {
|
|||
/**
|
||||
* Constructs {@link LBHttpSolrClient} instances from provided configuration.
|
||||
*/
|
||||
public static class Builder {
|
||||
public static class Builder extends SolrClientBuilder<Builder> {
|
||||
protected final List<String> baseSolrUrls;
|
||||
protected HttpClient httpClient;
|
||||
protected ResponseParser responseParser;
|
||||
protected HttpSolrClient.Builder httpSolrClientBuilder;
|
||||
protected Integer connectionTimeoutMillis;
|
||||
protected Integer socketTimeoutMillis;
|
||||
|
||||
public Builder() {
|
||||
this.baseSolrUrls = new ArrayList<>();
|
||||
|
@ -955,23 +951,6 @@ public class LBHttpSolrClient extends SolrClient {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Provides a {@link HttpClient} for the builder to use when creating clients.
|
||||
*/
|
||||
public Builder withHttpClient(HttpClient httpClient) {
|
||||
this.httpClient = httpClient;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link ResponseParser} for created clients to use when handling requests.
|
||||
*/
|
||||
public Builder withResponseParser(ResponseParser responseParser) {
|
||||
this.responseParser = responseParser;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link HttpSolrClient.Builder} to be used for building the internally used clients.
|
||||
|
@ -980,30 +959,6 @@ public class LBHttpSolrClient extends SolrClient {
|
|||
this.httpSolrClientBuilder = builder;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should obey the following timeout when connecting to Solr servers.
|
||||
*/
|
||||
public Builder withConnectionTimeout(int connectionTimeoutMillis) {
|
||||
if (connectionTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("connectionTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.connectionTimeoutMillis = connectionTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should set the following read timeout on all sockets.
|
||||
*/
|
||||
public Builder withSocketTimeout(int socketTimeoutMillis) {
|
||||
if (socketTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("socketTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.socketTimeoutMillis = socketTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link HttpSolrClient} based on provided configuration.
|
||||
|
@ -1011,5 +966,10 @@ public class LBHttpSolrClient extends SolrClient {
|
|||
public LBHttpSolrClient build() {
|
||||
return new LBHttpSolrClient(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder getThis() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.client.solrj.impl;
|
||||
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.solr.client.solrj.ResponseParser;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
|
||||
|
||||
public abstract class SolrClientBuilder<B extends SolrClientBuilder<B>> {
|
||||
|
||||
protected HttpClient httpClient;
|
||||
protected ResponseParser responseParser;
|
||||
protected Integer connectionTimeoutMillis;
|
||||
protected Integer socketTimeoutMillis;
|
||||
|
||||
/** The solution for the unchecked cast warning. */
|
||||
public abstract B getThis();
|
||||
|
||||
/**
|
||||
* Provides a {@link HttpClient} for the builder to use when creating clients.
|
||||
*/
|
||||
public B withHttpClient(HttpClient httpClient) {
|
||||
this.httpClient = httpClient;
|
||||
return getThis();
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a {@link ResponseParser} for created clients to use when handling requests.
|
||||
*/
|
||||
public B withResponseParser(ResponseParser responseParser) {
|
||||
this.responseParser = responseParser;
|
||||
return getThis();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should obey the following timeout when connecting to Solr servers.
|
||||
*/
|
||||
public B withConnectionTimeout(int connectionTimeoutMillis) {
|
||||
if (connectionTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("connectionTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.connectionTimeoutMillis = connectionTimeoutMillis;
|
||||
return getThis();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells {@link Builder} that created clients should set the following read timeout on all sockets.
|
||||
*/
|
||||
public B withSocketTimeout(int socketTimeoutMillis) {
|
||||
if (socketTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("socketTimeoutMillis must be a positive integer.");
|
||||
}
|
||||
|
||||
this.socketTimeoutMillis = socketTimeoutMillis;
|
||||
return getThis();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.client.solrj.io.eval;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class AddAllEvaluator extends ComplexEvaluator implements Expressible {
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
public AddAllEvaluator(StreamExpression expression, StreamFactory factory) throws IOException {
|
||||
super(expression, factory);
|
||||
|
||||
if(subEvaluators.size() == 0) {
|
||||
throw new IOException("addAll evaluator expects atleast one array parameter");
|
||||
}
|
||||
}
|
||||
|
||||
public List<Number> evaluate(Tuple tuple) throws IOException {
|
||||
|
||||
List<Number> all = new ArrayList();
|
||||
for(StreamEvaluator subEvaluator : subEvaluators) {
|
||||
List<Number> numbers = (List<Number>)subEvaluator.evaluate(tuple);
|
||||
all.addAll(numbers);
|
||||
}
|
||||
|
||||
return all;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
|
||||
StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass()));
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation toExplanation(StreamFactory factory) throws IOException {
|
||||
return new Explanation(nodeId.toString())
|
||||
.withExpressionType(ExpressionType.EVALUATOR)
|
||||
.withFunctionName(factory.getFunctionName(getClass()))
|
||||
.withImplementingClass(getClass().getName())
|
||||
.withExpression(toExpression(factory).toString());
|
||||
}
|
||||
}
|
|
@ -267,6 +267,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
|
|||
|
||||
String url = shardUrls.get(w);
|
||||
SolrStream solrStream = new SolrStream(url, paramsLoc);
|
||||
solrStream.setStreamContext(streamContext);
|
||||
solrStreams.add(solrStream);
|
||||
}
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@ import org.slf4j.LoggerFactory;
|
|||
public class CloudSolrClientTest extends SolrCloudTestCase {
|
||||
|
||||
private static final String COLLECTION = "collection1";
|
||||
private static final String COLLECTION2 = "2nd_collection";
|
||||
|
||||
private static final String id = "id";
|
||||
|
||||
|
@ -173,19 +174,49 @@ public class CloudSolrClientTest extends SolrCloudTestCase {
|
|||
|
||||
@Test
|
||||
public void testAliasHandling() throws Exception {
|
||||
|
||||
CollectionAdminRequest.createCollection(COLLECTION2, "conf", 2, 1).process(cluster.getSolrClient());
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION2, cluster.getSolrClient().getZkStateReader(),
|
||||
false, true, TIMEOUT);
|
||||
|
||||
CloudSolrClient client = getRandomClient();
|
||||
SolrInputDocument doc = new SolrInputDocument("id", "1", "title_s", "my doc");
|
||||
client.add(COLLECTION, doc);
|
||||
client.commit(COLLECTION);
|
||||
|
||||
CollectionAdminRequest.createAlias("testalias", COLLECTION).process(cluster.getSolrClient());
|
||||
|
||||
// ensure that the alias has been registered
|
||||
assertEquals(COLLECTION,
|
||||
new CollectionAdminRequest.ListAliases().process(cluster.getSolrClient()).getAliases().get("testalias"));
|
||||
SolrInputDocument doc2 = new SolrInputDocument("id", "2", "title_s", "my doc too");
|
||||
client.add(COLLECTION2, doc2);
|
||||
client.commit(COLLECTION2);
|
||||
CollectionAdminRequest.createAlias("testalias2", COLLECTION2).process(cluster.getSolrClient());
|
||||
|
||||
CollectionAdminRequest.createAlias("testaliascombined", COLLECTION + "," + COLLECTION2).process(cluster.getSolrClient());
|
||||
|
||||
// ensure that the aliases have been registered
|
||||
Map<String, String> aliases = new CollectionAdminRequest.ListAliases().process(cluster.getSolrClient()).getAliases();
|
||||
assertEquals(COLLECTION, aliases.get("testalias"));
|
||||
assertEquals(COLLECTION2, aliases.get("testalias2"));
|
||||
assertEquals(COLLECTION + "," + COLLECTION2, aliases.get("testaliascombined"));
|
||||
|
||||
assertEquals(1, client.query(COLLECTION, params("q", "*:*")).getResults().getNumFound());
|
||||
assertEquals(1, client.query("testalias", params("q", "*:*")).getResults().getNumFound());
|
||||
|
||||
assertEquals(1, client.query(COLLECTION2, params("q", "*:*")).getResults().getNumFound());
|
||||
assertEquals(1, client.query("testalias2", params("q", "*:*")).getResults().getNumFound());
|
||||
|
||||
assertEquals(2, client.query("testaliascombined", params("q", "*:*")).getResults().getNumFound());
|
||||
|
||||
ModifiableSolrParams paramsWithBothCollections = params("q", "*:*", "collection", COLLECTION + "," + COLLECTION2);
|
||||
assertEquals(2, client.query(null, paramsWithBothCollections).getResults().getNumFound());
|
||||
|
||||
ModifiableSolrParams paramsWithBothAliases = params("q", "*:*", "collection", "testalias,testalias2");
|
||||
assertEquals(2, client.query(null, paramsWithBothAliases).getResults().getNumFound());
|
||||
|
||||
ModifiableSolrParams paramsWithCombinedAlias = params("q", "*:*", "collection", "testaliascombined");
|
||||
assertEquals(2, client.query(null, paramsWithCombinedAlias).getResults().getNumFound());
|
||||
|
||||
ModifiableSolrParams paramsWithMixedCollectionAndAlias = params("q", "*:*", "collection", "testalias," + COLLECTION2);
|
||||
assertEquals(2, client.query(null, paramsWithMixedCollectionAndAlias).getResults().getNumFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -5995,6 +5995,32 @@ public class StreamExpressionTest extends SolrCloudTestCase {
|
|||
assertTrue(out.get(5).doubleValue() == 500.23D);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAddAll() throws Exception {
|
||||
String cexpr = "addAll(array(1, 2, 3), array(4.5, 5.5, 6.5), array(7,8,9))";
|
||||
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
|
||||
paramsLoc.set("expr", cexpr);
|
||||
paramsLoc.set("qt", "/stream");
|
||||
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
|
||||
TupleStream solrStream = new SolrStream(url, paramsLoc);
|
||||
StreamContext context = new StreamContext();
|
||||
solrStream.setStreamContext(context);
|
||||
List<Tuple> tuples = getTuples(solrStream);
|
||||
assertTrue(tuples.size() == 1);
|
||||
List<Number> out = (List<Number>)tuples.get(0).get("return-value");
|
||||
assertTrue(out.size() == 9);
|
||||
assertTrue(out.get(0).intValue() == 1);
|
||||
assertTrue(out.get(1).intValue() == 2);
|
||||
assertTrue(out.get(2).intValue() == 3);
|
||||
assertTrue(out.get(3).doubleValue() == 4.5D);
|
||||
assertTrue(out.get(4).doubleValue() == 5.5D);
|
||||
assertTrue(out.get(5).doubleValue() == 6.5D);
|
||||
assertTrue(out.get(6).intValue() == 7);
|
||||
assertTrue(out.get(7).intValue() == 8);
|
||||
assertTrue(out.get(8).intValue() == 9);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnova() throws Exception {
|
||||
String cexpr = "anova(array(1,2,3,5,4,6), array(5,2,3,5,4,6), array(1,2,7,5,4,6))";
|
||||
|
|
|
@ -1738,7 +1738,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
protected SolrClient createNewSolrClient(String collection, String baseUrl) {
|
||||
try {
|
||||
// setup the server...
|
||||
HttpSolrClient client = getHttpSolrClient(baseUrl + "/" + collection, DEFAULT_CONNECTION_TIMEOUT);
|
||||
HttpSolrClient client = getHttpSolrClient(baseUrl + "/" + collection, DEFAULT_CONNECTION_TIMEOUT, 60000);
|
||||
return client;
|
||||
}
|
||||
catch (Exception ex) {
|
||||
|
|
Loading…
Reference in New Issue