This commit is contained in:
Cao Manh Dat 2017-05-12 09:54:34 +07:00
commit 5f51228a01
16 changed files with 785 additions and 265 deletions

View File

@ -92,6 +92,9 @@ Other
* LUCENE-7753: Make fields static when possible.
(Daniel Jelinski via Adrien Grand)
======================= Lucene 6.7.0 =======================
(No Changes)
======================= Lucene 6.6.0 =======================
New Features

View File

@ -0,0 +1,243 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.classification;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.util.BytesRef;
/**
* A classifier approximating naive bayes classifier by using pure queries on BM25.
*
* @lucene.experimental
*/
public class BM25NBClassifier implements Classifier<BytesRef> {
/**
* {@link IndexReader} used to access the {@link Classifier}'s
* index
*/
private final IndexReader indexReader;
/**
* names of the fields to be used as input text
*/
private final String[] textFieldNames;
/**
* name of the field to be used as a class / category output
*/
private final String classFieldName;
/**
* {@link Analyzer} to be used for tokenizing unseen input text
*/
private final Analyzer analyzer;
/**
* {@link IndexSearcher} to run searches on the index for retrieving frequencies
*/
private final IndexSearcher indexSearcher;
/**
* {@link Query} used to eventually filter the document set to be used to classify
*/
private final Query query;
/**
* Creates a new NaiveBayes classifier.
*
* @param indexReader the reader on the index to be used for classification
* @param analyzer an {@link Analyzer} used to analyze unseen text
* @param query a {@link Query} to eventually filter the docs used for training the classifier, or {@code null}
* if all the indexed docs should be used
* @param classFieldName the name of the field used as the output for the classifier NOTE: must not be havely analyzed
* as the returned class will be a token indexed for this field
* @param textFieldNames the name of the fields used as the inputs for the classifier, NO boosting supported per field
*/
public BM25NBClassifier(IndexReader indexReader, Analyzer analyzer, Query query, String classFieldName, String... textFieldNames) {
this.indexReader = indexReader;
this.indexSearcher = new IndexSearcher(this.indexReader);
this.indexSearcher.setSimilarity(new BM25Similarity());
this.textFieldNames = textFieldNames;
this.classFieldName = classFieldName;
this.analyzer = analyzer;
this.query = query;
}
/**
* {@inheritDoc}
*/
@Override
public ClassificationResult<BytesRef> assignClass(String inputDocument) throws IOException {
return assignClassNormalizedList(inputDocument).get(0);
}
/**
* {@inheritDoc}
*/
@Override
public List<ClassificationResult<BytesRef>> getClasses(String text) throws IOException {
List<ClassificationResult<BytesRef>> assignedClasses = assignClassNormalizedList(text);
Collections.sort(assignedClasses);
return assignedClasses;
}
/**
* {@inheritDoc}
*/
@Override
public List<ClassificationResult<BytesRef>> getClasses(String text, int max) throws IOException {
List<ClassificationResult<BytesRef>> assignedClasses = assignClassNormalizedList(text);
Collections.sort(assignedClasses);
return assignedClasses.subList(0, max);
}
/**
* Calculate probabilities for all classes for a given input text
*
* @param inputDocument the input text as a {@code String}
* @return a {@code List} of {@code ClassificationResult}, one for each existing class
* @throws IOException if assigning probabilities fails
*/
private List<ClassificationResult<BytesRef>> assignClassNormalizedList(String inputDocument) throws IOException {
List<ClassificationResult<BytesRef>> assignedClasses = new ArrayList<>();
Terms classes = MultiFields.getTerms(indexReader, classFieldName);
TermsEnum classesEnum = classes.iterator();
BytesRef next;
String[] tokenizedText = tokenize(inputDocument);
while ((next = classesEnum.next()) != null) {
if (next.length > 0) {
Term term = new Term(this.classFieldName, next);
assignedClasses.add(new ClassificationResult<>(term.bytes(), calculateLogPrior(term) + calculateLogLikelihood(tokenizedText, term)));
}
}
return normClassificationResults(assignedClasses);
}
/**
* Normalize the classification results based on the max score available
*
* @param assignedClasses the list of assigned classes
* @return the normalized results
*/
private ArrayList<ClassificationResult<BytesRef>> normClassificationResults(List<ClassificationResult<BytesRef>> assignedClasses) {
// normalization; the values transforms to a 0-1 range
ArrayList<ClassificationResult<BytesRef>> returnList = new ArrayList<>();
if (!assignedClasses.isEmpty()) {
Collections.sort(assignedClasses);
// this is a negative number closest to 0 = a
double smax = assignedClasses.get(0).getScore();
double sumLog = 0;
// log(sum(exp(x_n-a)))
for (ClassificationResult<BytesRef> cr : assignedClasses) {
// getScore-smax <=0 (both negative, smax is the smallest abs()
sumLog += Math.exp(cr.getScore() - smax);
}
// loga=a+log(sum(exp(x_n-a))) = log(sum(exp(x_n)))
double loga = smax;
loga += Math.log(sumLog);
// 1/sum*x = exp(log(x))*1/sum = exp(log(x)-log(sum))
for (ClassificationResult<BytesRef> cr : assignedClasses) {
double scoreDiff = cr.getScore() - loga;
returnList.add(new ClassificationResult<>(cr.getAssignedClass(), Math.exp(scoreDiff)));
}
}
return returnList;
}
/**
* tokenize a <code>String</code> on this classifier's text fields and analyzer
*
* @param text the <code>String</code> representing an input text (to be classified)
* @return a <code>String</code> array of the resulting tokens
* @throws IOException if tokenization fails
*/
private String[] tokenize(String text) throws IOException {
Collection<String> result = new LinkedList<>();
for (String textFieldName : textFieldNames) {
try (TokenStream tokenStream = analyzer.tokenStream(textFieldName, text)) {
CharTermAttribute charTermAttribute = tokenStream.addAttribute(CharTermAttribute.class);
tokenStream.reset();
while (tokenStream.incrementToken()) {
result.add(charTermAttribute.toString());
}
tokenStream.end();
}
}
return result.toArray(new String[result.size()]);
}
private double calculateLogLikelihood(String[] tokens, Term term) throws IOException {
double result = 0d;
for (String word : tokens) {
result += Math.log(getTermProbForClass(term, word));
}
return result;
}
private double getTermProbForClass(Term classTerm, String... words) throws IOException {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(classTerm), BooleanClause.Occur.MUST));
for (String textFieldName : textFieldNames) {
for (String word : words) {
builder.add(new BooleanClause(new TermQuery(new Term(textFieldName, word)), BooleanClause.Occur.SHOULD));
}
}
if (query != null) {
builder.add(query, BooleanClause.Occur.MUST);
}
TopDocs search = indexSearcher.search(builder.build(), 1);
return search.totalHits > 0 ? search.getMaxScore() : 1;
}
private double calculateLogPrior(Term term) throws IOException {
TermQuery termQuery = new TermQuery(term);
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(termQuery, BooleanClause.Occur.MUST);
if (query != null) {
bq.add(query, BooleanClause.Occur.MUST);
}
TopDocs topDocs = indexSearcher.search(bq.build(), 1);
return topDocs.totalHits > 0 ? Math.log(topDocs.getMaxScore()) : 0;
}
}

View File

@ -0,0 +1,154 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.classification;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
import org.apache.lucene.classification.utils.ConfusionMatrixGenerator;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.junit.Test;
/**
* Tests for {@link BM25NBClassifier}
*/
public class BM25NBClassifierTest extends ClassificationTestBase<BytesRef> {
@Test
public void testBasicUsage() throws Exception {
LeafReader leafReader = null;
try {
MockAnalyzer analyzer = new MockAnalyzer(random());
leafReader = getSampleIndex(analyzer);
BM25NBClassifier classifier = new BM25NBClassifier(leafReader, analyzer, null, categoryFieldName, textFieldName);
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
} finally {
if (leafReader != null) {
leafReader.close();
}
}
}
@Test
public void testBasicUsageWithQuery() throws Exception {
LeafReader leafReader = null;
try {
MockAnalyzer analyzer = new MockAnalyzer(random());
leafReader = getSampleIndex(analyzer);
TermQuery query = new TermQuery(new Term(textFieldName, "not"));
BM25NBClassifier classifier = new BM25NBClassifier(leafReader, analyzer, query, categoryFieldName, textFieldName);
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
} finally {
if (leafReader != null) {
leafReader.close();
}
}
}
@Test
public void testNGramUsage() throws Exception {
LeafReader leafReader = null;
try {
Analyzer analyzer = new NGramAnalyzer();
leafReader = getSampleIndex(analyzer);
BM25NBClassifier classifier = new BM25NBClassifier(leafReader, analyzer, null, categoryFieldName, textFieldName);
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
} finally {
if (leafReader != null) {
leafReader.close();
}
}
}
private class NGramAnalyzer extends Analyzer {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer tokenizer = new KeywordTokenizer();
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(new EdgeNGramTokenFilter(new ReverseStringFilter(tokenizer), 10, 20)));
}
}
@Test
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
try {
long trainStart = System.currentTimeMillis();
BM25NBClassifier classifier = new BM25NBClassifier(leafReader,
analyzer, null, categoryFieldName, textFieldName);
long trainEnd = System.currentTimeMillis();
long trainTime = trainEnd - trainStart;
assertTrue("training took more than 10s: " + trainTime / 1000 + "s", trainTime < 10000);
long evaluationStart = System.currentTimeMillis();
ConfusionMatrixGenerator.ConfusionMatrix confusionMatrix = ConfusionMatrixGenerator.getConfusionMatrix(leafReader,
classifier, categoryFieldName, textFieldName, -1);
assertNotNull(confusionMatrix);
long evaluationEnd = System.currentTimeMillis();
long evaluationTime = evaluationEnd - evaluationStart;
assertTrue("evaluation took more than 2m: " + evaluationTime / 1000 + "s", evaluationTime < 120000);
double avgClassificationTime = confusionMatrix.getAvgClassificationTime();
assertTrue("avg classification time: " + avgClassificationTime, 5000 > avgClassificationTime);
double f1 = confusionMatrix.getF1Measure();
assertTrue(f1 >= 0d);
assertTrue(f1 <= 1d);
double accuracy = confusionMatrix.getAccuracy();
assertTrue(accuracy >= 0d);
assertTrue(accuracy <= 1d);
double recall = confusionMatrix.getRecall();
assertTrue(recall >= 0d);
assertTrue(recall <= 1d);
double precision = confusionMatrix.getPrecision();
assertTrue(precision >= 0d);
assertTrue(precision <= 1d);
Terms terms = MultiFields.getTerms(leafReader, categoryFieldName);
TermsEnum iterator = terms.iterator();
BytesRef term;
while ((term = iterator.next()) != null) {
String s = term.utf8ToString();
recall = confusionMatrix.getRecall(s);
assertTrue(recall >= 0d);
assertTrue(recall <= 1d);
precision = confusionMatrix.getPrecision(s);
assertTrue(precision >= 0d);
assertTrue(precision <= 1d);
double f1Measure = confusionMatrix.getF1Measure(s);
assertTrue(f1Measure >= 0d);
assertTrue(f1Measure <= 1d);
}
} finally {
leafReader.close();
}
}
}

View File

@ -298,7 +298,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
try {
Query singleton = uniqueQueries.putIfAbsent(query, query);
if (singleton == null) {
onQueryCache(singleton, LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(query));
onQueryCache(query, LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(query));
} else {
query = singleton;
}

View File

@ -115,6 +115,13 @@ public final class Version {
@Deprecated
public static final Version LUCENE_6_6_0 = new Version(6, 6, 0);
/**
* Match settings and bugs in Lucene's 6.7.0 release.
* @deprecated Use latest
*/
@Deprecated
public static final Version LUCENE_6_7_0 = new Version(6, 7, 0);
/**
* Match settings and bugs in Lucene's 7.0.0 release.
* <p>

View File

@ -660,12 +660,14 @@ public class TestLRUQueryCache extends LuceneTestCase {
@Override
protected void onQueryCache(Query query, long ramBytesUsed) {
super.onQueryCache(query, ramBytesUsed);
assertNotNull("cached query is null", query);
ramBytesUsage.addAndGet(ramBytesUsed);
}
@Override
protected void onQueryEviction(Query query, long ramBytesUsed) {
super.onQueryEviction(query, ramBytesUsed);
assertNotNull("evicted query is null", query);
ramBytesUsage.addAndGet(-ramBytesUsed);
}

View File

@ -142,6 +142,23 @@ Other Changes
* SOLR-10647: Move the V1 <-> V2 API mapping to SolrJ (noble)
================== 6.7.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
Versions of Major Components
---------------------
Apache Tika 1.13
Carrot2 3.15.0
Velocity 1.7 and Velocity Tools 2.0
Apache UIMA 2.3.1
Apache ZooKeeper 3.4.10
Jetty 9.3.14.v20161028
(No Changes)
================== 6.6.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

View File

@ -173,6 +173,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
.withFunctionName("cov", CovarianceEvaluator.class)
.withFunctionName("conv", ConvolutionEvaluator.class)
.withFunctionName("normalize", NormalizeEvaluator.class)
.withFunctionName("rev", ReverseEvaluator.class)
// metrics
.withFunctionName("min", MinMetric.class)

View File

@ -31,7 +31,7 @@ import org.apache.solr.api.Api;
import org.apache.solr.api.ApiBag;
import org.apache.solr.api.ApiSupport;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.request.CollectionApiMapping;
import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta;
import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
@ -182,35 +182,13 @@ public abstract class BaseHandlerApiSupport implements ApiSupport {
}
protected abstract Collection<ApiCommand> getCommands();
public static Collection<String> getParamNames(CommandOperation op, ApiCommand command) {
List<String> result = new ArrayList<>();
Object o = op.getCommandData();
if (o instanceof Map) {
Map map = (Map) o;
collectKeyNames(map, result, "");
}
return result;
}
public static void collectKeyNames(Map<String, Object> map, List<String> result, String prefix) {
for (Map.Entry<String, Object> e : map.entrySet()) {
if (e.getValue() instanceof Map) {
collectKeyNames((Map) e.getValue(), result, prefix + e.getKey() + ".");
} else {
result.add(prefix + e.getKey());
}
}
}
protected abstract List<ApiCommand> getCommands();
protected abstract List<V2EndPoint> getEndPoints();
protected abstract Collection<V2EndPoint> getEndPoints();
public interface ApiCommand {
CollectionApiMapping.CommandMeta meta();
CommandMeta meta();
void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception;
}

View File

@ -18,27 +18,70 @@
package org.apache.solr.handler.admin;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import org.apache.solr.client.solrj.request.CollectionApiMapping;
import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta;
import org.apache.solr.client.solrj.request.CollectionApiMapping.Meta;
import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint;
import org.apache.solr.handler.admin.CollectionsHandler.CollectionOperation;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import static org.apache.solr.handler.admin.CollectionsHandler.CollectionOperation.*;
public class CollectionHandlerApi extends BaseHandlerApiSupport {
final CollectionsHandler handler;
static Collection<ApiCommand> apiCommands = createCollMapping();
private static Collection<ApiCommand> createCollMapping() {
Map<Meta, ApiCommand> result = new EnumMap<>(Meta.class);
for (Meta meta : Meta.values()) {
for (CollectionOperation op : CollectionOperation.values()) {
if (op.action == meta.action) {
result.put(meta, new ApiCommand() {
@Override
public CommandMeta meta() {
return meta;
}
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
((CollectionHandlerApi) apiHandler).handler.invokeAction(req, rsp, ((CollectionHandlerApi) apiHandler).handler.coreContainer, op.action, op);
}
});
}
}
}
result.put(Meta.GET_NODES, new ApiCommand() {
@Override
public CommandMeta meta() {
return Meta.GET_NODES;
}
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
rsp.add("nodes", ((CollectionHandlerApi) apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes());
}
});
for (Meta meta : Meta.values()) {
if(result.get(meta) == null){
throw new RuntimeException("No implementation for "+ meta.name());
}
}
return result.values();
}
public CollectionHandlerApi(CollectionsHandler handler) {
this.handler = handler;
}
@Override
protected List<ApiCommand> getCommands() {
return Arrays.asList(Cmd.values());
protected Collection<ApiCommand> getCommands() {
return apiCommands;
}
@Override
@ -46,66 +89,4 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
return Arrays.asList(CollectionApiMapping.EndPoint.values());
}
enum Cmd implements ApiCommand {
GET_COLLECTIONS(Meta.GET_COLLECTIONS,LIST_OP),
GET_CLUSTER(Meta.GET_CLUSTER,LIST_OP),
GET_CLUSTER_OVERSEER(Meta.GET_CLUSTER_OVERSEER,OVERSEERSTATUS_OP),
GET_CLUSTER_STATUS_CMD(Meta.GET_CLUSTER_STATUS_CMD,REQUESTSTATUS_OP),
DELETE_CLUSTER_STATUS(Meta.DELETE_CLUSTER_STATUS,DELETESTATUS_OP),
GET_A_COLLECTION(Meta.GET_A_COLLECTION,CLUSTERSTATUS_OP),
LIST_ALIASES(Meta.LIST_ALIASES,LISTALIASES_OP),
CREATE_COLLECTION(Meta.CREATE_COLLECTION, CREATE_OP),
DELETE_COLL(Meta.DELETE_COLL, DELETE_OP),
RELOAD_COLL(Meta.RELOAD_COLL, RELOAD_OP),
MODIFYCOLLECTION(Meta.MODIFYCOLLECTION, MODIFYCOLLECTION_OP),
MIGRATE_DOCS(Meta.MIGRATE_DOCS,MIGRATE_OP),
REBALANCELEADERS(Meta.REBALANCELEADERS, REBALANCELEADERS_OP),
CREATE_ALIAS(Meta.CREATE_ALIAS, CREATEALIAS_OP),
DELETE_ALIAS(Meta.DELETE_ALIAS, DELETEALIAS_OP),
CREATE_SHARD(Meta.CREATE_SHARD,CREATESHARD_OP),
SPLIT_SHARD(Meta.SPLIT_SHARD, SPLITSHARD_OP),
DELETE_SHARD(Meta.DELETE_SHARD,DELETESHARD_OP),
CREATE_REPLICA(Meta.CREATE_REPLICA,ADDREPLICA_OP),
DELETE_REPLICA(Meta.DELETE_REPLICA,DELETEREPLICA_OP),
SYNC_SHARD(Meta.SYNC_SHARD, SYNCSHARD_OP),
ADDREPLICAPROP(Meta.ADDREPLICAPROP, ADDREPLICAPROP_OP),
DELETEREPLICAPROP(Meta.DELETEREPLICAPROP, DELETEREPLICAPROP_OP),
ADDROLE(Meta.ADDROLE, ADDROLE_OP),
REMOVEROLE(Meta.REMOVEROLE, REMOVEROLE_OP),
CLUSTERPROP(Meta.CLUSTERPROP,CLUSTERPROP_OP),
BACKUP(Meta.BACKUP, BACKUP_OP),
RESTORE(Meta.RESTORE, RESTORE_OP),
GET_NODES(Meta.GET_NODES, null) {
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
rsp.add("nodes", ((CollectionHandlerApi) apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes());
}
},
FORCELEADER(Meta.FORCELEADER,FORCELEADER_OP),
SYNCSHARD(Meta.SYNCSHARD,SYNCSHARD_OP),
BALANCESHARDUNIQUE(Meta.BALANCESHARDUNIQUE,BALANCESHARDUNIQUE_OP)
;
public final CollectionApiMapping.CommandMeta meta;
public final CollectionOperation target;
Cmd(CollectionApiMapping.CommandMeta meta, CollectionOperation target) {
this.meta = meta;
this.target = target;
}
@Override
public CollectionApiMapping.CommandMeta meta() {
return meta;
}
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler)
throws Exception {
((CollectionHandlerApi) apiHandler).handler.invokeAction(req, rsp, ((CollectionHandlerApi) apiHandler).handler.coreContainer, target.action, target);
}
}
}

View File

@ -18,7 +18,10 @@
package org.apache.solr.handler.admin;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import org.apache.solr.client.solrj.request.CollectionApiMapping;
import org.apache.solr.client.solrj.request.CollectionApiMapping.ConfigSetMeta;
@ -26,13 +29,39 @@ import org.apache.solr.handler.admin.ConfigSetsHandler.ConfigSetOperation;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import static org.apache.solr.handler.admin.ConfigSetsHandler.ConfigSetOperation.CREATE_OP;
import static org.apache.solr.handler.admin.ConfigSetsHandler.ConfigSetOperation.DELETE_OP;
import static org.apache.solr.handler.admin.ConfigSetsHandler.ConfigSetOperation.LIST_OP;
public class ConfigSetsHandlerApi extends BaseHandlerApiSupport {
final ConfigSetsHandler configSetHandler;
static Collection<ApiCommand> apiCommands = createMapping();
private static Collection<ApiCommand> createMapping() {
Map<ConfigSetMeta, ApiCommand> result = new EnumMap<>(ConfigSetMeta.class);
for (ConfigSetMeta meta : ConfigSetMeta.values())
for (ConfigSetOperation op : ConfigSetOperation.values()) {
if (op.action == meta.action) {
result.put(meta, new ApiCommand() {
@Override
public CollectionApiMapping.CommandMeta meta() {
return meta;
}
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
((ConfigSetsHandlerApi) apiHandler).configSetHandler.invokeAction(req, rsp, op.action);
}
});
}
}
for (ConfigSetMeta meta : ConfigSetMeta.values()) {
if(result.get(meta) == null){
throw new RuntimeException("No implementation for "+ meta.name());
}
}
return result.values();
}
public ConfigSetsHandlerApi(ConfigSetsHandler configSetHandler) {
this.configSetHandler = configSetHandler;
@ -40,8 +69,8 @@ public class ConfigSetsHandlerApi extends BaseHandlerApiSupport {
@Override
protected List<ApiCommand> getCommands() {
return Arrays.asList(Cmd.values());
protected Collection<ApiCommand> getCommands() {
return apiCommands;
}
@Override
@ -49,29 +78,4 @@ public class ConfigSetsHandlerApi extends BaseHandlerApiSupport {
return Arrays.asList(CollectionApiMapping.ConfigSetEndPoint.values());
}
enum Cmd implements ApiCommand {
LIST(ConfigSetMeta.LIST, LIST_OP),
CREATE(ConfigSetMeta.CREATE, CREATE_OP),
DEL(ConfigSetMeta.DEL,DELETE_OP);
public ConfigSetMeta meta;
private final ConfigSetOperation op;
Cmd(ConfigSetMeta meta, ConfigSetOperation op) {
this.meta = meta;
this.op = op;
}
@Override
public CollectionApiMapping.CommandMeta meta() {
return meta;
}
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
((ConfigSetsHandlerApi) apiHandler).configSetHandler.invokeAction(req, rsp, op.action);
}
}
}

View File

@ -18,94 +18,66 @@
package org.apache.solr.handler.admin;
import java.util.Arrays;
import java.util.List;
import java.util.Collection;
import java.util.EnumMap;
import java.util.Map;
import org.apache.solr.client.solrj.request.CollectionApiMapping;
import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta;
import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint;
import org.apache.solr.client.solrj.request.CoreApiMapping;
import org.apache.solr.client.solrj.request.CoreApiMapping.Meta;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import static org.apache.solr.handler.admin.CoreAdminOperation.CREATE_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.FORCEPREPAREFORLEADERSHIP_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.INVOKE_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.MERGEINDEXES_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.OVERSEEROP_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.PREPRECOVERY_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.REJOINLEADERELECTION_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.RELOAD_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.RENAME_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.REQUESTAPPLYUPDATES_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.REQUESTBUFFERUPDATES_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.REQUESTRECOVERY_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.REQUESTSTATUS_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.REQUESTSYNCSHARD_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.SPLIT_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.STATUS_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.SWAP_OP;
import static org.apache.solr.handler.admin.CoreAdminOperation.UNLOAD_OP;
public class CoreAdminHandlerApi extends BaseHandlerApiSupport {
private final CoreAdminHandler handler;
static Collection<ApiCommand> apiCommands = createMapping();
private static Collection<ApiCommand> createMapping() {
Map<CoreApiMapping.Meta, ApiCommand> result = new EnumMap<>(CoreApiMapping.Meta.class);
for (CoreApiMapping.Meta meta : CoreApiMapping.Meta.values()) {
for (CoreAdminOperation op : CoreAdminOperation.values()) {
if (op.action == meta.action) {
result.put(meta, new ApiCommand() {
@Override
public CommandMeta meta() {
return meta;
}
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
op.execute(new CoreAdminHandler.CallInfo(((CoreAdminHandlerApi) apiHandler).handler,
req,
rsp,
op));
}
});
}
}
}
for (CoreApiMapping.Meta meta : CoreApiMapping.Meta.values()) {
if (result.get(meta) == null) {
throw new RuntimeException("No implementation for " + meta.name());
}
}
return result.values();
}
public CoreAdminHandlerApi(CoreAdminHandler handler) {
this.handler = handler;
}
enum Cmd implements ApiCommand {
CREATE(Meta.CREATE, CREATE_OP),
UNLOAD(Meta.UNLOAD, UNLOAD_OP),
RELOAD(Meta.RELOAD, RELOAD_OP),
STATUS(Meta.STATUS, STATUS_OP),
SWAP(Meta.SWAP, SWAP_OP),
RENAME(Meta.RENAME, RENAME_OP),
MERGEINDEXES(Meta.MERGEINDEXES, MERGEINDEXES_OP),
SPLIT(Meta.SPLIT, SPLIT_OP),
PREPRECOVERY(Meta.PREPRECOVERY, PREPRECOVERY_OP),
REQUESTRECOVERY(Meta.REQUESTRECOVERY, REQUESTRECOVERY_OP),
REQUESTSYNCSHARD(Meta.REQUESTSYNCSHARD, REQUESTSYNCSHARD_OP),
REQUESTBUFFERUPDATES(Meta.REQUESTBUFFERUPDATES, REQUESTBUFFERUPDATES_OP),
REQUESTAPPLYUPDATES(Meta.REQUESTAPPLYUPDATES, REQUESTAPPLYUPDATES_OP),
REQUESTSTATUS(Meta.REQUESTSTATUS, REQUESTSTATUS_OP),
OVERSEEROP(Meta.OVERSEEROP, OVERSEEROP_OP),
REJOINLEADERELECTION(Meta.REJOINLEADERELECTION, REJOINLEADERELECTION_OP),
INVOKE(Meta.INVOKE, INVOKE_OP),
FORCEPREPAREFORLEADERSHIP(Meta.FORCEPREPAREFORLEADERSHIP, FORCEPREPAREFORLEADERSHIP_OP);
public final Meta meta;
public final CoreAdminOperation target;
Cmd(Meta meta, CoreAdminOperation target) {
this.meta = meta;
this.target = target;
}
@Override
public CollectionApiMapping.CommandMeta meta() {
return meta;
}
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
target.execute(new CoreAdminHandler.CallInfo(((CoreAdminHandlerApi) apiHandler).handler,
req,
rsp,
target));
}
}
@Override
protected List<ApiCommand> getCommands() {
return Arrays.asList(Cmd.values());
protected Collection<ApiCommand> getCommands() {
return apiCommands;
}
@Override
protected List<V2EndPoint> getEndPoints() {
protected Collection<V2EndPoint> getEndPoints() {
return Arrays.asList(CoreApiMapping.EndPoint.values());
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.io.stream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.eval.ComplexEvaluator;
import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
public class ReverseEvaluator extends ComplexEvaluator implements Expressible {
private static final long serialVersionUID = 1;
public ReverseEvaluator(StreamExpression expression, StreamFactory factory) throws IOException {
super(expression, factory);
}
public List<Number> evaluate(Tuple tuple) throws IOException {
StreamEvaluator colEval1 = subEvaluators.get(0);
List<Number> numbers1 = (List<Number>)colEval1.evaluate(tuple);
List<Number> rev = new ArrayList();
for(int i=numbers1.size()-1; i>=0; i--) {
rev.add(numbers1.get(i));
}
return rev;
}
@Override
public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass()));
return expression;
}
@Override
public Explanation toExplanation(StreamFactory factory) throws IOException {
return new Explanation(nodeId.toString())
.withExpressionType(ExpressionType.EVALUATOR)
.withFunctionName(factory.getFunctionName(getClass()))
.withImplementingClass(getClass().getName())
.withExpression(toExpression(factory).toString());
}
}

View File

@ -26,12 +26,31 @@ import java.util.Map;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.common.params.CollectionParams.CollectionAction;
import org.apache.solr.common.params.ConfigSetParams.ConfigSetAction;
import org.apache.solr.common.util.CommandOperation;
import org.apache.solr.common.util.Utils;
import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE;
import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.ConfigSetEndPoint.CONFIG_COMMANDS;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.ConfigSetEndPoint.CONFIG_DEL;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.ConfigSetEndPoint.LIST_CONFIG;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_ALIASES;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_CMD;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_CMD_STATUS;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_CMD_STATUS_DELETE;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_NODES;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.COLLECTIONS;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.COLLECTIONS_COMMANDS;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.COLLECTION_STATE;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.PER_COLLECTION;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.PER_COLLECTION_PER_SHARD_COMMANDS;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.PER_COLLECTION_PER_SHARD_DELETE;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.PER_COLLECTION_PER_SHARD_PER_REPLICA_DELETE;
import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.PER_COLLECTION_SHARDS_COMMANDS;
import static org.apache.solr.common.params.CollectionParams.CollectionAction.*;
import static org.apache.solr.common.params.CommonParams.NAME;
/** stores the mapping of v1 API parameters to v2 API parameters
@ -41,16 +60,17 @@ import static org.apache.solr.common.params.CommonParams.NAME;
public class CollectionApiMapping {
public enum Meta implements CommandMeta {
GET_COLLECTIONS(EndPoint.COLLECTIONS, GET),
GET_CLUSTER(EndPoint.CLUSTER, GET, "/cluster", null),
GET_CLUSTER_OVERSEER(EndPoint.CLUSTER, GET, "/cluster/overseer", null),
GET_CLUSTER_STATUS_CMD(EndPoint.CLUSTER_CMD_STATUS, GET ),
DELETE_CLUSTER_STATUS(EndPoint.CLUSTER_CMD_STATUS_DELETE, DELETE),
GET_A_COLLECTION(EndPoint.COLLECTION_STATE, GET),
LIST_ALIASES(EndPoint.CLUSTER_ALIASES, GET),
CREATE_COLLECTION(EndPoint.COLLECTIONS_COMMANDS,
GET_COLLECTIONS(COLLECTIONS, GET, LIST),
GET_CLUSTER(CLUSTER, GET, LIST, "/cluster", null),
GET_CLUSTER_OVERSEER(CLUSTER, GET, OVERSEERSTATUS, "/cluster/overseer", null),
GET_CLUSTER_STATUS_CMD(CLUSTER_CMD_STATUS, GET, REQUESTSTATUS),
DELETE_CLUSTER_STATUS(CLUSTER_CMD_STATUS_DELETE, DELETE, DELETESTATUS),
GET_A_COLLECTION(COLLECTION_STATE, GET, CLUSTERSTATUS),
LIST_ALIASES(CLUSTER_ALIASES, GET, LISTALIASES),
CREATE_COLLECTION(COLLECTIONS_COMMANDS,
POST,
CollectionAction.CREATE.toLower(),
CREATE,
CREATE.toLower(),
Utils.makeMap(
"collection.configName", "config",
"createNodeSet.shuffle", "shuffleNodes",
@ -60,37 +80,45 @@ public class CollectionApiMapping {
DELETE_COLL(EndPoint.PER_COLLECTION_DELETE,
DELETE,
CollectionAction.DELETE,
CollectionAction.DELETE.toLower(),
Utils.makeMap(NAME, "collection")),
RELOAD_COLL(EndPoint.PER_COLLECTION,
RELOAD_COLL(PER_COLLECTION,
POST,
CollectionAction.RELOAD.toLower(),
RELOAD,
RELOAD.toLower(),
Utils.makeMap(NAME, "collection")),
MODIFYCOLLECTION(EndPoint.PER_COLLECTION,
MODIFY_COLLECTION(PER_COLLECTION,
POST,
MODIFYCOLLECTION,
"modify",null),
MIGRATE_DOCS(EndPoint.PER_COLLECTION,
MIGRATE_DOCS(PER_COLLECTION,
POST,
MIGRATE,
"migrate-docs",
Utils.makeMap("split.key", "splitKey",
"target.collection", "target",
"forward.timeout", "forwardTimeout"
)),
REBALANCELEADERS(EndPoint.PER_COLLECTION,
REBALANCE_LEADERS(PER_COLLECTION,
POST,
REBALANCELEADERS,
"rebalance-leaders", null),
CREATE_ALIAS(EndPoint.COLLECTIONS_COMMANDS,
CREATE_ALIAS(COLLECTIONS_COMMANDS,
POST,
CREATEALIAS,
"create-alias",
null),
DELETE_ALIAS(EndPoint.COLLECTIONS_COMMANDS,
DELETE_ALIAS(COLLECTIONS_COMMANDS,
POST,
DELETEALIAS,
"delete-alias",
null),
CREATE_SHARD(EndPoint.PER_COLLECTION_SHARDS_COMMANDS,
CREATE_SHARD(PER_COLLECTION_SHARDS_COMMANDS,
POST,
CREATESHARD,
"create",
Utils.makeMap("createNodeSet", "nodeSet"),
Utils.makeMap("coreProperties.", "property.")) {
@ -100,60 +128,69 @@ public class CollectionApiMapping {
}
},
SPLIT_SHARD(EndPoint.PER_COLLECTION_SHARDS_COMMANDS,
SPLIT_SHARD(PER_COLLECTION_SHARDS_COMMANDS,
POST,
SPLITSHARD,
"split",
Utils.makeMap(
"split.key", "splitKey"),
Utils.makeMap("coreProperties.", "property.")),
DELETE_SHARD(EndPoint.PER_COLLECTION_PER_SHARD_DELETE,
DELETE),
DELETE_SHARD(PER_COLLECTION_PER_SHARD_DELETE,
DELETE, DELETESHARD),
CREATE_REPLICA(EndPoint.PER_COLLECTION_SHARDS_COMMANDS,
CREATE_REPLICA(PER_COLLECTION_SHARDS_COMMANDS,
POST,
ADDREPLICA,
"add-replica",
null,
Utils.makeMap("coreProperties.", "property.")),
DELETE_REPLICA(EndPoint.PER_COLLECTION_PER_SHARD_PER_REPLICA_DELETE,
DELETE),
DELETE_REPLICA(PER_COLLECTION_PER_SHARD_PER_REPLICA_DELETE,
DELETE, DELETEREPLICA),
SYNC_SHARD(EndPoint.PER_COLLECTION_PER_SHARD_COMMANDS,
SYNC_SHARD(PER_COLLECTION_PER_SHARD_COMMANDS,
POST,
CollectionAction.SYNCSHARD,
"synch-shard",
null),
ADDREPLICAPROP(EndPoint.PER_COLLECTION,
ADD_REPLICA_PROPERTY(PER_COLLECTION,
POST,
CollectionAction.ADDREPLICAPROP,
"add-replica-property",
Utils.makeMap("property", "name", "property.value", "value")),
DELETEREPLICAPROP(EndPoint.PER_COLLECTION,
DELETE_REPLICA_PROPERTY(PER_COLLECTION,
POST,
DELETEREPLICAPROP,
"delete-replica-property",
null),
ADDROLE(EndPoint.CLUSTER_CMD,
ADD_ROLE(CLUSTER_CMD,
POST,
ADDROLE,
"add-role",null),
REMOVEROLE(EndPoint.CLUSTER_CMD,
REMOVE_ROLE(CLUSTER_CMD,
POST,
REMOVEROLE,
"remove-role",null),
CLUSTERPROP(EndPoint.CLUSTER_CMD,
SET_CLUSTER_PROPERTY(CLUSTER_CMD,
POST,
CLUSTERPROP,
"set-property",null),
BACKUP(EndPoint.COLLECTIONS_COMMANDS,
BACKUP_COLLECTION(COLLECTIONS_COMMANDS,
POST,
BACKUP,
"backup-collection", null
),
RESTORE(EndPoint.COLLECTIONS_COMMANDS,
RESTORE_COLLECTION(COLLECTIONS_COMMANDS,
POST,
RESTORE,
"restore-collection",
null
),
GET_NODES(EndPoint.CLUSTER_NODES, null),
FORCELEADER(EndPoint.PER_COLLECTION_PER_SHARD_COMMANDS,POST, "force-leader",null),
SYNCSHARD(EndPoint.PER_COLLECTION_PER_SHARD_COMMANDS,POST, "sync-shard",null),
BALANCESHARDUNIQUE(EndPoint.PER_COLLECTION, POST, "balance-shard-unique",null)
GET_NODES(CLUSTER_NODES, GET, null),
FORCE_LEADER(PER_COLLECTION_PER_SHARD_COMMANDS, POST, CollectionAction.FORCELEADER, "force-leader", null),
BALANCE_SHARD_UNIQUE(PER_COLLECTION, POST, BALANCESHARDUNIQUE,"balance-shard-unique" , null)
;
public final String commandName;
@ -163,24 +200,26 @@ public class CollectionApiMapping {
public final Map<String, String> paramstoAttr;
//mapping of old prefix to new for instance properties.a=val can be substituted with property:{a:val}
public final Map<String, String> prefixSubstitutes;
public final CollectionAction action;
public SolrRequest.METHOD getMethod() {
return method;
}
Meta(EndPoint endPoint, SolrRequest.METHOD method) {
this(endPoint, method, null, null);
Meta(EndPoint endPoint, SolrRequest.METHOD method, CollectionAction action) {
this(endPoint, method, action, null, null);
}
Meta(EndPoint endPoint, SolrRequest.METHOD method,
Meta(EndPoint endPoint, SolrRequest.METHOD method, CollectionAction action,
String commandName, Map paramstoAttr) {
this(endPoint, method, commandName, paramstoAttr, Collections.EMPTY_MAP);
this(endPoint, method, action, commandName, paramstoAttr, Collections.EMPTY_MAP);
}
Meta(EndPoint endPoint, SolrRequest.METHOD method,
Meta(EndPoint endPoint, SolrRequest.METHOD method, CollectionAction action,
String commandName, Map paramstoAttr, Map prefixSubstitutes) {
this.action = action;
this.commandName = commandName;
this.endPoint = endPoint;
this.method = method;
@ -273,22 +312,21 @@ public class CollectionApiMapping {
}
public enum ConfigSetMeta implements CommandMeta {
LIST(ConfigSetEndPoint.LIST_CONFIG, GET),
CREATE(ConfigSetEndPoint.CONFIG_COMMANDS, POST, "create"),
DEL(ConfigSetEndPoint.CONFIG_DEL, DELETE)
LIST(LIST_CONFIG, GET,null, ConfigSetAction.LIST),
CREATE(CONFIG_COMMANDS, POST, "create", ConfigSetAction.CREATE),
DEL(CONFIG_DEL, DELETE, null, ConfigSetAction.DELETE)
;
private final ConfigSetEndPoint endPoint;
private final SolrRequest.METHOD method;
private final String cmdName;
public final ConfigSetEndPoint endPoint;
public final SolrRequest.METHOD method;
public final String cmdName;
public final ConfigSetAction action;
ConfigSetMeta(ConfigSetEndPoint endPoint, SolrRequest.METHOD method) {
this(endPoint, method, null);
}
ConfigSetMeta(ConfigSetEndPoint endPoint, SolrRequest.METHOD method, String cmdName) {
ConfigSetMeta(ConfigSetEndPoint endPoint, SolrRequest.METHOD method, String cmdName, ConfigSetAction action) {
this.cmdName = cmdName;
this.endPoint = endPoint;
this.method = method;
this.action = action;
}
@Override

View File

@ -19,7 +19,6 @@ package org.apache.solr.client.solrj.request;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import org.apache.solr.client.solrj.SolrRequest;
@ -41,36 +40,38 @@ import static org.apache.solr.client.solrj.request.CoreApiMapping.EndPoint.PER_C
*/
public class CoreApiMapping {
public enum Meta implements CommandMeta {
CREATE(CORES_COMMANDS, POST, CoreAdminAction.CREATE, Utils.makeMap("config", "configSet")),
UNLOAD(PER_CORE_COMMANDS, POST, CoreAdminAction.UNLOAD, null),
RELOAD(PER_CORE_COMMANDS, POST, CoreAdminAction.RELOAD, null),
STATUS(CORES_STATUS, GET, CoreAdminAction.STATUS, null),
SWAP(PER_CORE_COMMANDS, POST, CoreAdminAction.SWAP, Utils.makeMap("other", "with")),
RENAME(PER_CORE_COMMANDS, POST, CoreAdminAction.RENAME, null),
MERGEINDEXES(PER_CORE_COMMANDS, POST, "merge-indexes", null),
SPLIT(PER_CORE_COMMANDS, POST, CoreAdminAction.SPLIT, Utils.makeMap("split.key", "splitKey")),
PREPRECOVERY(PER_CORE_COMMANDS, POST, "prep-recovery", null),
REQUESTRECOVERY(PER_CORE_COMMANDS, POST, CoreAdminAction.REQUESTRECOVERY, null),
REQUESTSYNCSHARD(PER_CORE_COMMANDS, POST, "request-sync-shard", null),
REQUESTBUFFERUPDATES(PER_CORE_COMMANDS, POST, "request-buffer-updates", null),
REQUESTAPPLYUPDATES(PER_CORE_COMMANDS, POST, "request-apply-updates", null),
REQUESTSTATUS(PER_CORE_COMMANDS, POST, CoreAdminAction.REQUESTSTATUS, null),
OVERSEEROP(NODEAPIS, POST, "overseer-op", null),
REJOINLEADERELECTION(NODEAPIS, POST, "rejoin-leader-election", null),
INVOKE(NODEINVOKE, GET, CoreAdminAction.INVOKE, null),
FORCEPREPAREFORLEADERSHIP(PER_CORE_COMMANDS, POST, "force-prepare-for-leadership", null);
CREATE(CORES_COMMANDS, POST, CoreAdminAction.CREATE, "create", Utils.makeMap("config", "configSet")),
UNLOAD(PER_CORE_COMMANDS, POST, CoreAdminAction.UNLOAD, "unload", null),
RELOAD(PER_CORE_COMMANDS, POST, CoreAdminAction.RELOAD, "reload", null),
STATUS(CORES_STATUS, GET, CoreAdminAction.STATUS, "status", null),
SWAP(PER_CORE_COMMANDS, POST, CoreAdminAction.SWAP, "swap", Utils.makeMap("other", "with")),
RENAME(PER_CORE_COMMANDS, POST, CoreAdminAction.RENAME, "rename", null),
MERGEINDEXES(PER_CORE_COMMANDS, POST, CoreAdminAction.MERGEINDEXES, "merge-indexes", null),
SPLIT(PER_CORE_COMMANDS, POST, CoreAdminAction.SPLIT, "split", Utils.makeMap("split.key", "splitKey")),
PREPRECOVERY(PER_CORE_COMMANDS, POST, CoreAdminAction.PREPRECOVERY, "prep-recovery", null),
REQUESTRECOVERY(PER_CORE_COMMANDS, POST, CoreAdminAction.REQUESTRECOVERY, "request-recovery", null),
REQUESTSYNCSHARD(PER_CORE_COMMANDS, POST, CoreAdminAction.REQUESTSYNCSHARD, "request-sync-shard", null),
REQUESTBUFFERUPDATES(PER_CORE_COMMANDS, POST, CoreAdminAction.REQUESTBUFFERUPDATES, "request-buffer-updates", null),
REQUESTAPPLYUPDATES(PER_CORE_COMMANDS, POST, CoreAdminAction.REQUESTAPPLYUPDATES, "request-apply-updates", null),
REQUESTSTATUS(PER_CORE_COMMANDS, GET, CoreAdminAction.REQUESTSTATUS, "request-status", null),/*TODO*/
OVERSEEROP(NODEAPIS, POST, CoreAdminAction.OVERSEEROP, "overseer-op", null),
REJOINLEADERELECTION(NODEAPIS, POST, CoreAdminAction.REJOINLEADERELECTION, "rejoin-leader-election", null),
INVOKE(NODEINVOKE, GET, CoreAdminAction.INVOKE,"invoke", null),
FORCEPREPAREFORLEADERSHIP(PER_CORE_COMMANDS, POST, CoreAdminAction.FORCEPREPAREFORLEADERSHIP, "force-prepare-for-leadership", null);
public final String commandName;
public final EndPoint endPoint;
public final SolrRequest.METHOD method;
public final CoreAdminAction action;
public final Map<String, String> paramstoAttr;
Meta(EndPoint endPoint, SolrRequest.METHOD method, Object commandName,
Meta(EndPoint endPoint, SolrRequest.METHOD method, CoreAdminAction action, String commandName,
Map paramstoAttr) {
this.commandName = commandName.toString().toLowerCase(Locale.ROOT);
this.commandName = commandName;
this.endPoint = endPoint;
this.method = method;
this.paramstoAttr = paramstoAttr == null ? Collections.EMPTY_MAP : Collections.unmodifiableMap(paramstoAttr);
this.action = action;
}
@Override

View File

@ -5299,6 +5299,58 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertTrue(tuples.get(0).getDouble("cov").equals(-625.0D));
}
@Test
public void testReverse() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();
int i=0;
while(i<50) {
updateRequest.add(id, "id_"+(++i),"test_dt", getDateString("2016", "5", "1"), "price_f", "400.00");
}
while(i<100) {
updateRequest.add(id, "id_"+(++i),"test_dt", getDateString("2015", "5", "1"), "price_f", "300.0");
}
while(i<150) {
updateRequest.add(id, "id_"+(++i),"test_dt", getDateString("2014", "5", "1"), "price_f", "500.0");
}
while(i<250) {
updateRequest.add(id, "id_"+(++i),"test_dt", getDateString("2013", "5", "1"), "price_f", "100.00");
}
updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
String expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " +
"end=\"2016-12-01T01:00:00.000Z\", " +
"gap=\"+1YEAR\", " +
"field=\"test_dt\", " +
"count(*), sum(price_f), max(price_f), min(price_f))";
String cexpr = "let(a="+expr+", c=col(a, max(price_f)), tuple(reverse=rev(c)))";
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("expr", cexpr);
paramsLoc.set("qt", "/stream");
String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
TupleStream solrStream = new SolrStream(url, paramsLoc);
StreamContext context = new StreamContext();
solrStream.setStreamContext(context);
List<Tuple> tuples = getTuples(solrStream);
assertTrue(tuples.size() == 1);
List<Number> reverse = (List<Number>)tuples.get(0).get("reverse");
assertTrue(reverse.size() == 4);
assertTrue(reverse.get(0).doubleValue() == 400D);
assertTrue(reverse.get(1).doubleValue() == 300D);
assertTrue(reverse.get(2).doubleValue() == 500D);
assertTrue(reverse.get(3).doubleValue() == 100D);
}
@Test
public void testConvolution() throws Exception {
UpdateRequest updateRequest = new UpdateRequest();