This commit is contained in:
Karl Wright 2017-08-25 11:42:55 -04:00
commit 36da0e4a5b
6 changed files with 88 additions and 23 deletions

View File

@ -105,6 +105,10 @@ Bug Fixes
* SOLR-11289: fix comma handling in terms component. (yonik)
* SOLR-11164, SOLR-11180, SOLR-11220: Fix NullPointerException and always-returns-zero
contrib/ltr OriginalScoreFeature issues in SolrCloud mode.
(Yuki Yano, Jonathan Gonzalez, Ryan Yacyshyn, Christine Poerschke)
Optimizations
----------------------

View File

@ -225,15 +225,15 @@ public class LTRFeatureLoggerTransformerFactory extends TransformerFactory {
true,
threadManager); // request feature weights to be created for all features
// Local transformer efi if provided
scoringQuery.setOriginalQuery(context.getQuery());
}catch (final Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"retrieving the feature store "+featureStoreName, e);
}
}
if (scoringQuery.getOriginalQuery() == null) {
scoringQuery.setOriginalQuery(context.getQuery());
}
if (scoringQuery.getFeatureLogger() == null){
scoringQuery.setFeatureLogger( SolrQueryRequestContextUtils.getFeatureLogger(req) );
}
@ -261,7 +261,7 @@ public class LTRFeatureLoggerTransformerFactory extends TransformerFactory {
@Override
public void transform(SolrDocument doc, int docid)
throws IOException {
implTransform(doc, docid, 0.0f);
implTransform(doc, docid, null);
}
private void implTransform(SolrDocument doc, int docid, Float score)

View File

@ -29,6 +29,7 @@ import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.ltr.feature.OriginalScoreFeature;
import org.apache.solr.ltr.feature.SolrFeature;
import org.apache.solr.ltr.feature.ValueFeature;
import org.apache.solr.ltr.model.LinearModel;
@ -53,9 +54,9 @@ public class TestLTROnSolrCloud extends TestRerankBase {
int numberOfShards = random().nextInt(4)+1;
int numberOfReplicas = random().nextInt(2)+1;
int maxShardsPerNode = numberOfShards+random().nextInt(4)+1;
int maxShardsPerNode = random().nextInt(4)+1;
int numberOfNodes = numberOfShards * maxShardsPerNode;
int numberOfNodes = (numberOfShards*numberOfReplicas + (maxShardsPerNode-1))/maxShardsPerNode;
setupSolrCluster(numberOfShards, numberOfReplicas, numberOfNodes, maxShardsPerNode);
@ -106,21 +107,21 @@ public class TestLTROnSolrCloud extends TestRerankBase {
final Float original_result7_score = (Float)queryResponse.getResults().get(7).get("score");
final String result0_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS","64.0", "c3","2.0");
"powpularityS","64.0", "c3","2.0", "original","0.0");
final String result1_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS","49.0", "c3","2.0");
"powpularityS","49.0", "c3","2.0", "original","1.0");
final String result2_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS","36.0", "c3","2.0");
"powpularityS","36.0", "c3","2.0", "original","2.0");
final String result3_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS","25.0", "c3","2.0");
"powpularityS","25.0", "c3","2.0", "original","3.0");
final String result4_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS","16.0", "c3","2.0");
"powpularityS","16.0", "c3","2.0", "original","4.0");
final String result5_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS", "9.0", "c3","2.0");
"powpularityS", "9.0", "c3","2.0", "original","5.0");
final String result6_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS", "4.0", "c3","2.0");
"powpularityS", "4.0", "c3","2.0", "original","6.0");
final String result7_features= FeatureLoggerTestUtils.toFeatureVector(
"powpularityS", "1.0", "c3","2.0");
"powpularityS", "1.0", "c3","2.0", "original","7.0");
// Test feature vectors returned (without re-ranking)
@ -256,8 +257,8 @@ public class TestLTROnSolrCloud extends TestRerankBase {
private void loadModelsAndFeatures() throws Exception {
final String featureStore = "test";
final String[] featureNames = new String[] {"powpularityS","c3"};
final String jsonModelParams = "{\"weights\":{\"powpularityS\":1.0,\"c3\":1.0}}";
final String[] featureNames = new String[] {"powpularityS","c3", "original"};
final String jsonModelParams = "{\"weights\":{\"powpularityS\":1.0,\"c3\":1.0,\"original\":0.1}}";
loadFeature(
featureNames[0],
@ -271,6 +272,12 @@ public class TestLTROnSolrCloud extends TestRerankBase {
featureStore,
"{\"value\":2}"
);
loadFeature(
featureNames[2],
OriginalScoreFeature.class.getCanonicalName(),
featureStore,
null
);
loadModel(
"powpularityS-model",

View File

@ -139,9 +139,9 @@ public class TestExternalFeatures extends TestRerankBase {
final String docs0fvalias_dense_csv = FeatureLoggerTestUtils.toFeatureVector(
"occurrences","0.0",
"originalScore","0.0");
"originalScore","1.0");
final String docs0fvalias_sparse_csv = FeatureLoggerTestUtils.toFeatureVector(
"originalScore","0.0");
"originalScore","1.0");
final String docs0fvalias_default_csv = chooseDefaultFeatureVector(docs0fvalias_dense_csv, docs0fvalias_sparse_csv);
@ -159,9 +159,9 @@ public class TestExternalFeatures extends TestRerankBase {
final String docs0fvalias_dense_csv = FeatureLoggerTestUtils.toFeatureVector(
"occurrences","0.0",
"originalScore","0.0");
"originalScore","1.0");
final String docs0fvalias_sparse_csv = FeatureLoggerTestUtils.toFeatureVector(
"originalScore","0.0");
"originalScore","1.0");
final String docs0fvalias_default_csv = chooseDefaultFeatureVector(docs0fvalias_dense_csv, docs0fvalias_sparse_csv);

View File

@ -18,11 +18,18 @@ package org.apache.solr.metrics.reporters;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Slf4jReporter;
import com.codahale.metrics.Timer;
import org.apache.solr.metrics.FilteringSolrMetricReporter;
import org.apache.solr.metrics.SolrMetricManager;
import org.slf4j.Logger;
@ -50,6 +57,7 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
private String logger = null;
private Slf4jReporter reporter;
private boolean active;
private DiagnosticMetricRegistry diagnosticMetricRegistry;
/**
* Create a SLF4J reporter for metrics managed in a named registry.
@ -77,8 +85,10 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
} else {
instancePrefix = instancePrefix + "." + registryName;
}
MetricRegistry registry = metricManager.registry(registryName);
diagnosticMetricRegistry = new DiagnosticMetricRegistry(registry);
Slf4jReporter.Builder builder = Slf4jReporter
.forRegistry(metricManager.registry(registryName))
.forRegistry(diagnosticMetricRegistry)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS);
@ -123,4 +133,43 @@ public class SolrSlf4jReporter extends FilteringSolrMetricReporter {
boolean isActive() {
return active;
}
// for unit tests
int getCount() {
return diagnosticMetricRegistry != null ? diagnosticMetricRegistry.count : -1;
}
static class DiagnosticMetricRegistry extends MetricRegistry {
MetricRegistry delegate;
int count = 0;
DiagnosticMetricRegistry(MetricRegistry delegate) {
this.delegate = delegate;
}
@Override
public SortedMap<String, Counter> getCounters(MetricFilter filter) {
return delegate.getCounters(filter);
}
@Override
public SortedMap<String, Histogram> getHistograms(MetricFilter filter) {
return delegate.getHistograms(filter);
}
@Override
public SortedMap<String, Meter> getMeters(MetricFilter filter) {
return delegate.getMeters(filter);
}
@Override
public SortedMap<String, Timer> getTimers(MetricFilter filter) {
return delegate.getTimers(filter);
}
@Override
public SortedMap<String, Gauge> getGauges(MetricFilter filter) {
count++;
return delegate.getGauges(filter);
}
}
}

View File

@ -78,14 +78,19 @@ public class SolrSlf4jReporterTest extends SolrTestCaseJ4 {
}
Thread.sleep(5000);
int count1 = ((SolrSlf4jReporter)reporter1).getCount();
assertTrue("test1 count should be greater than 0", count1 > 0);
int count2 = ((SolrSlf4jReporter)reporter2).getCount();
assertTrue("test2 count should be greater than 0", count1 > 0);
SolrDocumentList history = watcher.getHistory(-1, null);
// dot-separated names are treated like class names and collapsed
// in regular log output, but here we get the full name
if (history.stream().filter(d -> "solr.node".equals(d.getFirstValue("logger"))).count() == 0) {
fail("No 'solr.node' logs in: " + history.toString());
fail("count1=" + count1 + ", count2=" + count2 + " - no 'solr.node' logs in: " + history.toString());
}
if (history.stream().filter(d -> "foobar".equals(d.getFirstValue("logger"))).count() == 0) {
fail("No 'foobar' logs in: " + history.toString());
fail("count1=" + count1 + ", count2=" + count2 + " - no 'foobar' logs in: " + history.toString());
}
}
}