Query DSL: custom score (scripted) query, closes #220.
This commit is contained in:
parent
2d0785078c
commit
751c56f31e
|
@ -71,6 +71,7 @@
|
|||
<w>multi</w>
|
||||
<w>multicast</w>
|
||||
<w>multiline</w>
|
||||
<w>mvel</w>
|
||||
<w>nanos</w>
|
||||
<w>newcount</w>
|
||||
<w>ngram</w>
|
||||
|
@ -109,6 +110,7 @@
|
|||
<w>traslog</w>
|
||||
<w>trie</w>
|
||||
<w>tuple</w>
|
||||
<w>unboxed</w>
|
||||
<w>unicast</w>
|
||||
<w>unregister</w>
|
||||
<w>uptime</w>
|
||||
|
|
|
@ -40,6 +40,17 @@
|
|||
</SOURCES>
|
||||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="module-library">
|
||||
<library name="mvel">
|
||||
<CLASSES>
|
||||
<root url="jar://$GRADLE_REPOSITORY$/org.mvel/mvel2/jars/mvel2-2.0.17.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES>
|
||||
<root url="jar://$MODULE_DIR$/../../../../../opt/mvel/mvel2-2.0.17-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" scope="TEST" name="testng" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="hamcrest" level="project" />
|
||||
<orderEntry type="module" module-name="test-testng" scope="TEST" />
|
||||
|
|
|
@ -37,6 +37,7 @@ allprojects {
|
|||
repositories {
|
||||
mavenCentral()
|
||||
mavenRepo urls: 'https://repository.jboss.org/nexus/content/groups/public'
|
||||
mavenRepo urls: 'http://repository.codehaus.org/'
|
||||
mavenRepo urls: 'http://elasticsearch.googlecode.com/svn/maven'
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +83,8 @@ task explodedDist(dependsOn: [configurations.distLib], description: 'Builds a mi
|
|||
ant.delete { fileset(dir: explodedDistLibDir, includes: "joda-*.jar") } // no need joda, we jarjar it
|
||||
ant.delete { fileset(dir: explodedDistLibDir, includes: "snakeyaml-*.jar") } // no need snakeyaml, we jarjar it
|
||||
ant.delete { fileset(dir: explodedDistLibDir, includes: "sigar-*.jar") } // no need sigar directly under lib...
|
||||
ant.delete { fileset(dir: explodedDistLibDir, includes: "netty-*.jar") } // no need sigar directly under lib...
|
||||
ant.delete { fileset(dir: explodedDistLibDir, includes: "netty-*.jar") } // no need netty directly under lib...
|
||||
ant.delete { fileset(dir: explodedDistLibDir, includes: "mvel2-*.jar") } // no need mvel2 directly under lib...
|
||||
|
||||
ant.chmod(dir: "$explodedDistDir/bin", perm: "ugo+rx", includes: "**/*")
|
||||
}
|
||||
|
|
|
@ -31,6 +31,8 @@ dependencies {
|
|||
|
||||
compile 'joda-time:joda-time:1.6'
|
||||
|
||||
compile 'org.mvel:mvel2:2.0.17'
|
||||
|
||||
compile 'org.codehaus.jackson:jackson-core-asl:1.5.2'
|
||||
compile 'org.yaml:snakeyaml:1.6'
|
||||
|
||||
|
@ -70,12 +72,13 @@ jar << {
|
|||
jarjar(jarfile: jarjarArchivePath) {
|
||||
zipfileset(src: jar.archivePath)
|
||||
configurations.compile.files.findAll {file ->
|
||||
['jackson', 'joda', 'snakeyaml', 'netty'].any { file.name.contains(it) }
|
||||
['mvel', 'jackson', 'joda', 'snakeyaml', 'netty'].any { file.name.contains(it) }
|
||||
}.each { jarjarFile ->
|
||||
zipfileset(src: jarjarFile) {
|
||||
exclude(name: "META-INF/**")
|
||||
}
|
||||
}
|
||||
rule pattern: "org.mvel2.**", result: "org.elasticsearch.util.mvel2.@1"
|
||||
rule pattern: "org.codehaus.jackson.**", result: "org.elasticsearch.util.jackson.@1"
|
||||
rule pattern: "org.joda.**", result: "org.elasticsearch.util.joda.@1"
|
||||
rule pattern: "org.yaml.**", result: "org.elasticsearch.util.yaml.@1"
|
||||
|
@ -88,6 +91,7 @@ jar << {
|
|||
// seems like empty dirst still exists, unjar and clean them
|
||||
unjar(src: jar.archivePath, dest: "build/tmp/extracted")
|
||||
delete(dir: "build/tmp/extracted/org/codehaus")
|
||||
delete(dir: "build/tmp/extracted/org/mvel2")
|
||||
delete(dir: "build/tmp/extracted/org/joda")
|
||||
delete(dir: "build/tmp/extracted/org/yaml")
|
||||
delete(dir: "build/tmp/extracted/org/jboss")
|
||||
|
@ -162,6 +166,7 @@ uploadArchives {
|
|||
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('joda') }
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('snakeyaml') }
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('netty') }
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('mvel') }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cache;
|
||||
|
||||
import org.elasticsearch.util.component.AbstractComponent;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class NodeCache extends AbstractComponent {
|
||||
|
||||
@Inject public NodeCache(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
}
|
|
@ -17,14 +17,16 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.lucene.search.function;
|
||||
package org.elasticsearch.cache;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.util.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface FunctionProvider {
|
||||
public class NodeCacheModule extends AbstractModule {
|
||||
|
||||
Function function(IndexReader reader);
|
||||
@Override protected void configure() {
|
||||
bind(NodeCache.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -48,6 +48,10 @@ public abstract class DocFieldData<T extends FieldData> {
|
|||
return fieldData.stringValue(docId);
|
||||
}
|
||||
|
||||
public String getStringValue() {
|
||||
return stringValue();
|
||||
}
|
||||
|
||||
public FieldData.Type getType() {
|
||||
return fieldData.type();
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.field.data.longs;
|
||||
|
||||
import org.elasticsearch.index.field.data.NumericDocFieldData;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
|
@ -37,4 +38,12 @@ public class LongDocFieldData extends NumericDocFieldData<LongFieldData> {
|
|||
public long[] getValues() {
|
||||
return fieldData.values(docId);
|
||||
}
|
||||
|
||||
public MutableDateTime getDate() {
|
||||
return fieldData.date(docId);
|
||||
}
|
||||
|
||||
public MutableDateTime[] getDates() {
|
||||
return fieldData.dates(docId);
|
||||
}
|
||||
}
|
|
@ -24,7 +24,9 @@ import org.apache.lucene.search.FieldCache;
|
|||
import org.elasticsearch.index.field.data.FieldDataOptions;
|
||||
import org.elasticsearch.index.field.data.NumericFieldData;
|
||||
import org.elasticsearch.index.field.data.support.FieldDataLoader;
|
||||
import org.elasticsearch.util.ThreadLocals;
|
||||
import org.elasticsearch.util.gnu.trove.TLongArrayList;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -34,7 +36,13 @@ import java.io.IOException;
|
|||
public abstract class LongFieldData extends NumericFieldData<LongDocFieldData> {
|
||||
|
||||
static final long[] EMPTY_LONG_ARRAY = new long[0];
|
||||
static final MutableDateTime[] EMPTY_DATETIME_ARRAY = new MutableDateTime[0];
|
||||
|
||||
private ThreadLocal<ThreadLocals.CleanableValue<MutableDateTime>> dateTimeCache = new ThreadLocal<ThreadLocals.CleanableValue<MutableDateTime>>() {
|
||||
@Override protected ThreadLocals.CleanableValue<MutableDateTime> initialValue() {
|
||||
return new ThreadLocals.CleanableValue<MutableDateTime>(new MutableDateTime());
|
||||
}
|
||||
};
|
||||
|
||||
protected final long[] values;
|
||||
protected final int[] freqs;
|
||||
|
@ -49,6 +57,14 @@ public abstract class LongFieldData extends NumericFieldData<LongDocFieldData> {
|
|||
|
||||
abstract public long[] values(int docId);
|
||||
|
||||
public MutableDateTime date(int docId) {
|
||||
MutableDateTime dateTime = dateTimeCache.get().get();
|
||||
dateTime.setMillis(value(docId));
|
||||
return dateTime;
|
||||
}
|
||||
|
||||
public abstract MutableDateTime[] dates(int docId);
|
||||
|
||||
@Override public LongDocFieldData docFieldData(int docId) {
|
||||
return super.docFieldData(docId);
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.field.data.longs;
|
|||
import org.elasticsearch.index.field.data.FieldDataOptions;
|
||||
import org.elasticsearch.index.field.data.doubles.DoubleFieldData;
|
||||
import org.elasticsearch.util.ThreadLocals;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
|
@ -40,6 +41,20 @@ public class MultiValueLongFieldData extends LongFieldData {
|
|||
}
|
||||
};
|
||||
|
||||
private ThreadLocal<ThreadLocals.CleanableValue<MutableDateTime[][]>> dateTimesCache = new ThreadLocal<ThreadLocals.CleanableValue<MutableDateTime[][]>>() {
|
||||
@Override protected ThreadLocals.CleanableValue<MutableDateTime[][]> initialValue() {
|
||||
MutableDateTime[][] value = new MutableDateTime[VALUE_CACHE_SIZE][];
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
value[i] = new MutableDateTime[i];
|
||||
for (int j = 0; j < i; j++) {
|
||||
value[i][j] = new MutableDateTime();
|
||||
}
|
||||
}
|
||||
return new ThreadLocals.CleanableValue<MutableDateTime[][]>(value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
private ThreadLocal<ThreadLocals.CleanableValue<long[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<long[][]>>() {
|
||||
@Override protected ThreadLocals.CleanableValue<long[][]> initialValue() {
|
||||
long[][] value = new long[VALUE_CACHE_SIZE][];
|
||||
|
@ -86,6 +101,26 @@ public class MultiValueLongFieldData extends LongFieldData {
|
|||
}
|
||||
}
|
||||
|
||||
@Override public MutableDateTime[] dates(int docId) {
|
||||
int[] docOrders = order[docId];
|
||||
if (docOrders == null) {
|
||||
return EMPTY_DATETIME_ARRAY;
|
||||
}
|
||||
MutableDateTime[] dates;
|
||||
if (docOrders.length < VALUE_CACHE_SIZE) {
|
||||
dates = dateTimesCache.get().get()[docOrders.length];
|
||||
} else {
|
||||
dates = new MutableDateTime[docOrders.length];
|
||||
for (int i = 0; i < dates.length; i++) {
|
||||
dates[i] = new MutableDateTime();
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < docOrders.length; i++) {
|
||||
dates[i].setMillis(values[docOrders[i]]);
|
||||
}
|
||||
return dates;
|
||||
}
|
||||
|
||||
@Override public double[] doubleValues(int docId) {
|
||||
int[] docOrders = order[docId];
|
||||
if (docOrders == null) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.field.data.longs;
|
|||
|
||||
import org.elasticsearch.index.field.data.FieldDataOptions;
|
||||
import org.elasticsearch.index.field.data.doubles.DoubleFieldData;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
|
@ -33,6 +34,14 @@ public class SingleValueLongFieldData extends LongFieldData {
|
|||
}
|
||||
};
|
||||
|
||||
private ThreadLocal<MutableDateTime[]> datesValuesCache = new ThreadLocal<MutableDateTime[]>() {
|
||||
@Override protected MutableDateTime[] initialValue() {
|
||||
MutableDateTime[] date = new MutableDateTime[1];
|
||||
date[0] = new MutableDateTime();
|
||||
return date;
|
||||
}
|
||||
};
|
||||
|
||||
private ThreadLocal<long[]> valuesCache = new ThreadLocal<long[]>() {
|
||||
@Override protected long[] initialValue() {
|
||||
return new long[1];
|
||||
|
@ -71,6 +80,16 @@ public class SingleValueLongFieldData extends LongFieldData {
|
|||
proc.onValue(docId, values[loc]);
|
||||
}
|
||||
|
||||
@Override public MutableDateTime[] dates(int docId) {
|
||||
int loc = order[docId];
|
||||
if (loc == 0) {
|
||||
return EMPTY_DATETIME_ARRAY;
|
||||
}
|
||||
MutableDateTime[] ret = datesValuesCache.get();
|
||||
ret[0].setMillis(values[loc]);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override public double[] doubleValues(int docId) {
|
||||
int loc = order[docId];
|
||||
if (loc == 0) {
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.field.function;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface FieldsFunction {
|
||||
|
||||
void setNextReader(IndexReader reader);
|
||||
|
||||
/**
|
||||
* @param docId
|
||||
* @param vars The vars providing additional parameters, should be reused and has values added to it in execute
|
||||
* @return
|
||||
*/
|
||||
Object execute(int docId, Map vars);
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.field.function.script;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldData;
|
||||
import org.elasticsearch.index.field.data.FieldDataOptions;
|
||||
import org.elasticsearch.index.field.function.FieldsFunction;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.util.ThreadLocals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class ScriptFieldsFunction implements FieldsFunction, Map {
|
||||
|
||||
private static ThreadLocal<ThreadLocals.CleanableValue<Map<String, FieldData>>> cachedFieldData = new ThreadLocal<ThreadLocals.CleanableValue<Map<String, FieldData>>>() {
|
||||
@Override protected ThreadLocals.CleanableValue<Map<String, FieldData>> initialValue() {
|
||||
return new ThreadLocals.CleanableValue<Map<String, FieldData>>(new HashMap<String, FieldData>());
|
||||
}
|
||||
};
|
||||
|
||||
final Object script;
|
||||
|
||||
final MapperService mapperService;
|
||||
|
||||
final FieldDataCache fieldDataCache;
|
||||
|
||||
final ScriptService scriptService;
|
||||
|
||||
final Map<String, FieldData> localCacheFieldData = cachedFieldData.get().get();
|
||||
|
||||
IndexReader reader;
|
||||
|
||||
int docId;
|
||||
|
||||
public ScriptFieldsFunction(String script, ScriptService scriptService, MapperService mapperService, FieldDataCache fieldDataCache) {
|
||||
this.scriptService = scriptService;
|
||||
this.mapperService = mapperService;
|
||||
this.fieldDataCache = fieldDataCache;
|
||||
this.script = scriptService.compile(script);
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader) {
|
||||
this.reader = reader;
|
||||
localCacheFieldData.clear();
|
||||
}
|
||||
|
||||
@Override public Object execute(int docId, Map vars) {
|
||||
this.docId = docId;
|
||||
vars.put("doc", this);
|
||||
return scriptService.execute(script, vars);
|
||||
}
|
||||
|
||||
// --- Map implementation for doc field data lookup
|
||||
|
||||
@Override public Object get(Object key) {
|
||||
// assume its a string...
|
||||
String fieldName = key.toString();
|
||||
FieldData fieldData = localCacheFieldData.get(fieldName);
|
||||
if (fieldData == null) {
|
||||
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("No field found for [" + fieldName + "]");
|
||||
}
|
||||
try {
|
||||
fieldData = fieldDataCache.cache(mapper.fieldDataType(), reader, mapper.names().indexName(), FieldDataOptions.fieldDataOptions().withFreqs(false));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchException("Failed to load field data for [" + fieldName + "]", e);
|
||||
}
|
||||
localCacheFieldData.put(fieldName, fieldData);
|
||||
}
|
||||
return fieldData.docFieldData(docId);
|
||||
}
|
||||
|
||||
public int size() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public boolean containsKey(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public boolean containsValue(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Object put(Object key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Object remove(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public void putAll(Map m) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Set keySet() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Collection values() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Set entrySet() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.query.xcontent.XContentIndexQueryParser;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.util.collect.ImmutableMap;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
|
@ -39,7 +40,7 @@ import static org.elasticsearch.util.collect.Maps.*;
|
|||
import static org.elasticsearch.util.settings.ImmutableSettings.Builder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
|
||||
|
@ -53,10 +54,11 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
private final Map<String, IndexQueryParser> indexQueryParsers;
|
||||
|
||||
public IndexQueryParserService(Index index, MapperService mapperService, IndexCache indexCache, IndexEngine indexEngine, AnalysisService analysisService) {
|
||||
this(index, EMPTY_SETTINGS, mapperService, indexCache, indexEngine, analysisService, null, null);
|
||||
this(index, EMPTY_SETTINGS, new ScriptService(EMPTY_SETTINGS), mapperService, indexCache, indexEngine, analysisService, null, null);
|
||||
}
|
||||
|
||||
@Inject public IndexQueryParserService(Index index, @IndexSettings Settings indexSettings,
|
||||
ScriptService scriptService,
|
||||
MapperService mapperService, IndexCache indexCache,
|
||||
IndexEngine indexEngine, AnalysisService analysisService,
|
||||
@Nullable SimilarityService similarityService,
|
||||
|
@ -80,7 +82,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
if (!qparsers.containsKey(Defaults.DEFAULT)) {
|
||||
IndexQueryParser defaultQueryParser = new XContentIndexQueryParser(index, indexSettings, mapperService, indexCache, indexEngine, analysisService, similarityService, null, null, Defaults.DEFAULT, null);
|
||||
IndexQueryParser defaultQueryParser = new XContentIndexQueryParser(index, indexSettings, scriptService, mapperService, indexCache, indexEngine, analysisService, similarityService, null, null, Defaults.DEFAULT, null);
|
||||
qparsers.put(Defaults.DEFAULT, defaultQueryParser);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.index.query.QueryParsingException;
|
|||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.util.Strings;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.lucene.search.function.BoostFactorFunctionProvider;
|
||||
import org.elasticsearch.util.lucene.search.function.BoostScoreFunction;
|
||||
import org.elasticsearch.util.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.elasticsearch.util.xcontent.XContentParser;
|
||||
|
@ -75,7 +75,7 @@ public class CustomBoostFactorQueryParser extends AbstractIndexComponent impleme
|
|||
if (query == null) {
|
||||
throw new QueryParsingException(index, "[constant_factor_query] requires 'query' element");
|
||||
}
|
||||
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query, new BoostFactorFunctionProvider(boostFactor));
|
||||
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query, new BoostScoreFunction(boostFactor));
|
||||
functionScoreQuery.setBoost(boost);
|
||||
return functionScoreQuery;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query.xcontent;
|
||||
|
||||
import org.elasticsearch.util.xcontent.builder.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A query that uses a script to compute the score.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class CustomScoreQueryBuilder extends BaseQueryBuilder {
|
||||
|
||||
private final XContentQueryBuilder queryBuilder;
|
||||
|
||||
private String script;
|
||||
|
||||
private float boost = -1;
|
||||
|
||||
/**
|
||||
* A query that simply applies the boost factor to another query (multiply it).
|
||||
*
|
||||
* @param queryBuilder The query to apply the boost factor to.
|
||||
*/
|
||||
public CustomScoreQueryBuilder(XContentQueryBuilder queryBuilder) {
|
||||
this.queryBuilder = queryBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the boost factor for this query.
|
||||
*/
|
||||
public CustomScoreQueryBuilder script(String script) {
|
||||
this.script = script;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the boost for this query. Documents matching this query will (in addition to the normal
|
||||
* weightings) have their score multiplied by the boost provided.
|
||||
*/
|
||||
public CustomScoreQueryBuilder boost(float boost) {
|
||||
this.boost = boost;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(CustomScoreQueryParser.NAME);
|
||||
builder.field("query");
|
||||
queryBuilder.toXContent(builder, params);
|
||||
builder.field("script", script);
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query.xcontent;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.util.Strings;
|
||||
import org.elasticsearch.util.ThreadLocals;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.util.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.elasticsearch.util.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class CustomScoreQueryParser extends AbstractIndexComponent implements XContentQueryParser {
|
||||
|
||||
public static final String NAME = "custom_score";
|
||||
|
||||
@Inject public CustomScoreQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
}
|
||||
|
||||
@Override public String[] names() {
|
||||
return new String[]{NAME, Strings.toCamelCase(NAME)};
|
||||
}
|
||||
|
||||
@Override public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Query query = null;
|
||||
float boost = 1.0f;
|
||||
String script = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
query = parseContext.parseInnerQuery();
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("script".equals(currentFieldName)) {
|
||||
script = parser.text();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (query == null) {
|
||||
throw new QueryParsingException(index, "[custom_score] requires 'query' field");
|
||||
}
|
||||
if (script == null) {
|
||||
throw new QueryParsingException(index, "[custom_score] requires 'script' field");
|
||||
}
|
||||
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query,
|
||||
new ScriptScoreFunction(new ScriptFieldsFunction(script, parseContext.scriptService(), parseContext.mapperService(), parseContext.indexCache().fieldData())));
|
||||
functionScoreQuery.setBoost(boost);
|
||||
return functionScoreQuery;
|
||||
}
|
||||
|
||||
private static ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>> cachedVars = new ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>>() {
|
||||
@Override protected ThreadLocals.CleanableValue<Map<String, Object>> initialValue() {
|
||||
return new ThreadLocals.CleanableValue<Map<String, Object>>(new HashMap<String, Object>());
|
||||
}
|
||||
};
|
||||
|
||||
public static class ScriptScoreFunction implements ScoreFunction {
|
||||
|
||||
private final ScriptFieldsFunction scriptFieldsFunction;
|
||||
|
||||
private Map<String, Object> vars;
|
||||
|
||||
private ScriptScoreFunction(ScriptFieldsFunction scriptFieldsFunction) {
|
||||
this.scriptFieldsFunction = scriptFieldsFunction;
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader) {
|
||||
scriptFieldsFunction.setNextReader(reader);
|
||||
vars = cachedVars.get().get();
|
||||
vars.clear();
|
||||
}
|
||||
|
||||
@Override public float score(int docId, float subQueryScore) {
|
||||
vars.put("score", subQueryScore);
|
||||
return ((Number) scriptFieldsFunction.execute(docId, vars)).floatValue();
|
||||
}
|
||||
|
||||
@Override public Explanation explain(int docId, Explanation subQueryExpl) {
|
||||
float score = score(docId, subQueryExpl.getValue());
|
||||
Explanation exp = new Explanation(score, "script score function: product of:");
|
||||
exp.addDetail(subQueryExpl);
|
||||
return exp;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -280,6 +280,15 @@ public abstract class QueryBuilders {
|
|||
return new CustomBoostFactorQueryBuilder(queryBuilder);
|
||||
}
|
||||
|
||||
/**
|
||||
* A query that allows to define a custom scoring script.
|
||||
*
|
||||
* @param queryBuilder The query to custom score
|
||||
*/
|
||||
public static CustomScoreQueryBuilder customScoreQuery(XContentQueryBuilder queryBuilder) {
|
||||
return new CustomScoreQueryBuilder(queryBuilder);
|
||||
}
|
||||
|
||||
/**
|
||||
* A more like this query that finds documents that are "like" the provided {@link MoreLikeThisQueryBuilder#likeText(String)}
|
||||
* which is checked against the fields the query is constructed with.
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.FieldMappers;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.util.xcontent.XContentParser;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -42,6 +43,8 @@ public class QueryParseContext {
|
|||
|
||||
private final Index index;
|
||||
|
||||
private final ScriptService scriptService;
|
||||
|
||||
private final MapperService mapperService;
|
||||
|
||||
private final SimilarityService similarityService;
|
||||
|
@ -55,10 +58,12 @@ public class QueryParseContext {
|
|||
private XContentParser parser;
|
||||
|
||||
public QueryParseContext(Index index, XContentQueryParserRegistry queryParserRegistry,
|
||||
ScriptService scriptService,
|
||||
MapperService mapperService, SimilarityService similarityService,
|
||||
IndexCache indexCache, IndexEngine indexEngine) {
|
||||
this.index = index;
|
||||
this.queryParserRegistry = queryParserRegistry;
|
||||
this.scriptService = scriptService;
|
||||
this.mapperService = mapperService;
|
||||
this.similarityService = similarityService;
|
||||
this.indexCache = indexCache;
|
||||
|
@ -73,6 +78,10 @@ public class QueryParseContext {
|
|||
return parser;
|
||||
}
|
||||
|
||||
public ScriptService scriptService() {
|
||||
return scriptService;
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.util.ThreadLocals;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.inject.assistedinject.Assisted;
|
||||
|
@ -61,12 +62,14 @@ public class XContentIndexQueryParser extends AbstractIndexComponent implements
|
|||
|
||||
private ThreadLocal<ThreadLocals.CleanableValue<QueryParseContext>> cache = new ThreadLocal<ThreadLocals.CleanableValue<QueryParseContext>>() {
|
||||
@Override protected ThreadLocals.CleanableValue<QueryParseContext> initialValue() {
|
||||
return new ThreadLocals.CleanableValue<QueryParseContext>(new QueryParseContext(index, queryParserRegistry, mapperService, similarityService, indexCache, indexEngine));
|
||||
return new ThreadLocals.CleanableValue<QueryParseContext>(new QueryParseContext(index, queryParserRegistry, scriptService, mapperService, similarityService, indexCache, indexEngine));
|
||||
}
|
||||
};
|
||||
|
||||
private final String name;
|
||||
|
||||
private final ScriptService scriptService;
|
||||
|
||||
private final MapperService mapperService;
|
||||
|
||||
private final SimilarityService similarityService;
|
||||
|
@ -78,7 +81,7 @@ public class XContentIndexQueryParser extends AbstractIndexComponent implements
|
|||
private final XContentQueryParserRegistry queryParserRegistry;
|
||||
|
||||
@Inject public XContentIndexQueryParser(Index index,
|
||||
@IndexSettings Settings indexSettings,
|
||||
@IndexSettings Settings indexSettings, ScriptService scriptService,
|
||||
MapperService mapperService, IndexCache indexCache, IndexEngine indexEngine,
|
||||
AnalysisService analysisService, @Nullable SimilarityService similarityService,
|
||||
@Nullable Map<String, XContentQueryParserFactory> namedQueryParsers,
|
||||
|
@ -86,6 +89,7 @@ public class XContentIndexQueryParser extends AbstractIndexComponent implements
|
|||
@Assisted String name, @Assisted @Nullable Settings settings) {
|
||||
super(index, indexSettings);
|
||||
this.name = name;
|
||||
this.scriptService = scriptService;
|
||||
this.mapperService = mapperService;
|
||||
this.similarityService = similarityService;
|
||||
this.indexCache = indexCache;
|
||||
|
|
|
@ -60,6 +60,7 @@ public class XContentQueryParserRegistry {
|
|||
add(queryParsersMap, new FilteredQueryParser(index, indexSettings));
|
||||
add(queryParsersMap, new ConstantScoreQueryParser(index, indexSettings));
|
||||
add(queryParsersMap, new CustomBoostFactorQueryParser(index, indexSettings));
|
||||
add(queryParsersMap, new CustomScoreQueryParser(index, indexSettings));
|
||||
add(queryParsersMap, new SpanTermQueryParser(index, indexSettings));
|
||||
add(queryParsersMap, new SpanNotQueryParser(index, indexSettings));
|
||||
add(queryParsersMap, new SpanFirstQueryParser(index, indexSettings));
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.node.internal;
|
|||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.TransportActionModule;
|
||||
import org.elasticsearch.cache.NodeCacheModule;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClientModule;
|
||||
import org.elasticsearch.cluster.ClusterModule;
|
||||
|
@ -49,6 +50,7 @@ import org.elasticsearch.plugins.PluginsModule;
|
|||
import org.elasticsearch.plugins.PluginsService;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -114,12 +116,14 @@ public final class InternalNode implements Node {
|
|||
|
||||
ArrayList<Module> modules = new ArrayList<Module>();
|
||||
modules.add(new PluginsModule(settings, pluginsService));
|
||||
modules.add(new SettingsModule(settings));
|
||||
modules.add(new NodeModule(this));
|
||||
modules.add(new NetworkModule());
|
||||
modules.add(new NodeCacheModule());
|
||||
modules.add(new ScriptModule());
|
||||
modules.add(new JmxModule(settings));
|
||||
modules.add(new EnvironmentModule(environment));
|
||||
modules.add(new ClusterNameModule(settings));
|
||||
modules.add(new SettingsModule(settings));
|
||||
modules.add(new ThreadPoolModule(settings));
|
||||
modules.add(new TimerModule());
|
||||
modules.add(new DiscoveryModule(settings));
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.util.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class ScriptModule extends AbstractModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(ScriptService.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.util.component.AbstractComponent;
|
||||
import org.elasticsearch.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.math.UnboxedMathUtils;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.mvel2.MVEL;
|
||||
import org.mvel2.ParserContext;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class ScriptService extends AbstractComponent {
|
||||
|
||||
private final ConcurrentMap<String, Object> cache = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final ParserContext parserContext;
|
||||
|
||||
@Inject public ScriptService(Settings settings) {
|
||||
super(settings);
|
||||
|
||||
parserContext = new ParserContext();
|
||||
parserContext.addPackageImport("java.util");
|
||||
parserContext.addPackageImport("org.elasticsearch.util.gnu.trove");
|
||||
parserContext.addPackageImport("org.elasticsearch.util.joda");
|
||||
parserContext.addImport("time", MVEL.getStaticMethod(System.class, "currentTimeMillis", new Class[0]));
|
||||
// unboxed version of Math, better performance since conversion from boxed to unboxed my mvel is not needed
|
||||
for (Method m : UnboxedMathUtils.class.getMethods()) {
|
||||
if ((m.getModifiers() & Modifier.STATIC) > 0) {
|
||||
parserContext.addImport(m.getName(), m);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Object compile(String script) {
|
||||
Object compiled = cache.get(script);
|
||||
if (compiled != null) {
|
||||
return compiled;
|
||||
}
|
||||
synchronized (cache) {
|
||||
compiled = cache.get(script);
|
||||
if (compiled != null) {
|
||||
return compiled;
|
||||
}
|
||||
compiled = MVEL.compileExpression(script, parserContext);
|
||||
cache.put(script, compiled);
|
||||
}
|
||||
return compiled;
|
||||
}
|
||||
|
||||
public Object execute(Object script, Map vars) {
|
||||
return MVEL.executeExpression(script, vars);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
cache.clear();
|
||||
}
|
||||
}
|
|
@ -45,6 +45,8 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
|
|||
|
||||
private final String fieldName;
|
||||
|
||||
private final String indexFieldName;
|
||||
|
||||
private final long interval;
|
||||
|
||||
private final HistogramFacet.ComparatorType comparatorType;
|
||||
|
@ -68,6 +70,7 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
|
|||
if (mapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
histoProc = new HistogramProc(interval);
|
||||
|
@ -78,7 +81,7 @@ public class HistogramFacetCollector extends AbstractFacetCollector {
|
|||
}
|
||||
|
||||
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, fieldName, fieldDataOptions().withFreqs(false));
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexFieldName, fieldDataOptions().withFreqs(false));
|
||||
}
|
||||
|
||||
@Override public Facet facet() {
|
||||
|
|
|
@ -43,8 +43,10 @@ import static org.elasticsearch.index.field.data.FieldDataOptions.*;
|
|||
public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String keyFieldName;
|
||||
private final String keyIndexFieldName;
|
||||
|
||||
private final String valueFieldName;
|
||||
private final String valueIndexFieldName;
|
||||
|
||||
private final long interval;
|
||||
|
||||
|
@ -73,12 +75,14 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
|
|||
if (mapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for key_field [" + keyFieldName + "]");
|
||||
}
|
||||
keyIndexFieldName = mapper.names().indexName();
|
||||
keyFieldDataType = mapper.fieldDataType();
|
||||
|
||||
mapper = mapperService.smartNameFieldMapper(valueFieldName);
|
||||
if (mapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for value_field [" + valueFieldName + "]");
|
||||
}
|
||||
valueIndexFieldName = mapper.names().indexName();
|
||||
valueFieldDataType = mapper.fieldDataType();
|
||||
}
|
||||
|
||||
|
@ -121,8 +125,8 @@ public class KeyValueHistogramFacetCollector extends AbstractFacetCollector {
|
|||
}
|
||||
|
||||
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, reader, keyFieldName, fieldDataOptions().withFreqs(false));
|
||||
valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, reader, valueFieldName, fieldDataOptions().withFreqs(false));
|
||||
keyFieldData = (NumericFieldData) fieldDataCache.cache(keyFieldDataType, reader, keyIndexFieldName, fieldDataOptions().withFreqs(false));
|
||||
valueFieldData = (NumericFieldData) fieldDataCache.cache(valueFieldDataType, reader, valueIndexFieldName, fieldDataOptions().withFreqs(false));
|
||||
}
|
||||
|
||||
@Override public Facet facet() {
|
||||
|
|
|
@ -40,6 +40,8 @@ public class StatisticalFacetCollector extends AbstractFacetCollector {
|
|||
|
||||
private final String fieldName;
|
||||
|
||||
private final String indexFieldName;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
|
||||
private final FieldData.Type fieldDataType;
|
||||
|
@ -57,6 +59,7 @@ public class StatisticalFacetCollector extends AbstractFacetCollector {
|
|||
if (mapper == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
|
||||
}
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
}
|
||||
|
||||
|
@ -65,7 +68,7 @@ public class StatisticalFacetCollector extends AbstractFacetCollector {
|
|||
}
|
||||
|
||||
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, fieldName, fieldDataOptions().withFreqs(false));
|
||||
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexFieldName, fieldDataOptions().withFreqs(false));
|
||||
}
|
||||
|
||||
@Override public Facet facet() {
|
||||
|
|
|
@ -54,6 +54,8 @@ public class TermsFacetCollector extends AbstractFacetCollector {
|
|||
|
||||
private final String fieldName;
|
||||
|
||||
private final String indexFieldName;
|
||||
|
||||
private final int size;
|
||||
|
||||
private final FieldData.Type fieldDataType;
|
||||
|
@ -68,11 +70,12 @@ public class TermsFacetCollector extends AbstractFacetCollector {
|
|||
this.size = size;
|
||||
|
||||
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
|
||||
this.fieldName = fieldName;
|
||||
if (mapper != null) {
|
||||
this.fieldName = mapper.names().indexName();
|
||||
this.indexFieldName = mapper.names().indexName();
|
||||
this.fieldDataType = mapper.fieldDataType();
|
||||
} else {
|
||||
this.fieldName = fieldName;
|
||||
this.indexFieldName = fieldName;
|
||||
this.fieldDataType = FieldData.Type.STRING;
|
||||
}
|
||||
|
||||
|
@ -80,7 +83,7 @@ public class TermsFacetCollector extends AbstractFacetCollector {
|
|||
}
|
||||
|
||||
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
|
||||
fieldData = fieldDataCache.cache(fieldDataType, reader, fieldName, fieldDataOptions().withFreqs(false));
|
||||
fieldData = fieldDataCache.cache(fieldDataType, reader, indexFieldName, fieldDataOptions().withFreqs(false));
|
||||
}
|
||||
|
||||
@Override protected void doCollect(int doc) throws IOException {
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
|||
import org.elasticsearch.util.collect.ImmutableMap;
|
||||
import org.elasticsearch.util.inject.Inject;
|
||||
import org.elasticsearch.util.lucene.search.TermFilter;
|
||||
import org.elasticsearch.util.lucene.search.function.BoostFactorFunctionProvider;
|
||||
import org.elasticsearch.util.lucene.search.function.BoostScoreFunction;
|
||||
import org.elasticsearch.util.lucene.search.function.FunctionScoreQuery;
|
||||
|
||||
import java.util.Map;
|
||||
|
@ -63,7 +63,7 @@ public class QueryPhase implements SearchPhase {
|
|||
throw new SearchParseException(context, "No query specified in search request");
|
||||
}
|
||||
if (context.queryBoost() != 1.0f) {
|
||||
context.query(new FunctionScoreQuery(context.query(), new BoostFactorFunctionProvider(context.queryBoost())));
|
||||
context.query(new FunctionScoreQuery(context.query(), new BoostScoreFunction(context.queryBoost())));
|
||||
}
|
||||
facetsPhase.preProcess(context);
|
||||
}
|
||||
|
|
|
@ -25,11 +25,11 @@ import org.apache.lucene.search.Explanation;
|
|||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class BoostFactorFunctionProvider implements FunctionProvider, Function {
|
||||
public class BoostScoreFunction implements ScoreFunction {
|
||||
|
||||
private final float boost;
|
||||
|
||||
public BoostFactorFunctionProvider(float boost) {
|
||||
public BoostScoreFunction(float boost) {
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
|
@ -38,8 +38,8 @@ public class BoostFactorFunctionProvider implements FunctionProvider, Function {
|
|||
return boost;
|
||||
}
|
||||
|
||||
@Override public Function function(IndexReader reader) {
|
||||
return this;
|
||||
@Override public void setNextReader(IndexReader reader) {
|
||||
// nothing to do here...
|
||||
}
|
||||
|
||||
@Override public float score(int docId, float subQueryScore) {
|
||||
|
@ -57,7 +57,7 @@ public class BoostFactorFunctionProvider implements FunctionProvider, Function {
|
|||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
BoostFactorFunctionProvider that = (BoostFactorFunctionProvider) o;
|
||||
BoostScoreFunction that = (BoostScoreFunction) o;
|
||||
|
||||
if (Float.compare(that.boost, boost) != 0) return false;
|
||||
|
|
@ -34,20 +34,20 @@ import java.util.Set;
|
|||
*/
|
||||
public class FunctionScoreQuery extends Query {
|
||||
|
||||
private Query subQuery;
|
||||
private FunctionProvider functionProvider;
|
||||
Query subQuery;
|
||||
final ScoreFunction function;
|
||||
|
||||
public FunctionScoreQuery(Query subQuery, FunctionProvider functionProvider) {
|
||||
public FunctionScoreQuery(Query subQuery, ScoreFunction function) {
|
||||
this.subQuery = subQuery;
|
||||
this.functionProvider = functionProvider;
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
public Query getSubQuery() {
|
||||
return subQuery;
|
||||
}
|
||||
|
||||
public FunctionProvider getFunctionProvider() {
|
||||
return functionProvider;
|
||||
public ScoreFunction getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -69,7 +69,7 @@ public class FunctionScoreQuery extends Query {
|
|||
return new CustomBoostFactorWeight(searcher);
|
||||
}
|
||||
|
||||
private class CustomBoostFactorWeight extends Weight {
|
||||
class CustomBoostFactorWeight extends Weight {
|
||||
Searcher searcher;
|
||||
Weight subQueryWeight;
|
||||
|
||||
|
@ -105,7 +105,8 @@ public class FunctionScoreQuery extends Query {
|
|||
if (subQueryScorer == null) {
|
||||
return null;
|
||||
}
|
||||
return new CustomBoostFactorScorer(getSimilarity(searcher), this, subQueryScorer, functionProvider.function(reader));
|
||||
function.setNextReader(reader);
|
||||
return new CustomBoostFactorScorer(getSimilarity(searcher), this, subQueryScorer);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -115,7 +116,8 @@ public class FunctionScoreQuery extends Query {
|
|||
return subQueryExpl;
|
||||
}
|
||||
|
||||
Explanation functionExplanation = functionProvider.function(reader).explain(doc, subQueryExpl);
|
||||
function.setNextReader(reader);
|
||||
Explanation functionExplanation = function.explain(doc, subQueryExpl);
|
||||
float sc = getValue() * functionExplanation.getValue();
|
||||
Explanation res = new ComplexExplanation(true, sc, "custom score, product of:");
|
||||
res.addDetail(functionExplanation);
|
||||
|
@ -125,16 +127,14 @@ public class FunctionScoreQuery extends Query {
|
|||
}
|
||||
|
||||
|
||||
private class CustomBoostFactorScorer extends Scorer {
|
||||
class CustomBoostFactorScorer extends Scorer {
|
||||
private final float subQueryWeight;
|
||||
private final Scorer scorer;
|
||||
private final Function function;
|
||||
|
||||
private CustomBoostFactorScorer(Similarity similarity, CustomBoostFactorWeight w, Scorer scorer, Function function) throws IOException {
|
||||
private CustomBoostFactorScorer(Similarity similarity, CustomBoostFactorWeight w, Scorer scorer) throws IOException {
|
||||
super(similarity);
|
||||
this.subQueryWeight = w.getValue();
|
||||
this.scorer = scorer;
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -161,7 +161,7 @@ public class FunctionScoreQuery extends Query {
|
|||
|
||||
public String toString(String field) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("custom score (").append(subQuery.toString(field)).append(",function=").append(functionProvider).append(')');
|
||||
sb.append("custom score (").append(subQuery.toString(field)).append(",function=").append(function).append(')');
|
||||
sb.append(ToStringUtils.boost(getBoost()));
|
||||
return sb.toString();
|
||||
}
|
||||
|
@ -171,11 +171,11 @@ public class FunctionScoreQuery extends Query {
|
|||
FunctionScoreQuery other = (FunctionScoreQuery) o;
|
||||
return this.getBoost() == other.getBoost()
|
||||
&& this.subQuery.equals(other.subQuery)
|
||||
&& this.functionProvider.equals(other.functionProvider);
|
||||
&& this.function.equals(other.function);
|
||||
}
|
||||
|
||||
public int hashCode() {
|
||||
return subQuery.hashCode() + 31 * functionProvider.hashCode() ^ Float.floatToIntBits(getBoost());
|
||||
return subQuery.hashCode() + 31 * function.hashCode() ^ Float.floatToIntBits(getBoost());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
|
||||
package org.elasticsearch.util.lucene.search.function;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface Function {
|
||||
public interface ScoreFunction {
|
||||
|
||||
void setNextReader(IndexReader reader);
|
||||
|
||||
float score(int docId, float subQueryScore);
|
||||
|
|
@ -0,0 +1,540 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.math;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.text.MessageFormat;
|
||||
import java.text.ParseException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Base class for commons-math unchecked exceptions.
|
||||
*
|
||||
* @version $Revision: 822850 $ $Date: 2009-10-07 14:56:42 -0400 (Wed, 07 Oct 2009) $
|
||||
* @since 2.0
|
||||
*/
|
||||
public class MathRuntimeException extends RuntimeException {
|
||||
|
||||
/**
|
||||
* Serializable version identifier.
|
||||
*/
|
||||
private static final long serialVersionUID = -5128983364075381060L;
|
||||
|
||||
/**
|
||||
* Pattern used to build the message.
|
||||
*/
|
||||
private final String pattern;
|
||||
|
||||
/**
|
||||
* Arguments used to build the message.
|
||||
*/
|
||||
private final Object[] arguments;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>MathRuntimeException</code> with specified
|
||||
* formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
*/
|
||||
public MathRuntimeException(final String pattern, final Object... arguments) {
|
||||
this.pattern = pattern;
|
||||
this.arguments = (arguments == null) ? new Object[0] : arguments.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>MathRuntimeException</code> with specified
|
||||
* nested <code>Throwable</code> root cause.
|
||||
*
|
||||
* @param rootCause the exception or error that caused this exception
|
||||
* to be thrown.
|
||||
*/
|
||||
public MathRuntimeException(final Throwable rootCause) {
|
||||
super(rootCause);
|
||||
this.pattern = getMessage();
|
||||
this.arguments = new Object[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>MathRuntimeException</code> with specified
|
||||
* formatted detail message and nested <code>Throwable</code> root cause.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param rootCause the exception or error that caused this exception
|
||||
* to be thrown.
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
*/
|
||||
public MathRuntimeException(final Throwable rootCause,
|
||||
final String pattern, final Object... arguments) {
|
||||
super(rootCause);
|
||||
this.pattern = pattern;
|
||||
this.arguments = (arguments == null) ? new Object[0] : arguments.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Translate a string to a given locale.
|
||||
*
|
||||
* @param s string to translate
|
||||
* @param locale locale into which to translate the string
|
||||
* @return translated string or original string
|
||||
* for unsupported locales or unknown strings
|
||||
*/
|
||||
private static String translate(final String s, final Locale locale) {
|
||||
try {
|
||||
ResourceBundle bundle =
|
||||
ResourceBundle.getBundle("org.apache.commons.math.MessagesResources", locale);
|
||||
if (bundle.getLocale().getLanguage().equals(locale.getLanguage())) {
|
||||
// the value of the resource is the translated string
|
||||
return bundle.getString(s);
|
||||
}
|
||||
|
||||
} catch (MissingResourceException mre) {
|
||||
// do nothing here
|
||||
}
|
||||
|
||||
// the locale is not supported or the resource is unknown
|
||||
// don't translate and fall back to using the string as is
|
||||
return s;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a message string by from a pattern and its arguments.
|
||||
*
|
||||
* @param locale Locale in which the message should be translated
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return a message string
|
||||
*/
|
||||
private static String buildMessage(final Locale locale, final String pattern,
|
||||
final Object... arguments) {
|
||||
return (pattern == null) ? "" : new MessageFormat(translate(pattern, locale), locale).format(arguments);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the pattern used to build the message of this throwable.
|
||||
*
|
||||
* @return the pattern used to build the message of this throwable
|
||||
*/
|
||||
public String getPattern() {
|
||||
return pattern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the arguments used to build the message of this throwable.
|
||||
*
|
||||
* @return the arguments used to build the message of this throwable
|
||||
*/
|
||||
public Object[] getArguments() {
|
||||
return arguments.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the message in a specified locale.
|
||||
*
|
||||
* @param locale Locale in which the message should be translated
|
||||
* @return localized message
|
||||
*/
|
||||
public String getMessage(final Locale locale) {
|
||||
return buildMessage(locale, pattern, arguments);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return getMessage(Locale.US);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return getMessage(Locale.getDefault());
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the stack trace of this exception to the standard error stream.
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace() {
|
||||
printStackTrace(System.err);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the stack trace of this exception to the specified stream.
|
||||
*
|
||||
* @param out the <code>PrintStream</code> to use for output
|
||||
*/
|
||||
@Override
|
||||
public void printStackTrace(final PrintStream out) {
|
||||
synchronized (out) {
|
||||
PrintWriter pw = new PrintWriter(out, false);
|
||||
printStackTrace(pw);
|
||||
// Flush the PrintWriter before it's GC'ed.
|
||||
pw.flush();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>ArithmeticException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static ArithmeticException createArithmeticException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new ArithmeticException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = 7705628723242533939L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>ArrayIndexOutOfBoundsException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static ArrayIndexOutOfBoundsException createArrayIndexOutOfBoundsException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new ArrayIndexOutOfBoundsException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = -3394748305449283486L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>EOFException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static EOFException createEOFException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new EOFException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = 279461544586092584L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>IOException</code> with specified nested
|
||||
* <code>Throwable</code> root cause.
|
||||
* <p>This factory method allows chaining of other exceptions within an
|
||||
* <code>IOException</code> even for Java 5. The constructor for
|
||||
* <code>IOException</code> with a cause parameter was introduced only
|
||||
* with Java 6.</p>
|
||||
*
|
||||
* @param rootCause the exception or error that caused this exception
|
||||
* to be thrown.
|
||||
* @return built exception
|
||||
*/
|
||||
public static IOException createIOException(final Throwable rootCause) {
|
||||
IOException ioe = new IOException(rootCause.getLocalizedMessage());
|
||||
ioe.initCause(rootCause);
|
||||
return ioe;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>IllegalArgumentException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static IllegalArgumentException createIllegalArgumentException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new IllegalArgumentException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = -6555453980658317913L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>IllegalArgumentException</code> with specified nested
|
||||
* <code>Throwable</code> root cause.
|
||||
*
|
||||
* @param rootCause the exception or error that caused this exception
|
||||
* to be thrown.
|
||||
* @return built exception
|
||||
*/
|
||||
public static IllegalArgumentException createIllegalArgumentException(final Throwable rootCause) {
|
||||
IllegalArgumentException iae = new IllegalArgumentException(rootCause.getLocalizedMessage());
|
||||
iae.initCause(rootCause);
|
||||
return iae;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>IllegalStateException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static IllegalStateException createIllegalStateException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new IllegalStateException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = -95247648156277208L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>ConcurrentModificationException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static ConcurrentModificationException createConcurrentModificationException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new ConcurrentModificationException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = 6134247282754009421L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>NoSuchElementException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static NoSuchElementException createNoSuchElementException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new NoSuchElementException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = 7304273322489425799L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>NullPointerException</code> with specified formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static NullPointerException createNullPointerException(final String pattern,
|
||||
final Object... arguments) {
|
||||
return new NullPointerException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = -3075660477939965216L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new <code>ParseException</code> with specified
|
||||
* formatted detail message.
|
||||
* Message formatting is delegated to {@link java.text.MessageFormat}.
|
||||
*
|
||||
* @param offset offset at which error occurred
|
||||
* @param pattern format specifier
|
||||
* @param arguments format arguments
|
||||
* @return built exception
|
||||
*/
|
||||
public static ParseException createParseException(final int offset,
|
||||
final String pattern,
|
||||
final Object... arguments) {
|
||||
return new ParseException(null, offset) {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = -1103502177342465975L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, arguments);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, arguments);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an {@link java.lang.RuntimeException} for an internal error.
|
||||
*
|
||||
* @param cause underlying cause
|
||||
* @return an {@link java.lang.RuntimeException} for an internal error
|
||||
*/
|
||||
public static RuntimeException createInternalError(final Throwable cause) {
|
||||
|
||||
final String pattern = "internal error, please fill a bug report at {0}";
|
||||
final String argument = "https://issues.apache.org/jira/browse/MATH";
|
||||
|
||||
return new RuntimeException() {
|
||||
|
||||
/** Serializable version identifier. */
|
||||
private static final long serialVersionUID = -201865440834027016L;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return buildMessage(Locale.US, pattern, argument);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String getLocalizedMessage() {
|
||||
return buildMessage(Locale.getDefault(), pattern, argument);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,244 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.math;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class UnboxedMathUtils {
|
||||
|
||||
public static double sin(Double a) {
|
||||
return Math.sin(a);
|
||||
}
|
||||
|
||||
public static double cos(Double a) {
|
||||
return Math.cos(a); // default impl. delegates to StrictMath
|
||||
}
|
||||
|
||||
public static double tan(Double a) {
|
||||
return Math.tan(a); // default impl. delegates to StrictMath
|
||||
}
|
||||
|
||||
public static double asin(Double a) {
|
||||
return Math.asin(a); // default impl. delegates to StrictMath
|
||||
}
|
||||
|
||||
public static double acos(Double a) {
|
||||
return Math.acos(a); // default impl. delegates to StrictMath
|
||||
}
|
||||
|
||||
public static double atan(Double a) {
|
||||
return Math.atan(a); // default impl. delegates to StrictMath
|
||||
}
|
||||
|
||||
public static double toRadians(Double angdeg) {
|
||||
return Math.toRadians(angdeg);
|
||||
}
|
||||
|
||||
public static double toDegrees(Double angrad) {
|
||||
return Math.toDegrees(angrad);
|
||||
}
|
||||
|
||||
public static double exp(Double a) {
|
||||
return Math.exp(a);
|
||||
}
|
||||
|
||||
public static double log(Double a) {
|
||||
return Math.log(a);
|
||||
}
|
||||
|
||||
public static double log10(Double a) {
|
||||
return Math.log10(a);
|
||||
}
|
||||
|
||||
public static double sqrt(Double a) {
|
||||
return Math.sqrt(a);
|
||||
}
|
||||
|
||||
|
||||
public static double cbrt(Double a) {
|
||||
return Math.cbrt(a);
|
||||
}
|
||||
|
||||
public static double IEEEremainder(Double f1, Double f2) {
|
||||
return Math.IEEEremainder(f1, f2);
|
||||
}
|
||||
|
||||
public static double ceil(Double a) {
|
||||
return Math.ceil(a);
|
||||
}
|
||||
|
||||
public static double floor(Double a) {
|
||||
return Math.floor(a);
|
||||
}
|
||||
|
||||
public static double rint(Double a) {
|
||||
return Math.rint(a);
|
||||
}
|
||||
|
||||
public static double atan2(Double y, Double x) {
|
||||
return Math.atan2(y, x);
|
||||
}
|
||||
|
||||
public static double pow(Double a, Double b) {
|
||||
return Math.pow(a, b);
|
||||
}
|
||||
|
||||
public static int round(Float a) {
|
||||
return Math.round(a);
|
||||
}
|
||||
|
||||
public static long round(Double a) {
|
||||
return Math.round(a);
|
||||
}
|
||||
|
||||
public static double random() {
|
||||
return Math.random();
|
||||
}
|
||||
|
||||
public static int abs(Integer a) {
|
||||
return Math.abs(a);
|
||||
}
|
||||
|
||||
public static long abs(Long a) {
|
||||
return Math.abs(a);
|
||||
}
|
||||
|
||||
public static float abs(Float a) {
|
||||
return Math.abs(a);
|
||||
}
|
||||
|
||||
public static double abs(Double a) {
|
||||
return Math.abs(a);
|
||||
}
|
||||
|
||||
public static int max(Integer a, Integer b) {
|
||||
return Math.max(a, b);
|
||||
}
|
||||
|
||||
public static long max(Long a, Long b) {
|
||||
return Math.max(a, b);
|
||||
}
|
||||
|
||||
public static float max(Float a, Float b) {
|
||||
return Math.max(a, b);
|
||||
}
|
||||
|
||||
public static double max(Double a, Double b) {
|
||||
return Math.max(a, b);
|
||||
}
|
||||
|
||||
public static int min(Integer a, Integer b) {
|
||||
return Math.min(a, b);
|
||||
}
|
||||
|
||||
public static long min(Long a, Long b) {
|
||||
return Math.min(a, b);
|
||||
}
|
||||
|
||||
public static float min(Float a, Float b) {
|
||||
return Math.min(a, b);
|
||||
}
|
||||
|
||||
public static double min(Double a, Double b) {
|
||||
return Math.min(a, b);
|
||||
}
|
||||
|
||||
public static double ulp(Double d) {
|
||||
return Math.ulp(d);
|
||||
}
|
||||
|
||||
public static float ulp(Float f) {
|
||||
return Math.ulp(f);
|
||||
}
|
||||
|
||||
public static double signum(Double d) {
|
||||
return Math.signum(d);
|
||||
}
|
||||
|
||||
public static float signum(Float f) {
|
||||
return Math.signum(f);
|
||||
}
|
||||
|
||||
public static double sinh(Double x) {
|
||||
return Math.sinh(x);
|
||||
}
|
||||
|
||||
public static double cosh(Double x) {
|
||||
return Math.cosh(x);
|
||||
}
|
||||
|
||||
public static double tanh(Double x) {
|
||||
return Math.tanh(x);
|
||||
}
|
||||
|
||||
public static double hypot(Double x, Double y) {
|
||||
return Math.hypot(x, y);
|
||||
}
|
||||
|
||||
public static double expm1(Double x) {
|
||||
return Math.expm1(x);
|
||||
}
|
||||
|
||||
public static double log1p(Double x) {
|
||||
return Math.log1p(x);
|
||||
}
|
||||
|
||||
public static double copySign(Double magnitude, Double sign) {
|
||||
return Math.copySign(magnitude, sign);
|
||||
}
|
||||
|
||||
public static float copySign(Float magnitude, Float sign) {
|
||||
return Math.copySign(magnitude, sign);
|
||||
}
|
||||
|
||||
public static int getExponent(Float f) {
|
||||
return Math.getExponent(f);
|
||||
}
|
||||
|
||||
public static int getExponent(Double d) {
|
||||
return Math.getExponent(d);
|
||||
}
|
||||
|
||||
public static double nextAfter(Double start, Double direction) {
|
||||
return Math.nextAfter(start, direction);
|
||||
}
|
||||
|
||||
public static float nextAfter(Float start, Double direction) {
|
||||
return Math.nextAfter(start, direction);
|
||||
}
|
||||
|
||||
public static double nextUp(Double d) {
|
||||
return Math.nextUp(d);
|
||||
}
|
||||
|
||||
public static float nextUp(Float f) {
|
||||
return Math.nextUp(f);
|
||||
}
|
||||
|
||||
|
||||
public static double scalb(Double d, Integer scaleFactor) {
|
||||
return Math.scalb(d, scaleFactor);
|
||||
}
|
||||
|
||||
public static float scalb(Float f, Integer scaleFactor) {
|
||||
return Math.scalb(f, scaleFactor);
|
||||
}
|
||||
}
|
|
@ -91,10 +91,12 @@ public class LongFieldDataTests {
|
|||
assertThat(sFieldData.hasValue(0), equalTo(true));
|
||||
assertThat(sFieldData.docFieldData(0).isEmpty(), equalTo(false));
|
||||
assertThat(sFieldData.value(0), equalTo(4l));
|
||||
assertThat(sFieldData.date(0).getMillis(), equalTo(4l));
|
||||
assertThat(sFieldData.docFieldData(0).getValue(), equalTo(4l));
|
||||
assertThat(sFieldData.values(0).length, equalTo(1));
|
||||
assertThat(sFieldData.docFieldData(0).getValues().length, equalTo(1));
|
||||
assertThat(sFieldData.values(0)[0], equalTo(4l));
|
||||
assertThat(sFieldData.dates(0)[0].getMillis(), equalTo(4l));
|
||||
assertThat(sFieldData.docFieldData(0).getValues()[0], equalTo(4l));
|
||||
|
||||
assertThat(sFieldData.hasValue(1), equalTo(true));
|
||||
|
@ -141,9 +143,12 @@ public class LongFieldDataTests {
|
|||
|
||||
assertThat(mFieldData.hasValue(1), equalTo(true));
|
||||
assertThat(mFieldData.value(1), equalTo(104l));
|
||||
assertThat(mFieldData.date(1).getMillis(), equalTo(104l));
|
||||
assertThat(mFieldData.values(1).length, equalTo(2));
|
||||
assertThat(mFieldData.values(1)[0], equalTo(104l));
|
||||
assertThat(mFieldData.dates(1)[0].getMillis(), equalTo(104l));
|
||||
assertThat(mFieldData.values(1)[1], equalTo(105l));
|
||||
assertThat(mFieldData.dates(1)[1].getMillis(), equalTo(105l));
|
||||
|
||||
assertThat(mFieldData.hasValue(2), equalTo(false));
|
||||
|
||||
|
|
|
@ -30,8 +30,9 @@ import org.elasticsearch.index.cache.IndexCache;
|
|||
import org.elasticsearch.index.engine.robin.RobinIndexEngine;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParser;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.util.lucene.search.*;
|
||||
import org.elasticsearch.util.lucene.search.function.BoostFactorFunctionProvider;
|
||||
import org.elasticsearch.util.lucene.search.function.BoostScoreFunction;
|
||||
import org.elasticsearch.util.lucene.search.function.FunctionScoreQuery;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
|
@ -712,13 +713,23 @@ public class SimpleIndexQueryParserTests {
|
|||
assertThat(((TermFilter) constantScoreQuery.getFilter()).getTerm(), equalTo(new Term("name.last", "banon")));
|
||||
}
|
||||
|
||||
@Test public void testCustomScoreQuery1() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/custom_score1.json");
|
||||
Query parsedQuery = queryParser.parse(query);
|
||||
assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class));
|
||||
FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery;
|
||||
assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon")));
|
||||
assertThat(functionScoreQuery.getFunction(), instanceOf(CustomScoreQueryParser.ScriptScoreFunction.class));
|
||||
}
|
||||
|
||||
@Test public void testCustomBoostFactorQueryBuilder() throws IOException {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(customBoostFactorQuery(termQuery("name.last", "banon")).boostFactor(1.3f));
|
||||
assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class));
|
||||
FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery;
|
||||
assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon")));
|
||||
assertThat((double) ((BoostFactorFunctionProvider) functionScoreQuery.getFunctionProvider()).getBoost(), closeTo(1.3, 0.001));
|
||||
assertThat((double) ((BoostScoreFunction) functionScoreQuery.getFunction()).getBoost(), closeTo(1.3, 0.001));
|
||||
}
|
||||
|
||||
|
||||
|
@ -729,7 +740,7 @@ public class SimpleIndexQueryParserTests {
|
|||
assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class));
|
||||
FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery;
|
||||
assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon")));
|
||||
assertThat((double) ((BoostFactorFunctionProvider) functionScoreQuery.getFunctionProvider()).getBoost(), closeTo(1.3, 0.001));
|
||||
assertThat((double) ((BoostScoreFunction) functionScoreQuery.getFunction()).getBoost(), closeTo(1.3, 0.001));
|
||||
}
|
||||
|
||||
@Test public void testSpanTermQueryBuilder() throws IOException {
|
||||
|
@ -946,7 +957,7 @@ public class SimpleIndexQueryParserTests {
|
|||
}
|
||||
|
||||
private XContentIndexQueryParser newQueryParser() throws IOException {
|
||||
return new XContentIndexQueryParser(new Index("test"), EMPTY_SETTINGS,
|
||||
return new XContentIndexQueryParser(new Index("test"), EMPTY_SETTINGS, new ScriptService(EMPTY_SETTINGS),
|
||||
newMapperService(), new IndexCache(index), new RobinIndexEngine(index), new AnalysisService(index), null, null, null, "test", null);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"custom_score" : {
|
||||
"query" : {
|
||||
"term" : { "name.last" : "banon"}
|
||||
},
|
||||
"script" : "score * doc['name.first']"
|
||||
}
|
||||
}
|
|
@ -30,9 +30,11 @@ import org.elasticsearch.index.query.xcontent.XContentIndexQueryParser;
|
|||
import org.elasticsearch.index.query.xcontent.XContentQueryParserRegistry;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.util.inject.Guice;
|
||||
import org.elasticsearch.util.inject.Injector;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.elasticsearch.util.settings.SettingsModule;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.util.settings.ImmutableSettings.*;
|
||||
|
@ -55,6 +57,8 @@ public class IndexQueryParserModuleTests {
|
|||
|
||||
Index index = new Index("test");
|
||||
Injector injector = Guice.createInjector(
|
||||
new SettingsModule(settings),
|
||||
new ScriptModule(),
|
||||
new IndexSettingsModule(settings),
|
||||
new IndexCacheModule(settings),
|
||||
new AnalysisModule(settings),
|
||||
|
|
|
@ -30,10 +30,12 @@ import org.elasticsearch.index.query.xcontent.XContentIndexQueryParser;
|
|||
import org.elasticsearch.index.query.xcontent.XContentQueryParserRegistry;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.util.inject.Guice;
|
||||
import org.elasticsearch.util.inject.Injector;
|
||||
import org.elasticsearch.util.settings.ImmutableSettings;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.elasticsearch.util.settings.SettingsModule;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
|
@ -60,6 +62,8 @@ public class IndexQueryParserPluginTests {
|
|||
|
||||
Index index = new Index("test");
|
||||
Injector injector = Guice.createInjector(
|
||||
new SettingsModule(settings),
|
||||
new ScriptModule(),
|
||||
new IndexSettingsModule(settings),
|
||||
new IndexCacheModule(settings),
|
||||
new AnalysisModule(settings),
|
||||
|
|
|
@ -0,0 +1,146 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.integration.search.customscore;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.test.integration.AbstractNodesTests;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.index.query.xcontent.QueryBuilders.*;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.*;
|
||||
import static org.elasticsearch.util.xcontent.XContentFactory.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class CustomScoreSearchTests extends AbstractNodesTests {
|
||||
|
||||
private Client client;
|
||||
|
||||
@BeforeMethod public void createNodes() throws Exception {
|
||||
startNode("server1");
|
||||
client = getClient();
|
||||
}
|
||||
|
||||
@AfterMethod public void closeNodes() {
|
||||
client.close();
|
||||
closeAllNodes();
|
||||
}
|
||||
|
||||
protected Client getClient() {
|
||||
return client("server1");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustomScriptBoost() throws Exception {
|
||||
// execute a search before we create an index
|
||||
try {
|
||||
client.prepareSearch().setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
try {
|
||||
client.prepareSearch("test").setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
client.admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("1")
|
||||
.source(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("2")
|
||||
.source(jsonBuilder().startObject().field("test", "value check").field("num1", 2.0f).endObject())).actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("--- QUERY_THEN_FETCH");
|
||||
|
||||
logger.info("running doc['num1'].value");
|
||||
SearchResponse response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running -doc['num1'].value");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("-doc['num1'].value")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
|
||||
|
||||
logger.info("running pow(doc['num1'].value, 2)");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("pow(doc['num1'].value, 2)")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running max(doc['num1'].value, 1)");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("max(doc['num1'].value, 1d)")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running doc['num1'].value * score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value * score")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
cluster:
|
||||
routing:
|
||||
schedule: 100ms
|
||||
index:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
Loading…
Reference in New Issue