Remove IndexQueryParseService#parse methods used only in tests

SimpleIndexQueryParserTests was the main responsible: deleted lots of duplicated tests, moved the ones that made sense to keep to their corresponding unit tests (note they were ESSingleNode tests before while are now converted to unit tests).

Closes #13750
This commit is contained in:
javanna 2015-09-23 19:09:15 +02:00 committed by Luca Cavanna
parent eb8ea63626
commit 8976934d3b
140 changed files with 1038 additions and 3562 deletions

View File

@ -191,7 +191,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
/** /**
* @return the string representation of the minimumShouldMatch settings for this query * @return the string representation of the minimumShouldMatch settings for this query
*/ */
public String minimumNumberShouldMatch() { public String minimumShouldMatch() {
return this.minimumShouldMatch; return this.minimumShouldMatch;
} }

View File

@ -153,24 +153,6 @@ public class IndexQueryParserService extends AbstractIndexComponent {
return indicesQueriesRegistry; return indicesQueriesRegistry;
} }
//norelease this needs to go away
public ParsedQuery parse(QueryBuilder queryBuilder) {
XContentParser parser = null;
try {
BytesReference bytes = queryBuilder.buildAsBytes();
parser = XContentFactory.xContent(bytes).createParser(bytes);
return innerParse(cache.get(), parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(BytesReference source) { public ParsedQuery parse(BytesReference source) {
return parse(cache.get(), source); return parse(cache.get(), source);
} }
@ -192,22 +174,6 @@ public class IndexQueryParserService extends AbstractIndexComponent {
} }
} }
public ParsedQuery parse(String source) throws ParsingException, QueryShardException {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(cache.get(), parser);
} catch (QueryShardException|ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse [" + source + "]", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(XContentParser parser) { public ParsedQuery parse(XContentParser parser) {
try { try {
return innerParse(cache.get(), parser); return innerParse(cache.get(), parser);

View File

@ -23,19 +23,8 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.*;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
@ -45,11 +34,7 @@ import org.elasticsearch.test.ESTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.*;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;

View File

@ -404,6 +404,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
/** /**
* Serialize the given query builder and asserts that both are equal * Serialize the given query builder and asserts that both are equal
*/ */
@SuppressWarnings("unchecked")
protected QB assertSerialization(QB testQuery) throws IOException { protected QB assertSerialization(QB testQuery) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
testQuery.writeTo(output); testQuery.writeTo(output);

View File

@ -19,15 +19,17 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanQuery; import org.hamcrest.Matchers;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
@ -80,6 +82,7 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
} else { } else {
assertThat(query, instanceOf(BooleanQuery.class)); assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query; BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.isCoordDisabled(), equalTo(queryBuilder.disableCoord()));
if (queryBuilder.adjustPureNegative()) { if (queryBuilder.adjustPureNegative()) {
boolean isNegative = true; boolean isNegative = true;
for (BooleanClause clause : clauses) { for (BooleanClause clause : clauses) {
@ -173,4 +176,30 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
} }
} }
// https://github.com/elasticsearch/elasticsearch/issues/7240
@Test
public void testEmptyBooleanQuery() throws Exception {
String query = jsonBuilder().startObject().startObject("bool").endObject().endObject().string();
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class));
}
public void testDefaultMinShouldMatch() throws Exception {
// Queries have a minShouldMatch of 0
BooleanQuery bq = (BooleanQuery) parseQuery(boolQuery().must(termQuery("foo", "bar")).buildAsBytes()).toQuery(createShardContext());
assertEquals(0, bq.getMinimumNumberShouldMatch());
bq = (BooleanQuery) parseQuery(boolQuery().should(termQuery("foo", "bar")).buildAsBytes()).toQuery(createShardContext());
assertEquals(0, bq.getMinimumNumberShouldMatch());
// Filters have a minShouldMatch of 0/1
ConstantScoreQuery csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().must(termQuery("foo", "bar"))).buildAsBytes()).toQuery(createShardContext());
bq = (BooleanQuery) csq.getQuery();
assertEquals(0, bq.getMinimumNumberShouldMatch());
csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().should(termQuery("foo", "bar"))).buildAsBytes()).toQuery(createShardContext());
bq = (BooleanQuery) csq.getQuery();
assertEquals(1, bq.getMinimumNumberShouldMatch());
}
} }

View File

@ -25,8 +25,11 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTermsQueryBuilder> { public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTermsQueryBuilder> {
@ -110,4 +113,44 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
context.setAllowUnmappedFields(true); context.setAllowUnmappedFields(true);
assertNull(builder.toQuery(context)); assertNull(builder.toQuery(context));
} }
@Test
public void testCommonTermsQuery1() throws IOException {
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query1.json");
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue());
assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2"));
}
@Test
public void testCommonTermsQuery2() throws IOException {
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query2.json");
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo("50%"));
assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("5<20%"));
}
@Test
public void testCommonTermsQuery3() throws IOException {
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query3.json");
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue());
assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2"));
}
@Test // see #11730
public void testCommonTermsQuery4() throws IOException {
boolean disableCoord = randomBoolean();
Query parsedQuery = parseQuery(commonTermsQuery("field", "text").disableCoord(disableCoord).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.isCoordDisabled(), equalTo(disableCoord));
}
} }

View File

@ -19,17 +19,17 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.*;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.*;
public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBuilder> { public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBuilder> {
@ -113,4 +113,34 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
// expected // expected
} }
} }
@Test
public void testToQueryInnerPrefixQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String queryAsString = "{\n" +
" \"dis_max\":{\n" +
" \"queries\":[\n" +
" {\n" +
" \"prefix\":{\n" +
" \"" + STRING_FIELD_NAME + "\":{\n" +
" \"value\":\"sh\",\n" +
" \"boost\":1.2\n" +
" }\n" +
" }\n" +
" }\n" +
" ]\n" +
" }\n" +
"}";
Query query = parseQuery(queryAsString).toQuery(createShardContext());
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query;
List<Query> disjuncts = disjunctionMaxQuery.getDisjuncts();
assertThat(disjuncts.size(), equalTo(1));
PrefixQuery firstQ = (PrefixQuery) disjuncts.get(0);
// since age is automatically registered in data, we encode it as numeric
assertThat(firstQ.getPrefix(), equalTo(new Term(STRING_FIELD_NAME, "sh")));
assertThat((double) firstQ.getBoost(), closeTo(1.2, 0.00001));
}
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -29,6 +30,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuilder> { public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuilder> {
@ -103,4 +105,45 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
assertThat(e.getMessage(), Matchers.containsString("For input string")); assertThat(e.getMessage(), Matchers.containsString("For input string"));
} }
} }
@Test
public void testToQueryWithStringField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"fuzzy\":{\n" +
" \"" + STRING_FIELD_NAME + "\":{\n" +
" \"value\":\"sh\",\n" +
" \"fuzziness\": \"AUTO\",\n" +
" \"prefix_length\":1,\n" +
" \"boost\":2.0\n" +
" }\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(FuzzyQuery.class));
FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery;
assertThat(fuzzyQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, "sh")));
assertThat(fuzzyQuery.getMaxEdits(), equalTo(Fuzziness.AUTO.asDistance("sh")));
assertThat(fuzzyQuery.getPrefixLength(), equalTo(1));
assertThat(fuzzyQuery.getBoost(), equalTo(2.0f));
}
@Test
public void testToQueryWithNumericField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"fuzzy\":{\n" +
" \"" + INT_FIELD_NAME + "\":{\n" +
" \"value\":12,\n" +
" \"fuzziness\":5,\n" +
" \"boost\":2.0\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery;
assertThat(fuzzyQuery.getMin().longValue(), equalTo(7l));
assertThat(fuzzyQuery.getMax().longValue(), equalTo(17l));
}
} }

View File

@ -21,12 +21,7 @@ package org.elasticsearch.index.query;
import com.spatial4j.core.io.GeohashUtils; import com.spatial4j.core.io.GeohashUtils;
import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Rectangle;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery; import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
@ -35,6 +30,9 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBoundingBoxQueryBuilder> { public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBoundingBoxQueryBuilder> {
/** Randomly generate either NaN or one of the two infinity values. */ /** Randomly generate either NaN or one of the two infinity values. */
private static Double[] brokenDoubles = {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY}; private static Double[] brokenDoubles = {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY};
@ -258,7 +256,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
} }
} }
// Java really could do with function pointers - is there any Java8 feature that would help me here which I don't know of?
public abstract class PointTester { public abstract class PointTester {
private double brokenCoordinate = randomFrom(brokenDoubles); private double brokenCoordinate = randomFrom(brokenDoubles);
private double invalidCoordinate; private double invalidCoordinate;
@ -319,4 +316,106 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), qb.topLeft().getLat(), coordinate); qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), qb.topLeft().getLat(), coordinate);
} }
} }
@Test
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":[-70, 40],\n" +
" \"bottom_right\":[-80, 30]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" },\n" +
" \"bottom_right\":{\n" +
" \"lat\":30,\n" +
" \"lon\":-80\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":\"40, -70\",\n" +
" \"bottom_right\":\"30, -80\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":\"drn5x1g8cu2y\",\n" +
" \"bottom_right\":\"30, -80\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery5() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_right\":\"40, -80\",\n" +
" \"bottom_left\":\"30, -70\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery6() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"right\": -80,\n" +
" \"top\": 40,\n" +
" \"left\": -70,\n" +
" \"bottom\": 30\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
private void assertGeoBoundingBoxQuery(String query) throws IOException {
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.topLeft().lat(), closeTo(40, 0.00001));
assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001));
assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001));
}
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Point;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
@ -31,9 +30,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDistanceQueryBuilder> { public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDistanceQueryBuilder> {
@ -180,4 +177,202 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
assertThat(geoQuery.maxInclusiveDistance(), closeTo(distance, Math.abs(distance) / 1000)); assertThat(geoQuery.maxInclusiveDistance(), closeTo(distance, Math.abs(distance) / 1000));
} }
@Test
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":[-70, 40]\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":\"40, -70\"\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":\"drn5x1g8cu2y\"\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery5() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":12,\n" +
" \"unit\":\"mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery6() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12\",\n" +
" \"unit\":\"mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery7() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"19.312128\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(0.012, DistanceUnit.MILES), 0.00001));
}
@Test
public void testParsingAndToQuery8() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":19.312128,\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.KILOMETERS.convert(12, DistanceUnit.MILES), 0.00001));
}
@Test
public void testParsingAndToQuery9() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"19.312128\",\n" +
" \"unit\":\"km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery10() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":19.312128,\n" +
" \"unit\":\"km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery11() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"19.312128km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery12() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"unit\":\"km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
private void assertGeoDistanceRangeQuery(String query) throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001));
}
} }

View File

@ -23,6 +23,7 @@ import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Coordinate;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder;
@ -37,6 +38,8 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -70,7 +73,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i))); assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i)));
} }
} }
} }
/** /**
@ -155,4 +157,114 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
assertEquals("Deprecated field [normalize] used, expected [coerce] instead", ex.getMessage()); assertEquals("Deprecated field [normalize] used, expected [coerce] instead", ex.getMessage());
} }
} }
@Test
public void testParsingAndToQueryParsingExceptions() throws IOException {
String[] brokenFiles = new String[]{
"/org/elasticsearch/index/query/geo_polygon_exception_1.json",
"/org/elasticsearch/index/query/geo_polygon_exception_2.json",
"/org/elasticsearch/index/query/geo_polygon_exception_3.json",
"/org/elasticsearch/index/query/geo_polygon_exception_4.json",
"/org/elasticsearch/index/query/geo_polygon_exception_5.json"
};
for (String brokenFile : brokenFiles) {
String query = copyToStringFromClasspath(brokenFile);
try {
parseQuery(query);
fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile);
} catch (ParsingException e) {
// success!
}
}
}
@Test
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" [-70, 40],\n" +
" [-80, 30],\n" +
" [-90, 20]\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
@Test
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" {\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" },\n" +
" {\n" +
" \"lat\":30,\n" +
" \"lon\":-80\n" +
" },\n" +
" {\n" +
" \"lat\":20,\n" +
" \"lon\":-90\n" +
" }\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
@Test
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" \"40, -70\",\n" +
" \"30, -80\",\n" +
" \"20, -90\"\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
@Test
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" \"drn5x1g8cu2y\",\n" +
" \"30, -80\",\n" +
" \"20, -90\"\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
private void assertGeoPolygonQuery(String query) throws IOException {
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.points().length, equalTo(4));
assertThat(filter.points()[0].lat(), closeTo(40, 0.00001));
assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001));
assertThat(filter.points()[1].lat(), closeTo(30, 0.00001));
assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001));
assertThat(filter.points()[2].lat(), closeTo(20, 0.00001));
assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001));
}
} }

View File

@ -1,172 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.TestSearchContext;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
/**
*
*/
public class IndexQueryParserFilterDateRangeFormatTests extends ESSingleNodeTestCase {
private Injector injector;
private IndexQueryParserService queryParser;
@Before
public void setup() throws IOException {
IndexService indexService = createIndex("test");
injector = indexService.injector();
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = injector.getInstance(IndexQueryParserService.class);
}
private IndexQueryParserService queryParser() throws IOException {
return this.queryParser;
}
@Test
public void testDateRangeFilterFormat() throws IOException {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format.json");
queryParser.parse(query).query();
// Sadly from NoCacheFilter, we can not access to the delegate filter so we can not check
// it's the one we are expecting
// Test Invalid format
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format_invalid.json");
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
queryParser.parse(query).query().rewrite(null);
fail("A Range Filter with a specific format but with an unexpected date should raise a ParsingException");
} catch (ElasticsearchParseException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
@Test
public void testDateRangeQueryFormat() throws IOException {
IndexQueryParserService queryParser = queryParser();
// We test 01/01/2012 from gte and 2030 for lt
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format.json");
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();;
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 01/01/2012 (dd/MM/yyyy)
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// Max value was 2030 (yyyy)
DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis()));
// Test Invalid format
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format_invalid.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query().rewrite(null);
fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException");
} catch (ElasticsearchParseException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
@Test
public void testDateRangeBoundaries() throws IOException {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_inclusive.json");
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertTrue(rangeQuery.includesMin());
DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertTrue(rangeQuery.includesMax());
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json");
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
rangeQuery = (NumericRangeQuery) parsedQuery;
min = DateTime.parse("2014-11-30T23:59:59.999+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertFalse(rangeQuery.includesMin());
max = DateTime.parse("2014-12-08T00:00:00.000+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertFalse(rangeQuery.includesMax());
}
}

View File

@ -1,129 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.TestSearchContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
*
*/
public class IndexQueryParserFilterDateRangeTimezoneTests extends ESSingleNodeTestCase {
private Injector injector;
private IndexQueryParserService queryParser;
@Before
public void setup() throws IOException {
IndexService indexService = createIndex("test");
injector = indexService.injector();
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = injector.getInstance(IndexQueryParserService.class);
}
private IndexQueryParserService queryParser() throws IOException {
return this.queryParser;
}
@Test
public void testDateRangeFilterTimezone() throws IOException {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_timezone.json");
queryParser.parse(query).query();
// Sadly from NoCacheFilter, we can not access to the delegate filter so we can not check
// it's the one we are expecting
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_timezone_numeric_field.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query();
fail("A Range Filter on a numeric field with a TimeZone should raise a ParsingException");
} catch (QueryShardException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
@Test
public void testDateRangeQueryTimezone() throws IOException {
long startDate = System.currentTimeMillis();
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_timezone.json");
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 2012-01-01 (UTC) so we need to remove one hour
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
// Max value is when we started the test. So it should be some ms from now
DateTime max = new DateTime(startDate, DateTimeZone.UTC);
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// We should not have a big difference here (should be some ms)
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L));
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_timezone_numeric_field.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query();
fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException");
} catch (QueryShardException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
}

View File

@ -23,6 +23,8 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
@ -33,6 +35,16 @@ public class MatchAllQueryBuilderTests extends AbstractQueryTestCase<MatchAllQue
return new MatchAllQueryBuilder(); return new MatchAllQueryBuilder();
} }
@Override
protected Map<String, MatchAllQueryBuilder> getAlternateVersions() {
Map<String, MatchAllQueryBuilder> alternateVersions = new HashMap<>();
String queryAsString = "{\n" +
" \"match_all\": []\n" +
"}";
alternateVersions.put(queryAsString, new MatchAllQueryBuilder());
return alternateVersions;
}
@Override @Override
protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(MatchAllDocsQuery.class)); assertThat(query, instanceOf(MatchAllDocsQuery.class));

View File

@ -21,12 +21,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.*;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -40,6 +35,7 @@ import java.util.Locale;
import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> { public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> {
@ -147,6 +143,9 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
if (query instanceof BooleanQuery) { if (query instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) query; BooleanQuery bq = (BooleanQuery) query;
if (queryBuilder.minimumShouldMatch() != null) {
assertThat(bq.getMinimumNumberShouldMatch(), greaterThan(0));
}
if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) { if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) {
assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length); assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length);
} }
@ -155,13 +154,13 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
if (query instanceof ExtendedCommonTermsQuery) { if (query instanceof ExtendedCommonTermsQuery) {
assertTrue(queryBuilder.cutoffFrequency() != null); assertTrue(queryBuilder.cutoffFrequency() != null);
ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query; ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query;
assertEquals((float) queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE); assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE);
} }
if (query instanceof FuzzyQuery) { if (query instanceof FuzzyQuery) {
assertTrue(queryBuilder.fuzziness() != null); assertTrue(queryBuilder.fuzziness() != null);
FuzzyQuery fuzzyQuery = (FuzzyQuery) query; FuzzyQuery fuzzyQuery = (FuzzyQuery) query;
fuzzyQuery.getTerm().equals(new Term(STRING_FIELD_NAME, BytesRefs.toBytesRef(queryBuilder.value()))); assertThat(fuzzyQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, BytesRefs.toBytesRef(queryBuilder.value()))));
assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength())); assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength()));
assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions())); assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions()));
} }
@ -223,6 +222,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
public void testBadAnalyzer() throws IOException { public void testBadAnalyzer() throws IOException {
MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text");
matchQuery.analyzer("bogusAnalyzer"); matchQuery.analyzer("bogusAnalyzer");
matchQuery.doToQuery(createShardContext()); matchQuery.toQuery(createShardContext());
} }
} }

View File

@ -41,7 +41,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
import org.hamcrest.Matchers;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -52,6 +51,9 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.stream.Stream; import java.util.stream.Stream;
import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery;
import static org.hamcrest.Matchers.*;
public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> { public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> {
private static String[] randomFields; private static String[] randomFields;
@ -205,7 +207,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
if (request.doc() != null) { if (request.doc() != null) {
generatedFields = generateFields(randomFields, request.doc().toUtf8()); generatedFields = generateFields(randomFields, request.doc().toUtf8());
} else { } else {
generatedFields = generateFields(request.selectedFields().toArray(new String[0]), request.id()); generatedFields = generateFields(request.selectedFields().toArray(new String[request.selectedFields().size()]), request.id());
} }
EnumSet<TermVectorsRequest.Flag> flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets); EnumSet<TermVectorsRequest.Flag> flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets);
response.setFields(generatedFields, request.selectedFields(), flags, generatedFields); response.setFields(generatedFields, request.selectedFields(), flags, generatedFields);
@ -233,10 +235,10 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
@Override @Override
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) { if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) {
assertThat(query, Matchers.instanceOf(BooleanQuery.class)); assertThat(query, instanceOf(BooleanQuery.class));
} else { } else {
// we rely on integration tests for a deeper check here // we rely on integration tests for a deeper check here
assertThat(query, Matchers.instanceOf(MoreLikeThisQuery.class)); assertThat(query, instanceOf(MoreLikeThisQuery.class));
} }
} }
@ -262,10 +264,21 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
queryBuilder.toQuery(createShardContext()); queryBuilder.toQuery(createShardContext());
fail("should have failed with IllegalArgumentException for field: " + unsupportedField); fail("should have failed with IllegalArgumentException for field: " + unsupportedField);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), Matchers.containsString("more_like_this doesn't support binary/numeric fields")); assertThat(e.getMessage(), containsString("more_like_this doesn't support binary/numeric fields"));
} }
} }
@Test
public void testMoreLikeThisBuilder() throws Exception {
Query parsedQuery = parseQuery(moreLikeThisQuery(new String[]{"name.first", "name.last"}, new String[]{"something"}, null).minTermFreq(1).maxQueryTerms(12).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class));
MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery;
assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first"));
assertThat(mltQuery.getLikeText(), equalTo("something"));
assertThat(mltQuery.getMinTermFrequency(), equalTo(1));
assertThat(mltQuery.getMaxQueryTerms(), equalTo(12));
}
@Test @Test
public void testItemSerialization() throws IOException { public void testItemSerialization() throws IOException {
Item expectedItem = generateRandomItem(); Item expectedItem = generateRandomItem();

View File

@ -28,7 +28,9 @@ import org.elasticsearch.index.search.MatchQuery;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
@ -101,6 +103,19 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
return query; return query;
} }
@Override
protected Map<String, MultiMatchQueryBuilder> getAlternateVersions() {
Map<String, MultiMatchQueryBuilder> alternateVersions = new HashMap<>();
String query = "{\n" +
" \"multi_match\": {\n" +
" \"query\": \"foo bar\",\n" +
" \"fields\": \"myField\"\n" +
" }\n" +
"}";
alternateVersions.put(query, new MultiMatchQueryBuilder("foo bar", "myField"));
return alternateVersions;
}
@Override @Override
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
// we rely on integration tests for deeper checks here // we rely on integration tests for deeper checks here
@ -133,6 +148,13 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
// expected // expected
} }
try {
new MultiMatchQueryBuilder("value", "field").type(null);
fail("type must not be null");
} catch (IllegalArgumentException e) {
// expected
}
} }
@Override @Override

View File

@ -19,12 +19,16 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -47,6 +51,7 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
assertThat(query, instanceOf(PrefixQuery.class)); assertThat(query, instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) query; PrefixQuery prefixQuery = (PrefixQuery) query;
assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName())); assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName()));
assertThat(prefixQuery.getPrefix().text(), equalTo(queryBuilder.value()));
} }
@Test @Test
@ -69,4 +74,15 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
// expected // expected
} }
} }
@Test
public void testBlendedRewriteMethod() throws IOException {
for (String rewrite : Arrays.asList("top_terms_blended_freqs_10", "topTermsBlendedFreqs10")) {
Query parsedQuery = parseQuery(prefixQuery("field", "val").rewrite(rewrite).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;
assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val")));
assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class));
}
}
} }

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.joda.time.DateTimeZone;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -296,7 +297,37 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
public void testToQueryNumericRangeQuery() throws Exception { public void testToQueryNumericRangeQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext()); Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext());
assertThat(query, instanceOf(NumericRangeQuery.class)); NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query;
assertThat(fuzzyQuery.getMin().longValue(), equalTo(12l));
assertThat(fuzzyQuery.getMax().longValue(), equalTo(12l));
} }
@Test
public void testTimezone() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String queryAsString = "{\n" +
" \"query_string\":{\n" +
" \"time_zone\":\"Europe/Paris\",\n" +
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
QueryBuilder<?> queryBuilder = parseQuery(queryAsString);
assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class));
QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder;
assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris")));
try {
queryAsString = "{\n" +
" \"query_string\":{\n" +
" \"time_zone\":\"This timezone does not exist\",\n" +
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
parseQuery(queryAsString);
fail("we expect a ParsingException as we are providing an unknown time_zome");
} catch (IllegalArgumentException e) {
// We expect this one
}
}
} }

View File

@ -61,10 +61,27 @@ public class RandomQueryBuilder {
public static MultiTermQueryBuilder createMultiTermQuery(Random r) { public static MultiTermQueryBuilder createMultiTermQuery(Random r) {
// for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense // for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense
// see issue #12123 for discussion // see issue #12123 for discussion
// Prefix / Fuzzy / RegEx / Wildcard can go here later once refactored and they have random query generators switch(RandomInts.randomIntBetween(r, 0, 5)) {
RangeQueryBuilder query = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME); case 0:
query.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME);
query.to("z" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); stringRangeQuery.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10));
return query; stringRangeQuery.to("z" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10));
return stringRangeQuery;
case 1:
RangeQueryBuilder numericRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME);
numericRangeQuery.from(RandomInts.randomIntBetween(r, 1, 100));
numericRangeQuery.to(RandomInts.randomIntBetween(r, 101, 200));
return numericRangeQuery;
case 2:
return new FuzzyQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME, RandomInts.randomInt(r, 1000));
case 3:
return new FuzzyQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME, RandomStrings.randomAsciiOfLengthBetween(r, 1, 10));
case 4:
return new PrefixQueryBuilderTests().createTestQueryBuilder();
case 5:
return new WildcardQueryBuilderTests().createTestQueryBuilder();
default:
throw new UnsupportedOperationException();
}
} }
} }

View File

@ -22,13 +22,18 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.lucene.BytesRefs;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.instanceOf; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.hamcrest.Matchers.*;
public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> { public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> {
@ -38,17 +43,10 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
// switch between numeric and date ranges // switch between numeric and date ranges
switch (randomIntBetween(0, 2)) { switch (randomIntBetween(0, 2)) {
case 0: case 0:
if (randomBoolean()) {
// use mapped integer field for numeric range queries // use mapped integer field for numeric range queries
query = new RangeQueryBuilder(INT_FIELD_NAME); query = new RangeQueryBuilder(INT_FIELD_NAME);
query.from(randomIntBetween(1, 100)); query.from(randomIntBetween(1, 100));
query.to(randomIntBetween(101, 200)); query.to(randomIntBetween(101, 200));
} else {
// use unmapped field for numeric range queries
query = new RangeQueryBuilder(randomAsciiOfLengthBetween(1, 10));
query.from(0.0 - randomDouble());
query.to(randomDouble());
}
break; break;
case 1: case 1:
// use mapped date field, using date string representation // use mapped date field, using date string representation
@ -83,14 +81,46 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
return query; return query;
} }
@Override
protected Map<String, RangeQueryBuilder> getAlternateVersions() {
Map<String, RangeQueryBuilder> alternateVersions = new HashMap<>();
RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(INT_FIELD_NAME);
rangeQueryBuilder.from(randomIntBetween(1, 100)).to(randomIntBetween(101, 200));
rangeQueryBuilder.includeLower(randomBoolean());
rangeQueryBuilder.includeUpper(randomBoolean());
String query =
"{\n" +
" \"range\":{\n" +
" \"" + INT_FIELD_NAME + "\": {\n" +
" \"" + (rangeQueryBuilder.includeLower() ? "gte" : "gt") + "\": " + rangeQueryBuilder.from() + ",\n" +
" \"" + (rangeQueryBuilder.includeUpper() ? "lte" : "lt") + "\": " + rangeQueryBuilder.to() + "\n" +
" }\n" +
" }\n" +
"}";
alternateVersions.put(query, rangeQueryBuilder);
return alternateVersions;
}
@Override @Override
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) { if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
assertThat(query, instanceOf(TermRangeQuery.class)); assertThat(query, instanceOf(TermRangeQuery.class));
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName()));
assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from())));
assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to())));
assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower()));
assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper()));
} else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) {
//we can't properly test unmapped dates because LateParsingQuery is package private //we can't properly test unmapped dates because LateParsingQuery is package private
} else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) {
assertThat(query, instanceOf(NumericRangeQuery.class)); assertThat(query, instanceOf(NumericRangeQuery.class));
NumericRangeQuery numericRangeQuery = (NumericRangeQuery) query;
assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName()));
assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from()));
assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to()));
assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower()));
assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper()));
} else { } else {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -152,4 +182,147 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
query.from(1).to(10).timeZone("UTC"); query.from(1).to(10).timeZone("UTC");
query.toQuery(createShardContext()); query.toQuery(createShardContext());
} }
@Test
public void testToQueryNumericField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext());
// since age is automatically registered in data, we encode it as numeric
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME));
assertThat(rangeQuery.getMin().intValue(), equalTo(23));
assertThat(rangeQuery.getMax().intValue(), equalTo(54));
assertThat(rangeQuery.includesMin(), equalTo(true));
assertThat(rangeQuery.includesMax(), equalTo(false));
}
@Test
public void testDateRangeQueryFormat() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
// We test 01/01/2012 from gte and 2030 for lt
String query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"01/01/2012\",\n" +
" \"lt\": \"2030\",\n" +
" \"format\": \"dd/MM/yyyy||yyyy\"\n" +
" }\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 01/01/2012 (dd/MM/yyyy)
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// Max value was 2030 (yyyy)
DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis()));
// Test Invalid format
query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"01/01/2012\",\n" +
" \"lt\": \"2030\",\n" +
" \"format\": \"yyyy\"\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(query).toQuery(createShardContext()).rewrite(null);
fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException");
} catch (ElasticsearchParseException e) {
// We expect it
}
}
@Test
public void testDateRangeBoundaries() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"2014-11-05||/M\",\n" +
" \"lte\": \"2014-12-08||/d\"\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertTrue(rangeQuery.includesMin());
DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertTrue(rangeQuery.includesMax());
query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gt\": \"2014-11-05||/M\",\n" +
" \"lt\": \"2014-12-08||/d\"\n" +
" }\n" +
" }\n" +
"}";
parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
rangeQuery = (NumericRangeQuery) parsedQuery;
min = DateTime.parse("2014-11-30T23:59:59.999+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertFalse(rangeQuery.includesMin());
max = DateTime.parse("2014-12-08T00:00:00.000+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertFalse(rangeQuery.includesMax());
}
@Test
public void testDateRangeQueryTimezone() throws IOException {
long startDate = System.currentTimeMillis();
String query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"2012-01-01\",\n" +
" \"lte\": \"now\",\n" +
" \"time_zone\": \"+01:00\"\n" +
" }\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 2012-01-01 (UTC) so we need to remove one hour
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
// Max value is when we started the test. So it should be some ms from now
DateTime max = new DateTime(startDate, DateTimeZone.UTC);
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// We should not have a big difference here (should be some ms)
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L));
query = "{\n" +
" \"range\" : {\n" +
" \"" + INT_FIELD_NAME + "\" : {\n" +
" \"gte\": \"0\",\n" +
" \"lte\": \"100\",\n" +
" \"time_zone\": \"-01:00\"\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(query).toQuery(createShardContext());
fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException");
} catch (QueryShardException e) {
// We expect it
}
}
} }

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBuilder> { public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBuilder> {
@ -58,6 +59,8 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
@Override @Override
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(RegexpQuery.class)); assertThat(query, instanceOf(RegexpQuery.class));
RegexpQuery regexpQuery = (RegexpQuery) query;
assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName()));
} }
@Test @Test

View File

@ -22,8 +22,10 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -53,4 +55,14 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value())));
} }
} }
@Test(expected = ParsingException.class)
public void testTermArray() throws IOException {
String queryAsString = "{\n" +
" \"term\": {\n" +
" \"age\": [34, 35]\n" +
" }\n" +
"}";
parseQuery(queryAsString);
}
} }

View File

@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -110,6 +111,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
// compare whether we have the expected list of terms returned // compare whether we have the expected list of terms returned
final List<Term> booleanTerms = new ArrayList<>(); final List<Term> booleanTerms = new ArrayList<>();
for (BooleanClause booleanClause : booleanQuery) { for (BooleanClause booleanClause : booleanQuery) {
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class));
Term term = ((TermQuery) booleanClause.getQuery()).getTerm(); Term term = ((TermQuery) booleanClause.getQuery()).getTerm();
booleanTerms.add(term); booleanTerms.add(term);
@ -212,15 +214,21 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
TermsQueryBuilder copy = assertSerialization(queryBuilder); TermsQueryBuilder copy = assertSerialization(queryBuilder);
assertTrue(queryBuilder.disableCoord()); assertTrue(queryBuilder.disableCoord());
assertTrue(copy.disableCoord()); assertTrue(copy.disableCoord());
Query luceneQuery = queryBuilder.toQuery(createShardContext());
assertThat(luceneQuery, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) luceneQuery;
assertThat(booleanQuery.isCoordDisabled(), equalTo(true));
String randomMinShouldMatch = RandomPicks.randomFrom(random(), Arrays.asList("min_match", "min_should_match", "minimum_should_match")); String randomMinShouldMatch = RandomPicks.randomFrom(random(), Arrays.asList("min_match", "min_should_match", "minimum_should_match"));
query = "{\n" + query = "{\n" +
" \"terms\": {\n" + " \"terms\": {\n" +
" \"field\": [\n" + " \"field\": [\n" +
" \"blue\",\n" + " \"value1\",\n" +
" \"pill\"\n" + " \"value2\",\n" +
" \"value3\",\n" +
" \"value4\"\n" +
" ],\n" + " ],\n" +
" \"" + randomMinShouldMatch +"\": \"42%\"\n" + " \"" + randomMinShouldMatch +"\": \"25%\"\n" +
" }\n" + " }\n" +
"}"; "}";
try { try {
@ -231,8 +239,12 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
} }
queryBuilder = (TermsQueryBuilder) parseQuery(query, ParseFieldMatcher.EMPTY); queryBuilder = (TermsQueryBuilder) parseQuery(query, ParseFieldMatcher.EMPTY);
copy = assertSerialization(queryBuilder); copy = assertSerialization(queryBuilder);
assertEquals("42%", queryBuilder.minimumShouldMatch()); assertEquals("25%", queryBuilder.minimumShouldMatch());
assertEquals("42%", copy.minimumShouldMatch()); assertEquals("25%", copy.minimumShouldMatch());
luceneQuery = queryBuilder.toQuery(createShardContext());
assertThat(luceneQuery, instanceOf(BooleanQuery.class));
booleanQuery = (BooleanQuery) luceneQuery;
assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(1));
} }
@Override @Override
@ -241,7 +253,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject(); builder.startObject();
builder.array(termsPath, randomTerms.toArray(new Object[0])); builder.array(termsPath, randomTerms.toArray(new Object[randomTerms.size()]));
builder.endObject(); builder.endObject();
json = builder.string(); json = builder.string();
} catch (IOException ex) { } catch (IOException ex) {
@ -276,5 +288,18 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
assertEquals(Arrays.asList(1l, 3l, 4l), values); assertEquals(Arrays.asList(1l, 3l, 4l), values);
} }
} }
@Test
public void testTermsQueryWithMultipleFields() throws IOException {
String query = XContentFactory.jsonBuilder().startObject()
.startObject("terms").array("foo", 123).array("bar", 456).endObject()
.endObject().string();
try {
parseQuery(query);
fail("parsing should have failed");
} catch (ParsingException ex) {
assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields"));
}
}
} }

View File

@ -25,6 +25,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQueryBuilder> { public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQueryBuilder> {
@ -49,6 +50,10 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQue
@Override @Override
protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(WildcardQuery.class)); assertThat(query, instanceOf(WildcardQuery.class));
WildcardQuery wildcardQuery = (WildcardQuery) query;
assertThat(wildcardQuery.getField(), equalTo(queryBuilder.fieldName()));
assertThat(wildcardQuery.getTerm().field(), equalTo(queryBuilder.fieldName()));
assertThat(wildcardQuery.getTerm().text(), equalTo(queryBuilder.value()));
} }
@Test @Test

View File

@ -23,7 +23,6 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
@ -78,7 +77,7 @@ public class CustomQueryParserIT extends ESIntegTestCase {
@Test //see #11120 @Test //see #11120
public void testConstantScoreParsesFilter() throws Exception { public void testConstantScoreParsesFilter() throws Exception {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query q = queryParser.parse(constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
Query inner = ((ConstantScoreQuery) q).getQuery(); Query inner = ((ConstantScoreQuery) q).getQuery();
assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class)); assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class));
assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter); assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter);
@ -88,11 +87,11 @@ public class CustomQueryParserIT extends ESIntegTestCase {
public void testBooleanParsesFilter() throws Exception { public void testBooleanParsesFilter() throws Exception {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
// single clause, serialized as inner object // single clause, serialized as inner object
Query q = queryParser.parse(boolQuery() Query q = boolQuery()
.should(new DummyQueryParserPlugin.DummyQueryBuilder()) .should(new DummyQueryParserPlugin.DummyQueryBuilder())
.must(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder())
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder())
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
assertThat(q, instanceOf(BooleanQuery.class)); assertThat(q, instanceOf(BooleanQuery.class));
BooleanQuery bq = (BooleanQuery) q; BooleanQuery bq = (BooleanQuery) q;
assertEquals(4, bq.clauses().size()); assertEquals(4, bq.clauses().size());
@ -113,11 +112,11 @@ public class CustomQueryParserIT extends ESIntegTestCase {
} }
// multiple clauses, serialized as inner arrays // multiple clauses, serialized as inner arrays
q = queryParser.parse(boolQuery() q = boolQuery()
.should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder()) .should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder())
.must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder())
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder())
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
assertThat(q, instanceOf(BooleanQuery.class)); assertThat(q, instanceOf(BooleanQuery.class));
bq = (BooleanQuery) q; bq = (BooleanQuery) q;
assertEquals(8, bq.clauses().size()); assertEquals(8, bq.clauses().size());

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
public class MultiMatchQueryTests extends ESSingleNodeTestCase {
private IndexQueryParserService queryParser;
private IndexService indexService;
@Before
public void setup() throws IOException {
IndexService indexService = createIndex("test");
MapperService mapperService = indexService.mapperService();
String mapping = "{\n" +
" \"person\":{\n" +
" \"properties\":{\n" +
" \"name\":{\n" +
" \"properties\":{\n" +
" \"first\": {\n" +
" \"type\":\"string\"\n" +
" }," +
" \"last\": {\n" +
" \"type\":\"string\"\n" +
" }" +
" }" +
" }\n" +
" }\n" +
" }\n" +
"}";
mapperService.merge("person", new CompressedXContent(mapping), true, false);
this.indexService = indexService;
queryParser = indexService.queryParserService();
}
public void testCrossFieldMultiMatchQuery() throws IOException {
QueryShardContext queryShardContext = new QueryShardContext(new Index("test"), queryParser);
queryShardContext.setAllowUnmappedFields(true);
Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) {
Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery);
BooleanQuery.Builder expected = new BooleanQuery.Builder();
expected.add(new TermQuery(new Term("foobar", "banon")), BooleanClause.Occur.SHOULD);
Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2);
Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3);
expected.add(new DisjunctionMaxQuery(Arrays.<Query>asList(tq1, tq2), 0f), BooleanClause.Occur.SHOULD);
assertEquals(expected.build(), rewrittenQuery);
}
}
}

View File

@ -19,16 +19,16 @@
package org.elasticsearch.search.query; package org.elasticsearch.search.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.*;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
@ -41,6 +41,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;

View File

@ -1,35 +0,0 @@
{
filtered:{
query:{
term:{
"name.first":"shay"
}
},
filter:{
bool:{
must:[
{
term:{
"name.first":"shay1"
}
},
{
term:{
"name.first":"shay4"
}
}
],
must_not:{
term:{
"name.first":"shay2"
}
},
should:{
term:{
"name.first":"shay3"
}
}
}
}
}
}

View File

@ -1,30 +0,0 @@
{
bool:{
must:[
{
query_string:{
default_field:"content",
query:"test1"
}
},
{
query_string:{
default_field:"content",
query:"test4"
}
}
],
must_not:{
query_string:{
default_field:"content",
query:"test2"
}
},
should:{
query_string:{
default_field:"content",
query:"test3"
}
}
}
}

View File

@ -1,15 +0,0 @@
{
"boosting":{
"positive":{
"term":{
"field1":"value1"
}
},
"negative":{
"term":{
"field2":"value2"
}
},
"negative_boost":0.2
}
}

View File

@ -1,12 +0,0 @@
{
"child":{
"properties":{
"field":{
"type":"string"
}
},
"_parent" : {
"type" : "person"
}
}
}

View File

@ -1,9 +0,0 @@
{
constant_score:{
filter:{
term:{
"name.last":"banon"
}
}
}
}

View File

@ -1,43 +0,0 @@
{
name:{
first:"shay",
last:"banon"
},
address:{
first:{
location:"first location"
},
last:{
location:"last location"
}
},
age:32,
birthDate:"1977-11-15",
nerd:true,
dogs:["buck", "mia"],
complex:[
{
value1:"value1"
},
{
value2:"value2"
}
],
complex2:[
[
{
value1:"value1"
}
],
[
{
value2:"value2"
}
]
],
nullValue:null,
"location":{
"lat":1.1,
"lon":1.2
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"born" : {
"gte": "01/01/2012",
"lt": "2030",
"format": "dd/MM/yyyy||yyyy"
}
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"born" : {
"gte": "01/01/2012",
"lt": "2030",
"format": "yyyy"
}
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"born" : {
"gte": "2012-01-01",
"lte": "now",
"time_zone": "+01:00"
}
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"age" : {
"gte": "0",
"lte": "100",
"time_zone": "-01:00"
}
}
}
}
}

View File

@ -1,8 +0,0 @@
{
"range" : {
"born" : {
"gt": "2014-11-05||/M",
"lt": "2014-12-08||/d"
}
}
}

View File

@ -1,8 +0,0 @@
{
"range" : {
"born" : {
"gte": "2014-11-05||/M",
"lte": "2014-12-08||/d"
}
}
}

View File

@ -1,9 +0,0 @@
{
"range" : {
"born" : {
"gte": "01/01/2012",
"lt": "2030",
"format": "dd/MM/yyyy||yyyy"
}
}
}

View File

@ -1,9 +0,0 @@
{
"range" : {
"born" : {
"gte": "01/01/2012",
"lt": "2030",
"format": "yyyy"
}
}
}

View File

@ -1,9 +0,0 @@
{
"range" : {
"born" : {
"gte": "2012-01-01",
"lte": "now",
"time_zone": "+01:00"
}
}
}

View File

@ -1,9 +0,0 @@
{
"range" : {
"age" : {
"gte": "0",
"lte": "100",
"time_zone": "-01:00"
}
}
}

View File

@ -1,18 +0,0 @@
{
dis_max:{
tie_breaker:0.7,
boost:1.2,
queries:[
{
term:{
"name.first":"first"
}
},
{
term:{
"name.last":"last"
}
}
]
}
}

View File

@ -1,14 +0,0 @@
{
"dis_max":{
"queries":[
{
"prefix":{
"name.first":{
"value":"sh",
"boost":1.2
}
}
}
]
}
}

View File

@ -1,9 +0,0 @@
{
field:{
age:{
query:34,
boost:2.0,
enable_position_increments:false
}
}
}

View File

@ -1,24 +0,0 @@
{
"function_score":{
"query":{
"term":{
"name.last":"banon"
}
},
"functions": [
{
"weight": 3,
"filter": {
"term":{
"name.last":"banon"
}
}
}
],
"boost" : 3,
"score_mode" : "avg",
"max_boost" : 10
}
}

View File

@ -1,10 +0,0 @@
{
"fuzzy":{
"name.first":{
"value":"sh",
"fuzziness": "AUTO",
"prefix_length":1,
"boost":2.0
}
}
}

View File

@ -1,9 +0,0 @@
{
"fuzzy":{
"age":{
"value":12,
"fuzziness":5,
"boost":2.0
}
}
}

View File

@ -1,5 +0,0 @@
{
"fuzzy":{
"name.first":"sh"
}
}

View File

@ -1,14 +0,0 @@
{
"geo_shape" : {
"country" : {
"shape" : {
"type" : "Envelope",
"coordinates" : [
[-45, 45],
[45, -45]
]
},
"relation" : "intersects"
}
}
}

View File

@ -1,14 +0,0 @@
{
"geo_shape" : {
"country" : {
"shape" : {
"type" : "Envelope",
"coordinates" : [
[-45, 45],
[45, -45]
]
},
"relation" : "intersects"
}
}
}

View File

@ -1,9 +0,0 @@
{
"geo_bounding_box":{
"location":{
"top_left":[-70, 40],
"bottom_right":[-80, 30]
},
"_name":"test"
}
}

View File

@ -1,8 +0,0 @@
{
"geo_bounding_box":{
"location":{
"top_left":[-70, 40],
"bottom_right":[-80, 30]
}
}
}

View File

@ -1,14 +0,0 @@
{
"geo_bounding_box":{
"location":{
"top_left":{
"lat":40,
"lon":-70
},
"bottom_right":{
"lat":30,
"lon":-80
}
}
}
}

View File

@ -1,8 +0,0 @@
{
"geo_bounding_box":{
"location":{
"top_left":"40, -70",
"bottom_right":"30, -80"
}
}
}

View File

@ -1,8 +0,0 @@
{
"geo_bounding_box":{
"location":{
"top_left":"drn5x1g8cu2y",
"bottom_right":"30, -80"
}
}
}

View File

@ -1,8 +0,0 @@
{
"geo_bounding_box":{
"location":{
"top_right":"40, -80",
"bottom_left":"30, -70"
}
}
}

View File

@ -1,10 +0,0 @@
{
"geo_bounding_box":{
"location":{
"right": -80,
"top": 40,
"left": -70,
"bottom": 30
}
}
}

View File

@ -1,10 +0,0 @@
{
"geo_distance":{
"distance":"12mi",
"location":{
"lat":40,
"lon":-70
},
"_name":"test"
}
}

View File

@ -1,9 +0,0 @@
{
"geo_distance":{
"distance":"12mi",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,10 +0,0 @@
{
"geo_distance":{
"distance":19.312128,
"unit":"km",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,9 +0,0 @@
{
"geo_distance":{
"distance":"19.312128km",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,10 +0,0 @@
{
"geo_distance":{
"distance":"12mi",
"unit":"km",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,6 +0,0 @@
{
"geo_distance":{
"distance":"12mi",
"location":[-70, 40]
}
}

View File

@ -1,6 +0,0 @@
{
"geo_distance":{
"distance":"12mi",
"location":"40, -70"
}
}

View File

@ -1,6 +0,0 @@
{
"geo_distance":{
"distance":"12mi",
"location":"drn5x1g8cu2y"
}
}

View File

@ -1,10 +0,0 @@
{
"geo_distance":{
"distance":12,
"unit":"mi",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,10 +0,0 @@
{
"geo_distance":{
"distance":"12",
"unit":"mi",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,9 +0,0 @@
{
"geo_distance":{
"distance":"19.312128",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,9 +0,0 @@
{
"geo_distance":{
"distance":19.312128,
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,10 +0,0 @@
{
"geo_distance":{
"distance":"19.312128",
"unit":"km",
"location":{
"lat":40,
"lon":-70
}
}
}

View File

@ -1,12 +0,0 @@
{
"geo_polygon":{
"location":{
"points":[
[-70, 40],
[-80, 30],
[-90, 20]
]
},
"_name":"test"
}
}

View File

@ -1,11 +0,0 @@
{
"geo_polygon":{
"location":{
"points":[
[-70, 40],
[-80, 30],
[-90, 20]
]
}
}
}

View File

@ -1,20 +0,0 @@
{
"geo_polygon":{
"location":{
"points":[
{
"lat":40,
"lon":-70
},
{
"lat":30,
"lon":-80
},
{
"lat":20,
"lon":-90
}
]
}
}
}

View File

@ -1,11 +0,0 @@
{
"geo_polygon":{
"location":{
"points":[
"40, -70",
"30, -80",
"20, -90"
]
}
}
}

View File

@ -1,11 +0,0 @@
{
"geo_polygon":{
"location":{
"points":[
"drn5x1g8cu2y",
"30, -80",
"20, -90"
]
}
}
}

View File

@ -1,19 +0,0 @@
{
"filtered":{
"filter":{
"and" : {
"filters" : [
{
"has_child" : {
"type" : "child",
"query" : {
"match_all" : {}
}
}
}
],
"_cache" : true
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"filtered":{
"filter":{
"has_child" : {
"type" : "child",
"query" : {
"match_all" : {}
},
"_cache" : true
}
}
}
}

View File

@ -1,15 +0,0 @@
{
"person":{
"properties":{
"location":{
"type":"geo_point"
},
"country" : {
"type" : "geo_shape"
},
"born":{
"type":"date"
}
}
}
}

View File

@ -1,8 +0,0 @@
{
"match" : {
"message" : {
"query" : "this is a test",
"type" : "doesNotExist"
}
}
}

View File

@ -1 +0,0 @@
{ "match": { "body": { "query": "fuzzy", "fuzziness": 1, "fuzzy_transpositions": true }} }

View File

@ -1 +0,0 @@
{ "match": { "body": { "query": "fuzzy", "fuzziness": 1, "fuzzy_transpositions": false }} }

View File

@ -1,5 +0,0 @@
{
match_all:{
boost:1.2
}
}

View File

@ -1,3 +0,0 @@
{
"match_all": {}
}

View File

@ -1,3 +0,0 @@
{
"match_all": []
}

View File

@ -1,22 +0,0 @@
{
"more_like_this" : {
"fields" : ["name.first", "name.last"],
"like_text": "Apache Lucene",
"like" : [
{
"_index" : "test",
"_type" : "person",
"_id" : "1"
},
{
"_index" : "test",
"_type" : "person",
"_id" : "2"
}
],
"ids" : ["3", "4"],
"include" : true,
"min_term_freq" : 1,
"max_query_terms" : 12
}
}

View File

@ -1,8 +0,0 @@
{
"more_like_this" : {
"fields" : ["name.first", "name.last"],
"like_text" : "something",
"min_term_freq" : 1,
"max_query_terms" : 12
}
}

View File

@ -1,7 +0,0 @@
{
"multi_match": {
"query": "foo bar",
"fields": [ "myField", "otherField" ],
"type":"doesNotExist"
}
}

View File

@ -1,6 +0,0 @@
{
"multi_match": {
"query": "foo bar",
"fields": "myField"
}
}

View File

@ -1,6 +0,0 @@
{
"multi_match": {
"query": "foo bar",
"fields": [ "myField", "otherField" ]
}
}

View File

@ -1,9 +0,0 @@
{
"not":{
"query":{
"term":{
"name.first":"shay1"
}
}
}
}

View File

@ -1,7 +0,0 @@
{
"not":{
"term":{
"name.first":"shay1"
}
}
}

View File

@ -1,7 +0,0 @@
{
"not":{
"term":{
"name.first":"shay1"
}
}
}

View File

@ -1,8 +0,0 @@
{
"prefix":{
"name.first":{
"value":"sh",
"boost":1.2
}
}
}

View File

@ -1,8 +0,0 @@
{
prefix:{
"name.first":{
prefix:"sh",
boost:2.0
}
}
}

View File

@ -1,5 +0,0 @@
{
prefix:{
"name.first":"sh"
}
}

View File

@ -1,7 +0,0 @@
{
query_string:{
fields:["name.*"],
use_dis_max:false,
query:"test"
}
}

Some files were not shown because too many files have changed in this diff Show More