mirror of https://github.com/apache/lucene.git
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
This commit is contained in:
commit
9d8d3b0f29
|
@ -861,7 +861,7 @@ def testSolrExample(unpackPath, javaPath, isSrc):
|
|||
if s.find('"numFound":3,"start":0') == -1:
|
||||
print('FAILED: response is:\n%s' % s)
|
||||
raise RuntimeError('query on solr example instance failed')
|
||||
s = load('http://localhost:8983/v2/cores')
|
||||
s = load('http://localhost:8983/api/cores')
|
||||
if s.find('"status":0,') == -1:
|
||||
print('FAILED: response is:\n%s' % s)
|
||||
raise RuntimeError('query api v2 on solr example instance failed')
|
||||
|
|
|
@ -136,7 +136,7 @@ org.apache.directory.server.version = 2.0.0-M15
|
|||
/org.apache.directory.server/apacheds-protocol-shared = ${org.apache.directory.server.version}
|
||||
/org.apache.directory.server/apacheds-xdbm-partition = ${org.apache.directory.server.version}
|
||||
|
||||
org.apache.hadoop.version = 2.7.2
|
||||
org.apache.hadoop.version = 2.7.4
|
||||
/org.apache.hadoop/hadoop-annotations = ${org.apache.hadoop.version}
|
||||
/org.apache.hadoop/hadoop-auth = ${org.apache.hadoop.version}
|
||||
/org.apache.hadoop/hadoop-common = ${org.apache.hadoop.version}
|
||||
|
@ -252,6 +252,7 @@ org.gagravarr.vorbis.java.version = 0.8
|
|||
|
||||
org.mortbay.jetty.version = 6.1.26
|
||||
/org.mortbay.jetty/jetty = ${org.mortbay.jetty.version}
|
||||
/org.mortbay.jetty/jetty-sslengine = ${org.mortbay.jetty.version}
|
||||
/org.mortbay.jetty/jetty-util = ${org.mortbay.jetty.version}
|
||||
|
||||
/org.noggit/noggit = 0.8
|
||||
|
|
|
@ -134,12 +134,12 @@ class Geo3DUtil {
|
|||
final GeoPolygon component = fromPolygon(polygons[0]);
|
||||
if (component == null) {
|
||||
// Polygon is degenerate
|
||||
shape = new GeoCompositePolygon();
|
||||
shape = new GeoCompositePolygon(PlanetModel.WGS84);
|
||||
} else {
|
||||
shape = component;
|
||||
}
|
||||
} else {
|
||||
final GeoCompositePolygon poly = new GeoCompositePolygon();
|
||||
final GeoCompositePolygon poly = new GeoCompositePolygon(PlanetModel.WGS84);
|
||||
for (final Polygon p : polygons) {
|
||||
final GeoPolygon component = fromPolygon(p);
|
||||
if (component != null) {
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.lucene.spatial3d.geom;
|
|||
*
|
||||
* @lucene.internal
|
||||
*/
|
||||
public abstract class BasePlanetObject {
|
||||
public abstract class BasePlanetObject implements PlanetObject {
|
||||
|
||||
/** This is the planet model embedded in all objects derived from this
|
||||
* class. */
|
||||
|
@ -35,7 +35,7 @@ public abstract class BasePlanetObject {
|
|||
this.planetModel = planetModel;
|
||||
}
|
||||
|
||||
/** Returns the {@link PlanetModel} provided when this shape was created. */
|
||||
@Override
|
||||
public PlanetModel getPlanetModel() {
|
||||
return planetModel;
|
||||
}
|
||||
|
|
|
@ -94,6 +94,9 @@ abstract class GeoBaseAreaShape extends GeoBaseMembershipShape implements GeoAre
|
|||
|
||||
@Override
|
||||
public int getRelationship(GeoShape geoShape) {
|
||||
if (!geoShape.getPlanetModel().equals(planetModel)) {
|
||||
throw new IllegalArgumentException("Cannot relate shapes with different planet models.");
|
||||
}
|
||||
final int insideGeoAreaShape = isShapeInsideGeoAreaShape(geoShape);
|
||||
if (insideGeoAreaShape == SOME_INSIDE) {
|
||||
return GeoArea.OVERLAPS;
|
||||
|
|
|
@ -35,7 +35,8 @@ abstract class GeoBaseCompositeAreaShape<T extends GeoAreaShape> extends GeoBase
|
|||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public GeoBaseCompositeAreaShape() {
|
||||
public GeoBaseCompositeAreaShape(PlanetModel planetModel) {
|
||||
super(planetModel);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -50,6 +51,9 @@ abstract class GeoBaseCompositeAreaShape<T extends GeoAreaShape> extends GeoBase
|
|||
|
||||
@Override
|
||||
public int getRelationship(GeoShape geoShape) {
|
||||
if (!geoShape.getPlanetModel().equals(planetModel)) {
|
||||
throw new IllegalArgumentException("Cannot relate shapes with different planet models.");
|
||||
}
|
||||
final int insideGeoAreaShape = isShapeInsideGeoAreaShape(geoShape);
|
||||
if (insideGeoAreaShape == SOME_INSIDE) {
|
||||
return GeoArea.OVERLAPS;
|
||||
|
|
|
@ -29,7 +29,8 @@ abstract class GeoBaseCompositeMembershipShape<T extends GeoMembershipShape>
|
|||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
GeoBaseCompositeMembershipShape() {
|
||||
GeoBaseCompositeMembershipShape(PlanetModel planetModel) {
|
||||
super(planetModel);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,7 +27,7 @@ import java.util.List;
|
|||
* @param <T> is the type of GeoShapes of the composite.
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoShape {
|
||||
public abstract class GeoBaseCompositeShape<T extends GeoShape> extends BasePlanetObject implements GeoShape {
|
||||
|
||||
/**
|
||||
* Shape's container
|
||||
|
@ -37,7 +37,8 @@ public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoSh
|
|||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public GeoBaseCompositeShape() {
|
||||
public GeoBaseCompositeShape(PlanetModel planetModel) {
|
||||
super(planetModel);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -46,6 +47,9 @@ public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoSh
|
|||
* @param shape is the shape to add.
|
||||
*/
|
||||
public void addShape(final T shape) {
|
||||
if (!shape.getPlanetModel().equals(planetModel)) {
|
||||
throw new IllegalArgumentException("Cannot add a shape into a composite with different planet models.");
|
||||
}
|
||||
shapes.add(shape);
|
||||
}
|
||||
|
||||
|
@ -108,7 +112,7 @@ public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoSh
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return shapes.hashCode();
|
||||
return super.hashCode() + shapes.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -116,6 +120,6 @@ public abstract class GeoBaseCompositeShape<T extends GeoShape> implements GeoSh
|
|||
if (!(o instanceof GeoBaseCompositeShape<?>))
|
||||
return false;
|
||||
GeoBaseCompositeShape<?> other = (GeoBaseCompositeShape<?>) o;
|
||||
return shapes.equals(other.shapes);
|
||||
return super.equals(other) && shapes.equals(other.shapes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,15 @@ public class GeoCompositeAreaShape extends GeoBaseCompositeAreaShape<GeoAreaShap
|
|||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public GeoCompositeAreaShape() {
|
||||
public GeoCompositeAreaShape(PlanetModel planetModel) {
|
||||
super(planetModel);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof GeoCompositeAreaShape))
|
||||
return false;
|
||||
return super.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -26,7 +26,15 @@ public class GeoCompositeMembershipShape extends GeoBaseCompositeMembershipShape
|
|||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public GeoCompositeMembershipShape() {
|
||||
public GeoCompositeMembershipShape(PlanetModel planetModel) {
|
||||
super(planetModel);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof GeoCompositeMembershipShape))
|
||||
return false;
|
||||
return super.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,15 @@ public class GeoCompositePolygon extends GeoBaseCompositeAreaShape<GeoPolygon> i
|
|||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public GeoCompositePolygon() {
|
||||
public GeoCompositePolygon(PlanetModel planetModel) {
|
||||
super(planetModel);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof GeoCompositePolygon))
|
||||
return false;
|
||||
return super.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -39,6 +39,11 @@ class GeoDegeneratePoint extends GeoPoint implements GeoBBox, GeoCircle {
|
|||
this.edgePoints = new GeoPoint[]{this};
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlanetModel getPlanetModel() {
|
||||
return planetModel;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoBBox expand(final double angle) {
|
||||
final double newTopLat = latitude + angle;
|
||||
|
|
|
@ -336,19 +336,19 @@ public class GeoPolygonFactory {
|
|||
final SidedPlane initialPlane = new SidedPlane(testPoint, filteredPointList.get(0), filteredPointList.get(1));
|
||||
// We don't know if this is the correct siding choice. We will only know as we build the complex polygon.
|
||||
// So we need to be prepared to try both possibilities.
|
||||
GeoCompositePolygon rval = new GeoCompositePolygon();
|
||||
GeoCompositePolygon rval = new GeoCompositePolygon(planetModel);
|
||||
MutableBoolean seenConcave = new MutableBoolean();
|
||||
if (buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, initialPlane, holes, testPoint) == false) {
|
||||
// The testPoint was within the shape. Was that intended?
|
||||
if (testPointInside) {
|
||||
// Yes: build it for real
|
||||
rval = new GeoCompositePolygon();
|
||||
rval = new GeoCompositePolygon(planetModel);
|
||||
seenConcave = new MutableBoolean();
|
||||
buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, initialPlane, holes, null);
|
||||
return rval;
|
||||
}
|
||||
// No: do the complement and return that.
|
||||
rval = new GeoCompositePolygon();
|
||||
rval = new GeoCompositePolygon(planetModel);
|
||||
seenConcave = new MutableBoolean();
|
||||
buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, new SidedPlane(initialPlane), holes, null);
|
||||
return rval;
|
||||
|
@ -359,7 +359,7 @@ public class GeoPolygonFactory {
|
|||
return rval;
|
||||
}
|
||||
// No: return the complement
|
||||
rval = new GeoCompositePolygon();
|
||||
rval = new GeoCompositePolygon(planetModel);
|
||||
seenConcave = new MutableBoolean();
|
||||
buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, new SidedPlane(initialPlane), holes, null);
|
||||
return rval;
|
||||
|
|
|
@ -23,7 +23,7 @@ package org.apache.lucene.spatial3d.geom;
|
|||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public interface GeoShape extends Membership {
|
||||
public interface GeoShape extends Membership, PlanetObject {
|
||||
|
||||
/**
|
||||
* Return a sample point that is on the outside edge/boundary of the shape.
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial3d.geom;
|
||||
|
||||
/**
|
||||
* Relates all Geo3d shape with a specific {@link PlanetModel}.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public interface PlanetObject {
|
||||
|
||||
/** Returns the {@link PlanetModel} provided when this shape was created. */
|
||||
PlanetModel getPlanetModel();
|
||||
}
|
|
@ -21,6 +21,6 @@ package org.apache.lucene.spatial3d.geom;
|
|||
*
|
||||
* @lucene.internal
|
||||
*/
|
||||
public interface XYZSolid extends GeoArea {
|
||||
public interface XYZSolid extends GeoArea, PlanetObject {
|
||||
}
|
||||
|
||||
|
|
|
@ -833,7 +833,7 @@ public class CompositeGeoPolygonRelationshipsTest {
|
|||
points2.add(point8);
|
||||
GeoPolygon p1 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points1);
|
||||
GeoPolygon p2 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points2);
|
||||
GeoCompositeMembershipShape compositeMembershipShape = new GeoCompositeMembershipShape();
|
||||
GeoCompositeMembershipShape compositeMembershipShape = new GeoCompositeMembershipShape(PlanetModel.SPHERE);
|
||||
compositeMembershipShape.addShape(p1);
|
||||
compositeMembershipShape.addShape(p2);
|
||||
return compositeMembershipShape;
|
||||
|
|
|
@ -134,12 +134,12 @@ class Geo3DUtil {
|
|||
final GeoPolygon component = fromPolygon(polygons[0]);
|
||||
if (component == null) {
|
||||
// Polygon is degenerate
|
||||
shape = new GeoCompositePolygon();
|
||||
shape = new GeoCompositePolygon(PlanetModel.WGS84);
|
||||
} else {
|
||||
shape = component;
|
||||
}
|
||||
} else {
|
||||
final GeoCompositePolygon poly = new GeoCompositePolygon();
|
||||
final GeoCompositePolygon poly = new GeoCompositePolygon(PlanetModel.WGS84);
|
||||
for (final Polygon p : polygons) {
|
||||
final GeoPolygon component = fromPolygon(p);
|
||||
if (component != null) {
|
||||
|
|
|
@ -441,7 +441,7 @@ public class GeoPolygonTest {
|
|||
|
||||
PlanetModel pm = new PlanetModel(0.7563871189161702, 1.2436128810838298);
|
||||
// Build the polygon
|
||||
GeoCompositeMembershipShape c = new GeoCompositeMembershipShape();
|
||||
GeoCompositeMembershipShape c = new GeoCompositeMembershipShape(pm);
|
||||
List<GeoPoint> points1 = new ArrayList<>();
|
||||
points1.add(new GeoPoint(pm, 0.014071770744627236, 0.011030818292803128));
|
||||
points1.add(new GeoPoint(pm, 0.006772117088906782, -0.0012531892445234592));
|
||||
|
@ -500,7 +500,7 @@ shape:
|
|||
*/
|
||||
PlanetModel pm = new PlanetModel(0.8568069516722363, 1.1431930483277637);
|
||||
// Build the polygon
|
||||
GeoCompositeMembershipShape c = new GeoCompositeMembershipShape();
|
||||
GeoCompositeMembershipShape c = new GeoCompositeMembershipShape(pm);
|
||||
List<GeoPoint> points1 = new ArrayList<>();
|
||||
points1.add(new GeoPoint(pm, 1.1577814487635816, 1.6283601832010004));
|
||||
points1.add(new GeoPoint(pm, 0.6664570999069251, 2.0855825542851574));
|
||||
|
@ -626,7 +626,7 @@ shape:
|
|||
points.add(p1);
|
||||
|
||||
final BitSet internal = new BitSet();
|
||||
final GeoCompositePolygon rval = new GeoCompositePolygon();
|
||||
final GeoCompositePolygon rval = new GeoCompositePolygon(PlanetModel.WGS84);
|
||||
final GeoPolygonFactory.MutableBoolean mutableBoolean = new GeoPolygonFactory.MutableBoolean();
|
||||
|
||||
boolean result = GeoPolygonFactory.buildPolygonShape(rval, mutableBoolean, PlanetModel.WGS84, points, internal, 0, 1,
|
||||
|
|
|
@ -380,7 +380,7 @@ public class RandomGeoShapeGenerator extends LuceneTestCase {
|
|||
while (iterations < MAX_SHAPE_ITERATIONS) {
|
||||
iterations++;
|
||||
int numberShapes = random().nextInt(3) + 2;
|
||||
GeoCompositeAreaShape collection = new GeoCompositeAreaShape();
|
||||
GeoCompositeAreaShape collection = new GeoCompositeAreaShape(planetModel);
|
||||
for(int i=0; i<numberShapes;i++){
|
||||
GeoPolygon member = convexPolygon(planetModel, constraints);
|
||||
if (member != null){
|
||||
|
|
|
@ -829,7 +829,7 @@ public class SimpleGeoPolygonRelationshipsTest {
|
|||
points2.add(point7);
|
||||
points2.add(point8);
|
||||
GeoPolygon pol2 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points2);
|
||||
GeoCompositeMembershipShape composite = new GeoCompositeMembershipShape();
|
||||
GeoCompositeMembershipShape composite = new GeoCompositeMembershipShape(PlanetModel.SPHERE);
|
||||
composite.addShape(pol1);
|
||||
composite.addShape(pol2);
|
||||
return composite;
|
||||
|
|
|
@ -884,6 +884,8 @@ public class AssertingLeafReader extends FilterLeafReader {
|
|||
assertStats(maxDoc);
|
||||
}
|
||||
|
||||
public PointValues getWrapped() { return in; }
|
||||
|
||||
private void assertStats(int maxDoc) {
|
||||
assert in.size() > 0;
|
||||
assert in.getDocCount() > 0;
|
||||
|
|
|
@ -95,9 +95,6 @@ Bug Fixes
|
|||
* SOLR-11190: GraphQuery also supports string fields which are indexed=false and docValues=true. Please refer to the
|
||||
Javadocs for DocValuesTermsQuery for it's performance characteristics. (Karthik Ramachandran, Varun Thacker)
|
||||
|
||||
* SOLR-11228: Exclude static html files in the partials directory from authentication and authorization checks. The UI
|
||||
will open correctly with kerberos enabled (Ishan Chattopadhyaya, Varun Thacker)
|
||||
|
||||
* SOLR-11084: Issue with starting script with solr.home (-s) == solr (Leil Ireson, Amrit Sarkar via Erick Erickson)
|
||||
|
||||
Optimizations
|
||||
|
@ -296,6 +293,9 @@ Upgrading from Solr 6.x
|
|||
* SOLR-11239: The use of maxShardsPerNode is not supported when a cluster policy is in effect or
|
||||
when a collection specific policy is specified during collection creation.
|
||||
|
||||
* V2 APIs are now available at /api, in addition to /v2 (which is now deprecated). Legacy APIs continue to remain
|
||||
available at /solr.
|
||||
|
||||
New Features
|
||||
----------------------
|
||||
* SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab)
|
||||
|
@ -375,6 +375,8 @@ New Features
|
|||
* SOLR-10939: Add support for PointsFields to {!join} query. Joined fields should
|
||||
also have docValues enabled. (yonik)
|
||||
|
||||
* SOLR-11173 TermsComponent support for Points fields. (yonik)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
* SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509.
|
||||
|
@ -469,6 +471,8 @@ Bug Fixes
|
|||
|
||||
* SOLR-11243: Replica Placement rules are ignored if a cluster policy exists. (shalin)
|
||||
|
||||
* SOLR-11268: AtomicUpdateProcessor complains missing UpdateLog (noble, Ishan Chattopadhyaya)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
@ -678,6 +682,8 @@ Other Changes
|
|||
collection specific policy is specified during collection creation.
|
||||
(Noble Paul, shalin)
|
||||
|
||||
* SOLR-11183: V2 APIs are now available at /api endpoint. (Ishan Chattopadhyaya)
|
||||
|
||||
================== 6.7.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
@ -863,7 +869,7 @@ Other Changes
|
|||
with point fields and provides control over dynamic fields used for the raw amount and currency
|
||||
code sub-fields. (hossman, Steve Rowe)
|
||||
|
||||
* SOLR-10966: Add workaround for Hadoop-Common 2.7.2 incompatibility with Java 9.
|
||||
* SOLR-11261, SOLR-10966: Upgrade to Hadoop 2.7.4 to fix incompatibility with Java 9.
|
||||
(Uwe Schindler)
|
||||
|
||||
================== 6.6.1 ==================
|
||||
|
@ -891,6 +897,15 @@ Bug Fixes
|
|||
|
||||
* SOLR-11069: CDCR bootstrapping can get into an infinite loop when a core is reloaded (Amrit Sarkar, Erick Erickson)
|
||||
|
||||
* SOLR-11221: SolrJmxReporter broken on core reload. This resulted in some or most metrics not being reported
|
||||
via JMX after core reloads, depending on timing. (ab)
|
||||
|
||||
* SOLR-11261, SOLR-10966: Upgrade to Hadoop 2.7.4 to fix incompatibility with Java 9.
|
||||
(Uwe Schindler)
|
||||
|
||||
* SOLR-11228: Exclude static html files in the partials directory from authentication and authorization checks. The UI
|
||||
will open correctly with kerberos enabled (Ishan Chattopadhyaya, Varun Thacker)
|
||||
|
||||
================== 6.6.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
|
|
@ -87,6 +87,7 @@
|
|||
<dependency org="org.mortbay.jetty" name="jetty" rev="${/org.mortbay.jetty/jetty}" conf="test.DfsMiniCluster"/>
|
||||
<dependency org="org.codehaus.janino" name="commons-compiler" rev="${/org.codehaus.janino/commons-compiler}" conf="compile"/>
|
||||
<dependency org="org.mortbay.jetty" name="jetty-util" rev="${/org.mortbay.jetty/jetty-util}" conf="test.DfsMiniCluster"/>
|
||||
<dependency org="org.mortbay.jetty" name="jetty-sslengine" rev="${/org.mortbay.jetty/jetty-sslengine}" conf="test.DfsMiniCluster"/>
|
||||
<dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test.DfsMiniCluster"/>
|
||||
<dependency org="com.sun.jersey" name="jersey-server" rev="${/com.sun.jersey/jersey-server}" conf="test.DfsMiniCluster"/>
|
||||
<dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="test.DfsMiniCluster"/>
|
||||
|
|
|
@ -30,10 +30,12 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.mutable.MutableValue;
|
||||
import org.apache.solr.client.solrj.response.TermsResponse;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
|
@ -45,7 +47,9 @@ import org.apache.solr.common.util.SimpleOrderedMap;
|
|||
import org.apache.solr.common.util.StrUtils;
|
||||
import org.apache.solr.request.SimpleFacets.CountPair;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.schema.StrField;
|
||||
import org.apache.solr.search.PointMerger;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.util.BoundedTreeSet;
|
||||
|
||||
|
@ -108,16 +112,6 @@ public class TermsComponent extends SearchComponent {
|
|||
rb.rsp.add("terms", termsResult);
|
||||
|
||||
if (fields == null || fields.length==0) return;
|
||||
|
||||
for (String field : fields) {
|
||||
FieldType fieldType = rb.req.getSchema().getFieldTypeNoEx(field);
|
||||
if (null != fieldType) {
|
||||
if (fieldType.isPointField()) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"The terms component does not support Points-based field " + field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean termStats = params.getBool(TermsParams.TERMS_STATS, false);
|
||||
|
||||
|
@ -134,10 +128,8 @@ public class TermsComponent extends SearchComponent {
|
|||
return;
|
||||
}
|
||||
|
||||
int limit = params.getInt(TermsParams.TERMS_LIMIT, 10);
|
||||
if (limit < 0) {
|
||||
limit = Integer.MAX_VALUE;
|
||||
}
|
||||
int _limit = params.getInt(TermsParams.TERMS_LIMIT, 10);
|
||||
final int limit = _limit < 0 ? Integer.MAX_VALUE : _limit;
|
||||
|
||||
String lowerStr = params.get(TermsParams.TERMS_LOWER);
|
||||
String upperStr = params.get(TermsParams.TERMS_UPPER);
|
||||
|
@ -146,10 +138,9 @@ public class TermsComponent extends SearchComponent {
|
|||
boolean sort = !TermsParams.TERMS_SORT_INDEX.equals(
|
||||
params.get(TermsParams.TERMS_SORT, TermsParams.TERMS_SORT_COUNT));
|
||||
int freqmin = params.getInt(TermsParams.TERMS_MINCOUNT, 1);
|
||||
int freqmax = params.getInt(TermsParams.TERMS_MAXCOUNT, UNLIMITED_MAX_COUNT);
|
||||
if (freqmax<0) {
|
||||
freqmax = Integer.MAX_VALUE;
|
||||
}
|
||||
int _freqmax = params.getInt(TermsParams.TERMS_MAXCOUNT, UNLIMITED_MAX_COUNT);
|
||||
final int freqmax = _freqmax < 0 ? Integer.MAX_VALUE : _freqmax;
|
||||
|
||||
String prefix = params.get(TermsParams.TERMS_PREFIX_STR);
|
||||
String regexp = params.get(TermsParams.TERMS_REGEXP_STR);
|
||||
Pattern pattern = regexp != null ? Pattern.compile(regexp, resolveRegexpFlags(params)) : null;
|
||||
|
@ -161,13 +152,76 @@ public class TermsComponent extends SearchComponent {
|
|||
|
||||
for (String field : fields) {
|
||||
NamedList<Integer> fieldTerms = new NamedList<>();
|
||||
termsResult.add(field, fieldTerms);
|
||||
|
||||
Terms terms = indexReader.terms(field);
|
||||
if (terms == null) {
|
||||
// field does not exist
|
||||
// field does not exist in terms index. Check points.
|
||||
SchemaField sf = rb.req.getSchema().getFieldOrNull(field);
|
||||
if (sf != null && sf.getType().isPointField()) {
|
||||
if (lowerStr!=null || upperStr!=null || prefix!=null || regexp!=null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
String.format(Locale.ROOT, "The terms component does not support Points-based fields with sorting or with parameters %s,%s,%s,%s ", TermsParams.TERMS_LOWER, TermsParams.TERMS_UPPER, TermsParams.TERMS_PREFIX_STR, TermsParams.TERMS_REGEXP_STR));
|
||||
}
|
||||
|
||||
if (sort) {
|
||||
PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
|
||||
MutableValue mv = valueIterator.getMutableValue();
|
||||
BoundedTreeSet<CountPair<MutableValue, Integer>> queue = (sort ? new BoundedTreeSet<>(limit) : null);
|
||||
|
||||
for (; ; ) {
|
||||
long count = valueIterator.getNextCount();
|
||||
if (count < 0) break;
|
||||
if (count < freqmin || count > freqmax) continue;
|
||||
if (queue.size() < limit || queue.last().val < count || (queue.last().val == count && queue.last().key.compareTo(mv) < 0)) {
|
||||
queue.add(new CountPair<>(mv.duplicate(), (int) count));
|
||||
}
|
||||
}
|
||||
|
||||
for (CountPair<MutableValue, Integer> item : queue) {
|
||||
fieldTerms.add(item.key.toString(), item.val);
|
||||
}
|
||||
termsResult.add(field, fieldTerms);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!sort) {
|
||||
/***
|
||||
// streaming solution that is deferred until writing the response
|
||||
// TODO: we can't use the streaming solution until XML writer supports PushWriter!
|
||||
termsResult.add(field, (MapWriter) ew -> {
|
||||
PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
|
||||
MutableValue mv = valueIterator.getMutableValue();
|
||||
int num = 0;
|
||||
for(;;) {
|
||||
long count = valueIterator.getNextCount();
|
||||
if (count < 0) break;
|
||||
if (count < freqmin || count > freqmax) continue;
|
||||
if (++num > limit) break;
|
||||
ew.put(mv.toString(), (int)count); // match the numeric type of terms
|
||||
}
|
||||
});
|
||||
***/
|
||||
|
||||
PointMerger.ValueIterator valueIterator = new PointMerger.ValueIterator(sf, rb.req.getSearcher().getRawReader().leaves());
|
||||
MutableValue mv = valueIterator.getMutableValue();
|
||||
int num = 0;
|
||||
for(;;) {
|
||||
long count = valueIterator.getNextCount();
|
||||
if (count < 0) break;
|
||||
if (count < freqmin || count > freqmax) continue;
|
||||
if (++num > limit) break;
|
||||
fieldTerms.add(mv.toString(), (int)count); // match the numeric type of terms
|
||||
}
|
||||
|
||||
termsResult.add(field, fieldTerms);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
termsResult.add(field, fieldTerms); // add empty
|
||||
continue;
|
||||
}
|
||||
termsResult.add(field, fieldTerms);
|
||||
|
||||
FieldType ft = raw ? null : rb.req.getSchema().getFieldTypeNoEx(field);
|
||||
if (ft==null) ft = new StrField();
|
||||
|
@ -545,7 +599,19 @@ public class TermsComponent extends SearchComponent {
|
|||
|
||||
IndexReaderContext topReaderContext = indexSearcher.getTopReaderContext();
|
||||
for (String field : fields) {
|
||||
FieldType fieldType = indexSearcher.getSchema().getField(field).getType();
|
||||
SchemaField sf = indexSearcher.getSchema().getField(field);
|
||||
FieldType fieldType = sf.getType();
|
||||
|
||||
if (fieldType.isPointField()) {
|
||||
NamedList<Object> termsMap = new SimpleOrderedMap<>();
|
||||
for (String term : splitTerms) {
|
||||
Query q = fieldType.getFieldQuery(null, sf, term);
|
||||
int count = indexSearcher.getDocSet(q).size();
|
||||
termsMap.add(term, count);
|
||||
}
|
||||
result.add(field, termsMap);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Since splitTerms is already sorted, this array will also be sorted
|
||||
Term[] terms = new Term[splitTerms.length];
|
||||
|
|
|
@ -0,0 +1,454 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.search;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
import org.apache.lucene.util.mutable.MutableValue;
|
||||
import org.apache.lucene.util.mutable.MutableValueDate;
|
||||
import org.apache.lucene.util.mutable.MutableValueDouble;
|
||||
import org.apache.lucene.util.mutable.MutableValueFloat;
|
||||
import org.apache.lucene.util.mutable.MutableValueInt;
|
||||
import org.apache.lucene.util.mutable.MutableValueLong;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
|
||||
/**
|
||||
* Merge multiple numeric point fields (segments) together.
|
||||
*
|
||||
* @lucene.internal
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class PointMerger {
|
||||
public static int TOTAL_BUFFER_SIZE = 1000000; // target number of elements to cache across all segments
|
||||
public static int MIN_SEG_BUFFER_SIZE = 100; // minimum buffer size on any segment (to limit unnecessary exception throws)
|
||||
|
||||
public static class ValueIterator {
|
||||
PQueue queue;
|
||||
MutableValue topVal;
|
||||
|
||||
public ValueIterator(SchemaField field, List<LeafReaderContext> readers) throws IOException {
|
||||
this(field, readers, TOTAL_BUFFER_SIZE, MIN_SEG_BUFFER_SIZE);
|
||||
}
|
||||
|
||||
public ValueIterator(SchemaField field, List<LeafReaderContext> readers, int totalBufferSize, int minSegBufferSize) throws IOException {
|
||||
assert field.getType().isPointField();
|
||||
queue = new PQueue(readers.size());
|
||||
long ndocs = readers.get(readers.size()-1).docBase + readers.get(readers.size()-1).reader().maxDoc();
|
||||
for (LeafReaderContext ctx : readers) {
|
||||
PointValues pv = ctx.reader().getPointValues(field.getName());
|
||||
if (pv == null) continue;
|
||||
BaseSeg seg = null;
|
||||
// int capacity = 2;
|
||||
int capacity = (int)((long)totalBufferSize * ctx.reader().maxDoc() / ndocs);
|
||||
capacity = Math.max(capacity, minSegBufferSize);
|
||||
|
||||
switch (field.getType().getNumberType()) {
|
||||
case INTEGER:
|
||||
seg = new IntSeg(pv, capacity);
|
||||
break;
|
||||
case LONG:
|
||||
seg = new LongSeg(pv, capacity);
|
||||
break;
|
||||
case FLOAT:
|
||||
seg = new FloatSeg(pv, capacity);
|
||||
break;
|
||||
case DOUBLE:
|
||||
seg = new DoubleSeg(pv, capacity);
|
||||
break;
|
||||
}
|
||||
int count = seg.setNextValue();
|
||||
if (count >= 0) {
|
||||
queue.add(seg);
|
||||
}
|
||||
}
|
||||
if (queue.size() > 0) topVal = queue.top().getMutableValue().duplicate();
|
||||
}
|
||||
|
||||
// gets the mutable value that is updated after every call to getNextCount().
|
||||
// getMutableValue only needs to be called a single time since the instance is reused for every call to getNextCount().
|
||||
public MutableValue getMutableValue() {
|
||||
return topVal;
|
||||
}
|
||||
|
||||
public long getNextCount() throws IOException {
|
||||
if (queue.size() == 0) return -1;
|
||||
|
||||
BaseSeg seg = queue.top();
|
||||
topVal.copy(seg.getMutableValue());
|
||||
long count = 0;
|
||||
|
||||
do {
|
||||
count += seg.getCurrentCount();
|
||||
int nextCount = seg.setNextValue();
|
||||
if (nextCount < 0) {
|
||||
queue.pop();
|
||||
if (queue.size() == 0) break;
|
||||
} else {
|
||||
queue.updateTop();
|
||||
}
|
||||
seg = queue.top();
|
||||
} while (seg.getMutableValue().equalsSameType(topVal));
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class PQueue extends PriorityQueue<BaseSeg> {
|
||||
public PQueue(int maxSize) {
|
||||
super(maxSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean lessThan(BaseSeg a, BaseSeg b) {
|
||||
return BaseSeg.lessThan(a,b);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
abstract static class BaseSeg implements PointValues.IntersectVisitor {
|
||||
final PointValues points;
|
||||
final int[] count;
|
||||
int pos = -1; // index of the last valid entry
|
||||
int readPos = -1; // last position read from
|
||||
|
||||
MutableValue currentValue; // subclass constructor will fill this in
|
||||
int currentCount;
|
||||
|
||||
BaseSeg(PointValues points, int capacity) {
|
||||
this.points = points;
|
||||
this.count = new int[capacity];
|
||||
}
|
||||
|
||||
public static boolean lessThan(BaseSeg a, BaseSeg b) {
|
||||
return a.currentValue.compareTo(b.currentValue) < 0;
|
||||
}
|
||||
|
||||
public MutableValue getMutableValue() {
|
||||
return currentValue;
|
||||
}
|
||||
|
||||
// returns -1 count if there are no more values
|
||||
public int getCurrentCount() {
|
||||
return currentCount;
|
||||
}
|
||||
|
||||
// sets the next value and returns getCurrentCount()
|
||||
public int setNextValue() throws IOException {
|
||||
return 0;
|
||||
};
|
||||
|
||||
|
||||
void refill() throws IOException {
|
||||
assert readPos >= pos;
|
||||
readPos = -1;
|
||||
pos = -1;
|
||||
try {
|
||||
points.intersect(this);
|
||||
} catch (BreakException e) {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(int docID) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
static class IntSeg extends BaseSeg {
|
||||
final int[] values;
|
||||
int last = Integer.MIN_VALUE;
|
||||
final MutableValueInt mval;
|
||||
|
||||
IntSeg(PointValues points, int capacity) {
|
||||
super(points, capacity);
|
||||
this.values = new int[capacity];
|
||||
this.currentValue = this.mval = new MutableValueInt();
|
||||
}
|
||||
|
||||
public int setNextValue() throws IOException {
|
||||
if (readPos >= pos) {
|
||||
if (last != Integer.MAX_VALUE) {
|
||||
++last;
|
||||
refill();
|
||||
}
|
||||
if (readPos >= pos) {
|
||||
last = Integer.MAX_VALUE;
|
||||
currentCount = -1;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
++readPos;
|
||||
mval.value = values[readPos];
|
||||
currentCount = count[readPos];
|
||||
return currentCount;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||
// TODO: handle filter or deleted documents?
|
||||
int v = IntPoint.decodeDimension(packedValue, 0);
|
||||
if (v < last) return;
|
||||
|
||||
if (v == last && pos >= 0) {
|
||||
count[pos]++;
|
||||
} else {
|
||||
if (pos+1 < values.length) {
|
||||
last = v;
|
||||
++pos;
|
||||
values[pos] = v;
|
||||
count[pos] = 1;
|
||||
} else {
|
||||
// a new value we don't have room for
|
||||
throw breakException;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
int v = IntPoint.decodeDimension(maxPackedValue, 0);
|
||||
if (v >= last) {
|
||||
return PointValues.Relation.CELL_CROSSES_QUERY;
|
||||
} else {
|
||||
return PointValues.Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class LongSeg extends BaseSeg {
|
||||
final long[] values;
|
||||
long last = Long.MIN_VALUE;
|
||||
MutableValueLong mval;
|
||||
|
||||
LongSeg(PointValues points, int capacity) {
|
||||
super(points, capacity);
|
||||
this.values = new long[capacity];
|
||||
this.currentValue = this.mval = new MutableValueLong();
|
||||
}
|
||||
|
||||
public int setNextValue() throws IOException {
|
||||
if (readPos >= pos) {
|
||||
if (last != Long.MAX_VALUE) {
|
||||
++last;
|
||||
refill();
|
||||
}
|
||||
if (readPos >= pos) {
|
||||
last = Long.MAX_VALUE;
|
||||
currentCount = -1;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
++readPos;
|
||||
mval.value = values[readPos];
|
||||
currentCount = count[readPos];
|
||||
return currentCount;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||
// TODO: handle filter or deleted documents?
|
||||
long v = LongPoint.decodeDimension(packedValue, 0);
|
||||
if (v < last) return;
|
||||
|
||||
if (v == last && pos >= 0) {
|
||||
count[pos]++;
|
||||
} else {
|
||||
if (pos+1 < values.length) {
|
||||
last = v;
|
||||
++pos;
|
||||
values[pos] = v;
|
||||
count[pos] = 1;
|
||||
} else {
|
||||
// a new value we don't have room for
|
||||
throw breakException;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
long v = LongPoint.decodeDimension(maxPackedValue, 0);
|
||||
if (v >= last) {
|
||||
return PointValues.Relation.CELL_CROSSES_QUERY;
|
||||
} else {
|
||||
return PointValues.Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class FloatSeg extends BaseSeg {
|
||||
final float[] values;
|
||||
float last = -Float.MAX_VALUE;
|
||||
final MutableValueFloat mval;
|
||||
|
||||
FloatSeg(PointValues points, int capacity) {
|
||||
super(points, capacity);
|
||||
this.values = new float[capacity];
|
||||
this.currentValue = this.mval = new MutableValueFloat();
|
||||
}
|
||||
|
||||
public int setNextValue() throws IOException {
|
||||
if (readPos >= pos) {
|
||||
if (last != Float.MAX_VALUE) {
|
||||
last = Math.nextUp(last);
|
||||
refill();
|
||||
}
|
||||
if (readPos >= pos) {
|
||||
last = Float.MAX_VALUE;
|
||||
currentCount = -1;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
++readPos;
|
||||
mval.value = values[readPos];
|
||||
currentCount = count[readPos];
|
||||
return currentCount;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||
// TODO: handle filter or deleted documents?
|
||||
float v = FloatPoint.decodeDimension(packedValue, 0);
|
||||
if (v < last) return;
|
||||
|
||||
if (v == last && pos >= 0) {
|
||||
count[pos]++;
|
||||
} else {
|
||||
if (pos+1 < values.length) {
|
||||
last = v;
|
||||
++pos;
|
||||
values[pos] = v;
|
||||
count[pos] = 1;
|
||||
} else {
|
||||
// a new value we don't have room for
|
||||
throw breakException;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
float v = FloatPoint.decodeDimension(maxPackedValue, 0);
|
||||
if (v >= last) {
|
||||
return PointValues.Relation.CELL_CROSSES_QUERY;
|
||||
} else {
|
||||
return PointValues.Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class DoubleSeg extends BaseSeg {
|
||||
final double[] values;
|
||||
double last = -Double.MAX_VALUE;
|
||||
final MutableValueDouble mval;
|
||||
|
||||
DoubleSeg(PointValues points, int capacity) {
|
||||
super(points, capacity);
|
||||
this.values = new double[capacity];
|
||||
this.currentValue = this.mval = new MutableValueDouble();
|
||||
}
|
||||
|
||||
public int setNextValue() throws IOException {
|
||||
if (readPos >= pos) {
|
||||
if (last != Double.MAX_VALUE) {
|
||||
last = Math.nextUp(last);
|
||||
refill();
|
||||
}
|
||||
if (readPos >= pos) {
|
||||
last = Double.MAX_VALUE;
|
||||
currentCount = -1;
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
++readPos;
|
||||
mval.value = values[readPos];
|
||||
currentCount = count[readPos];
|
||||
return currentCount;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) throws IOException {
|
||||
// TODO: handle filter or deleted documents?
|
||||
double v = DoublePoint.decodeDimension(packedValue, 0);
|
||||
if (v < last) return;
|
||||
|
||||
if (v == last && pos >= 0) {
|
||||
count[pos]++;
|
||||
} else {
|
||||
if (pos+1 < values.length) {
|
||||
last = v;
|
||||
++pos;
|
||||
values[pos] = v;
|
||||
count[pos] = 1;
|
||||
} else {
|
||||
// a new value we don't have room for
|
||||
throw breakException;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
double v = DoublePoint.decodeDimension(maxPackedValue, 0);
|
||||
if (v >= last) {
|
||||
return PointValues.Relation.CELL_CROSSES_QUERY;
|
||||
} else {
|
||||
return PointValues.Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class DateSeg extends LongSeg {
|
||||
DateSeg(PointValues points, int capacity) {
|
||||
super(points, capacity);
|
||||
this.currentValue = this.mval = new MutableValueDate();
|
||||
}
|
||||
}
|
||||
|
||||
static class BreakException extends RuntimeException {
|
||||
@Override
|
||||
public synchronized Throwable fillInStackTrace() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
static BreakException breakException = new BreakException();
|
||||
|
||||
}
|
|
@ -18,8 +18,6 @@ package org.apache.solr.servlet;
|
|||
|
||||
import javax.servlet.Filter;
|
||||
|
||||
import org.apache.solr.util.Java9InitHack;
|
||||
|
||||
/**
|
||||
* All Solr filters available to the user's webapp should
|
||||
* extend this class and not just implement {@link Filter}.
|
||||
|
@ -30,7 +28,6 @@ abstract class BaseSolrFilter implements Filter {
|
|||
|
||||
static {
|
||||
CheckLoggingConfiguration.check();
|
||||
Java9InitHack.initJava9();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.solr.servlet;
|
|||
|
||||
import javax.servlet.http.HttpServlet;
|
||||
|
||||
import org.apache.solr.util.Java9InitHack;
|
||||
|
||||
/**
|
||||
* All Solr servlets available to the user's webapp should
|
||||
* extend this class and not {@link HttpServlet}.
|
||||
|
@ -31,7 +29,6 @@ abstract class BaseSolrServlet extends HttpServlet {
|
|||
|
||||
static {
|
||||
CheckLoggingConfiguration.check();
|
||||
Java9InitHack.initJava9();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.solr.core.SolrCore;
|
|||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.util.plugin.PluginInfoInitialized;
|
||||
import org.apache.solr.util.plugin.SolrCoreAware;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -278,7 +279,9 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized
|
|||
PluginInfo pluginInfo = new PluginInfo("updateProcessor",
|
||||
Utils.makeMap("name", s,
|
||||
"class", factoryClass.getName()));
|
||||
core.getUpdateProcessors().put(s, p = core.getUpdateProcessors().createPlugin(pluginInfo).get());
|
||||
UpdateRequestProcessorFactory plugin = p = core.getUpdateProcessors().createPlugin(pluginInfo).get();
|
||||
if (plugin instanceof SolrCoreAware) ((SolrCoreAware) plugin).inform(core);
|
||||
core.getUpdateProcessors().put(s, plugin);
|
||||
}
|
||||
if (p == null)
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No such processor " + s);
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.util;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* This class works around a bug in hadoop-common-2.7.2 where the Hadoop Shell class cannot
|
||||
* initialize on Java 9 (due to a bug while parsing Java's version number).
|
||||
* This class does some early checks and fakes the java version for a very short time
|
||||
* during class loading of Solr's web application or Solr's test framework.
|
||||
* <p>
|
||||
* Be sure to run this only in static initializers, as soon as possible after JVM startup!
|
||||
* <p>
|
||||
* Related issues: HADOOP-14586, SOLR-10966
|
||||
* <p>
|
||||
* TODO: <b>Remove this ASAP, once we have upgraded Hadoop (SOLR-10951)!</b>
|
||||
*
|
||||
* @lucene.internal
|
||||
*/
|
||||
public final class Java9InitHack {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private static final String JAVA_VERSION_PROP = "java.version";
|
||||
private static boolean done = false;
|
||||
|
||||
/**
|
||||
* Runs the hack. Should be done as early as possible on JVM startup, from a static initializer
|
||||
* to prevent concurrency issues - because we change temporarily some 'important' system properties.
|
||||
*/
|
||||
public static synchronized void initJava9() {
|
||||
if (Constants.JRE_IS_MINIMUM_JAVA9 && done == false) {
|
||||
AccessController.doPrivileged((PrivilegedAction<Void>) Java9InitHack::initPrivileged);
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
private static Void initPrivileged() {
|
||||
log.info("Adding temporary workaround for Hadoop's Shell class to allow running on Java 9 (please ignore any warnings/failures).");
|
||||
String oldVersion = System.getProperty(JAVA_VERSION_PROP);
|
||||
try {
|
||||
System.setProperty(JAVA_VERSION_PROP, "1.9");
|
||||
Class.forName("org.apache.hadoop.util.Shell");
|
||||
} catch (Throwable t) {
|
||||
log.warn("Cannot initialize Hadoop's Shell class on Java 9.", t);
|
||||
} finally {
|
||||
if (!Objects.equals(System.getProperty(JAVA_VERSION_PROP), oldVersion)) {
|
||||
System.setProperty(JAVA_VERSION_PROP, oldVersion);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Java9InitHack() {}
|
||||
|
||||
}
|
|
@ -633,7 +633,11 @@
|
|||
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="*_pi" type="pint" indexed="true" stored="true" docValues="false" multiValued="false"/>
|
||||
<dynamicField name="*_pi" type="pint" indexed="true" multiValued="false"/>
|
||||
<dynamicField name="*_pl" type="plong" indexed="true" multiValued="false"/>
|
||||
<dynamicField name="*_pf" type="pfloat" indexed="true" multiValued="false"/>
|
||||
<dynamicField name="*_pd" type="pdouble" indexed="true" multiValued="false"/>
|
||||
<dynamicField name="*_pdt" type="pdate" indexed="true" multiValued="false"/>
|
||||
|
||||
<!-- some trie-coded dynamic fields for faster range queries -->
|
||||
<dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
|
||||
|
|
|
@ -15,22 +15,24 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.handler.component;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.lucene.util.mutable.MutableValueDouble;
|
||||
import org.apache.lucene.util.mutable.MutableValueFloat;
|
||||
import org.apache.lucene.util.mutable.MutableValueInt;
|
||||
import org.apache.lucene.util.mutable.MutableValueLong;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.TermsParams;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.apache.solr.search.PointMerger;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
**/
|
||||
// TermsComponent not currently supported for PointFields
|
||||
@SolrTestCaseJ4.SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-11173")
|
||||
public class TermsComponentTest extends SolrTestCaseJ4 {
|
||||
|
||||
@BeforeClass
|
||||
|
@ -283,10 +285,12 @@ public class TermsComponentTest extends SolrTestCaseJ4 {
|
|||
,"//int[@name='1'][.='2']"
|
||||
);
|
||||
|
||||
/* terms.raw only applies to indexed fields
|
||||
assertQ(req("indent","true", "qt","/terms", "terms","true",
|
||||
"terms.fl","foo_i", "terms.raw","true")
|
||||
,"not(//int[@name='1'][.='2'])"
|
||||
);
|
||||
*/
|
||||
|
||||
// check something at the end of the index
|
||||
assertQ(req("indent","true", "qt","/terms", "terms","true",
|
||||
|
@ -381,28 +385,121 @@ public class TermsComponentTest extends SolrTestCaseJ4 {
|
|||
|
||||
@Test
|
||||
public void testPointField() throws Exception {
|
||||
assertU(adoc("id", "10000", "foo_pi", "1"));
|
||||
assertU(commit());
|
||||
|
||||
try {
|
||||
final SolrQueryRequest req = req(
|
||||
"qt", "/terms",
|
||||
"terms", "true",
|
||||
"terms.fl", "foo_pi");
|
||||
Exception e = expectThrows(SolrException.class, () -> h.query(req));
|
||||
assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException) e).code());
|
||||
assertTrue(e.getMessage().contains("The terms component does not support Points-based field foo_pi"));
|
||||
int nvals = 10000; int maxval = 1000000;
|
||||
// int nvals = 5; int maxval = 2;
|
||||
final int vals[] = new int[nvals];
|
||||
for (int i=0; i<nvals; i++) {
|
||||
vals[i] = random().nextInt(maxval);
|
||||
String v = Integer.toString(vals[i]);
|
||||
assertU(adoc("id", Integer.toString(100000+i), "foo_pi",v, "foo_pl",v, "foo_pf",v, "foo_pd",v) );
|
||||
if (random().nextInt(1000) == 0) assertU(commit()); // make multiple segments
|
||||
}
|
||||
|
||||
assertU(commit());
|
||||
// assertU(optimize());
|
||||
|
||||
Arrays.sort(vals);
|
||||
|
||||
// find the first two values and account for dups
|
||||
int val1 = vals[0];
|
||||
int val2 = vals[1];
|
||||
for (int i=2; i<vals.length; i++) {
|
||||
if (val2 != val1) break;
|
||||
val2 = vals[i];
|
||||
}
|
||||
|
||||
SolrQueryRequest req = req(
|
||||
"qt", "/terms",
|
||||
"terms", "true",
|
||||
"terms.fl", "foo_pi");
|
||||
;
|
||||
try {
|
||||
SchemaField sf = req.getSchema().getField("foo_pi");
|
||||
|
||||
/**
|
||||
LeafReader r = req.getSearcher().getIndexReader().leaves().get(0).reader();
|
||||
PointValues pv = r.getPointValues("foo_pi");
|
||||
System.out.println("pv=" + pv);
|
||||
if (pv instanceof AssertingLeafReader.AssertingPointValues) {
|
||||
pv = ((AssertingLeafReader.AssertingPointValues) pv).getWrapped();
|
||||
}
|
||||
System.out.println("pv=" + pv);
|
||||
BKDReader bkdr = (BKDReader)pv;
|
||||
|
||||
for (int i=0; i<Math.min(10,nvals); i++) { System.out.println("INDEXED VAL=" + vals[i]); }
|
||||
**/
|
||||
|
||||
|
||||
//
|
||||
// iterate all values
|
||||
//
|
||||
int totBuff = random().nextInt(50)+1;
|
||||
int minSegBuff = random().nextInt(10)+1;
|
||||
PointMerger.ValueIterator iter = new PointMerger.ValueIterator(req.getSchema().getField("foo_pi"), req.getSearcher().getIndexReader().leaves(), totBuff, minSegBuff);
|
||||
MutableValueInt v = (MutableValueInt)iter.getMutableValue();
|
||||
int i=0;
|
||||
for (;;) {
|
||||
long count = iter.getNextCount();
|
||||
if (count < 0) break;
|
||||
assertEquals( vals[i], v.value );
|
||||
i += count;
|
||||
// if (i < 10) System.out.println("COUNT=" + count + " OBJ="+v.toObject());
|
||||
}
|
||||
assert(i==nvals);
|
||||
|
||||
totBuff = random().nextInt(50)+1;
|
||||
minSegBuff = random().nextInt(10)+1;
|
||||
iter = new PointMerger.ValueIterator(req.getSchema().getField("foo_pl"), req.getSearcher().getIndexReader().leaves());
|
||||
MutableValueLong lv = (MutableValueLong)iter.getMutableValue();
|
||||
i=0;
|
||||
for (;;) {
|
||||
long count = iter.getNextCount();
|
||||
if (count < 0) break;
|
||||
assertEquals( vals[i], lv.value );
|
||||
i += count;
|
||||
// if (i < 10) System.out.println("COUNT=" + count + " OBJ="+v.toObject());
|
||||
}
|
||||
assert(i==nvals);
|
||||
|
||||
totBuff = random().nextInt(50)+1;
|
||||
minSegBuff = random().nextInt(10)+1;
|
||||
iter = new PointMerger.ValueIterator(req.getSchema().getField("foo_pf"), req.getSearcher().getIndexReader().leaves());
|
||||
MutableValueFloat fv = (MutableValueFloat)iter.getMutableValue();
|
||||
i=0;
|
||||
for (;;) {
|
||||
long count = iter.getNextCount();
|
||||
if (count < 0) break;
|
||||
assertEquals( vals[i], fv.value, 0);
|
||||
i += count;
|
||||
// if (i < 10) System.out.println("COUNT=" + count + " OBJ="+v.toObject());
|
||||
}
|
||||
assert(i==nvals);
|
||||
|
||||
totBuff = random().nextInt(50)+1;
|
||||
minSegBuff = random().nextInt(10)+1;
|
||||
iter = new PointMerger.ValueIterator(req.getSchema().getField("foo_pd"), req.getSearcher().getIndexReader().leaves());
|
||||
MutableValueDouble dv = (MutableValueDouble)iter.getMutableValue();
|
||||
i=0;
|
||||
for (;;) {
|
||||
long count = iter.getNextCount();
|
||||
if (count < 0) break;
|
||||
assertEquals( vals[i], dv.value, 0);
|
||||
i += count;
|
||||
// if (i < 10) System.out.println("COUNT=" + count + " OBJ="+v.toObject());
|
||||
}
|
||||
assert(i==nvals);
|
||||
|
||||
assertQ(req("indent","true", "qt","/terms", "terms","true",
|
||||
"terms.fl","foo_pi", "terms.sort","index", "terms.limit","2")
|
||||
,"count(//lst[@name='foo_pi']/*)=2"
|
||||
,"//lst[@name='foo_pi']/int[1][@name='" +val1+ "']"
|
||||
,"//lst[@name='foo_pi']/int[2][@name='" +val2+ "']"
|
||||
);
|
||||
|
||||
|
||||
final SolrQueryRequest req2 = req(
|
||||
"qt", "/terms",
|
||||
"terms", "true",
|
||||
"terms.fl", "foo_pi",
|
||||
"terms.list", "1");
|
||||
e = expectThrows(SolrException.class, () -> h.query(req2));
|
||||
assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException) e).code());
|
||||
assertTrue(e.getMessage().contains("The terms component does not support Points-based field foo_pi"));
|
||||
} finally {
|
||||
assertU(delI("10000"));
|
||||
req.close();
|
||||
assertU(delQ("foo_pi:[* TO *]"));
|
||||
assertU(commit());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,15 +83,16 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
|
|||
|
||||
public void testBasics() throws Exception {
|
||||
|
||||
ModifiableSolrParams params = new ModifiableSolrParams()
|
||||
.add("processor", "atomic")
|
||||
.add("atomic.cat", "add")
|
||||
.add("atomic.title", "set")
|
||||
.add("atomic.count_i", "set")
|
||||
.add("atomic.name_s", "set")
|
||||
.add("atomic.multiDefault", "set")
|
||||
.add("commit", "true");
|
||||
AddUpdateCommand cmd = new AddUpdateCommand(new LocalSolrQueryRequest(h.getCore(),
|
||||
new ModifiableSolrParams()
|
||||
.add("processor", "atomic")
|
||||
.add("atomic.cat", "add")
|
||||
.add("atomic.title", "set")
|
||||
.add("atomic.count_i", "set")
|
||||
.add("atomic.name_s", "set")
|
||||
.add("atomic.multiDefault", "set")
|
||||
.add("commit","true")
|
||||
params
|
||||
));
|
||||
|
||||
cmd.solrDoc = new SolrInputDocument();
|
||||
|
@ -102,11 +103,10 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
|
|||
cmd.solrDoc.addField("name_s", "Virat");
|
||||
cmd.solrDoc.addField("multiDefault", "Delhi");
|
||||
|
||||
AtomicUpdateProcessorFactory factory = new AtomicUpdateProcessorFactory();
|
||||
factory.inform(h.getCore());
|
||||
factory.getInstance(cmd.getReq(), new SolrQueryResponse(),
|
||||
new DistributedUpdateProcessor(cmd.getReq(), new SolrQueryResponse(),
|
||||
new RunUpdateProcessor(cmd.getReq(), null))).processAdd(cmd);
|
||||
UpdateRequestProcessor chain = h.getCore()
|
||||
.getUpdateProcessorChain(params)
|
||||
.createProcessor(cmd.getReq(), new SolrQueryResponse());
|
||||
chain.processAdd(cmd);
|
||||
|
||||
assertU(commit());
|
||||
|
||||
|
@ -134,16 +134,15 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
|
|||
req("q", "multiDefault:Delhi")
|
||||
, "//result[@numFound=1]");
|
||||
|
||||
cmd = new AddUpdateCommand(new LocalSolrQueryRequest(h.getCore(),
|
||||
new ModifiableSolrParams()
|
||||
.add("processor", "atomic")
|
||||
.add("atomic.cat", "add")
|
||||
.add("atomic.title", "set")
|
||||
.add("atomic.count_i", "inc")
|
||||
.add("atomic.name_s", "remove")
|
||||
.add("atomic.multiDefault", "removeregex")
|
||||
.add("commit","true")
|
||||
));
|
||||
params = new ModifiableSolrParams()
|
||||
.add("processor", "atomic")
|
||||
.add("atomic.cat", "add")
|
||||
.add("atomic.title", "set")
|
||||
.add("atomic.count_i", "inc")
|
||||
.add("atomic.name_s", "remove")
|
||||
.add("atomic.multiDefault", "removeregex")
|
||||
.add("commit", "true");
|
||||
cmd = new AddUpdateCommand(new LocalSolrQueryRequest(h.getCore(), params));
|
||||
|
||||
cmd.solrDoc = new SolrInputDocument();
|
||||
cmd.solrDoc.addField("id", 1);
|
||||
|
@ -152,12 +151,8 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 {
|
|||
cmd.solrDoc.addField("count_i", 20);
|
||||
cmd.solrDoc.addField("name_s", "Virat");
|
||||
cmd.solrDoc.addField("multiDefault", ".elh.");
|
||||
|
||||
factory = new AtomicUpdateProcessorFactory();
|
||||
factory.inform(h.getCore());
|
||||
factory.getInstance(cmd.getReq(), new SolrQueryResponse(),
|
||||
new DistributedUpdateProcessor(cmd.getReq(), new SolrQueryResponse(),
|
||||
new RunUpdateProcessor(cmd.getReq(), null))).processAdd(cmd);
|
||||
chain = h.getCore().getUpdateProcessorChain(params).createProcessor(cmd.getReq(), new SolrQueryResponse());
|
||||
chain.processAdd(cmd);
|
||||
|
||||
assertU(commit());
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
80693ef2884927ee3c5464a7539fcfa4af382e14
|
|
@ -0,0 +1 @@
|
|||
d8e0a3abcc3fb46e1418b99d6d1328a95d9bd7b1
|
|
@ -1 +0,0 @@
|
|||
bf613cfec06a1f3d3a91d7f82f9e4af75bc01f72
|
|
@ -0,0 +1 @@
|
|||
a2d5d89a6acfb11dd1a125e86b84fcef549483ae
|
|
@ -1 +0,0 @@
|
|||
482b3051dc384c1e15182a6ad1402649ef24be02
|
|
@ -1 +0,0 @@
|
|||
422eb48913fa6f81835b3192c97a576505b6c192
|
|
@ -0,0 +1 @@
|
|||
a2aa0905c8f980d36f4e861283dccfcad6dd3dec
|
|
@ -0,0 +1 @@
|
|||
9afa8d2004a0bbd930d1ac10d221d927917067be
|
|
@ -1 +0,0 @@
|
|||
dfb6840b97211044e87a0345f7edad51b942fd2a
|
|
@ -1 +0,0 @@
|
|||
3c304b3d9227fbf8af8bc1cab013271538c3cf0a
|
|
@ -0,0 +1 @@
|
|||
3e5dbc6eb1d4d5d4c19a06c0a443f5bdc3740a35
|
|
@ -0,0 +1 @@
|
|||
3e1414e3ae47e97f66b2eb904d3ec6c50a3e29d0
|
|
@ -1 +0,0 @@
|
|||
59d112c8683f563f7aaf05fde7bc4022b90b44a7
|
|
@ -0,0 +1 @@
|
|||
3964a7984a19e553e090a2279569ec0060b87d5b
|
|
@ -0,0 +1 @@
|
|||
60367999cee49a3b09fa86bdcb52310b6c896014
|
|
@ -105,6 +105,14 @@
|
|||
</New>
|
||||
</Arg>
|
||||
</Call>
|
||||
<Call name="addRule">
|
||||
<Arg>
|
||||
<New class="org.eclipse.jetty.rewrite.handler.RewritePatternRule">
|
||||
<Set name="pattern">/api/*</Set>
|
||||
<Set name="replacement">/solr/____v2</Set>
|
||||
</New>
|
||||
</Arg>
|
||||
</Call>
|
||||
<Set name="handler">
|
||||
<New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
|
||||
<Set name="handlers">
|
||||
|
|
|
@ -165,7 +165,7 @@ Returns the following response:
|
|||
}
|
||||
----
|
||||
|
||||
== Arrays
|
||||
== Array (data structure)
|
||||
|
||||
The first data structure we'll explore is the *array*.
|
||||
|
||||
|
@ -370,7 +370,7 @@ Returns the following response:
|
|||
----
|
||||
|
||||
|
||||
== Tuple
|
||||
== Tuple (data structure)
|
||||
|
||||
The *tuple* is the next data structure we'll explore.
|
||||
|
||||
|
@ -419,7 +419,7 @@ Returns the following response:
|
|||
}
|
||||
----
|
||||
|
||||
== List
|
||||
== List (data structure)
|
||||
|
||||
Next we have the *list* data structure.
|
||||
|
||||
|
@ -467,7 +467,7 @@ Returns the following response:
|
|||
}
|
||||
----
|
||||
|
||||
== Let
|
||||
== Let (setting variables)
|
||||
|
||||
The `let` function sets *variables* and runs a Streaming Expression that references the variables. The `let` funtion can be used to
|
||||
write small statistical programs.
|
||||
|
@ -529,7 +529,7 @@ Here is the output:
|
|||
}
|
||||
----
|
||||
|
||||
== Col
|
||||
== Creating arrays with `col` function
|
||||
|
||||
The `col` function is used to move a column of numbers from a list of tuples into an `array`.
|
||||
This is an important function because Streaming Expressions such as `sql`, `random` and `timeseries` return tuples,
|
||||
|
@ -586,7 +586,7 @@ The response shows the arrays:
|
|||
}
|
||||
----
|
||||
|
||||
== Statistical Programming
|
||||
== Statistical Programming Example
|
||||
|
||||
We've covered how the *data structures*, *variables* and a few *statistical functions* work.
|
||||
Let's dive into an example that puts these tools to use.
|
||||
|
@ -623,7 +623,7 @@ The use case we're reasoning about can often be approached using a K Nearest Nei
|
|||
With knn we use a *distance* measure to compare vectors of data to find the k nearest neighbors to
|
||||
a specific vector.
|
||||
|
||||
=== Distance
|
||||
=== Euclidean Distance
|
||||
|
||||
The Streaming Expression statistical function library has a function called `distance`. The `distance` function
|
||||
computes the Euclidean distance between two vectors. This looks promising for comparing vectors of room rates.
|
||||
|
|
|
@ -41,38 +41,38 @@ Following are some v2 API URL paths and path prefixes, along with some of the op
|
|||
[width="100%",options="header",]
|
||||
|===
|
||||
|Path prefix |Some Supported Operations
|
||||
|`/v2/collections` or equivalently: `/v2/c` |Create, alias, backup, and restore a collection.
|
||||
|`/v2/c/_collection-name_/update` |Update requests.
|
||||
|`/v2/c/_collection-name_/config` |Configuration requests.
|
||||
|`/v2/c/_collection-name_/schema` |Schema requests.
|
||||
|`/v2/c/_collection-name_/_handler-name_` |Handler-specific requests.
|
||||
|`/v2/c/_collection-name_/shards` |Split a shard, create a shard, add a replica.
|
||||
|`/v2/c/_collection-name_/shards/_shard-name_` |Delete a shard, force leader election
|
||||
|`/v2/c/_collection-name_/shards/_shard-name_/_replica-name_` |Delete a replica.
|
||||
|`/v2/cores` |Create a core.
|
||||
|`/v2/cores/_core-name_` |Reload, rename, delete, and unload a core.
|
||||
|`/v2/node` |Perform overseer operation, rejoin leader election.
|
||||
|`/v2/cluster` |Add role, remove role, set cluster property.
|
||||
|`/v2/c/.system/blob` |Upload and download blobs and metadata.
|
||||
|`/api/collections` or equivalently: `/api/c` |Create, alias, backup, and restore a collection.
|
||||
|`/api/c/_collection-name_/update` |Update requests.
|
||||
|`/api/c/_collection-name_/config` |Configuration requests.
|
||||
|`/api/c/_collection-name_/schema` |Schema requests.
|
||||
|`/api/c/_collection-name_/_handler-name_` |Handler-specific requests.
|
||||
|`/api/c/_collection-name_/shards` |Split a shard, create a shard, add a replica.
|
||||
|`/api/c/_collection-name_/shards/_shard-name_` |Delete a shard, force leader election
|
||||
|`/api/c/_collection-name_/shards/_shard-name_/_replica-name_` |Delete a replica.
|
||||
|`/api/cores` |Create a core.
|
||||
|`/api/cores/_core-name_` |Reload, rename, delete, and unload a core.
|
||||
|`/api/node` |Perform overseer operation, rejoin leader election.
|
||||
|`/api/cluster` |Add role, remove role, set cluster property.
|
||||
|`/api/c/.system/blob` |Upload and download blobs and metadata.
|
||||
|===
|
||||
|
||||
== Introspect
|
||||
|
||||
Append `/_introspect` to any valid v2 API path and the API specification will be returned in JSON format.
|
||||
|
||||
`\http://localhost:8983/v2/c/_introspect`
|
||||
`\http://localhost:8983/api/c/_introspect`
|
||||
|
||||
To limit the introspect output to include just one particular HTTP method, add request param `method` with value `GET`, `POST`, or `DELETE`.
|
||||
|
||||
`\http://localhost:8983/v2/c/_introspect?method=POST`
|
||||
`\http://localhost:8983/api/c/_introspect?method=POST`
|
||||
|
||||
Most endpoints support commands provided in a body sent via POST. To limit the introspect output to only one command, add request param `command=_command-name_` .
|
||||
|
||||
`\http://localhost:8983/v2/c/gettingstarted/_introspect?method=POST&command=modify`
|
||||
`\http://localhost:8983/api/c/gettingstarted/_introspect?method=POST&command=modify`
|
||||
|
||||
=== Interpreting the Introspect Output
|
||||
|
||||
Example : `\http://localhost:8983/v2/c/gettingstarted/get/_introspect`
|
||||
Example : `\http://localhost:8983/api/c/gettingstarted/get/_introspect`
|
||||
|
||||
[source,json]
|
||||
----
|
||||
|
@ -107,7 +107,7 @@ Description of some of the keys in the above example:
|
|||
* `**spec/url/params**` : List of supported URL request params
|
||||
* `**availableSubPaths**` : List of valid URL subpaths and the HTTP method(s) each supports
|
||||
|
||||
Example of introspect for a POST API: `\http://localhost:8983/v2/c/gettingstarted/_introspect?method=POST&command=modify`
|
||||
Example of introspect for a POST API: `\http://localhost:8983/api/c/gettingstarted/_introspect?method=POST&command=modify`
|
||||
|
||||
[source,json]
|
||||
----
|
||||
|
@ -161,7 +161,7 @@ For the "gettingstarted" collection, set the replication factor and whether to a
|
|||
|
||||
[source,bash]
|
||||
----
|
||||
$ curl http://localhost:8983/v2/c/gettingstarted -H 'Content-type:application/json' -d '
|
||||
$ curl http://localhost:8983/api/c/gettingstarted -H 'Content-type:application/json' -d '
|
||||
{ modify: { replicationFactor: "3", autoAddReplicas: false } }'
|
||||
|
||||
{"responseHeader":{"status":0,"QTime":842}}
|
||||
|
@ -171,7 +171,7 @@ See the state of the cluster:
|
|||
|
||||
[source,bash]
|
||||
----
|
||||
$ curl http://localhost:8983/v2/cluster
|
||||
$ curl http://localhost:8983/api/cluster
|
||||
|
||||
{"responseHeader":{"status":0,"QTime":0},"collections":["gettingstarted",".system"]}
|
||||
----
|
||||
|
@ -180,7 +180,7 @@ Set a cluster property:
|
|||
|
||||
[source,bash]
|
||||
----
|
||||
$ curl http://localhost:8983/v2/cluster -H 'Content-type: application/json' -d '
|
||||
$ curl http://localhost:8983/api/cluster -H 'Content-type: application/json' -d '
|
||||
{ set-property: { name: autoAddReplicas, val: "false" } }'
|
||||
|
||||
{"responseHeader":{"status":0,"QTime":4}}
|
||||
|
|
|
@ -355,7 +355,7 @@ public class HttpSolrClient extends SolrClient {
|
|||
|
||||
if (request instanceof V2Request) {
|
||||
if (System.getProperty("solr.v2RealPath") == null) {
|
||||
basePath = baseUrl.replace("/solr", "/v2");
|
||||
basePath = baseUrl.replace("/solr", "/api");
|
||||
} else {
|
||||
basePath = baseUrl + "/____v2";
|
||||
}
|
||||
|
|
|
@ -116,7 +116,6 @@ import org.apache.solr.schema.SchemaField;
|
|||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.servlet.DirectSolrConnection;
|
||||
import org.apache.solr.util.AbstractSolrTestCase;
|
||||
import org.apache.solr.util.Java9InitHack;
|
||||
import org.apache.solr.util.LogLevel;
|
||||
import org.apache.solr.util.RandomizeSSL;
|
||||
import org.apache.solr.util.RandomizeSSL.SSLRandomizer;
|
||||
|
@ -162,11 +161,6 @@ import static java.util.Objects.requireNonNull;
|
|||
public abstract class SolrTestCaseJ4 extends LuceneTestCase {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
// this must be a static init block to be safe!
|
||||
static {
|
||||
Java9InitHack.initJava9();
|
||||
}
|
||||
|
||||
private static final List<String> DEFAULT_STACK_FILTERS = Arrays.asList(new String [] {
|
||||
"org.junit.",
|
||||
|
|
|
@ -29,6 +29,8 @@ public class BadHdfsThreadsFilter implements ThreadFilter {
|
|||
return true;
|
||||
} else if (name.startsWith("LeaseRenewer")) { // SOLR-7287
|
||||
return true;
|
||||
} else if (name.startsWith("org.apache.hadoop.fs.FileSystem$Statistics")) { // SOLR-11261
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue