Merge branch 'master' into feature/query-refactoring

core/src/test/java/org/elasticsearch/plugins/PluginManagerIT.java
	core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java
This commit is contained in:
Christoph Büscher 2015-08-20 12:20:30 +02:00
commit 345a30a2a7
216 changed files with 2000 additions and 1882 deletions

View File

@ -5,15 +5,14 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.elasticsearch</groupId> <groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-parent</artifactId> <artifactId>parent</artifactId>
<version>2.1.0-SNAPSHOT</version> <version>2.1.0-SNAPSHOT</version>
</parent> </parent>
<groupId>org.elasticsearch</groupId> <groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId> <artifactId>elasticsearch</artifactId>
<packaging>jar</packaging> <name>Elasticsearch: Core</name>
<name>Elasticsearch Core</name>
<description>Elasticsearch - Open Source, Distributed, RESTful Search Engine</description> <description>Elasticsearch - Open Source, Distributed, RESTful Search Engine</description>
<dependencies> <dependencies>
@ -162,10 +161,6 @@
<groupId>org.hdrhistogram</groupId> <groupId>org.hdrhistogram</groupId>
<artifactId>HdrHistogram</artifactId> <artifactId>HdrHistogram</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>

View File

@ -23,7 +23,6 @@
<include>com.ning:compress-lzf</include> <include>com.ning:compress-lzf</include>
<include>com.github.spullara.mustache.java:compiler</include> <include>com.github.spullara.mustache.java:compiler</include>
<include>com.tdunning:t-digest</include> <include>com.tdunning:t-digest</include>
<include>org.apache.commons:commons-lang3</include>
<include>commons-cli:commons-cli</include> <include>commons-cli:commons-cli</include>
<include>com.twitter:jsr166e</include> <include>com.twitter:jsr166e</include>
<include>org.hdrhistogram:HdrHistogram</include> <include>org.hdrhistogram:HdrHistogram</include>

View File

@ -0,0 +1,119 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.queries;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import java.io.IOException;
/** A {@link Query} that only matches documents that are greater than or equal
* to a configured doc ID. */
public final class MinDocQuery extends Query {
private final int minDoc;
/** Sole constructor. */
public MinDocQuery(int minDoc) {
this.minDoc = minDoc;
}
@Override
public int hashCode() {
return 31 * super.hashCode() + minDoc;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
MinDocQuery that = (MinDocQuery) obj;
return minDoc == that.minDoc;
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ConstantScoreWeight(this) {
@Override
public Scorer scorer(LeafReaderContext context, final Bits acceptDocs) throws IOException {
final int maxDoc = context.reader().maxDoc();
if (context.docBase + maxDoc <= minDoc) {
return null;
}
final int segmentMinDoc = Math.max(0, minDoc - context.docBase);
final DocIdSetIterator disi = new DocIdSetIterator() {
int doc = -1;
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
assert target > doc;
if (doc == -1) {
// skip directly to minDoc
doc = Math.max(target, segmentMinDoc);
} else {
doc = target;
}
while (doc < maxDoc) {
if (acceptDocs == null || acceptDocs.get(doc)) {
break;
}
doc += 1;
}
if (doc >= maxDoc) {
doc = NO_MORE_DOCS;
}
return doc;
}
@Override
public long cost() {
return maxDoc - segmentMinDoc;
}
};
return new ConstantScoreScorer(this, score(), disi);
}
};
}
@Override
public String toString(String field) {
return "MinDocQuery(minDoc=" + minDoc + ")";
}
}

View File

@ -277,11 +277,19 @@ public class Bootstrap {
closeSysError(); closeSysError();
} }
} catch (Throwable e) { } catch (Throwable e) {
// disable console logging, so user does not see the exception twice (jvm will show it already)
if (foreground) {
Loggers.disableConsoleLogging();
}
ESLogger logger = Loggers.getLogger(Bootstrap.class); ESLogger logger = Loggers.getLogger(Bootstrap.class);
if (INSTANCE.node != null) { if (INSTANCE.node != null) {
logger = Loggers.getLogger(Bootstrap.class, INSTANCE.node.settings().get("name")); logger = Loggers.getLogger(Bootstrap.class, INSTANCE.node.settings().get("name"));
} }
logger.error("Exception", e); logger.error("Exception", e);
// re-enable it if appropriate, so they can see any logging during the shutdown process
if (foreground) {
Loggers.enableConsoleLogging();
}
throw e; throw e;
} }

View File

@ -260,7 +260,7 @@ public class TransportClient extends AbstractClient {
// ignore, might not be bounded // ignore, might not be bounded
} }
for (Class<? extends LifecycleComponent> plugin : injector.getInstance(PluginsService.class).services()) { for (Class<? extends LifecycleComponent> plugin : injector.getInstance(PluginsService.class).nodeServices()) {
injector.getInstance(plugin).close(); injector.getInstance(plugin).close();
} }
try { try {

View File

@ -138,7 +138,7 @@ public class DiscoveryNode implements Streamable, ToXContent {
* @param version the version of the node. * @param version the version of the node.
*/ */
public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes, Version version) { public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes, Version version) {
this(nodeName, nodeId, NetworkUtils.getLocalHost().getHostName(), NetworkUtils.getLocalHost().getHostAddress(), address, attributes, version); this(nodeName, nodeId, address.getHost(), address.getAddress(), address, attributes, version);
} }
/** /**

View File

@ -40,6 +40,8 @@ import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.*; import org.elasticsearch.common.util.concurrent.*;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
@ -159,7 +161,8 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes(); Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes();
// note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling // note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling
final String nodeId = DiscoveryService.generateNodeId(settings); final String nodeId = DiscoveryService.generateNodeId(settings);
DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, transportService.boundAddress().publishAddress(), nodeAttributes, version); final TransportAddress publishAddress = transportService.boundAddress().publishAddress();
DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, publishAddress, nodeAttributes, version);
DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id()); DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id());
this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build(); this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build();
this.transportService.setLocalNode(localNode); this.transportService.setLocalNode(localNode);

View File

@ -76,7 +76,7 @@ public abstract class CheckFileCommand extends CliTool.Command {
if (paths != null && paths.length > 0) { if (paths != null && paths.length > 0) {
for (Path path : paths) { for (Path path : paths) {
try { try {
boolean supportsPosixPermissions = Files.getFileStore(path).supportsFileAttributeView(PosixFileAttributeView.class); boolean supportsPosixPermissions = Environment.getFileStore(path).supportsFileAttributeView(PosixFileAttributeView.class);
if (supportsPosixPermissions) { if (supportsPosixPermissions) {
PosixFileAttributes attributes = Files.readAttributes(path, PosixFileAttributes.class); PosixFileAttributes attributes = Files.readAttributes(path, PosixFileAttributes.class);
permissions.put(path, attributes.permissions()); permissions.put(path, attributes.permissions());

View File

@ -23,7 +23,6 @@ import com.google.common.base.Preconditions;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.GnuParser;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;

View File

@ -22,9 +22,9 @@ package org.elasticsearch.common.collect;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.UnmodifiableIterator; import com.google.common.collect.UnmodifiableIterator;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.lucene.util.mutable.MutableValueInt; import org.apache.lucene.util.mutable.MutableValueInt;
import java.lang.reflect.Array;
import java.util.*; import java.util.*;
/** /**
@ -134,12 +134,13 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
V get(Object key, int hash) { V get(Object key, int hash) {
final int slot = ArrayUtils.indexOf(keys, key); for (int i = 0; i < keys.length; i++) {
if (slot < 0) { if (key.equals(keys[i])) {
return null; return values[i];
} else { }
return values[slot];
} }
return null;
} }
private static <T> T[] replace(T[] array, int index, T value) { private static <T> T[] replace(T[] array, int index, T value) {
@ -151,14 +152,20 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) { Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) {
assert hashBits <= 0 : hashBits; assert hashBits <= 0 : hashBits;
final int slot = ArrayUtils.indexOf(keys, key); int slot = -1;
for (int i = 0; i < keys.length; i++) {
if (key.equals(keys[i])) {
slot = i;
break;
}
}
final K[] keys2; final K[] keys2;
final V[] values2; final V[] values2;
if (slot < 0) { if (slot < 0) {
keys2 = ArrayUtils.add(keys, key); keys2 = appendElement(keys, key);
values2 = ArrayUtils.add(values, value); values2 = appendElement(values, value);
newValue.value = 1; newValue.value = 1;
} else { } else {
keys2 = replace(keys, slot, key); keys2 = replace(keys, slot, key);
@ -170,16 +177,49 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
Leaf<K, V> remove(Object key, int hash) { Leaf<K, V> remove(Object key, int hash) {
final int slot = ArrayUtils.indexOf(keys, key); int slot = -1;
for (int i = 0; i < keys.length; i++) {
if (key.equals(keys[i])) {
slot = i;
break;
}
}
if (slot < 0) { if (slot < 0) {
return this; return this;
} }
final K[] keys2 = ArrayUtils.remove(keys, slot); final K[] keys2 = removeArrayElement(keys, slot);
final V[] values2 = ArrayUtils.remove(values, slot); final V[] values2 = removeArrayElement(values, slot);
return new Leaf<>(keys2, values2); return new Leaf<>(keys2, values2);
} }
} }
private static <T> T[] removeArrayElement(T[] array, int index) {
final Object result = Array.newInstance(array.getClass().getComponentType(), array.length - 1);
System.arraycopy(array, 0, result, 0, index);
if (index < array.length - 1) {
System.arraycopy(array, index + 1, result, index, array.length - index - 1);
}
return (T[]) result;
}
public static <T> T[] appendElement(final T[] array, final T element) {
final T[] newArray = Arrays.copyOf(array, array.length + 1);
newArray[newArray.length - 1] = element;
return newArray;
}
public static <T> T[] insertElement(final T[] array, final T element, final int index) {
final T[] result = Arrays.copyOf(array, array.length + 1);
System.arraycopy(array, 0, result, 0, index);
result[index] = element;
if (index < array.length) {
System.arraycopy(array, index, result, index + 1, array.length - index);
}
return result;
}
/** /**
* An inner node in this trie. Inner nodes store up to 64 key-value pairs * An inner node in this trie. Inner nodes store up to 64 key-value pairs
* and use a bitmap in order to associate hashes to them. For example, if * and use a bitmap in order to associate hashes to them. For example, if
@ -320,8 +360,8 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
private InnerNode<K, V> putNew(K key, int hash6, int slot, V value) { private InnerNode<K, V> putNew(K key, int hash6, int slot, V value) {
final long mask2 = mask | (1L << hash6); final long mask2 = mask | (1L << hash6);
final K[] keys2 = ArrayUtils.add(keys, slot, key); final K[] keys2 = insertElement(keys, key, slot);
final Object[] subNodes2 = ArrayUtils.add(subNodes, slot, value); final Object[] subNodes2 = insertElement(subNodes, value, slot);
return new InnerNode<>(mask2, keys2, subNodes2); return new InnerNode<>(mask2, keys2, subNodes2);
} }
@ -342,8 +382,8 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
private InnerNode<K, V> removeSlot(int hash6, int slot) { private InnerNode<K, V> removeSlot(int hash6, int slot) {
final long mask2 = mask & ~(1L << hash6); final long mask2 = mask & ~(1L << hash6);
final K[] keys2 = ArrayUtils.remove(keys, slot); final K[] keys2 = removeArrayElement(keys, slot);
final Object[] subNodes2 = ArrayUtils.remove(subNodes, slot); final Object[] subNodes2 = removeArrayElement(subNodes, slot);
return new InnerNode<>(mask2, keys2, subNodes2); return new InnerNode<>(mask2, keys2, subNodes2);
} }

View File

@ -23,7 +23,7 @@ import com.google.common.collect.Sets;
import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.exception.InvalidShapeException;
import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.Shape;
import com.vividsolutions.jts.geom.*; import com.vividsolutions.jts.geom.*;
import org.apache.commons.lang3.tuple.Pair; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
@ -98,7 +98,6 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
/** /**
* build new hole to the polygon * build new hole to the polygon
* @param hole linear ring defining the hole
* @return this * @return this
*/ */
public Ring<E> hole() { public Ring<E> hole() {
@ -285,7 +284,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
Edge current = edge; Edge current = edge;
Edge prev = edge; Edge prev = edge;
// bookkeep the source and sink of each visited coordinate // bookkeep the source and sink of each visited coordinate
HashMap<Coordinate, Pair<Edge, Edge>> visitedEdge = new HashMap<>(); HashMap<Coordinate, Tuple<Edge, Edge>> visitedEdge = new HashMap<>();
do { do {
current.coordinate = shift(current.coordinate, shiftOffset); current.coordinate = shift(current.coordinate, shiftOffset);
current.component = id; current.component = id;
@ -301,7 +300,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
// since we're splitting connected components, we want the edges method to visit // since we're splitting connected components, we want the edges method to visit
// the newly separated component // the newly separated component
final int visitID = -id; final int visitID = -id;
Edge firstAppearance = visitedEdge.get(current.coordinate).getRight(); Edge firstAppearance = visitedEdge.get(current.coordinate).v2();
// correct the graph pointers by correcting the 'next' pointer for both the // correct the graph pointers by correcting the 'next' pointer for both the
// first appearance and this appearance of the edge // first appearance and this appearance of the edge
Edge temp = firstAppearance.next; Edge temp = firstAppearance.next;
@ -312,12 +311,12 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
// a non-visited value (anything positive) // a non-visited value (anything positive)
do { do {
prev.component = visitID; prev.component = visitID;
prev = visitedEdge.get(prev.coordinate).getLeft(); prev = visitedEdge.get(prev.coordinate).v1();
++splitIndex; ++splitIndex;
} while (!current.coordinate.equals(prev.coordinate)); } while (!current.coordinate.equals(prev.coordinate));
++connectedComponents; ++connectedComponents;
} else { } else {
visitedEdge.put(current.coordinate, Pair.of(prev, current)); visitedEdge.put(current.coordinate, new Tuple<Edge, Edge>(prev, current));
} }
edges.add(current); edges.add(current);
prev = current; prev = current;

View File

@ -26,9 +26,8 @@ import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.GeometryFactory;
import org.apache.commons.lang3.tuple.Pair;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.unit.DistanceUnit.Distance; import org.elasticsearch.common.unit.DistanceUnit.Distance;
@ -487,7 +486,7 @@ public abstract class ShapeBuilder implements ToXContent {
return top; return top;
} }
private static final Pair range(Coordinate[] points, int offset, int length) { private static final double[] range(Coordinate[] points, int offset, int length) {
double minX = points[0].x; double minX = points[0].x;
double maxX = points[0].x; double maxX = points[0].x;
double minY = points[0].y; double minY = points[0].y;
@ -507,7 +506,7 @@ public abstract class ShapeBuilder implements ToXContent {
maxY = points[offset + i].y; maxY = points[offset + i].y;
} }
} }
return Pair.of(Pair.of(minX, maxX), Pair.of(minY, maxY)); return new double[] {minX, maxX, minY, maxY};
} }
/** /**
@ -585,8 +584,8 @@ public abstract class ShapeBuilder implements ToXContent {
// and convert to a right handed system // and convert to a right handed system
// compute the bounding box and calculate range // compute the bounding box and calculate range
Pair<Pair, Pair> range = range(points, offset, length); double[] range = range(points, offset, length);
final double rng = (Double)range.getLeft().getRight() - (Double)range.getLeft().getLeft(); final double rng = range[1] - range[0];
// translate the points if the following is true // translate the points if the following is true
// 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres
// (translation would result in a collapsed poly) // (translation would result in a collapsed poly)

View File

@ -360,10 +360,11 @@ public class HttpDownloadHelper {
if (connection instanceof HttpURLConnection) { if (connection instanceof HttpURLConnection) {
((HttpURLConnection) connection).setInstanceFollowRedirects(false); ((HttpURLConnection) connection).setInstanceFollowRedirects(false);
((HttpURLConnection) connection).setUseCaches(true); connection.setUseCaches(true);
((HttpURLConnection) connection).setConnectTimeout(5000); connection.setConnectTimeout(5000);
} }
connection.setRequestProperty("ES-Version", Version.CURRENT.toString()); connection.setRequestProperty("ES-Version", Version.CURRENT.toString());
connection.setRequestProperty("ES-Build-Hash", Build.CURRENT.hashShort());
connection.setRequestProperty("User-Agent", "elasticsearch-plugin-manager"); connection.setRequestProperty("User-Agent", "elasticsearch-plugin-manager");
// connect to the remote site (may take some time) // connect to the remote site (may take some time)

View File

@ -553,6 +553,8 @@ public abstract class StreamInput extends InputStream {
return (T) readStackTrace(new IllegalStateException(readOptionalString(), readThrowable()), this); return (T) readStackTrace(new IllegalStateException(readOptionalString(), readThrowable()), this);
case 17: case 17:
return (T) readStackTrace(new LockObtainFailedException(readOptionalString(), readThrowable()), this); return (T) readStackTrace(new LockObtainFailedException(readOptionalString(), readThrowable()), this);
case 18:
return (T) readStackTrace(new InterruptedException(readOptionalString()), this);
default: default:
assert false : "no such exception for id: " + key; assert false : "no such exception for id: " + key;
} }

View File

@ -591,6 +591,9 @@ public abstract class StreamOutput extends OutputStream {
writeVInt(16); writeVInt(16);
} else if (throwable instanceof LockObtainFailedException) { } else if (throwable instanceof LockObtainFailedException) {
writeVInt(17); writeVInt(17);
} else if (throwable instanceof InterruptedException) {
writeVInt(18);
writeCause = false;
} else { } else {
ElasticsearchException ex; ElasticsearchException ex;
final String name = throwable.getClass().getName(); final String name = throwable.getClass().getName();

View File

@ -275,9 +275,9 @@ public class Joda {
.toFormatter() .toFormatter()
.withZoneUTC(); .withZoneUTC();
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[] {longFormatter.getParser(), shortFormatter.getParser()}); DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT); return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.common.logging; package org.elasticsearch.common.logging;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.lucene.util.SuppressForbidden;
import org.elasticsearch.common.Classes; import org.elasticsearch.common.Classes;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -74,20 +75,27 @@ public class Loggers {
return getLogger(buildClassLoggerName(clazz), settings, prefixes); return getLogger(buildClassLoggerName(clazz), settings, prefixes);
} }
@SuppressForbidden(reason = "using localhost for logging on which host it is is fine")
private static InetAddress getHostAddress() {
try {
return InetAddress.getLocalHost();
} catch (UnknownHostException e) {
return null;
}
}
public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) { public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) {
List<String> prefixesList = newArrayList(); List<String> prefixesList = newArrayList();
if (settings.getAsBoolean("logger.logHostAddress", false)) { if (settings.getAsBoolean("logger.logHostAddress", false)) {
try { final InetAddress addr = getHostAddress();
prefixesList.add(InetAddress.getLocalHost().getHostAddress()); if (addr != null) {
} catch (UnknownHostException e) { prefixesList.add(addr.getHostAddress());
// ignore
} }
} }
if (settings.getAsBoolean("logger.logHostName", false)) { if (settings.getAsBoolean("logger.logHostName", false)) {
try { final InetAddress addr = getHostAddress();
prefixesList.add(InetAddress.getLocalHost().getHostName()); if (addr != null) {
} catch (UnknownHostException e) { prefixesList.add(addr.getHostName());
// ignore
} }
} }
String name = settings.get("name"); String name = settings.get("name");

View File

@ -0,0 +1,167 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.network;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.InterfaceAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.List;
import java.util.Locale;
/**
* Simple class to log {@code ifconfig}-style output at DEBUG logging.
*/
final class IfConfig {
private static final ESLogger logger = Loggers.getLogger(IfConfig.class);
private static final String INDENT = " ";
/** log interface configuration at debug level, if its enabled */
static void logIfNecessary() {
if (logger.isDebugEnabled()) {
try {
doLogging();
} catch (IOException | SecurityException e) {
logger.warn("unable to gather network information", e);
}
}
}
/** perform actual logging: might throw exception if things go wrong */
private static void doLogging() throws IOException {
StringBuilder msg = new StringBuilder();
for (NetworkInterface nic : NetworkUtils.getInterfaces()) {
msg.append(System.lineSeparator());
// ordinary name
msg.append(nic.getName());
msg.append(System.lineSeparator());
// display name (e.g. on windows)
if (!nic.getName().equals(nic.getDisplayName())) {
msg.append(INDENT);
msg.append(nic.getDisplayName());
msg.append(System.lineSeparator());
}
// addresses: v4 first, then v6
List<InterfaceAddress> addresses = nic.getInterfaceAddresses();
for (InterfaceAddress address : addresses) {
if (address.getAddress() instanceof Inet6Address == false) {
msg.append(INDENT);
msg.append(formatAddress(address));
msg.append(System.lineSeparator());
}
}
for (InterfaceAddress address : addresses) {
if (address.getAddress() instanceof Inet6Address) {
msg.append(INDENT);
msg.append(formatAddress(address));
msg.append(System.lineSeparator());
}
}
// hardware address
byte hardware[] = nic.getHardwareAddress();
if (hardware != null) {
msg.append(INDENT);
msg.append("hardware ");
for (int i = 0; i < hardware.length; i++) {
if (i > 0) {
msg.append(":");
}
msg.append(String.format(Locale.ROOT, "%02X", hardware[i]));
}
msg.append(System.lineSeparator());
}
// attributes
msg.append(INDENT);
msg.append(formatFlags(nic));
msg.append(System.lineSeparator());
}
logger.debug("configuration:" + System.lineSeparator() + "{}", msg.toString());
}
/** format internet address: java's default doesn't include everything useful */
private static String formatAddress(InterfaceAddress interfaceAddress) throws IOException {
StringBuilder sb = new StringBuilder();
InetAddress address = interfaceAddress.getAddress();
if (address instanceof Inet6Address) {
sb.append("inet6 ");
sb.append(address.toString().substring(1));
sb.append(" prefixlen:");
sb.append(interfaceAddress.getNetworkPrefixLength());
} else {
sb.append("inet ");
sb.append(address.toString().substring(1));
int netmask = 0xFFFFFFFF << (32 - interfaceAddress.getNetworkPrefixLength());
sb.append(" netmask:" + InetAddress.getByAddress(new byte[] {
(byte)(netmask >>> 24),
(byte)(netmask >>> 16 & 0xFF),
(byte)(netmask >>> 8 & 0xFF),
(byte)(netmask & 0xFF)
}).toString().substring(1));
InetAddress broadcast = interfaceAddress.getBroadcast();
if (broadcast != null) {
sb.append(" broadcast:" + broadcast.toString().substring(1));
}
}
if (address.isLoopbackAddress()) {
sb.append(" scope:host");
} else if (address.isLinkLocalAddress()) {
sb.append(" scope:link");
} else if (address.isSiteLocalAddress()) {
sb.append(" scope:site");
}
return sb.toString();
}
/** format network interface flags */
private static String formatFlags(NetworkInterface nic) throws SocketException {
StringBuilder flags = new StringBuilder();
if (nic.isUp()) {
flags.append("UP ");
}
if (nic.supportsMulticast()) {
flags.append("MULTICAST ");
}
if (nic.isLoopback()) {
flags.append("LOOPBACK ");
}
if (nic.isPointToPoint()) {
flags.append("POINTOPOINT ");
}
if (nic.isVirtual()) {
flags.append("VIRTUAL ");
}
flags.append("mtu:" + nic.getMTU());
flags.append(" index:" + nic.getIndex());
return flags.toString();
}
}

View File

@ -82,6 +82,7 @@ public class NetworkService extends AbstractComponent {
@Inject @Inject
public NetworkService(Settings settings) { public NetworkService(Settings settings) {
super(settings); super(settings);
IfConfig.logIfNecessary();
InetSocketTransportAddress.setResolveAddress(settings.getAsBoolean("network.address.serialization.resolve", false)); InetSocketTransportAddress.setResolveAddress(settings.getAsBoolean("network.address.serialization.resolve", false));
} }

View File

@ -21,8 +21,6 @@ package org.elasticsearch.common.network;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.net.Inet4Address; import java.net.Inet4Address;
import java.net.Inet6Address; import java.net.Inet6Address;
@ -34,10 +32,12 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashSet;
import java.util.List; import java.util.List;
/** /**
* Utilities for network interfaces / addresses * Utilities for network interfaces / addresses binding and publishing.
* Its only intended for that purpose, not general purpose usage!!!!
*/ */
public abstract class NetworkUtils { public abstract class NetworkUtils {
@ -84,7 +84,7 @@ public abstract class NetworkUtils {
* @deprecated remove this when multihoming is really correct * @deprecated remove this when multihoming is really correct
*/ */
@Deprecated @Deprecated
private static void sortAddresses(List<InetAddress> list) { static void sortAddresses(List<InetAddress> list) {
Collections.sort(list, new Comparator<InetAddress>() { Collections.sort(list, new Comparator<InetAddress>() {
@Override @Override
public int compare(InetAddress left, InetAddress right) { public int compare(InetAddress left, InetAddress right) {
@ -97,8 +97,6 @@ public abstract class NetworkUtils {
}); });
} }
private final static ESLogger logger = Loggers.getLogger(NetworkUtils.class);
/** Return all interfaces (and subinterfaces) on the system */ /** Return all interfaces (and subinterfaces) on the system */
static List<NetworkInterface> getInterfaces() throws SocketException { static List<NetworkInterface> getInterfaces() throws SocketException {
List<NetworkInterface> all = new ArrayList<>(); List<NetworkInterface> all = new ArrayList<>();
@ -127,19 +125,8 @@ public abstract class NetworkUtils {
return Constants.WINDOWS ? false : true; return Constants.WINDOWS ? false : true;
} }
/** Returns localhost, or if its misconfigured, falls back to loopback. Use with caution!!!! */
// TODO: can we remove this?
public static InetAddress getLocalHost() {
try {
return InetAddress.getLocalHost();
} catch (UnknownHostException e) {
logger.warn("failed to resolve local host, fallback to loopback", e);
return InetAddress.getLoopbackAddress();
}
}
/** Returns addresses for all loopback interfaces that are up. */ /** Returns addresses for all loopback interfaces that are up. */
public static InetAddress[] getLoopbackAddresses() throws SocketException { static InetAddress[] getLoopbackAddresses() throws SocketException {
List<InetAddress> list = new ArrayList<>(); List<InetAddress> list = new ArrayList<>();
for (NetworkInterface intf : getInterfaces()) { for (NetworkInterface intf : getInterfaces()) {
if (intf.isLoopback() && intf.isUp()) { if (intf.isLoopback() && intf.isUp()) {
@ -154,7 +141,7 @@ public abstract class NetworkUtils {
} }
/** Returns addresses for the first non-loopback interface that is up. */ /** Returns addresses for the first non-loopback interface that is up. */
public static InetAddress[] getFirstNonLoopbackAddresses() throws SocketException { static InetAddress[] getFirstNonLoopbackAddresses() throws SocketException {
List<InetAddress> list = new ArrayList<>(); List<InetAddress> list = new ArrayList<>();
for (NetworkInterface intf : getInterfaces()) { for (NetworkInterface intf : getInterfaces()) {
if (intf.isLoopback() == false && intf.isUp()) { if (intf.isLoopback() == false && intf.isUp()) {
@ -170,7 +157,7 @@ public abstract class NetworkUtils {
} }
/** Returns addresses for the given interface (it must be marked up) */ /** Returns addresses for the given interface (it must be marked up) */
public static InetAddress[] getAddressesForInterface(String name) throws SocketException { static InetAddress[] getAddressesForInterface(String name) throws SocketException {
NetworkInterface intf = NetworkInterface.getByName(name); NetworkInterface intf = NetworkInterface.getByName(name);
if (intf == null) { if (intf == null) {
throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces()); throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces());
@ -187,14 +174,17 @@ public abstract class NetworkUtils {
} }
/** Returns addresses for the given host, sorted by order of preference */ /** Returns addresses for the given host, sorted by order of preference */
public static InetAddress[] getAllByName(String host) throws UnknownHostException { static InetAddress[] getAllByName(String host) throws UnknownHostException {
InetAddress addresses[] = InetAddress.getAllByName(host); InetAddress addresses[] = InetAddress.getAllByName(host);
sortAddresses(Arrays.asList(addresses)); // deduplicate, in case of resolver misconfiguration
return addresses; // stuff like https://bugzilla.redhat.com/show_bug.cgi?id=496300
List<InetAddress> unique = new ArrayList<>(new HashSet<>(Arrays.asList(addresses)));
sortAddresses(unique);
return unique.toArray(new InetAddress[unique.size()]);
} }
/** Returns only the IPV4 addresses in {@code addresses} */ /** Returns only the IPV4 addresses in {@code addresses} */
public static InetAddress[] filterIPV4(InetAddress addresses[]) { static InetAddress[] filterIPV4(InetAddress addresses[]) {
List<InetAddress> list = new ArrayList<>(); List<InetAddress> list = new ArrayList<>();
for (InetAddress address : addresses) { for (InetAddress address : addresses) {
if (address instanceof Inet4Address) { if (address instanceof Inet4Address) {
@ -208,7 +198,7 @@ public abstract class NetworkUtils {
} }
/** Returns only the IPV6 addresses in {@code addresses} */ /** Returns only the IPV6 addresses in {@code addresses} */
public static InetAddress[] filterIPV6(InetAddress addresses[]) { static InetAddress[] filterIPV6(InetAddress addresses[]) {
List<InetAddress> list = new ArrayList<>(); List<InetAddress> list = new ArrayList<>();
for (InetAddress address : addresses) { for (InetAddress address : addresses) {
if (address instanceof Inet6Address) { if (address instanceof Inet6Address) {

View File

@ -44,6 +44,21 @@ public class DummyTransportAddress implements TransportAddress {
return other == INSTANCE; return other == INSTANCE;
} }
@Override
public String getHost() {
return "dummy";
}
@Override
public String getAddress() {
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
}
@Override
public int getPort() {
return 42;
}
@Override @Override
public DummyTransportAddress readFrom(StreamInput in) throws IOException { public DummyTransportAddress readFrom(StreamInput in) throws IOException {
return INSTANCE; return INSTANCE;

View File

@ -30,7 +30,7 @@ import java.net.InetSocketAddress;
/** /**
* A transport address used for IP socket address (wraps {@link java.net.InetSocketAddress}). * A transport address used for IP socket address (wraps {@link java.net.InetSocketAddress}).
*/ */
public class InetSocketTransportAddress implements TransportAddress { public final class InetSocketTransportAddress implements TransportAddress {
private static boolean resolveAddress = false; private static boolean resolveAddress = false;
@ -92,6 +92,25 @@ public class InetSocketTransportAddress implements TransportAddress {
address.getAddress().equals(((InetSocketTransportAddress) other).address.getAddress()); address.getAddress().equals(((InetSocketTransportAddress) other).address.getAddress());
} }
@Override
public String getHost() {
if (resolveAddress) {
return address.getHostName();
} else {
return getAddress();
}
}
@Override
public String getAddress() {
return address.getAddress().getHostAddress();
}
@Override
public int getPort() {
return address.getPort();
}
public InetSocketAddress address() { public InetSocketAddress address() {
return this.address; return this.address;
} }

View File

@ -29,7 +29,7 @@ import java.io.IOException;
/** /**
* *
*/ */
public class LocalTransportAddress implements TransportAddress { public final class LocalTransportAddress implements TransportAddress {
public static final LocalTransportAddress PROTO = new LocalTransportAddress("_na"); public static final LocalTransportAddress PROTO = new LocalTransportAddress("_na");
@ -57,6 +57,21 @@ public class LocalTransportAddress implements TransportAddress {
return other instanceof LocalTransportAddress && id.equals(((LocalTransportAddress) other).id); return other instanceof LocalTransportAddress && id.equals(((LocalTransportAddress) other).id);
} }
@Override
public String getHost() {
return "local";
}
@Override
public String getAddress() {
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
}
@Override
public int getPort() {
return 0;
}
@Override @Override
public LocalTransportAddress readFrom(StreamInput in) throws IOException { public LocalTransportAddress readFrom(StreamInput in) throws IOException {
return new LocalTransportAddress(in); return new LocalTransportAddress(in);

View File

@ -28,7 +28,24 @@ import org.elasticsearch.common.io.stream.Writeable;
*/ */
public interface TransportAddress extends Writeable<TransportAddress> { public interface TransportAddress extends Writeable<TransportAddress> {
/**
* Returns the host string for this transport address
*/
String getHost();
/**
* Returns the address string for this transport address
*/
String getAddress();
/**
* Returns the port of this transport address if applicable
*/
int getPort();
short uniqueAddressTypeId(); short uniqueAddressTypeId();
boolean sameHost(TransportAddress other); boolean sameHost(TransportAddress other);
public String toString();
} }

View File

@ -131,13 +131,16 @@ public abstract class ExtensionPoint {
* the settings object. * the settings object.
* *
* @param binder the binder to use * @param binder the binder to use
* @param settings the settings to look up the key to find the implemetation to bind * @param settings the settings to look up the key to find the implementation to bind
* @param settingsKey the key to use with the settings * @param settingsKey the key to use with the settings
* @param defaultValue the default value if they settings doesn't contain the key * @param defaultValue the default value if the settings do not contain the key, or null if there is no default
* @return the actual bound type key * @return the actual bound type key
*/ */
public String bindType(Binder binder, Settings settings, String settingsKey, String defaultValue) { public String bindType(Binder binder, Settings settings, String settingsKey, String defaultValue) {
final String type = settings.get(settingsKey, defaultValue); final String type = settings.get(settingsKey, defaultValue);
if (type == null) {
throw new IllegalArgumentException("Missing setting [" + settingsKey + "]");
}
final Class<? extends T> instance = getExtension(type); final Class<? extends T> instance = getExtension(type);
if (instance == null) { if (instance == null) {
throw new IllegalArgumentException("Unknown [" + this.name + "] type [" + type + "]"); throw new IllegalArgumentException("Unknown [" + this.name + "] type [" + type + "]");

View File

@ -26,10 +26,12 @@ import org.elasticsearch.common.io.PathUtils;
import java.io.IOException; import java.io.IOException;
import java.nio.file.FileStore; import java.nio.file.FileStore;
import java.nio.file.FileSystemException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.attribute.FileAttributeView; import java.nio.file.attribute.FileAttributeView;
import java.nio.file.attribute.FileStoreAttributeView; import java.nio.file.attribute.FileStoreAttributeView;
import java.util.Arrays;
/** /**
* Implementation of FileStore that supports * Implementation of FileStore that supports
@ -73,13 +75,16 @@ class ESFileStore extends FileStore {
} }
} }
/** Files.getFileStore(Path) useless here! Don't complain, just try it yourself. */ /**
static FileStore getMatchingFileStore(Path path, FileStore fileStores[]) throws IOException { * Files.getFileStore(Path) useless here! Don't complain, just try it yourself.
FileStore store = Files.getFileStore(path); */
@SuppressForbidden(reason = "works around the bugs")
static FileStore getMatchingFileStore(Path path, FileStore fileStores[]) throws IOException {
if (Constants.WINDOWS) { if (Constants.WINDOWS) {
return store; // be defensive, don't even try to do anything fancy. return getFileStoreWindows(path, fileStores);
} }
FileStore store = Files.getFileStore(path);
try { try {
String mount = getMountPointLinux(store); String mount = getMountPointLinux(store);
@ -110,6 +115,57 @@ class ESFileStore extends FileStore {
// fall back to crappy one we got from Files.getFileStore // fall back to crappy one we got from Files.getFileStore
return store; return store;
} }
/**
* remove this code and just use getFileStore for windows on java 9
* works around https://bugs.openjdk.java.net/browse/JDK-8034057
*/
@SuppressForbidden(reason = "works around https://bugs.openjdk.java.net/browse/JDK-8034057")
static FileStore getFileStoreWindows(Path path, FileStore fileStores[]) throws IOException {
assert Constants.WINDOWS;
try {
return Files.getFileStore(path);
} catch (FileSystemException possibleBug) {
final char driveLetter;
// look for a drive letter to see if its the SUBST bug,
// it might be some other type of path, like a windows share
// if something goes wrong, we just deliver the original exception
try {
String root = path.toRealPath().getRoot().toString();
if (root.length() < 2) {
throw new RuntimeException("root isn't a drive letter: " + root);
}
driveLetter = Character.toLowerCase(root.charAt(0));
if (Character.isAlphabetic(driveLetter) == false || root.charAt(1) != ':') {
throw new RuntimeException("root isn't a drive letter: " + root);
}
} catch (Throwable checkFailed) {
// something went wrong,
possibleBug.addSuppressed(checkFailed);
throw possibleBug;
}
// we have a drive letter: the hack begins!!!!!!!!
try {
// we have no choice but to parse toString of all stores and find the matching drive letter
for (FileStore store : fileStores) {
String toString = store.toString();
int length = toString.length();
if (length > 3 && toString.endsWith(":)") && toString.charAt(length - 4) == '(') {
if (Character.toLowerCase(toString.charAt(length - 3)) == driveLetter) {
return store;
}
}
}
throw new RuntimeException("no filestores matched");
} catch (Throwable weTried) {
IOException newException = new IOException("Unable to retrieve filestore for '" + path + "', tried matching against " + Arrays.toString(fileStores), weTried);
newException.addSuppressed(possibleBug);
throw newException;
}
}
}
@Override @Override
public String name() { public String name() {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.env; package org.elasticsearch.env;
import org.apache.lucene.util.Constants;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtils;
@ -302,9 +303,37 @@ public class Environment {
* <li>Only requires the security permissions of {@link Files#getFileStore(Path)}, * <li>Only requires the security permissions of {@link Files#getFileStore(Path)},
* no permissions to the actual mount point are required. * no permissions to the actual mount point are required.
* <li>Exception handling has the same semantics as {@link Files#getFileStore(Path)}. * <li>Exception handling has the same semantics as {@link Files#getFileStore(Path)}.
* <li>Works around https://bugs.openjdk.java.net/browse/JDK-8034057.
* </ul> * </ul>
*/ */
public FileStore getFileStore(Path path) throws IOException { public static FileStore getFileStore(Path path) throws IOException {
return ESFileStore.getMatchingFileStore(path, fileStores); return ESFileStore.getMatchingFileStore(path, fileStores);
} }
/**
* Returns true if the path is writable.
* Acts just like {@link Files#isWritable(Path)}, except won't
* falsely return false for paths on SUBST'd drive letters
* See https://bugs.openjdk.java.net/browse/JDK-8034057
* Note this will set the file modification time (to its already-set value)
* to test access.
*/
@SuppressForbidden(reason = "works around https://bugs.openjdk.java.net/browse/JDK-8034057")
public static boolean isWritable(Path path) throws IOException {
boolean v = Files.isWritable(path);
if (v || Constants.WINDOWS == false) {
return v;
}
// isWritable returned false on windows, the hack begins!!!!!!
// resetting the modification time is the least destructive/simplest
// way to check for both files and directories, and fails early just
// in getting the current value if file doesn't exist, etc
try {
Files.setLastModifiedTime(path, Files.getLastModifiedTime(path));
return true;
} catch (Throwable e) {
return false;
}
}
} }

View File

@ -230,7 +230,7 @@ public class Node implements Releasable {
// hack around dependency injection problem (for now...) // hack around dependency injection problem (for now...)
injector.getInstance(Discovery.class).setRoutingService(injector.getInstance(RoutingService.class)); injector.getInstance(Discovery.class).setRoutingService(injector.getInstance(RoutingService.class));
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) { for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
injector.getInstance(plugin).start(); injector.getInstance(plugin).start();
} }
@ -297,7 +297,7 @@ public class Node implements Releasable {
injector.getInstance(RestController.class).stop(); injector.getInstance(RestController.class).stop();
injector.getInstance(TransportService.class).stop(); injector.getInstance(TransportService.class).stop();
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) { for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
injector.getInstance(plugin).stop(); injector.getInstance(plugin).stop();
} }
// we should stop this last since it waits for resources to get released // we should stop this last since it waits for resources to get released
@ -364,7 +364,7 @@ public class Node implements Releasable {
stopWatch.stop().start("percolator_service"); stopWatch.stop().start("percolator_service");
injector.getInstance(PercolatorService.class).close(); injector.getInstance(PercolatorService.class).close();
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) { for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
stopWatch.stop().start("plugin(" + plugin.getName() + ")"); stopWatch.stop().start("plugin(" + plugin.getName() + ")");
injector.getInstance(plugin).close(); injector.getInstance(plugin).close();
} }

View File

@ -27,7 +27,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
@ -53,7 +52,6 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.aggregations.SearchContextAggregations;
@ -68,6 +66,7 @@ import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField; import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.LeafSearchLookup;
@ -348,12 +347,12 @@ public class PercolateContext extends SearchContext {
} }
@Override @Override
public Scroll scroll() { public ScrollContext scrollContext() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override @Override
public SearchContext scroll(Scroll scroll) { public SearchContext scrollContext(ScrollContext scroll) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -621,16 +620,6 @@ public class PercolateContext extends SearchContext {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public void lastEmittedDoc(ScoreDoc doc) {
throw new UnsupportedOperationException();
}
@Override
public ScoreDoc lastEmittedDoc() {
throw new UnsupportedOperationException();
}
@Override @Override
public DfsSearchResult dfsResult() { public DfsSearchResult dfsResult() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -1,121 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings;
import java.io.Closeable;
import java.util.Collection;
/**
* A base class for a plugin.
* <p/>
* A plugin can be dynamically injected with {@link Module} by implementing <tt>onModule(AnyModule)</tt> method
* removing the need to override {@link #processModule(org.elasticsearch.common.inject.Module)} and check using
* instanceof.
*/
public abstract class AbstractPlugin implements Plugin {
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Class<? extends Module>> modules() {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Module> modules(Settings settings) {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Class<? extends LifecycleComponent>> services() {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Class<? extends Module>> indexModules() {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Module> indexModules(Settings settings) {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Class<? extends Closeable>> indexServices() {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Class<? extends Module>> shardModules() {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Module> shardModules(Settings settings) {
return ImmutableList.of();
}
/**
* Defaults to return an empty list.
*/
@Override
public Collection<Class<? extends Closeable>> shardServices() {
return ImmutableList.of();
}
@Override
public void processModule(Module module) {
// nothing to do here
}
@Override
public Settings additionalSettings() {
return Settings.Builder.EMPTY_SETTINGS;
}
}

View File

@ -19,18 +19,12 @@
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import com.google.common.collect.Lists;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.PreProcessModule; import org.elasticsearch.common.inject.PreProcessModule;
import org.elasticsearch.common.inject.SpawnModules; import org.elasticsearch.common.inject.SpawnModules;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import java.util.Collection;
import java.util.List;
import static org.elasticsearch.common.inject.Modules.createModule;
/** /**
* *
*/ */
@ -47,13 +41,7 @@ public class IndexPluginsModule extends AbstractModule implements SpawnModules,
@Override @Override
public Iterable<? extends Module> spawnModules() { public Iterable<? extends Module> spawnModules() {
List<Module> modules = Lists.newArrayList(); return pluginsService.indexModules(settings);
Collection<Class<? extends Module>> modulesClasses = pluginsService.indexModules();
for (Class<? extends Module> moduleClass : modulesClasses) {
modules.add(createModule(moduleClass, settings));
}
modules.addAll(pluginsService.indexModules(settings));
return modules;
} }
@Override @Override

View File

@ -25,82 +25,73 @@ import org.elasticsearch.common.settings.Settings;
import java.io.Closeable; import java.io.Closeable;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
/** /**
* An extension point allowing to plug in custom functionality. * An extension point allowing to plug in custom functionality.
* <p/> * <p/>
* A plugin can be dynamically injected with {@link Module} by implementing <tt>onModule(AnyModule)</tt> method * A plugin can be register custom extensions to builtin behavior by implementing <tt>onModule(AnyModule)</tt>,
* removing the need to override {@link #processModule(org.elasticsearch.common.inject.Module)} and check using * and registering the extension with the given module.
* instanceof.
*/ */
public interface Plugin { public abstract class Plugin {
/** /**
* The name of the plugin. * The name of the plugin.
*/ */
String name(); public abstract String name();
/** /**
* The description of the plugin. * The description of the plugin.
*/ */
String description(); public abstract String description();
/** /**
* Node level modules (classes, will automatically be created). * Node level modules.
*/ */
Collection<Class<? extends Module>> modules(); public Collection<Module> nodeModules() {
return Collections.emptyList();
/** }
* Node level modules (instances)
*
* @param settings The node level settings.
*/
Collection<? extends Module> modules(Settings settings);
/** /**
* Node level services that will be automatically started/stopped/closed. * Node level services that will be automatically started/stopped/closed.
*/ */
Collection<Class<? extends LifecycleComponent>> services(); public Collection<Class<? extends LifecycleComponent>> nodeServices() {
return Collections.emptyList();
}
/** /**
* Per index modules. * Per index modules.
*/ */
Collection<Class<? extends Module>> indexModules(); public Collection<Module> indexModules(Settings indexSettings) {
return Collections.emptyList();
/** }
* Per index modules.
*/
Collection<? extends Module> indexModules(Settings settings);
/** /**
* Per index services that will be automatically closed. * Per index services that will be automatically closed.
*/ */
Collection<Class<? extends Closeable>> indexServices(); public Collection<Class<? extends Closeable>> indexServices() {
return Collections.emptyList();
}
/** /**
* Per index shard module. * Per index shard module.
*/ */
Collection<Class<? extends Module>> shardModules(); public Collection<Module> shardModules(Settings indexSettings) {
return Collections.emptyList();
/** }
* Per index shard module.
*/
Collection<? extends Module> shardModules(Settings settings);
/** /**
* Per index shard service that will be automatically closed. * Per index shard service that will be automatically closed.
*/ */
Collection<Class<? extends Closeable>> shardServices(); public Collection<Class<? extends Closeable>> shardServices() {
return Collections.emptyList();
/** }
* Process a specific module. Note, its simpler to implement a custom <tt>onModule(AnyModule module)</tt>
* method, which will be automatically be called by the relevant type.
*/
void processModule(Module module);
/** /**
* Additional node settings loaded by the plugin. Note that settings that are explicit in the nodes settings can't be * Additional node settings loaded by the plugin. Note that settings that are explicit in the nodes settings can't be
* overwritten with the additional settings. These settings added if they don't exist. * overwritten with the additional settings. These settings added if they don't exist.
*/ */
Settings additionalSettings(); public Settings additionalSettings() {
return Settings.Builder.EMPTY_SETTINGS;
}
} }

View File

@ -52,7 +52,7 @@ public class PluginInfo implements Streamable, ToXContent {
private String description; private String description;
private boolean site; private boolean site;
private String version; private String version;
private boolean jvm; private boolean jvm;
private String classname; private String classname;
private boolean isolated; private boolean isolated;
@ -86,7 +86,11 @@ public class PluginInfo implements Streamable, ToXContent {
try (InputStream stream = Files.newInputStream(descriptor)) { try (InputStream stream = Files.newInputStream(descriptor)) {
props.load(stream); props.load(stream);
} }
String name = dir.getFileName().toString(); String name = props.getProperty("name");
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]");
}
PluginManager.checkForForbiddenName(name);
String description = props.getProperty("description"); String description = props.getProperty("description");
if (description == null) { if (description == null) {
throw new IllegalArgumentException("Property [description] is missing for plugin [" + name + "]"); throw new IllegalArgumentException("Property [description] is missing for plugin [" + name + "]");
@ -95,6 +99,7 @@ public class PluginInfo implements Streamable, ToXContent {
if (version == null) { if (version == null) {
throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]"); throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]");
} }
boolean jvm = Boolean.parseBoolean(props.getProperty("jvm")); boolean jvm = Boolean.parseBoolean(props.getProperty("jvm"));
boolean site = Boolean.parseBoolean(props.getProperty("site")); boolean site = Boolean.parseBoolean(props.getProperty("site"));
if (jvm == false && site == false) { if (jvm == false && site == false) {
@ -122,7 +127,7 @@ public class PluginInfo implements Streamable, ToXContent {
throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]"); throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]");
} }
} }
if (site) { if (site) {
if (!Files.exists(dir.resolve("_site"))) { if (!Files.exists(dir.resolve("_site"))) {
throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory"); throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory");
@ -159,14 +164,14 @@ public class PluginInfo implements Streamable, ToXContent {
public boolean isJvm() { public boolean isJvm() {
return jvm; return jvm;
} }
/** /**
* @return true if jvm plugin has isolated classloader * @return true if jvm plugin has isolated classloader
*/ */
public boolean isIsolated() { public boolean isIsolated() {
return isolated; return isolated;
} }
/** /**
* @return jvm plugin's classname * @return jvm plugin's classname
*/ */

View File

@ -75,27 +75,27 @@ public class PluginManager {
static final ImmutableSet<String> OFFICIAL_PLUGINS = ImmutableSet.<String>builder() static final ImmutableSet<String> OFFICIAL_PLUGINS = ImmutableSet.<String>builder()
.add( .add(
"elasticsearch-analysis-icu", "analysis-icu",
"elasticsearch-analysis-kuromoji", "analysis-kuromoji",
"elasticsearch-analysis-phonetic", "analysis-phonetic",
"elasticsearch-analysis-smartcn", "analysis-smartcn",
"elasticsearch-analysis-stempel", "analysis-stempel",
"elasticsearch-cloud-aws", "cloud-aws",
"elasticsearch-cloud-azure", "cloud-azure",
"elasticsearch-cloud-gce", "cloud-gce",
"elasticsearch-delete-by-query", "delete-by-query",
"elasticsearch-lang-javascript", "lang-javascript",
"elasticsearch-lang-python", "lang-python",
"elasticsearch-mapper-murmur3", "mapper-murmur3",
"elasticsearch-mapper-size" "mapper-size"
).build(); ).build();
private final Environment environment; private final Environment environment;
private String url; private URL url;
private OutputMode outputMode; private OutputMode outputMode;
private TimeValue timeout; private TimeValue timeout;
public PluginManager(Environment environment, String url, OutputMode outputMode, TimeValue timeout) { public PluginManager(Environment environment, URL url, OutputMode outputMode, TimeValue timeout) {
this.environment = environment; this.environment = environment;
this.url = url; this.url = url;
this.outputMode = outputMode; this.outputMode = outputMode;
@ -103,8 +103,8 @@ public class PluginManager {
} }
public void downloadAndExtract(String name, Terminal terminal) throws IOException { public void downloadAndExtract(String name, Terminal terminal) throws IOException {
if (name == null) { if (name == null && url == null) {
throw new IllegalArgumentException("plugin name must be supplied with install [name]."); throw new IllegalArgumentException("plugin name or url must be supplied with install.");
} }
if (!Files.exists(environment.pluginsFile())) { if (!Files.exists(environment.pluginsFile())) {
@ -112,12 +112,18 @@ public class PluginManager {
Files.createDirectory(environment.pluginsFile()); Files.createDirectory(environment.pluginsFile());
} }
if (!Files.isWritable(environment.pluginsFile())) { if (!Environment.isWritable(environment.pluginsFile())) {
throw new IOException("plugin directory " + environment.pluginsFile() + " is read only"); throw new IOException("plugin directory " + environment.pluginsFile() + " is read only");
} }
PluginHandle pluginHandle = PluginHandle.parse(name); PluginHandle pluginHandle;
checkForForbiddenName(pluginHandle.name); if (name != null) {
pluginHandle = PluginHandle.parse(name);
checkForForbiddenName(pluginHandle.name);
} else {
// if we have no name but url, use temporary name that will be overwritten later
pluginHandle = new PluginHandle("temp_name" + new Random().nextInt(), null, null);
}
Path pluginFile = download(pluginHandle, terminal); Path pluginFile = download(pluginHandle, terminal);
extract(pluginHandle, terminal, pluginFile); extract(pluginHandle, terminal, pluginFile);
@ -138,7 +144,7 @@ public class PluginManager {
// first, try directly from the URL provided // first, try directly from the URL provided
if (url != null) { if (url != null) {
URL pluginUrl = new URL(url); URL pluginUrl = url;
boolean isSecureProcotol = "https".equalsIgnoreCase(pluginUrl.getProtocol()); boolean isSecureProcotol = "https".equalsIgnoreCase(pluginUrl.getProtocol());
boolean isAuthInfoSet = !Strings.isNullOrEmpty(pluginUrl.getUserInfo()); boolean isAuthInfoSet = !Strings.isNullOrEmpty(pluginUrl.getUserInfo());
if (isAuthInfoSet && !isSecureProcotol) { if (isAuthInfoSet && !isSecureProcotol) {
@ -163,7 +169,7 @@ public class PluginManager {
terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e)); terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e));
} }
} else { } else {
if (PluginHandle.isOfficialPlugin(pluginHandle.repo, pluginHandle.user, pluginHandle.version)) { if (PluginHandle.isOfficialPlugin(pluginHandle.name, pluginHandle.user, pluginHandle.version)) {
checkForOfficialPlugins(pluginHandle.name); checkForOfficialPlugins(pluginHandle.name);
} }
} }
@ -204,14 +210,10 @@ public class PluginManager {
} }
private void extract(PluginHandle pluginHandle, Terminal terminal, Path pluginFile) throws IOException { private void extract(PluginHandle pluginHandle, Terminal terminal, Path pluginFile) throws IOException {
final Path extractLocation = pluginHandle.extractedDir(environment);
if (Files.exists(extractLocation)) {
throw new IOException("plugin directory " + extractLocation.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + pluginHandle.name + "' command");
}
// unzip plugin to a staging temp dir, named for the plugin // unzip plugin to a staging temp dir, named for the plugin
Path tmp = Files.createTempDirectory(environment.tmpFile(), null); Path tmp = Files.createTempDirectory(environment.tmpFile(), null);
Path root = tmp.resolve(pluginHandle.name); Path root = tmp.resolve(pluginHandle.name);
unzipPlugin(pluginFile, root); unzipPlugin(pluginFile, root);
// find the actual root (in case its unzipped with extra directory wrapping) // find the actual root (in case its unzipped with extra directory wrapping)
@ -226,6 +228,13 @@ public class PluginManager {
jarHellCheck(root, info.isIsolated()); jarHellCheck(root, info.isIsolated());
} }
// update name in handle based on 'name' property found in descriptor file
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
final Path extractLocation = pluginHandle.extractedDir(environment);
if (Files.exists(extractLocation)) {
throw new IOException("plugin directory " + extractLocation.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + pluginHandle.name + "' command");
}
// install plugin // install plugin
FileSystemUtils.copyDirectoryRecursively(root, extractLocation); FileSystemUtils.copyDirectoryRecursively(root, extractLocation);
terminal.println("Installed %s into %s", pluginHandle.name, extractLocation.toAbsolutePath()); terminal.println("Installed %s into %s", pluginHandle.name, extractLocation.toAbsolutePath());
@ -246,7 +255,7 @@ public class PluginManager {
} catch (IOException e) { } catch (IOException e) {
throw new IOException("Could not move [" + binFile + "] to [" + toLocation + "]", e); throw new IOException("Could not move [" + binFile + "] to [" + toLocation + "]", e);
} }
if (Files.getFileStore(toLocation).supportsFileAttributeView(PosixFileAttributeView.class)) { if (Environment.getFileStore(toLocation).supportsFileAttributeView(PosixFileAttributeView.class)) {
// add read and execute permissions to existing perms, so execution will work. // add read and execute permissions to existing perms, so execution will work.
// read should generally be set already, but set it anyway: don't rely on umask... // read should generally be set already, but set it anyway: don't rely on umask...
final Set<PosixFilePermission> executePerms = new HashSet<>(); final Set<PosixFilePermission> executePerms = new HashSet<>();
@ -334,7 +343,7 @@ public class PluginManager {
private void unzipPlugin(Path zip, Path target) throws IOException { private void unzipPlugin(Path zip, Path target) throws IOException {
Files.createDirectories(target); Files.createDirectories(target);
try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) { try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
ZipEntry entry; ZipEntry entry;
byte[] buffer = new byte[8192]; byte[] buffer = new byte[8192];
@ -395,7 +404,7 @@ public class PluginManager {
} }
} }
private static void checkForForbiddenName(String name) { static void checkForForbiddenName(String name) {
if (!hasLength(name) || BLACKLIST.contains(name.toLowerCase(Locale.ROOT))) { if (!hasLength(name) || BLACKLIST.contains(name.toLowerCase(Locale.ROOT))) {
throw new IllegalArgumentException("Illegal plugin name: " + name); throw new IllegalArgumentException("Illegal plugin name: " + name);
} }
@ -438,43 +447,41 @@ public class PluginManager {
*/ */
static class PluginHandle { static class PluginHandle {
final String name;
final String version; final String version;
final String user; final String user;
final String repo; final String name;
PluginHandle(String name, String version, String user, String repo) { PluginHandle(String name, String version, String user) {
this.name = name;
this.version = version; this.version = version;
this.user = user; this.user = user;
this.repo = repo; this.name = name;
} }
List<URL> urls() { List<URL> urls() {
List<URL> urls = new ArrayList<>(); List<URL> urls = new ArrayList<>();
if (version != null) { if (version != null) {
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0 // Elasticsearch new download service uses groupId org.elasticsearch.plugin from 2.0.0
if (user == null) { if (user == null) {
// TODO Update to https // TODO Update to https
if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) { if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/elasticsearch-%s-%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", version, Build.CURRENT.hashShort(), repo, version, repo, version)); addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip", version, Build.CURRENT.hashShort(), name, version, name, version));
} }
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", repo, version, repo, version)); addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", name, version, name, version));
} else { } else {
// Elasticsearch old download service // Elasticsearch old download service
// TODO Update to https // TODO Update to https
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, repo, version)); addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, name, version));
// Maven central repository // Maven central repository
addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version)); addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
// Sonatype repository // Sonatype repository
addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version)); addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
// Github repository // Github repository
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, repo, version)); addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, name, version));
} }
} }
if (user != null) { if (user != null) {
// Github repository for master branch (assume site) // Github repository for master branch (assume site)
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, repo)); addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, name));
} }
return urls; return urls;
} }
@ -526,20 +533,11 @@ public class PluginManager {
} }
} }
String endname = repo;
if (repo.startsWith("elasticsearch-")) {
// remove elasticsearch- prefix
endname = repo.substring("elasticsearch-".length());
} else if (repo.startsWith("es-")) {
// remove es- prefix
endname = repo.substring("es-".length());
}
if (isOfficialPlugin(repo, user, version)) { if (isOfficialPlugin(repo, user, version)) {
return new PluginHandle(endname, Version.CURRENT.number(), null, repo); return new PluginHandle(repo, Version.CURRENT.number(), null);
} }
return new PluginHandle(endname, version, user, repo); return new PluginHandle(repo, version, user);
} }
static boolean isOfficialPlugin(String repo, String user, String version) { static boolean isOfficialPlugin(String repo, String user, String version) {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliTool;
import org.elasticsearch.common.cli.CliToolConfig; import org.elasticsearch.common.cli.CliToolConfig;
@ -32,7 +33,8 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.PluginManager.OutputMode; import org.elasticsearch.plugins.PluginManager.OutputMode;
import java.io.IOException; import java.net.MalformedURLException;
import java.net.URL;
import java.util.Locale; import java.util.Locale;
import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
@ -166,19 +168,29 @@ public class PluginManagerCliParser extends CliTool {
private static final String NAME = "install"; private static final String NAME = "install";
private static final CliToolConfig.Cmd CMD = cmd(NAME, Install.class) private static final CliToolConfig.Cmd CMD = cmd(NAME, Install.class)
.options(option("u", "url").required(false).hasArg(true))
.options(option("t", "timeout").required(false).hasArg(false)) .options(option("t", "timeout").required(false).hasArg(false))
.build(); .build();
static Command parse(Terminal terminal, CommandLine cli) { static Command parse(Terminal terminal, CommandLine cli) {
String[] args = cli.getArgs(); String[] args = cli.getArgs();
// install [plugin-name/url]
if ((args == null) || (args.length == 0)) { if ((args == null) || (args.length == 0)) {
return exitCmd(ExitStatus.USAGE, terminal, "plugin name is missing (type -h for help)"); return exitCmd(ExitStatus.USAGE, terminal, "plugin name or url is missing (type -h for help)");
}
String name = args[0];
URL optionalPluginUrl = null;
// try parsing cli argument as URL
try {
optionalPluginUrl = new URL(name);
name = null;
} catch (MalformedURLException e) {
// we tried to parse the cli argument as url and failed
// continue treating it as a symbolic plugin name like `analysis-icu` etc.
} }
String name = args[0];
TimeValue timeout = TimeValue.parseTimeValue(cli.getOptionValue("t"), DEFAULT_TIMEOUT, "cli"); TimeValue timeout = TimeValue.parseTimeValue(cli.getOptionValue("t"), DEFAULT_TIMEOUT, "cli");
String url = cli.getOptionValue("u");
OutputMode outputMode = OutputMode.DEFAULT; OutputMode outputMode = OutputMode.DEFAULT;
if (cli.hasOption("s")) { if (cli.hasOption("s")) {
@ -188,15 +200,15 @@ public class PluginManagerCliParser extends CliTool {
outputMode = OutputMode.VERBOSE; outputMode = OutputMode.VERBOSE;
} }
return new Install(terminal, name, outputMode, url, timeout); return new Install(terminal, name, outputMode, optionalPluginUrl, timeout);
} }
final String name; final String name;
private OutputMode outputMode; private OutputMode outputMode;
final String url; final URL url;
final TimeValue timeout; final TimeValue timeout;
Install(Terminal terminal, String name, OutputMode outputMode, String url, TimeValue timeout) { Install(Terminal terminal, String name, OutputMode outputMode, URL url, TimeValue timeout) {
super(terminal); super(terminal);
this.name = name; this.name = name;
this.outputMode = outputMode; this.outputMode = outputMode;
@ -207,7 +219,11 @@ public class PluginManagerCliParser extends CliTool {
@Override @Override
public ExitStatus execute(Settings settings, Environment env) throws Exception { public ExitStatus execute(Settings settings, Environment env) throws Exception {
PluginManager pluginManager = new PluginManager(env, url, outputMode, timeout); PluginManager pluginManager = new PluginManager(env, url, outputMode, timeout);
terminal.println("-> Installing " + Strings.nullToEmpty(name) + "..."); if (name != null) {
terminal.println("-> Installing " + Strings.nullToEmpty(name) + "...");
} else {
terminal.println("-> Installing from " + url + "...");
}
pluginManager.downloadAndExtract(name, terminal); pluginManager.downloadAndExtract(name, terminal);
return ExitStatus.OK; return ExitStatus.OK;
} }

View File

@ -19,18 +19,12 @@
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import com.google.common.collect.Lists;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.PreProcessModule; import org.elasticsearch.common.inject.PreProcessModule;
import org.elasticsearch.common.inject.SpawnModules; import org.elasticsearch.common.inject.SpawnModules;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import java.util.Collection;
import java.util.List;
import static org.elasticsearch.common.inject.Modules.createModule;
/** /**
* *
*/ */
@ -47,13 +41,7 @@ public class PluginsModule extends AbstractModule implements SpawnModules, PrePr
@Override @Override
public Iterable<? extends Module> spawnModules() { public Iterable<? extends Module> spawnModules() {
List<Module> modules = Lists.newArrayList(); return pluginsService.nodeModules();
Collection<Class<? extends Module>> modulesClasses = pluginsService.modules();
for (Class<? extends Module> moduleClass : modulesClasses) {
modules.add(createModule(moduleClass, settings));
}
modules.addAll(pluginsService.modules(settings));
return modules;
} }
@Override @Override

View File

@ -47,7 +47,6 @@ import java.nio.file.DirectoryStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -184,7 +183,6 @@ public class PluginsService extends AbstractComponent {
public void processModule(Module module) { public void processModule(Module module) {
for (Tuple<PluginInfo, Plugin> plugin : plugins()) { for (Tuple<PluginInfo, Plugin> plugin : plugins()) {
plugin.v2().processModule(module);
// see if there are onModule references // see if there are onModule references
List<OnModuleReference> references = onModuleReferences.get(plugin.v2()); List<OnModuleReference> references = onModuleReferences.get(plugin.v2());
if (references != null) { if (references != null) {
@ -202,49 +200,42 @@ public class PluginsService extends AbstractComponent {
} }
public Settings updatedSettings() { public Settings updatedSettings() {
Map<String, String> foundSettings = new HashMap<>();
final Settings.Builder builder = Settings.settingsBuilder(); final Settings.Builder builder = Settings.settingsBuilder();
for (Tuple<PluginInfo, Plugin> plugin : plugins) { for (Tuple<PluginInfo, Plugin> plugin : plugins) {
builder.put(plugin.v2().additionalSettings()); Settings settings = plugin.v2().additionalSettings();
for (String setting : settings.getAsMap().keySet()) {
String oldPlugin = foundSettings.put(setting, plugin.v1().getName());
if (oldPlugin != null) {
throw new IllegalArgumentException("Cannot have additional setting [" + setting + "] " +
"in plugin [" + plugin.v1().getName() + "], already added in plugin [" + oldPlugin + "]");
}
}
builder.put(settings);
} }
return builder.put(this.settings).build(); return builder.put(this.settings).build();
} }
public Collection<Class<? extends Module>> modules() { public Collection<Module> nodeModules() {
List<Class<? extends Module>> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().modules());
}
return modules;
}
public Collection<Module> modules(Settings settings) {
List<Module> modules = new ArrayList<>(); List<Module> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) { for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().modules(settings)); modules.addAll(plugin.v2().nodeModules());
} }
return modules; return modules;
} }
public Collection<Class<? extends LifecycleComponent>> services() { public Collection<Class<? extends LifecycleComponent>> nodeServices() {
List<Class<? extends LifecycleComponent>> services = new ArrayList<>(); List<Class<? extends LifecycleComponent>> services = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) { for (Tuple<PluginInfo, Plugin> plugin : plugins) {
services.addAll(plugin.v2().services()); services.addAll(plugin.v2().nodeServices());
} }
return services; return services;
} }
public Collection<Class<? extends Module>> indexModules() { public Collection<Module> indexModules(Settings indexSettings) {
List<Class<? extends Module>> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().indexModules());
}
return modules;
}
public Collection<Module> indexModules(Settings settings) {
List<Module> modules = new ArrayList<>(); List<Module> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) { for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().indexModules(settings)); modules.addAll(plugin.v2().indexModules(indexSettings));
} }
return modules; return modules;
} }
@ -257,18 +248,10 @@ public class PluginsService extends AbstractComponent {
return services; return services;
} }
public Collection<Class<? extends Module>> shardModules() { public Collection<Module> shardModules(Settings indexSettings) {
List<Class<? extends Module>> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().shardModules());
}
return modules;
}
public Collection<Module> shardModules(Settings settings) {
List<Module> modules = new ArrayList<>(); List<Module> modules = new ArrayList<>();
for (Tuple<PluginInfo, Plugin> plugin : plugins) { for (Tuple<PluginInfo, Plugin> plugin : plugins) {
modules.addAll(plugin.v2().shardModules(settings)); modules.addAll(plugin.v2().shardModules(indexSettings));
} }
return modules; return modules;
} }

View File

@ -19,18 +19,12 @@
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import com.google.common.collect.Lists;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.PreProcessModule; import org.elasticsearch.common.inject.PreProcessModule;
import org.elasticsearch.common.inject.SpawnModules; import org.elasticsearch.common.inject.SpawnModules;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import java.util.Collection;
import java.util.List;
import static org.elasticsearch.common.inject.Modules.createModule;
/** /**
* *
*/ */
@ -47,13 +41,7 @@ public class ShardsPluginsModule extends AbstractModule implements SpawnModules,
@Override @Override
public Iterable<? extends Module> spawnModules() { public Iterable<? extends Module> spawnModules() {
List<Module> modules = Lists.newArrayList(); return pluginsService.shardModules(settings);
Collection<Class<? extends Module>> modulesClasses = pluginsService.shardModules();
for (Class<? extends Module> moduleClass : modulesClasses) {
modules.add(createModule(moduleClass, settings));
}
modules.addAll(pluginsService.shardModules(settings));
return modules;
} }
@Override @Override

View File

@ -19,16 +19,8 @@
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings;
import java.io.Closeable;
import java.util.Collection;
import java.util.Collections;
/** A site-only plugin, just serves resources */ /** A site-only plugin, just serves resources */
final class SitePlugin implements Plugin { final class SitePlugin extends Plugin {
final String name; final String name;
final String description; final String description;
@ -46,58 +38,4 @@ final class SitePlugin implements Plugin {
public String description() { public String description() {
return description; return description;
} }
@Override
public Collection<Class<? extends Module>> modules() {
return Collections.emptyList();
}
@Override
public Collection<? extends Module> modules(Settings settings) {
return Collections.emptyList();
}
@Override
public Collection<Class<? extends LifecycleComponent>> services() {
return Collections.emptyList();
}
@Override
public Collection<Class<? extends Module>> indexModules() {
return Collections.emptyList();
}
@Override
public Collection<? extends Module> indexModules(Settings settings) {
return Collections.emptyList();
}
@Override
public Collection<Class<? extends Closeable>> indexServices() {
return Collections.emptyList();
}
@Override
public Collection<Class<? extends Module>> shardModules() {
return Collections.emptyList();
}
@Override
public Collection<? extends Module> shardModules(Settings settings) {
return Collections.emptyList();
}
@Override
public Collection<Class<? extends Closeable>> shardServices() {
return Collections.emptyList();
}
@Override
public void processModule(Module module) {
}
@Override
public Settings additionalSettings() {
return Settings.EMPTY;
}
} }

View File

@ -19,44 +19,33 @@
package org.elasticsearch.repositories; package org.elasticsearch.repositories;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.elasticsearch.action.admin.cluster.snapshots.status.TransportNodesSnapshotsStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.TransportNodesSnapshotsStatus;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.repositories.fs.FsRepository;
import org.elasticsearch.repositories.fs.FsRepositoryModule;
import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.repositories.uri.URLRepository;
import org.elasticsearch.repositories.uri.URLRepositoryModule;
import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.RestoreService;
import org.elasticsearch.snapshots.SnapshotsService;
import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotShardsService;
import org.elasticsearch.snapshots.SnapshotsService;
import java.util.Map;
/** /**
* Module responsible for registering other repositories. * Sets up classes for Snapshot/Restore.
* <p/> *
* Repositories implemented as plugins should implement {@code onModule(RepositoriesModule module)} method, in which * Plugins can add custom repository types by calling {@link #registerRepository(String, Class, Class)}.
* they should register repository using {@link #registerRepository(String, Class)} method.
*/ */
public class RepositoriesModule extends AbstractModule { public class RepositoriesModule extends AbstractModule {
private Map<String, Class<? extends Module>> repositoryTypes = Maps.newHashMap(); private final RepositoryTypesRegistry repositoryTypes = new RepositoryTypesRegistry();
public RepositoriesModule() { public RepositoriesModule() {
registerRepository(FsRepository.TYPE, FsRepositoryModule.class); registerRepository(FsRepository.TYPE, FsRepository.class, BlobStoreIndexShardRepository.class);
registerRepository(URLRepository.TYPE, URLRepositoryModule.class); registerRepository(URLRepository.TYPE, URLRepository.class, BlobStoreIndexShardRepository.class);
} }
/** /** Registers a custom repository type to the given {@link Repository} and {@link IndexShardRepository}. */
* Registers a custom repository type name against a module. public void registerRepository(String type, Class<? extends Repository> repositoryType, Class<? extends IndexShardRepository> shardRepositoryType) {
* repositoryTypes.registerRepository(type, repositoryType, shardRepositoryType);
* @param type The type
* @param module The module
*/
public void registerRepository(String type, Class<? extends Module> module) {
repositoryTypes.put(type, module);
} }
@Override @Override
@ -66,6 +55,6 @@ public class RepositoriesModule extends AbstractModule {
bind(SnapshotShardsService.class).asEagerSingleton(); bind(SnapshotShardsService.class).asEagerSingleton();
bind(TransportNodesSnapshotsStatus.class).asEagerSingleton(); bind(TransportNodesSnapshotsStatus.class).asEagerSingleton();
bind(RestoreService.class).asEagerSingleton(); bind(RestoreService.class).asEagerSingleton();
bind(RepositoryTypesRegistry.class).toInstance(new RepositoryTypesRegistry(ImmutableMap.copyOf(repositoryTypes))); bind(RepositoryTypesRegistry.class).toInstance(repositoryTypes);
} }
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.repositories; package org.elasticsearch.repositories;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.Modules; import org.elasticsearch.common.inject.Modules;
@ -29,12 +28,10 @@ import org.elasticsearch.common.settings.Settings;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import static org.elasticsearch.common.Strings.toCamelCase;
/** /**
* This module spawns specific repository module * Binds repository classes for the specific repository type.
*/ */
public class RepositoryModule extends AbstractModule implements SpawnModules { public class RepositoryModule extends AbstractModule {
private RepositoryName repositoryName; private RepositoryName repositoryName;
@ -59,28 +56,12 @@ public class RepositoryModule extends AbstractModule implements SpawnModules {
this.typesRegistry = typesRegistry; this.typesRegistry = typesRegistry;
} }
/**
* Returns repository module.
* <p/>
* First repository type is looked up in typesRegistry and if it's not found there, this module tries to
* load repository by it's class name.
*
* @return repository module
*/
@Override
public Iterable<? extends Module> spawnModules() {
Class<? extends Module> repoModuleClass = typesRegistry.type(repositoryName.type());
if (repoModuleClass == null) {
throw new IllegalArgumentException("Could not find repository type [" + repositoryName.getType() + "] for repository [" + repositoryName.getName() + "]");
}
return Collections.unmodifiableList(Arrays.asList(Modules.createModule(repoModuleClass, globalSettings)));
}
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
@Override @Override
protected void configure() { protected void configure() {
typesRegistry.bindType(binder(), repositoryName.type());
bind(RepositorySettings.class).toInstance(new RepositorySettings(globalSettings, settings)); bind(RepositorySettings.class).toInstance(new RepositorySettings(globalSettings, settings));
} }
} }

View File

@ -19,31 +19,34 @@
package org.elasticsearch.repositories; package org.elasticsearch.repositories;
import com.google.common.collect.ImmutableMap; import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.index.snapshots.IndexShardRepository;
/** /**
* Map of registered repository types and associated with these types modules * A mapping from type name to implementations of {@link Repository} and {@link IndexShardRepository}.
*/ */
public class RepositoryTypesRegistry { public class RepositoryTypesRegistry {
private final ImmutableMap<String, Class<? extends Module>> repositoryTypes; // invariant: repositories and shardRepositories have the same keyset
private final ExtensionPoint.SelectedType<Repository> repositoryTypes =
new ExtensionPoint.SelectedType<>("repository", Repository.class);
private final ExtensionPoint.SelectedType<IndexShardRepository> shardRepositoryTypes =
new ExtensionPoint.SelectedType<>("index_repository", IndexShardRepository.class);
/** /** Adds a new repository type to the registry, bound to the given implementation classes. */
* Creates new repository with given map of types public void registerRepository(String name, Class<? extends Repository> repositoryType, Class<? extends IndexShardRepository> shardRepositoryType) {
* repositoryTypes.registerExtension(name, repositoryType);
* @param repositoryTypes shardRepositoryTypes.registerExtension(name, shardRepositoryType);
*/
public RepositoryTypesRegistry(ImmutableMap<String, Class<? extends Module>> repositoryTypes) {
this.repositoryTypes = repositoryTypes;
} }
/** /**
* Returns repository module class for the given type * Looks up the given type and binds the implementation into the given binder.
* * Throws an {@link IllegalArgumentException} if the given type does not exist.
* @param type repository type
* @return repository module class or null if type is not found
*/ */
public Class<? extends Module> type(String type) { public void bindType(Binder binder, String type) {
return repositoryTypes.get(type); Settings settings = Settings.builder().put("type", type).build();
repositoryTypes.bindType(binder, settings, "type", null);
shardRepositoryTypes.bindType(binder, settings, "type", null);
} }
} }

View File

@ -1,46 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.fs;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
import org.elasticsearch.repositories.Repository;
/**
* File system repository module
*/
public class FsRepositoryModule extends AbstractModule {
public FsRepositoryModule() {
super();
}
/**
* {@inheritDoc}
*/
@Override
protected void configure() {
bind(Repository.class).to(FsRepository.class).asEagerSingleton();
bind(IndexShardRepository.class).to(BlobStoreIndexShardRepository.class).asEagerSingleton();
}
}

View File

@ -156,7 +156,7 @@ public class URLRepository extends BlobStoreRepository {
logger.warn("cannot parse the specified url [{}]", url); logger.warn("cannot parse the specified url [{}]", url);
throw new RepositoryException(repositoryName, "cannot parse the specified url [" + url + "]"); throw new RepositoryException(repositoryName, "cannot parse the specified url [" + url + "]");
} }
// We didn't match white list - try to resolve against repo.path // We didn't match white list - try to resolve against path.repo
URL normalizedUrl = environment.resolveRepoURL(url); URL normalizedUrl = environment.resolveRepoURL(url);
if (normalizedUrl == null) { if (normalizedUrl == null) {
logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles()); logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles());

View File

@ -1,46 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.uri;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
import org.elasticsearch.repositories.Repository;
/**
* URL repository module
*/
public class URLRepositoryModule extends AbstractModule {
public URLRepositoryModule() {
super();
}
/**
* {@inheritDoc}
*/
@Override
protected void configure() {
bind(Repository.class).to(URLRepository.class).asEagerSingleton();
bind(IndexShardRepository.class).to(BlobStoreIndexShardRepository.class).asEagerSingleton();
}
}

View File

@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableMap;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
@ -54,7 +53,6 @@ import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
@ -82,7 +80,6 @@ import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.Template; import org.elasticsearch.script.Template;
import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.script.mustache.MustacheScriptEngineService;
import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.*; import org.elasticsearch.search.fetch.*;
@ -274,7 +271,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
throw new IllegalArgumentException("aggregations are not supported with search_type=scan"); throw new IllegalArgumentException("aggregations are not supported with search_type=scan");
} }
if (context.scroll() == null) { if (context.scrollContext() == null || context.scrollContext().scroll == null) {
throw new ElasticsearchException("Scroll must be provided when scanning..."); throw new ElasticsearchException("Scroll must be provided when scanning...");
} }
@ -322,7 +319,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
try { try {
shortcutDocIdsToLoadForScanning(context); shortcutDocIdsToLoadForScanning(context);
fetchPhase.execute(context); fetchPhase.execute(context);
if (context.scroll() == null || context.fetchResult().hits().hits().length < context.size()) { if (context.scrollContext() == null || context.fetchResult().hits().hits().length < context.size()) {
freeContext(request.id()); freeContext(request.id());
} else { } else {
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -365,7 +362,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
loadOrExecuteQueryPhase(request, context, queryPhase); loadOrExecuteQueryPhase(request, context, queryPhase);
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scroll() == null) { if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
freeContext(context.id()); freeContext(context.id());
} else { } else {
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -412,23 +409,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
public QuerySearchResult executeQueryPhase(QuerySearchRequest request) { public QuerySearchResult executeQueryPhase(QuerySearchRequest request) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
contextProcessing(context); contextProcessing(context);
context.searcher().setAggregatedDfs(request.dfs());
IndexShard indexShard = context.indexShard(); IndexShard indexShard = context.indexShard();
try {
final IndexCache indexCache = indexShard.indexService().cache();
final QueryCachingPolicy cachingPolicy = indexShard.getQueryCachingPolicy();
context.searcher().dfSource(new CachedDfSource(context.searcher().getIndexReader(), request.dfs(), context.similarityService().similarity(),
indexCache.query(), cachingPolicy));
} catch (Throwable e) {
processFailure(context, e);
cleanContext(context);
throw new QueryPhaseExecutionException(context, "Failed to set aggregated df", e);
}
ShardSearchStats shardSearchStats = indexShard.searchService(); ShardSearchStats shardSearchStats = indexShard.searchService();
try { try {
shardSearchStats.onPreQueryPhase(context); shardSearchStats.onPreQueryPhase(context);
long time = System.nanoTime(); long time = System.nanoTime();
queryPhase.execute(context); queryPhase.execute(context);
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scroll() == null) { if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
// no hits, we can release the context since there will be no fetch phase // no hits, we can release the context since there will be no fetch phase
freeContext(context.id()); freeContext(context.id());
} else { } else {
@ -446,6 +434,16 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
} }
} }
private boolean fetchPhaseShouldFreeContext(SearchContext context) {
if (context.scrollContext() == null) {
// simple search, no scroll
return true;
} else {
// scroll request, but the scroll was not extended
return context.scrollContext().scroll == null;
}
}
public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) { public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) {
final SearchContext context = createAndPutContext(request); final SearchContext context = createAndPutContext(request);
contextProcessing(context); contextProcessing(context);
@ -465,7 +463,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
try { try {
shortcutDocIdsToLoad(context); shortcutDocIdsToLoad(context);
fetchPhase.execute(context); fetchPhase.execute(context);
if (context.scroll() == null) { if (fetchPhaseShouldFreeContext(context)) {
freeContext(context.id()); freeContext(context.id());
} else { } else {
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -488,17 +486,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request) { public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request) {
final SearchContext context = findContext(request.id()); final SearchContext context = findContext(request.id());
contextProcessing(context); contextProcessing(context);
try { context.searcher().setAggregatedDfs(request.dfs());
final IndexShard indexShard = context.indexShard();
final IndexCache indexCache = indexShard.indexService().cache();
final QueryCachingPolicy cachingPolicy = indexShard.getQueryCachingPolicy();
context.searcher().dfSource(new CachedDfSource(context.searcher().getIndexReader(), request.dfs(), context.similarityService().similarity(),
indexCache.query(), cachingPolicy));
} catch (Throwable e) {
freeContext(context.id());
cleanContext(context);
throw new QueryPhaseExecutionException(context, "Failed to set aggregated df", e);
}
try { try {
ShardSearchStats shardSearchStats = context.indexShard().searchService(); ShardSearchStats shardSearchStats = context.indexShard().searchService();
shardSearchStats.onPreQueryPhase(context); shardSearchStats.onPreQueryPhase(context);
@ -515,7 +503,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
try { try {
shortcutDocIdsToLoad(context); shortcutDocIdsToLoad(context);
fetchPhase.execute(context); fetchPhase.execute(context);
if (context.scroll() == null) { if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id()); freeContext(request.id());
} else { } else {
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -555,7 +543,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
try { try {
shortcutDocIdsToLoad(context); shortcutDocIdsToLoad(context);
fetchPhase.execute(context); fetchPhase.execute(context);
if (context.scroll() == null) { if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id()); freeContext(request.id());
} else { } else {
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -581,13 +569,13 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
final ShardSearchStats shardSearchStats = context.indexShard().searchService(); final ShardSearchStats shardSearchStats = context.indexShard().searchService();
try { try {
if (request.lastEmittedDoc() != null) { if (request.lastEmittedDoc() != null) {
context.lastEmittedDoc(request.lastEmittedDoc()); context.scrollContext().lastEmittedDoc = request.lastEmittedDoc();
} }
context.docIdsToLoad(request.docIds(), 0, request.docIdsSize()); context.docIdsToLoad(request.docIds(), 0, request.docIdsSize());
shardSearchStats.onPreFetchPhase(context); shardSearchStats.onPreFetchPhase(context);
long time = System.nanoTime(); long time = System.nanoTime();
fetchPhase.execute(context); fetchPhase.execute(context);
if (context.scroll() == null) { if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id()); freeContext(request.id());
} else { } else {
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -642,7 +630,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout); SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout);
SearchContext.setCurrent(context); SearchContext.setCurrent(context);
try { try {
context.scroll(request.scroll()); if (request.scroll() != null) {
context.scrollContext(new ScrollContext());
context.scrollContext().scroll = request.scroll();
}
parseTemplate(request, context); parseTemplate(request, context);
parseSource(context, request.source()); parseSource(context, request.source());
@ -695,7 +686,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
if (context != null) { if (context != null) {
try { try {
context.indexShard().searchService().onFreeContext(context); context.indexShard().searchService().onFreeContext(context);
if (context.scroll() != null) { if (context.scrollContext() != null) {
context.indexShard().searchService().onFreeScrollContext(context); context.indexShard().searchService().onFreeScrollContext(context);
} }
} finally { } finally {
@ -708,7 +699,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
public void freeAllScrollContexts() { public void freeAllScrollContexts() {
for (SearchContext searchContext : activeContexts.values()) { for (SearchContext searchContext : activeContexts.values()) {
if (searchContext.scroll() != null) { if (searchContext.scrollContext() != null) {
freeContext(searchContext.id()); freeContext(searchContext.id());
} }
} }
@ -902,7 +893,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
private void processScroll(InternalScrollSearchRequest request, SearchContext context) { private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
// process scroll // process scroll
context.from(context.from() + context.size()); context.from(context.from() + context.size());
context.scroll(request.scroll()); context.scrollContext().scroll = request.scroll();
// update the context keep alive based on the new scroll value // update the context keep alive based on the new scroll value
if (request.scroll() != null && request.scroll().keepAlive() != null) { if (request.scroll() != null && request.scroll().keepAlive() != null) {
context.keepAlive(request.scroll().keepAlive().millis()); context.keepAlive(request.scroll().keepAlive().millis());

View File

@ -31,7 +31,7 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource> { final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource> {
private final long precisionThreshold; private final long precisionThreshold;

View File

@ -69,14 +69,14 @@ public class ValueFormat {
public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT); public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT);
public static DateTime format(String format, DateTimeZone timezone) { public static DateTime format(String format, DateTimeZone timezone) {
return new DateTime(format, new ValueFormatter.DateTime(format, timezone), new ValueParser.DateMath(format)); return new DateTime(format, new ValueFormatter.DateTime(format, timezone), new ValueParser.DateMath(format, timezone));
} }
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType, DateTimeZone timezone) { public static DateTime mapper(DateFieldMapper.DateFieldType fieldType, DateTimeZone timezone) {
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType, timezone), ValueParser.DateMath.mapper(fieldType)); return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType, timezone), ValueParser.DateMath.mapper(fieldType, timezone));
} }
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) { private DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
super(pattern, formatter, parser); super(pattern, formatter, parser);
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.support.format; package org.elasticsearch.search.aggregations.support.format;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.joda.Joda;
@ -25,6 +26,7 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone;
import java.text.DecimalFormat; import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols; import java.text.DecimalFormatSymbols;
@ -80,16 +82,21 @@ public interface ValueParser {
*/ */
static class DateMath implements ValueParser { static class DateMath implements ValueParser {
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER)); public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER), DateTimeZone.UTC);
private DateMathParser parser; private DateMathParser parser;
public DateMath(String format) { private DateTimeZone timezone = DateTimeZone.UTC;
this(new DateMathParser(Joda.forPattern(format)));
public DateMath(String format, DateTimeZone timezone) {
this(new DateMathParser(Joda.forPattern(format)), timezone);
} }
public DateMath(DateMathParser parser) { public DateMath(DateMathParser parser, @Nullable DateTimeZone timeZone) {
this.parser = parser; this.parser = parser;
if (timeZone != null) {
this.timezone = timeZone;
}
} }
@Override @Override
@ -100,7 +107,7 @@ public interface ValueParser {
return searchContext.nowInMillis(); return searchContext.nowInMillis();
} }
}; };
return parser.parse(value, now); return parser.parse(value, now, false, timezone);
} }
@Override @Override
@ -108,8 +115,8 @@ public interface ValueParser {
return parseLong(value, searchContext); return parseLong(value, searchContext);
} }
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType) { public static DateMath mapper(DateFieldMapper.DateFieldType fieldType, @Nullable DateTimeZone timezone) {
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter())); return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()), timezone);
} }
} }

View File

@ -1,97 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.dfs;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.similarities.Similarity;
import java.io.IOException;
import java.util.List;
/**
*
*/
public class CachedDfSource extends IndexSearcher {
private final AggregatedDfs aggregatedDfs;
private final int maxDoc;
public CachedDfSource(IndexReader reader, AggregatedDfs aggregatedDfs, Similarity similarity,
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) throws IOException {
super(reader);
this.aggregatedDfs = aggregatedDfs;
setSimilarity(similarity);
setQueryCache(queryCache);
setQueryCachingPolicy(queryCachingPolicy);
if (aggregatedDfs.maxDoc() > Integer.MAX_VALUE) {
maxDoc = Integer.MAX_VALUE;
} else {
maxDoc = (int) aggregatedDfs.maxDoc();
}
}
@Override
public TermStatistics termStatistics(Term term, TermContext context) throws IOException {
TermStatistics termStatistics = aggregatedDfs.termStatistics().get(term);
if (termStatistics == null) {
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
return super.termStatistics(term, context);
}
return termStatistics;
}
@Override
public CollectionStatistics collectionStatistics(String field) throws IOException {
CollectionStatistics collectionStatistics = aggregatedDfs.fieldStatistics().get(field);
if (collectionStatistics == null) {
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
return super.collectionStatistics(field);
}
return collectionStatistics;
}
public int maxDoc() {
return this.maxDoc;
}
@Override
public Document doc(int i) {
throw new UnsupportedOperationException();
}
@Override
public void doc(int docID, StoredFieldVisitor fieldVisitor) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public Explanation explain(Weight weight, int doc) {
throw new UnsupportedOperationException();
}
@Override
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
throw new UnsupportedOperationException();
}
}

View File

@ -20,15 +20,13 @@
package org.elasticsearch.search.internal; package org.elasticsearch.search.internal;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector; import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation; import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.*;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Weight;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.search.dfs.CachedDfSource; import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.internal.SearchContext.Lifetime; import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException; import java.io.IOException;
@ -46,21 +44,23 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
private final SearchContext searchContext; private final SearchContext searchContext;
private CachedDfSource dfSource; private AggregatedDfs aggregatedDfs;
public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) { public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
super(searcher.reader()); super(searcher.reader());
in = searcher.searcher(); in = searcher.searcher();
this.searchContext = searchContext; this.searchContext = searchContext;
setSimilarity(searcher.searcher().getSimilarity(true)); setSimilarity(searcher.searcher().getSimilarity(true));
setQueryCache(searchContext.indexShard().indexService().cache().query());
setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy());
} }
@Override @Override
public void close() { public void close() {
} }
public void dfSource(CachedDfSource dfSource) { public void setAggregatedDfs(AggregatedDfs aggregatedDfs) {
this.dfSource = dfSource; this.aggregatedDfs = aggregatedDfs;
} }
@Override @Override
@ -75,10 +75,12 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
@Override @Override
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
// During tests we prefer to use the wrapped IndexSearcher, because then we use the AssertingIndexSearcher
// it is hacky, because if we perform a dfs search, we don't use the wrapped IndexSearcher...
try { try {
// if scores are needed and we have dfs data then use it // if scores are needed and we have dfs data then use it
if (dfSource != null && needsScores) { if (aggregatedDfs != null && needsScores) {
return dfSource.createNormalizedWeight(query, needsScores); return super.createNormalizedWeight(query, needsScores);
} }
return in.createNormalizedWeight(query, needsScores); return in.createNormalizedWeight(query, needsScores);
} catch (Throwable t) { } catch (Throwable t) {
@ -104,4 +106,32 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
searchContext.clearReleasables(Lifetime.COLLECTION); searchContext.clearReleasables(Lifetime.COLLECTION);
} }
} }
@Override
public TermStatistics termStatistics(Term term, TermContext context) throws IOException {
if (aggregatedDfs == null) {
// we are either executing the dfs phase or the search_type doesn't include the dfs phase.
return super.termStatistics(term, context);
}
TermStatistics termStatistics = aggregatedDfs.termStatistics().get(term);
if (termStatistics == null) {
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
return super.termStatistics(term, context);
}
return termStatistics;
}
@Override
public CollectionStatistics collectionStatistics(String field) throws IOException {
if (aggregatedDfs == null) {
// we are either executing the dfs phase or the search_type doesn't include the dfs phase.
return super.collectionStatistics(field);
}
CollectionStatistics collectionStatistics = aggregatedDfs.fieldStatistics().get(field);
if (collectionStatistics == null) {
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
return super.collectionStatistics(field);
}
return collectionStatistics;
}
} }

View File

@ -49,7 +49,6 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.dfs.DfsSearchResult;
@ -98,7 +97,7 @@ public class DefaultSearchContext extends SearchContext {
// terminate after count // terminate after count
private int terminateAfter = DEFAULT_TERMINATE_AFTER; private int terminateAfter = DEFAULT_TERMINATE_AFTER;
private List<String> groupStats; private List<String> groupStats;
private Scroll scroll; private ScrollContext scrollContext;
private boolean explain; private boolean explain;
private boolean version = false; // by default, we don't return versions private boolean version = false; // by default, we don't return versions
private List<String> fieldNames; private List<String> fieldNames;
@ -290,13 +289,13 @@ public class DefaultSearchContext extends SearchContext {
} }
@Override @Override
public Scroll scroll() { public ScrollContext scrollContext() {
return this.scroll; return this.scrollContext;
} }
@Override @Override
public SearchContext scroll(Scroll scroll) { public SearchContext scrollContext(ScrollContext scrollContext) {
this.scroll = scroll; this.scrollContext = scrollContext;
return this; return this;
} }
@ -652,16 +651,6 @@ public class DefaultSearchContext extends SearchContext {
this.keepAlive = keepAlive; this.keepAlive = keepAlive;
} }
@Override
public void lastEmittedDoc(ScoreDoc doc) {
this.lastEmittedDoc = doc;
}
@Override
public ScoreDoc lastEmittedDoc() {
return lastEmittedDoc;
}
@Override @Override
public SearchLookup lookup() { public SearchLookup lookup() {
// TODO: The types should take into account the parsing context in QueryParserContext... // TODO: The types should take into account the parsing context in QueryParserContext...

View File

@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
@ -42,7 +41,6 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.dfs.DfsSearchResult;
@ -154,13 +152,13 @@ public abstract class FilteredSearchContext extends SearchContext {
} }
@Override @Override
public Scroll scroll() { public ScrollContext scrollContext() {
return in.scroll(); return in.scrollContext();
} }
@Override @Override
public SearchContext scroll(Scroll scroll) { public SearchContext scrollContext(ScrollContext scroll) {
return in.scroll(scroll); return in.scrollContext(scroll);
} }
@Override @Override
@ -483,16 +481,6 @@ public abstract class FilteredSearchContext extends SearchContext {
in.keepAlive(keepAlive); in.keepAlive(keepAlive);
} }
@Override
public void lastEmittedDoc(ScoreDoc doc) {
in.lastEmittedDoc(doc);
}
@Override
public ScoreDoc lastEmittedDoc() {
return in.lastEmittedDoc();
}
@Override @Override
public SearchLookup lookup() { public SearchLookup lookup() {
return in.lookup(); return in.lookup();

View File

@ -17,24 +17,17 @@
* under the License. * under the License.
*/ */
package org.elasticsearch.nodesinfo.plugin.dummy2; package org.elasticsearch.search.internal;
import org.elasticsearch.plugins.AbstractPlugin; import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.search.Scroll;
public class TestNoVersionPlugin extends AbstractPlugin { /** Wrapper around information that needs to stay around when scrolling. */
public class ScrollContext {
static final public class Fields { public int totalHits = -1;
static public final String NAME = "test-no-version-plugin"; public float maxScore;
static public final String DESCRIPTION = NAME + " description"; public ScoreDoc lastEmittedDoc;
} public Scroll scroll;
@Override
public String name() {
return Fields.NAME;
}
@Override
public String description() {
return Fields.DESCRIPTION;
}
} }

View File

@ -24,7 +24,6 @@ import com.google.common.collect.MultimapBuilder;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
@ -159,9 +158,9 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
protected abstract long nowInMillisImpl(); protected abstract long nowInMillisImpl();
public abstract Scroll scroll(); public abstract ScrollContext scrollContext();
public abstract SearchContext scroll(Scroll scroll); public abstract SearchContext scrollContext(ScrollContext scroll);
public abstract SearchContextAggregations aggregations(); public abstract SearchContextAggregations aggregations();
@ -303,10 +302,6 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
public abstract void keepAlive(long keepAlive); public abstract void keepAlive(long keepAlive);
public abstract void lastEmittedDoc(ScoreDoc doc);
public abstract ScoreDoc lastEmittedDoc();
public abstract SearchLookup lookup(); public abstract SearchLookup lookup();
public abstract DfsSearchResult dfsResult(); public abstract DfsSearchResult dfsResult();

View File

@ -21,13 +21,10 @@ package org.elasticsearch.search.internal;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
@ -101,7 +98,7 @@ public class SubSearchContext extends FilteredSearchContext {
} }
@Override @Override
public SearchContext scroll(Scroll scroll) { public SearchContext scrollContext(ScrollContext scrollContext) {
throw new UnsupportedOperationException("Not supported"); throw new UnsupportedOperationException("Not supported");
} }
@ -304,11 +301,6 @@ public class SubSearchContext extends FilteredSearchContext {
throw new UnsupportedOperationException("Not supported"); throw new UnsupportedOperationException("Not supported");
} }
@Override
public void lastEmittedDoc(ScoreDoc doc) {
throw new UnsupportedOperationException("Not supported");
}
@Override @Override
public QuerySearchResult queryResult() { public QuerySearchResult queryResult() {
return querySearchResult; return querySearchResult;

View File

@ -21,12 +21,16 @@ package org.elasticsearch.search.query;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.lucene.queries.MinDocQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopDocsCollector;
@ -43,8 +47,8 @@ import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.aggregations.AggregationPhase;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import org.elasticsearch.search.rescore.RescorePhase; import org.elasticsearch.search.rescore.RescorePhase;
import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext.ScanCollector; import org.elasticsearch.search.scan.ScanContext.ScanCollector;
@ -52,7 +56,6 @@ import org.elasticsearch.search.sort.SortParseElement;
import org.elasticsearch.search.sort.TrackScoresParseElement; import org.elasticsearch.search.sort.TrackScoresParseElement;
import org.elasticsearch.search.suggest.SuggestPhase; import org.elasticsearch.search.suggest.SuggestPhase;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -115,6 +118,7 @@ public class QueryPhase implements SearchPhase {
searchContext.queryResult().searchTimedOut(false); searchContext.queryResult().searchTimedOut(false);
final SearchType searchType = searchContext.searchType();
boolean rescore = false; boolean rescore = false;
try { try {
searchContext.queryResult().from(searchContext.from()); searchContext.queryResult().from(searchContext.from());
@ -138,7 +142,7 @@ public class QueryPhase implements SearchPhase {
return new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0); return new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
} }
}; };
} else if (searchContext.searchType() == SearchType.SCAN) { } else if (searchType == SearchType.SCAN) {
query = searchContext.scanContext().wrapQuery(query); query = searchContext.scanContext().wrapQuery(query);
final ScanCollector scanCollector = searchContext.scanContext().collector(searchContext); final ScanCollector scanCollector = searchContext.scanContext().collector(searchContext);
collector = scanCollector; collector = scanCollector;
@ -150,11 +154,32 @@ public class QueryPhase implements SearchPhase {
}; };
} else { } else {
// Perhaps have a dedicated scroll phase? // Perhaps have a dedicated scroll phase?
final ScrollContext scrollContext = searchContext.scrollContext();
assert (scrollContext != null) == (searchContext.request().scroll() != null);
final TopDocsCollector<?> topDocsCollector; final TopDocsCollector<?> topDocsCollector;
ScoreDoc lastEmittedDoc; ScoreDoc lastEmittedDoc;
if (searchContext.request().scroll() != null) { if (searchContext.request().scroll() != null) {
numDocs = Math.min(searchContext.size(), totalNumDocs); numDocs = Math.min(searchContext.size(), totalNumDocs);
lastEmittedDoc = searchContext.lastEmittedDoc(); lastEmittedDoc = scrollContext.lastEmittedDoc;
if (Sort.INDEXORDER.equals(searchContext.sort())) {
if (scrollContext.totalHits == -1) {
// first round
assert scrollContext.lastEmittedDoc == null;
// there is not much that we can optimize here since we want to collect all
// documents in order to get the total number of hits
} else {
// now this gets interesting: since we sort in index-order, we can directly
// skip to the desired doc and stop collecting after ${size} matches
if (scrollContext.lastEmittedDoc != null) {
BooleanQuery bq = new BooleanQuery();
bq.add(query, Occur.MUST);
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), Occur.FILTER);
query = bq;
}
searchContext.terminateAfter(numDocs);
}
}
} else { } else {
lastEmittedDoc = null; lastEmittedDoc = null;
} }
@ -177,7 +202,31 @@ public class QueryPhase implements SearchPhase {
topDocsCallable = new Callable<TopDocs>() { topDocsCallable = new Callable<TopDocs>() {
@Override @Override
public TopDocs call() throws Exception { public TopDocs call() throws Exception {
return topDocsCollector.topDocs(); TopDocs topDocs = topDocsCollector.topDocs();
if (scrollContext != null) {
if (scrollContext.totalHits == -1) {
// first round
scrollContext.totalHits = topDocs.totalHits;
scrollContext.maxScore = topDocs.getMaxScore();
} else {
// subsequent round: the total number of hits and
// the maximum score were computed on the first round
topDocs.totalHits = scrollContext.totalHits;
topDocs.setMaxScore(scrollContext.maxScore);
}
switch (searchType) {
case QUERY_AND_FETCH:
case DFS_QUERY_AND_FETCH:
// for (DFS_)QUERY_AND_FETCH, we already know the last emitted doc
if (topDocs.scoreDocs.length > 0) {
// set the last emitted doc
scrollContext.lastEmittedDoc = topDocs.scoreDocs[topDocs.scoreDocs.length - 1];
}
default:
break;
}
}
return topDocs;
} }
}; };
} }
@ -227,19 +276,7 @@ public class QueryPhase implements SearchPhase {
searchContext.queryResult().terminatedEarly(false); searchContext.queryResult().terminatedEarly(false);
} }
final TopDocs topDocs = topDocsCallable.call(); searchContext.queryResult().topDocs(topDocsCallable.call());
if (searchContext.request().scroll() != null) {
int size = topDocs.scoreDocs.length;
if (size > 0) {
// In the case of *QUERY_AND_FETCH we don't get back to shards telling them which least
// relevant docs got emitted as hit, we can simply mark the last doc as last emitted
if (searchContext.searchType() == SearchType.QUERY_AND_FETCH ||
searchContext.searchType() == SearchType.DFS_QUERY_AND_FETCH) {
searchContext.lastEmittedDoc(topDocs.scoreDocs[size - 1]);
}
}
}
searchContext.queryResult().topDocs(topDocs);
} catch (Throwable e) { } catch (Throwable e) {
throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e);
} }

View File

@ -20,18 +20,13 @@
package org.elasticsearch.search.scan; package org.elasticsearch.search.scan;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.MinDocQuery;
import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -118,93 +113,4 @@ public class ScanContext {
} }
} }
/**
* A filtering query that matches all doc IDs that are not deleted and
* greater than or equal to the configured doc ID.
*/
// pkg-private for testing
static class MinDocQuery extends Query {
private final int minDoc;
MinDocQuery(int minDoc) {
this.minDoc = minDoc;
}
@Override
public int hashCode() {
return 31 * super.hashCode() + minDoc;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
MinDocQuery that = (MinDocQuery) obj;
return minDoc == that.minDoc;
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ConstantScoreWeight(this) {
@Override
public Scorer scorer(LeafReaderContext context, final Bits acceptDocs) throws IOException {
final int maxDoc = context.reader().maxDoc();
if (context.docBase + maxDoc <= minDoc) {
return null;
}
final int segmentMinDoc = Math.max(0, minDoc - context.docBase);
final DocIdSetIterator disi = new DocIdSetIterator() {
int doc = -1;
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
return advance(doc + 1);
}
@Override
public int advance(int target) throws IOException {
assert target > doc;
if (doc == -1) {
// skip directly to minDoc
doc = Math.max(target, segmentMinDoc);
} else {
doc = target;
}
while (doc < maxDoc) {
if (acceptDocs == null || acceptDocs.get(doc)) {
break;
}
doc += 1;
}
if (doc >= maxDoc) {
doc = NO_MORE_DOCS;
}
return doc;
}
@Override
public long cost() {
return maxDoc - minDoc;
}
};
return new ConstantScoreScorer(this, score(), disi);
}
};
}
@Override
public String toString(String field) {
return "MinDocQuery(minDoc=" + minDoc + ")";
}
}
} }

View File

@ -404,6 +404,9 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
} catch (IOException e) { } catch (IOException e) {
throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e); throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e);
} }
if (logger.isDebugEnabled()) {
logger.debug("binding server bootstrap to: {}", hostAddresses);
}
for (InetAddress hostAddress : hostAddresses) { for (InetAddress hostAddress : hostAddresses) {
bindServerBootstrap(name, hostAddress, settings); bindServerBootstrap(name, hostAddress, settings);
} }
@ -497,7 +500,6 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
serverBootstrap.setOption("child.receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory); serverBootstrap.setOption("child.receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
serverBootstrap.setOption("reuseAddress", reuseAddress); serverBootstrap.setOption("reuseAddress", reuseAddress);
serverBootstrap.setOption("child.reuseAddress", reuseAddress); serverBootstrap.setOption("child.reuseAddress", reuseAddress);
serverBootstraps.put(name, serverBootstrap); serverBootstraps.put(name, serverBootstrap);
} }

View File

@ -4,13 +4,13 @@ NAME
SYNOPSIS SYNOPSIS
plugin install <name> plugin install <name or url>
DESCRIPTION DESCRIPTION
This command installs an elasticsearch plugin This command installs an elasticsearch plugin
<name> can be one of the official plugins, or refer to a github repository, or to one of the official plugins The argument can be a <name> of one of the official plugins, or refer to a github repository
The notation of just specifying a plugin name, downloads an officially supported plugin. The notation of just specifying a plugin name, downloads an officially supported plugin.
@ -20,37 +20,41 @@ DESCRIPTION
The notation of 'username/repository' refers to a github repository. The notation of 'username/repository' refers to a github repository.
The argument can be an valid <url> which points to a download or file location for the plugin to be loaded from.
EXAMPLES EXAMPLES
plugin install elasticsearch-analysis-kuromoji plugin install analysis-kuromoji
plugin install elasticsearch/shield/latest plugin install elasticsearch/shield/latest
plugin install lmenezes/elasticsearch-kopf plugin install lmenezes/elasticsearch-kopf
plugin install http://download.elasticsearch.org/elasticsearch/elasticsearch-analysis-kuromoji/elasticsearch-analysis-kuromoji-2.7.0.zip
plugin install file:/path/to/plugin/elasticsearch-analysis-kuromoji-2.7.0.zip
OFFICIAL PLUGINS OFFICIAL PLUGINS
The following plugins are officially supported and can be installed by just referring to their name The following plugins are officially supported and can be installed by just referring to their name
- elasticsearch-analysis-icu - analysis-icu
- elasticsearch-analysis-kuromoji - analysis-kuromoji
- elasticsearch-analysis-phonetic - analysis-phonetic
- elasticsearch-analysis-smartcn - analysis-smartcn
- elasticsearch-analysis-stempel - analysis-stempel
- elasticsearch-cloud-aws - cloud-aws
- elasticsearch-cloud-azure - cloud-azure
- elasticsearch-cloud-gce - cloud-gce
- elasticsearch-delete-by-query - delete-by-query
- elasticsearch-lang-javascript - lang-javascript
- elasticsearch-lang-python - lang-python
- elasticsearch-mapper-murmur3 - mapper-murmur3
- elasticsearch-mapper-size - mapper-size
OPTIONS OPTIONS
-u,--url URL to retrive the plugin from
-t,--timeout Timeout until the plugin download is abort -t,--timeout Timeout until the plugin download is abort
-v,--verbose Verbose output -v,--verbose Verbose output

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.queries;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.store.Directory;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
public class MinDocQueryTests extends ESTestCase {
public void testBasics() {
MinDocQuery query1 = new MinDocQuery(42);
MinDocQuery query2 = new MinDocQuery(42);
MinDocQuery query3 = new MinDocQuery(43);
QueryUtils.check(query1);
QueryUtils.checkEqual(query1, query2);
QueryUtils.checkUnequal(query1, query3);
}
public void testRandom() throws IOException {
final int numDocs = randomIntBetween(10, 200);
final Document doc = new Document();
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir);
for (int i = 0; i < numDocs; ++i) {
w.addDocument(doc);
}
final IndexReader reader = w.getReader();
final IndexSearcher searcher = newSearcher(reader);
for (int i = 0; i <= numDocs; ++i) {
assertEquals(numDocs - i, searcher.count(new MinDocQuery(i)));
}
w.close();
reader.close();
dir.close();
}
}

View File

@ -622,4 +622,20 @@ public class ExceptionSerializationTests extends ESTestCase {
assertEquals(ex.status(), e.status()); assertEquals(ex.status(), e.status());
assertEquals(RestStatus.UNAUTHORIZED, e.status()); assertEquals(RestStatus.UNAUTHORIZED, e.status());
} }
public void testInterruptedException() throws IOException {
InterruptedException orig = randomBoolean() ? new InterruptedException("boom") : new InterruptedException();
InterruptedException ex = serialize(orig);
assertEquals(orig.getMessage(), ex.getMessage());
}
public static class UnknownException extends Exception {
public UnknownException(String message) {
super(message);
}
public UnknownException(String message, Throwable cause) {
super(message, cause);
}
}
} }

View File

@ -89,7 +89,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.action.SearchServiceTransportAction; import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
@ -144,7 +144,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) { protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder() return Settings.settingsBuilder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.extendArray("plugin.types", InterceptingTransportService.Plugin.class.getName()) .extendArray("plugin.types", InterceptingTransportService.TestPlugin.class.getName())
.build(); .build();
} }
@ -844,7 +844,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
public static class InterceptingTransportService extends TransportService { public static class InterceptingTransportService extends TransportService {
public static class Plugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "intercepting-transport-service"; return "intercepting-transport-service";

View File

@ -19,10 +19,10 @@
package org.elasticsearch.benchmark.scripts.expression; package org.elasticsearch.benchmark.scripts.expression;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptModule;
public class NativeScriptPlugin extends AbstractPlugin { public class NativeScriptPlugin extends Plugin {
@Override @Override
public String name() { public String name() {

View File

@ -18,11 +18,15 @@
*/ */
package org.elasticsearch.benchmark.scripts.score.plugin; package org.elasticsearch.benchmark.scripts.score.plugin;
import org.elasticsearch.benchmark.scripts.score.script.*; import org.elasticsearch.benchmark.scripts.score.script.NativeConstantForLoopScoreScript;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.benchmark.scripts.score.script.NativeConstantScoreScript;
import org.elasticsearch.benchmark.scripts.score.script.NativeNaiveTFIDFScoreScript;
import org.elasticsearch.benchmark.scripts.score.script.NativePayloadSumNoRecordScoreScript;
import org.elasticsearch.benchmark.scripts.score.script.NativePayloadSumScoreScript;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptModule;
public class NativeScriptExamplesPlugin extends AbstractPlugin { public class NativeScriptExamplesPlugin extends Plugin {
@Override @Override

View File

@ -25,8 +25,6 @@ import com.google.common.collect.Sets;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.recycler.Recycler.V; import org.elasticsearch.common.recycler.Recycler.V;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.NodeModule;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;

View File

@ -35,9 +35,17 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*; import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportModule;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import org.junit.Test; import org.junit.Test;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -58,7 +66,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTests {
TransportClient client = TransportClient.builder().settings(Settings.builder() TransportClient client = TransportClient.builder().settings(Settings.builder()
.put("client.transport.sniff", false) .put("client.transport.sniff", false)
.put("node.name", "transport_client_" + this.getTestName()) .put("node.name", "transport_client_" + this.getTestName())
.put("plugin.types", InternalTransportService.Plugin.class.getName()) .put("plugin.types", InternalTransportService.TestPlugin.class.getName())
.put(headersSettings) .put(headersSettings)
.build()).build(); .build()).build();
@ -73,7 +81,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTests {
.put("cluster.name", "cluster1") .put("cluster.name", "cluster1")
.put("node.name", "transport_client_" + this.getTestName() + "_1") .put("node.name", "transport_client_" + this.getTestName() + "_1")
.put("client.transport.nodes_sampler_interval", "1s") .put("client.transport.nodes_sampler_interval", "1s")
.put("plugin.types", InternalTransportService.Plugin.class.getName()) .put("plugin.types", InternalTransportService.TestPlugin.class.getName())
.put(HEADER_SETTINGS) .put(HEADER_SETTINGS)
.put("path.home", createTempDir().toString()) .put("path.home", createTempDir().toString())
.build()).build(); .build()).build();
@ -96,7 +104,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTests {
public static class InternalTransportService extends TransportService { public static class InternalTransportService extends TransportService {
public static class Plugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "mock-transport-service"; return "mock-transport-service";

View File

@ -35,11 +35,14 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.Store;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.*; import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -64,7 +67,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
public class ClusterInfoServiceIT extends ESIntegTestCase { public class ClusterInfoServiceIT extends ESIntegTestCase {
public static class Plugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
@ -143,7 +146,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase {
return Settings.builder() return Settings.builder()
// manual collection or upon cluster forming. // manual collection or upon cluster forming.
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, "1s") .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, "1s")
.putArray("plugin.types", Plugin.class.getName(), MockTransportService.Plugin.class.getName()) .putArray("plugin.types", TestPlugin.class.getName(), MockTransportService.TestPlugin.class.getName())
.build(); .build();
} }

View File

@ -20,7 +20,6 @@ package org.elasticsearch.cluster;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
@ -36,7 +35,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Singleton; import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.MockLogAppender;
@ -44,7 +43,12 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.junit.Test; import org.junit.Test;
import java.util.*; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
@ -52,7 +56,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.test.ESIntegTestCase.Scope; import static org.elasticsearch.test.ESIntegTestCase.Scope;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/** /**
* *
@ -992,7 +1000,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
} }
} }
public static class TestPlugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
@ -1005,7 +1013,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
} }
@Override @Override
public Collection<Class<? extends LifecycleComponent>> services() { public Collection<Class<? extends LifecycleComponent>> nodeServices() {
List<Class<? extends LifecycleComponent>> services = new ArrayList<>(1); List<Class<? extends LifecycleComponent>> services = new ArrayList<>(1);
services.add(MasterAwareService.class); services.add(MasterAwareService.class);
return services; return services;

View File

@ -28,7 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.MockDiskUsagesIT;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -39,7 +39,7 @@ import java.util.concurrent.CountDownLatch;
*/ */
public class MockInternalClusterInfoService extends InternalClusterInfoService { public class MockInternalClusterInfoService extends InternalClusterInfoService {
public static class Plugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "mock-cluster-info-service"; return "mock-cluster-info-service";

View File

@ -27,6 +27,7 @@ import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.junit.Test; import org.junit.Test;
@ -50,7 +51,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
return Settings.builder() return Settings.builder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
// Use the mock internal cluster info service, which has fake-able disk usages // Use the mock internal cluster info service, which has fake-able disk usages
.extendArray("plugin.types", MockInternalClusterInfoService.Plugin.class.getName()) .extendArray("plugin.types", MockInternalClusterInfoService.TestPlugin.class.getName())
// Update more frequently // Update more frequently
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s") .put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s")
.build(); .build();
@ -167,7 +168,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes()); usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
paths[0] = path; paths[0] = path;
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths); FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
return new NodeStats(new DiscoveryNode(nodeName, null, Version.V_2_0_0_beta1), return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT),
System.currentTimeMillis(), System.currentTimeMillis(),
null, null, null, null, null, null, null, null, null, null,
fsInfo, fsInfo,

View File

@ -25,12 +25,13 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.junit.Test; import org.junit.Test;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.newArrayList;
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
@ -49,7 +50,7 @@ public class SettingsFilteringIT extends ESIntegTestCase {
.build(); .build();
} }
public static class SettingsFilteringPlugin extends AbstractPlugin { public static class SettingsFilteringPlugin extends Plugin {
/** /**
* The name of the plugin. * The name of the plugin.
*/ */
@ -67,10 +68,8 @@ public class SettingsFilteringIT extends ESIntegTestCase {
} }
@Override @Override
public Collection<Class<? extends Module>> indexModules() { public Collection<Module> indexModules(Settings indexSettings) {
Collection<Class<? extends Module>> modules = newArrayList(); return Collections.<Module>singletonList(new SettingsFilteringModule());
modules.add(SettingsFilteringModule.class);
return modules;
} }
} }

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.common; package org.elasticsearch.common;
import org.apache.commons.lang3.ArrayUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Test; import org.junit.Test;
@ -77,7 +76,10 @@ public class ParseFieldTests extends ESTestCase {
String[] deprecated = new String[]{"text", "same_as_text"}; String[] deprecated = new String[]{"text", "same_as_text"};
String[] allValues = values; String[] allValues = values;
if (withDeprecatedNames) { if (withDeprecatedNames) {
allValues = ArrayUtils.addAll(values, deprecated); String[] newArray = new String[allValues.length + deprecated.length];
System.arraycopy(allValues, 0, newArray, 0, allValues.length);
System.arraycopy(deprecated, 0, newArray, allValues.length, deprecated.length);
allValues = newArray;
} }
ParseField field = new ParseField(randomFrom(values)); ParseField field = new ParseField(randomFrom(values));

View File

@ -24,14 +24,12 @@ import com.carrotsearch.randomizedtesting.SeedUtils;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables; import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.util.Collection; import java.util.Collection;

View File

@ -143,7 +143,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
.put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly
.put("http.enabled", false) // just to make test quicker .put("http.enabled", false) // just to make test quicker
.put("gateway.local.list_timeout", "10s") // still long to induce failures but to long so test won't time out .put("gateway.local.list_timeout", "10s") // still long to induce failures but to long so test won't time out
.put("plugin.types", MockTransportService.Plugin.class.getName()) .put("plugin.types", MockTransportService.TestPlugin.class.getName())
.build(); .build();
private void configureCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException { private void configureCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException {

View File

@ -416,7 +416,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
Path dataPath = createTempDir(); Path dataPath = createTempDir();
Settings nodeSettings = Settings.builder() Settings nodeSettings = Settings.builder()
.put("node.add_id_to_custom_path", false) .put("node.add_id_to_custom_path", false)
.put("plugin.types", MockTransportService.Plugin.class.getName()) .put("plugin.types", MockTransportService.TestPlugin.class.getName())
.put("path.shared_data", dataPath) .put("path.shared_data", dataPath)
.build(); .build();

View File

@ -56,7 +56,7 @@ public class TransportIndexFailuresIT extends ESIntegTestCase {
.put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly .put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly
.put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly .put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly
.put("discovery.zen.minimum_master_nodes", 1) .put("discovery.zen.minimum_master_nodes", 1)
.put("plugin.types", MockTransportService.Plugin.class.getName()) .put("plugin.types", MockTransportService.TestPlugin.class.getName())
.build(); .build();
@Override @Override

View File

@ -83,6 +83,9 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1");
assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); assertThat(fieldMapper, instanceOf(DateFieldMapper.class));
DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper;
assertEquals("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", dateFieldMapper.fieldType().dateTimeFormatter().format());
assertEquals(1265587200000L, dateFieldMapper.fieldType().dateTimeFormatter().parser().parseMillis("1265587200000"));
fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field2"); fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field2");
assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); assertThat(fieldMapper, instanceOf(DateFieldMapper.class));

View File

@ -20,33 +20,25 @@
package org.elasticsearch.index.mapper.externalvalues; package org.elasticsearch.index.mapper.externalvalues;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import static com.google.common.collect.Lists.newArrayList; public class ExternalMapperPlugin extends Plugin {
public class ExternalMapperPlugin extends AbstractPlugin {
/**
* The name of the plugin.
*/
@Override @Override
public String name() { public String name() {
return "external-mappers"; return "external-mappers";
} }
/**
* The description of the plugin.
*/
@Override @Override
public String description() { public String description() {
return "External Mappers Plugin"; return "External Mappers Plugin";
} }
@Override @Override
public Collection<Class<? extends Module>> indexModules() { public Collection<Module> indexModules(Settings indexSettings) {
Collection<Class<? extends Module>> modules = newArrayList(); return Collections.<Module>singletonList(new ExternalIndexModule());
modules.add(ExternalIndexModule.class);
return modules;
} }
} }

View File

@ -27,11 +27,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.*; import org.elasticsearch.index.query.*;
import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import java.io.IOException; import java.io.IOException;
public class DummyQueryParserPlugin extends AbstractPlugin { public class DummyQueryParserPlugin extends Plugin {
@Override @Override
public String name() { public String name() {

View File

@ -19,16 +19,16 @@
package org.elasticsearch.index.shard; package org.elasticsearch.index.shard;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.engine.MockEngineFactory; import org.elasticsearch.test.engine.MockEngineFactory;
import org.elasticsearch.test.engine.MockEngineSupportModule; import org.elasticsearch.test.engine.MockEngineSupportModule;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.Collections;
// this must exist in the same package as IndexShardModule to allow access to setting the impl // this must exist in the same package as IndexShardModule to allow access to setting the impl
public class MockEngineFactoryPlugin extends AbstractPlugin { public class MockEngineFactoryPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "mock-engine-factory"; return "mock-engine-factory";
@ -38,10 +38,8 @@ public class MockEngineFactoryPlugin extends AbstractPlugin {
return "a mock engine factory for testing"; return "a mock engine factory for testing";
} }
@Override @Override
public Collection<Class<? extends Module>> indexModules() { public Collection<Module> indexModules(Settings indexSettings) {
List<Class<? extends Module>> modules = new ArrayList<>(); return Collections.<Module>singletonList(new MockEngineSupportModule());
modules.add(MockEngineSupportModule.class);
return modules;
} }
public void onModule(IndexShardModule module) { public void onModule(IndexShardModule module) {
module.engineFactoryImpl = MockEngineFactory.class; module.engineFactoryImpl = MockEngineFactory.class;

View File

@ -98,7 +98,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
// we really need local GW here since this also checks for corruption etc. // we really need local GW here since this also checks for corruption etc.
// and we need to make sure primaries are not just trashed if we don't have replicas // and we need to make sure primaries are not just trashed if we don't have replicas
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.extendArray("plugin.types", MockTransportService.Plugin.class.getName()) .extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
// speed up recoveries // speed up recoveries
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10) .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10)
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10) .put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10)

View File

@ -66,7 +66,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase {
// we really need local GW here since this also checks for corruption etc. // we really need local GW here since this also checks for corruption etc.
// and we need to make sure primaries are not just trashed if we don't have replicas // and we need to make sure primaries are not just trashed if we don't have replicas
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.extendArray("plugin.types", MockTransportService.Plugin.class.getName()).build(); .extendArray("plugin.types", MockTransportService.TestPlugin.class.getName()).build();
} }
@Test @Test

View File

@ -54,7 +54,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) { protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder() return Settings.builder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.extendArray("plugin.types", MockTransportService.Plugin.class.getName()) .extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
.build(); .build();
} }

View File

@ -19,14 +19,14 @@
package org.elasticsearch.indices.analysis; package org.elasticsearch.indices.analysis;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.index.analysis.AnalysisModule; import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
public class DummyAnalysisPlugin extends AbstractPlugin { public class DummyAnalysisPlugin extends Plugin {
/** /**
* The name of the plugin. * The name of the plugin.
*/ */
@ -44,8 +44,8 @@ public class DummyAnalysisPlugin extends AbstractPlugin {
} }
@Override @Override
public Collection<Class<? extends Module>> modules() { public Collection<Module> nodeModules() {
return ImmutableList.<Class<? extends Module>>of(DummyIndicesAnalysisModule.class); return Collections.<Module>singletonList(new DummyIndicesAnalysisModule());
} }
public void onModule(AnalysisModule module) { public void onModule(AnalysisModule module) {

View File

@ -35,7 +35,7 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.engine.MockEngineSupport; import org.elasticsearch.test.engine.MockEngineSupport;
@ -107,7 +107,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
Settings.Builder settings = settingsBuilder() Settings.Builder settings = settingsBuilder()
.put(indexSettings()) .put(indexSettings())
.extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.Plugin.class.getName()) .extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.TestPlugin.class.getName())
.put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate) .put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate)
.put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate)
.put(MockEngineSupport.WRAP_READER_RATIO, 1.0d); .put(MockEngineSupport.WRAP_READER_RATIO, 1.0d);
@ -202,7 +202,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
// TODO: Generalize this class and add it as a utility // TODO: Generalize this class and add it as a utility
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
public static class Plugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "random-exception-reader-wrapper"; return "random-exception-reader-wrapper";

View File

@ -519,7 +519,7 @@ public class IndexRecoveryIT extends ESIntegTestCase {
final Settings nodeSettings = Settings.builder() final Settings nodeSettings = Settings.builder()
.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, "100ms") .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, "100ms")
.put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, "1s") .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, "1s")
.put("plugin.types", MockTransportService.Plugin.class.getName()) .put("plugin.types", MockTransportService.TestPlugin.class.getName())
.put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again
.build(); .build();
// start a master node // start a master node

View File

@ -87,7 +87,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
// which is between 1 and 2 sec can cause each of the shard deletion requests to timeout. // which is between 1 and 2 sec can cause each of the shard deletion requests to timeout.
// to prevent this we are setting the timeout here to something highish ie. the default in practice // to prevent this we are setting the timeout here to something highish ie. the default in practice
.put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS)) .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS))
.extendArray("plugin.types", MockTransportService.Plugin.class.getName()) .extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
.build(); .build();
} }

View File

@ -27,7 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.ESIntegTestCase.Scope;
@ -80,7 +80,7 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase {
} }
} }
public static class TestPlugin extends AbstractPlugin { public static class TestPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "test-plugin"; return "test-plugin";

View File

@ -20,9 +20,9 @@ package org.elasticsearch.node;
import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
public class NodeMocksPlugin extends AbstractPlugin { public class NodeMocksPlugin extends Plugin {
@Override @Override
public String name() { public String name() {

View File

@ -19,9 +19,9 @@
package org.elasticsearch.nodesinfo.plugin.dummy1; package org.elasticsearch.nodesinfo.plugin.dummy1;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
public class TestPlugin extends AbstractPlugin { public class TestPlugin extends Plugin {
static final public class Fields { static final public class Fields {
static public final String NAME = "test-plugin"; static public final String NAME = "test-plugin";

View File

@ -73,7 +73,7 @@ public class PluggableTransportModuleIT extends ESIntegTestCase {
assertThat("Expected send request counter to be greather than zero", countAfterRequest, is(greaterThan(countBeforeRequest))); assertThat("Expected send request counter to be greather than zero", countAfterRequest, is(greaterThan(countBeforeRequest)));
} }
public static class CountingSentRequestsPlugin extends AbstractPlugin { public static class CountingSentRequestsPlugin extends Plugin {
@Override @Override
public String name() { public String name() {
return "counting-pipelines-plugin"; return "counting-pipelines-plugin";

View File

@ -21,6 +21,7 @@ package org.elasticsearch.plugins;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo; import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -53,13 +54,14 @@ public class PluginInfoTests extends ESTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
"jvm", "true", "jvm", "true",
"classname", "FakePlugin"); "classname", "FakePlugin");
PluginInfo info = PluginInfo.readFromProperties(pluginDir); PluginInfo info = PluginInfo.readFromProperties(pluginDir);
assertEquals("fake-plugin", info.getName()); assertEquals("my_plugin", info.getName());
assertEquals("fake desc", info.getDescription()); assertEquals("fake desc", info.getDescription());
assertEquals("1.0", info.getVersion()); assertEquals("1.0", info.getVersion());
assertEquals("FakePlugin", info.getClassname()); assertEquals("FakePlugin", info.getClassname());
@ -69,9 +71,28 @@ public class PluginInfoTests extends ESTestCase {
assertNull(info.getUrl()); assertNull(info.getUrl());
} }
public void testReadFromPropertiesDescriptionMissing() throws Exception { public void testReadFromPropertiesNameMissing() throws Exception {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir); writeProperties(pluginDir);
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected missing name exception");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Property [name] is missing in"));
}
writeProperties(pluginDir, "name", "");
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected missing name exception");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("Property [name] is missing in"));
}
}
public void testReadFromPropertiesDescriptionMissing() throws Exception {
Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, "name", "fake-plugin");
try { try {
PluginInfo.readFromProperties(pluginDir); PluginInfo.readFromProperties(pluginDir);
fail("expected missing description exception"); fail("expected missing description exception");
@ -82,7 +103,7 @@ public class PluginInfoTests extends ESTestCase {
public void testReadFromPropertiesVersionMissing() throws Exception { public void testReadFromPropertiesVersionMissing() throws Exception {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, "description", "fake desc"); writeProperties(pluginDir, "description", "fake desc", "name", "fake-plugin");
try { try {
PluginInfo.readFromProperties(pluginDir); PluginInfo.readFromProperties(pluginDir);
fail("expected missing version exception"); fail("expected missing version exception");
@ -95,7 +116,8 @@ public class PluginInfoTests extends ESTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"version", "1.0"); "version", "1.0",
"name", "my_plugin");
try { try {
PluginInfo.readFromProperties(pluginDir); PluginInfo.readFromProperties(pluginDir);
fail("expected jvm or site exception"); fail("expected jvm or site exception");
@ -108,6 +130,7 @@ public class PluginInfoTests extends ESTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"version", "1.0", "version", "1.0",
"jvm", "true"); "jvm", "true");
try { try {
@ -122,6 +145,7 @@ public class PluginInfoTests extends ESTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"version", "1.0", "version", "1.0",
"jvm", "true"); "jvm", "true");
@ -134,9 +158,11 @@ public class PluginInfoTests extends ESTestCase {
} }
public void testReadFromPropertiesJavaVersionIncompatible() throws Exception { public void testReadFromPropertiesJavaVersionIncompatible() throws Exception {
Path pluginDir = createTempDir().resolve("fake-plugin"); String pluginName = "fake-plugin";
Path pluginDir = createTempDir().resolve(pluginName);
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", "1000000.0", "java.version", "1000000.0",
"classname", "FakePlugin", "classname", "FakePlugin",
@ -146,7 +172,7 @@ public class PluginInfoTests extends ESTestCase {
PluginInfo.readFromProperties(pluginDir); PluginInfo.readFromProperties(pluginDir);
fail("expected incompatible java version exception"); fail("expected incompatible java version exception");
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
assertTrue(e.getMessage(), e.getMessage().contains("fake-plugin requires Java")); assertTrue(e.getMessage(), e.getMessage().contains(pluginName + " requires Java"));
} }
} }
@ -156,6 +182,7 @@ public class PluginInfoTests extends ESTestCase {
"description", "fake desc", "description", "fake desc",
"version", "1.0", "version", "1.0",
"jvm", "true", "jvm", "true",
"name", "my_plugin",
"elasticsearch.version", "bogus"); "elasticsearch.version", "bogus");
try { try {
PluginInfo.readFromProperties(pluginDir); PluginInfo.readFromProperties(pluginDir);
@ -169,6 +196,7 @@ public class PluginInfoTests extends ESTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"version", "1.0", "version", "1.0",
"jvm", "true", "jvm", "true",
"elasticsearch.version", Version.V_1_7_0.toString()); "elasticsearch.version", Version.V_1_7_0.toString());
@ -184,6 +212,7 @@ public class PluginInfoTests extends ESTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
@ -201,6 +230,7 @@ public class PluginInfoTests extends ESTestCase {
Files.createDirectories(pluginDir.resolve("_site")); Files.createDirectories(pluginDir.resolve("_site"));
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"version", "1.0", "version", "1.0",
"site", "true"); "site", "true");
PluginInfo info = PluginInfo.readFromProperties(pluginDir); PluginInfo info = PluginInfo.readFromProperties(pluginDir);
@ -208,11 +238,12 @@ public class PluginInfoTests extends ESTestCase {
assertFalse(info.isJvm()); assertFalse(info.isJvm());
assertEquals("NA", info.getClassname()); assertEquals("NA", info.getClassname());
} }
public void testReadFromPropertiesSitePluginWithoutSite() throws Exception { public void testReadFromPropertiesSitePluginWithoutSite() throws Exception {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
writeProperties(pluginDir, writeProperties(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "my_plugin",
"version", "1.0", "version", "1.0",
"site", "true"); "site", "true");
try { try {

View File

@ -52,9 +52,7 @@ import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext; import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.SSLSocketFactory;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult; import java.nio.file.FileVisitResult;
@ -177,11 +175,13 @@ public class PluginManagerIT extends ESIntegTestCase {
Path pluginDir = createTempDir().resolve("fake-plugin"); Path pluginDir = createTempDir().resolve("fake-plugin");
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "fake-plugin",
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"),
"jvm", "true", "jvm", "true",
"classname", "FakePlugin"); "classname", "FakePlugin");
assertStatus("install --url " + pluginUrl, USAGE); assertStatus("install", USAGE);
} }
@Test @Test
@ -196,6 +196,7 @@ public class PluginManagerIT extends ESIntegTestCase {
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
@ -207,7 +208,7 @@ public class PluginManagerIT extends ESIntegTestCase {
Path pluginBinDir = binDir.resolve(pluginName); Path pluginBinDir = binDir.resolve(pluginName);
Path pluginConfigDir = env.configFile().resolve(pluginName); Path pluginConfigDir = env.configFile().resolve(pluginName);
assertStatusOk("install " + pluginName + " --url " + pluginUrl + " --verbose"); assertStatusOk("install " + pluginUrl + " --verbose");
terminal.getTerminalOutput().clear(); terminal.getTerminalOutput().clear();
assertStatusOk("list"); assertStatusOk("list");
@ -241,6 +242,7 @@ public class PluginManagerIT extends ESIntegTestCase {
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
@ -250,7 +252,7 @@ public class PluginManagerIT extends ESIntegTestCase {
Environment env = initialSettings.v2(); Environment env = initialSettings.v2();
Path pluginConfigDir = env.configFile().resolve(pluginName); Path pluginConfigDir = env.configFile().resolve(pluginName);
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
/* /*
First time, our plugin contains: First time, our plugin contains:
@ -277,13 +279,14 @@ public class PluginManagerIT extends ESIntegTestCase {
Files.write(pluginDir.resolve("config").resolve("dir").resolve("subdir").resolve("testsubdir.txt"), "version1".getBytes(StandardCharsets.UTF_8)); Files.write(pluginDir.resolve("config").resolve("dir").resolve("subdir").resolve("testsubdir.txt"), "version1".getBytes(StandardCharsets.UTF_8));
pluginUrl = createPlugin(pluginDir, pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"version", "2.0", "version", "2.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
"jvm", "true", "jvm", "true",
"classname", "FakePlugin"); "classname", "FakePlugin");
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
assertFileContent(pluginConfigDir, "test.txt", "version1"); assertFileContent(pluginConfigDir, "test.txt", "version1");
assertFileContent(pluginConfigDir, "test.txt.new", "version2"); assertFileContent(pluginConfigDir, "test.txt.new", "version2");
@ -313,13 +316,14 @@ public class PluginManagerIT extends ESIntegTestCase {
Files.write(pluginDir.resolve("config").resolve("dir").resolve("subdir").resolve("testsubdir.txt"), "version2".getBytes(StandardCharsets.UTF_8)); Files.write(pluginDir.resolve("config").resolve("dir").resolve("subdir").resolve("testsubdir.txt"), "version2".getBytes(StandardCharsets.UTF_8));
pluginUrl = createPlugin(pluginDir, pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"version", "3.0", "version", "3.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
"jvm", "true", "jvm", "true",
"classname", "FakePlugin"); "classname", "FakePlugin");
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
assertFileContent(pluginConfigDir, "test.txt", "version1"); assertFileContent(pluginConfigDir, "test.txt", "version1");
assertFileContent(pluginConfigDir, "test2.txt", "version1"); assertFileContent(pluginConfigDir, "test2.txt", "version1");
@ -341,6 +345,7 @@ public class PluginManagerIT extends ESIntegTestCase {
Files.createFile(pluginDir.resolve("bin").resolve("tool"));; Files.createFile(pluginDir.resolve("bin").resolve("tool"));;
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", "fake-plugin",
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
@ -351,7 +356,7 @@ public class PluginManagerIT extends ESIntegTestCase {
Path binDir = env.binFile(); Path binDir = env.binFile();
Path pluginBinDir = binDir.resolve(pluginName); Path pluginBinDir = binDir.resolve(pluginName);
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
assertThatPluginIsListed(pluginName); assertThatPluginIsListed(pluginName);
assertDirectoryExists(pluginBinDir); assertDirectoryExists(pluginBinDir);
} }
@ -375,12 +380,13 @@ public class PluginManagerIT extends ESIntegTestCase {
Path pluginDir = createTempDir().resolve(pluginName); Path pluginDir = createTempDir().resolve(pluginName);
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
"jvm", "true", "jvm", "true",
"classname", "FakePlugin"); "classname", "FakePlugin");
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
assertThatPluginIsListed(pluginName); assertThatPluginIsListed(pluginName);
} }
@ -391,10 +397,11 @@ public class PluginManagerIT extends ESIntegTestCase {
Files.createDirectories(pluginDir.resolve("_site")); Files.createDirectories(pluginDir.resolve("_site"));
Files.createFile(pluginDir.resolve("_site").resolve("somefile")); Files.createFile(pluginDir.resolve("_site").resolve("somefile"));
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"version", "1.0", "name", pluginName,
"site", "true"); "version", "1.0",
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl)); "site", "true");
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
assertThatPluginIsListed(pluginName); assertThatPluginIsListed(pluginName);
// We want to check that Plugin Manager moves content to _site // We want to check that Plugin Manager moves content to _site
assertFileExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site")); assertFileExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site"));
@ -410,7 +417,7 @@ public class PluginManagerIT extends ESIntegTestCase {
"description", "fake desc", "description", "fake desc",
"version", "1.0", "version", "1.0",
"site", "true"); "site", "true");
assertStatus(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl), assertStatus(String.format(Locale.ROOT, "install %s --verbose", pluginUrl),
ExitStatus.IO_ERROR); ExitStatus.IO_ERROR);
assertThatPluginIsNotListed(pluginName); assertThatPluginIsNotListed(pluginName);
assertFileNotExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site")); assertFileNotExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site"));
@ -421,7 +428,7 @@ public class PluginManagerIT extends ESIntegTestCase {
if (pluginCoordinates == null) { if (pluginCoordinates == null) {
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginDescriptor)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginDescriptor));
} else { } else {
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginDescriptor, pluginCoordinates)); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginCoordinates));
} }
assertThatPluginIsListed(pluginName); assertThatPluginIsListed(pluginName);
@ -498,6 +505,7 @@ public class PluginManagerIT extends ESIntegTestCase {
Path pluginDir = createTempDir().resolve(pluginName); Path pluginDir = createTempDir().resolve(pluginName);
String pluginUrl = createPlugin(pluginDir, String pluginUrl = createPlugin(pluginDir,
"description", "fake desc", "description", "fake desc",
"name", pluginName,
"version", "1.0.0", "version", "1.0.0",
"elasticsearch.version", Version.CURRENT.toString(), "elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"), "java.version", System.getProperty("java.specification.version"),
@ -539,18 +547,19 @@ public class PluginManagerIT extends ESIntegTestCase {
@Test @Test
public void testOfficialPluginName_ThrowsException() throws IOException { public void testOfficialPluginName_ThrowsException() throws IOException {
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-icu"); PluginManager.checkForOfficialPlugins("analysis-icu");
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-kuromoji"); PluginManager.checkForOfficialPlugins("analysis-kuromoji");
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-phonetic"); PluginManager.checkForOfficialPlugins("analysis-phonetic");
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-smartcn"); PluginManager.checkForOfficialPlugins("analysis-smartcn");
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-stempel"); PluginManager.checkForOfficialPlugins("analysis-stempel");
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-aws"); PluginManager.checkForOfficialPlugins("cloud-aws");
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-azure"); PluginManager.checkForOfficialPlugins("cloud-azure");
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-gce"); PluginManager.checkForOfficialPlugins("cloud-gce");
PluginManager.checkForOfficialPlugins("elasticsearch-delete-by-query"); PluginManager.checkForOfficialPlugins("delete-by-query");
PluginManager.checkForOfficialPlugins("elasticsearch-lang-javascript"); PluginManager.checkForOfficialPlugins("lang-javascript");
PluginManager.checkForOfficialPlugins("elasticsearch-lang-python"); PluginManager.checkForOfficialPlugins("lang-python");
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-murmur3"); PluginManager.checkForOfficialPlugins("mapper-murmur3");
PluginManager.checkForOfficialPlugins("mapper-size");
try { try {
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-attachment"); PluginManager.checkForOfficialPlugins("elasticsearch-mapper-attachment");
@ -562,7 +571,7 @@ public class PluginManagerIT extends ESIntegTestCase {
@Test @Test
public void testThatBasicAuthIsRejectedOnHttp() throws Exception { public void testThatBasicAuthIsRejectedOnHttp() throws Exception {
assertStatus(String.format(Locale.ROOT, "install foo --url http://user:pass@localhost:12345/foo.zip --verbose"), CliTool.ExitStatus.IO_ERROR); assertStatus(String.format(Locale.ROOT, "install http://user:pass@localhost:12345/foo.zip --verbose"), CliTool.ExitStatus.IO_ERROR);
assertThat(terminal.getTerminalOutput(), hasItem(containsString("Basic auth is only supported for HTTPS!"))); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Basic auth is only supported for HTTPS!")));
} }
@ -599,7 +608,7 @@ public class PluginManagerIT extends ESIntegTestCase {
Channel channel = serverBootstrap.bind(new InetSocketAddress("localhost", 0)); Channel channel = serverBootstrap.bind(new InetSocketAddress("localhost", 0));
int port = ((InetSocketAddress) channel.getLocalAddress()).getPort(); int port = ((InetSocketAddress) channel.getLocalAddress()).getPort();
// IO_ERROR because there is no real file delivered... // IO_ERROR because there is no real file delivered...
assertStatus(String.format(Locale.ROOT, "install foo --url https://user:pass@localhost:%s/foo.zip --verbose --timeout 1s", port), ExitStatus.IO_ERROR); assertStatus(String.format(Locale.ROOT, "install https://user:pass@localhost:%s/foo.zip --verbose --timeout 1s", port), ExitStatus.IO_ERROR);
// ensure that we did not try any other data source like download.elastic.co, in case we specified our own local URL // ensure that we did not try any other data source like download.elastic.co, in case we specified our own local URL
assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("download.elastic.co")))); assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("download.elastic.co"))));

View File

@ -62,7 +62,7 @@ public class PluginManagerUnitTests extends ESTestCase {
.build(); .build();
Environment environment = new Environment(settings); Environment environment = new Environment(settings);
PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user", "repo"); PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user");
String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).normalize().toString()); String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).normalize().toString());
String expectedDirPath = Files.simplifyPath(genericConfigFolder.resolve(pluginName).normalize().toString()); String expectedDirPath = Files.simplifyPath(genericConfigFolder.resolve(pluginName).normalize().toString());
@ -82,12 +82,12 @@ public class PluginManagerUnitTests extends ESTestCase {
Iterator<URL> iterator = handle.urls().iterator(); Iterator<URL> iterator = handle.urls().iterator();
if (supportStagingUrls) { if (supportStagingUrls) {
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/elasticsearch-%s-%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
Version.CURRENT.number(), Build.CURRENT.hashShort(), pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number()); Version.CURRENT.number(), Build.CURRENT.hashShort(), pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
assertThat(iterator.next().toExternalForm(), is(expectedStagingURL)); assertThat(iterator.next().toExternalForm(), is(expectedStagingURL));
} }
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-" + pluginName + "/" + Version.CURRENT.number() + "/elasticsearch-" + URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/" + pluginName + "/" + Version.CURRENT.number() + "/" +
pluginName + "-" + Version.CURRENT.number() + ".zip"); pluginName + "-" + Version.CURRENT.number() + ".zip");
assertThat(iterator.next().toExternalForm(), is(expected.toExternalForm())); assertThat(iterator.next().toExternalForm(), is(expected.toExternalForm()));
@ -95,10 +95,10 @@ public class PluginManagerUnitTests extends ESTestCase {
} }
@Test @Test
public void testTrimmingElasticsearchFromOfficialPluginName() throws IOException { public void testOfficialPluginName() throws IOException {
String randomPluginName = randomFrom(PluginManager.OFFICIAL_PLUGINS.asList()).replaceFirst("elasticsearch-", ""); String randomPluginName = randomFrom(PluginManager.OFFICIAL_PLUGINS.asList());
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName); PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
assertThat(handle.name, is(randomPluginName.replaceAll("^elasticsearch-", ""))); assertThat(handle.name, is(randomPluginName));
boolean supportStagingUrls = randomBoolean(); boolean supportStagingUrls = randomBoolean();
if (supportStagingUrls) { if (supportStagingUrls) {
@ -108,12 +108,12 @@ public class PluginManagerUnitTests extends ESTestCase {
Iterator<URL> iterator = handle.urls().iterator(); Iterator<URL> iterator = handle.urls().iterator();
if (supportStagingUrls) { if (supportStagingUrls) {
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/elasticsearch-%s-%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
Version.CURRENT.number(), Build.CURRENT.hashShort(), randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number()); Version.CURRENT.number(), Build.CURRENT.hashShort(), randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
assertThat(iterator.next().toExternalForm(), is(expectedStagingUrl)); assertThat(iterator.next().toExternalForm(), is(expectedStagingUrl));
} }
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number()); randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
assertThat(iterator.next().toExternalForm(), is(releaseUrl)); assertThat(iterator.next().toExternalForm(), is(releaseUrl));
@ -121,12 +121,11 @@ public class PluginManagerUnitTests extends ESTestCase {
} }
@Test @Test
public void testTrimmingElasticsearchFromGithubPluginName() throws IOException { public void testGithubPluginName() throws IOException {
String user = randomAsciiOfLength(6); String user = randomAsciiOfLength(6);
String randomName = randomAsciiOfLength(10); String pluginName = randomAsciiOfLength(10);
String pluginName = randomFrom("elasticsearch-", "es-") + randomName;
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(user + "/" + pluginName); PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(user + "/" + pluginName);
assertThat(handle.name, is(randomName)); assertThat(handle.name, is(pluginName));
assertThat(handle.urls(), hasSize(1)); assertThat(handle.urls(), hasSize(1));
assertThat(handle.urls().get(0).toExternalForm(), is(new URL("https", "github.com", "/" + user + "/" + pluginName + "/" + "archive/master.zip").toExternalForm())); assertThat(handle.urls().get(0).toExternalForm(), is(new URL("https", "github.com", "/" + user + "/" + pluginName + "/" + "archive/master.zip").toExternalForm()));
} }

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.store.IndexStoreModule;
import org.elasticsearch.test.ESTestCase;
public class PluginsServiceTests extends ESTestCase {
public static class AdditionalSettingsPlugin1 extends Plugin {
@Override
public String name() {
return "additional-settings1";
}
@Override
public String description() {
return "adds additional setting 'foo.bar'";
}
@Override
public Settings additionalSettings() {
return Settings.builder().put("foo.bar", "1").put(IndexStoreModule.STORE_TYPE, IndexStoreModule.Type.MMAPFS.getSettingsKey()).build();
}
}
public static class AdditionalSettingsPlugin2 extends Plugin {
@Override
public String name() {
return "additional-settings2";
}
@Override
public String description() {
return "adds additional setting 'foo.bar'";
}
@Override
public Settings additionalSettings() {
return Settings.builder().put("foo.bar", "2").build();
}
}
public void testAdditionalSettings() {
Settings settings = Settings.builder()
.put("path.home", createTempDir())
.put("my.setting", "test")
.put(IndexStoreModule.STORE_TYPE, IndexStoreModule.Type.SIMPLEFS.getSettingsKey())
.putArray("plugin.types", AdditionalSettingsPlugin1.class.getName()).build();
PluginsService service = new PluginsService(settings, new Environment(settings));
Settings newSettings = service.updatedSettings();
assertEquals("test", newSettings.get("my.setting")); // previous settings still exist
assertEquals("1", newSettings.get("foo.bar")); // added setting exists
assertEquals(IndexStoreModule.Type.SIMPLEFS.getSettingsKey(), newSettings.get(IndexStoreModule.STORE_TYPE)); // does not override pre existing settings
}
public void testAdditionalSettingsClash() {
Settings settings = Settings.builder()
.put("path.home", createTempDir())
.putArray("plugin.types", AdditionalSettingsPlugin1.class.getName(), AdditionalSettingsPlugin2.class.getName()).build();
PluginsService service = new PluginsService(settings, new Environment(settings));
try {
service.updatedSettings();
fail("Expected exception when building updated settings");
} catch (IllegalArgumentException e) {
String msg = e.getMessage();
assertTrue(msg, msg.contains("Cannot have additional setting [foo.bar]"));
assertTrue(msg, msg.contains("plugin [additional-settings1]"));
assertTrue(msg, msg.contains("plugin [additional-settings2]"));
}
}
}

View File

@ -19,9 +19,9 @@
package org.elasticsearch.plugins.loading.classpath; package org.elasticsearch.plugins.loading.classpath;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
public class InClassPathPlugin extends AbstractPlugin { public class InClassPathPlugin extends Plugin {
@Override @Override
public String name() { public String name() {

View File

@ -19,10 +19,10 @@
package org.elasticsearch.plugins.responseheader; package org.elasticsearch.plugins.responseheader;
import org.elasticsearch.plugins.AbstractPlugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestModule; import org.elasticsearch.rest.RestModule;
public class TestResponseHeaderPlugin extends AbstractPlugin { public class TestResponseHeaderPlugin extends Plugin {
@Override @Override
public String name() { public String name() {

View File

@ -102,7 +102,7 @@ public class RelocationIT extends ESIntegTestCase {
@Override @Override
protected Settings nodeSettings(int nodeOrdinal) { protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder() return Settings.builder()
.put("plugin.types", MockTransportService.Plugin.class.getName()).build(); .put("plugin.types", MockTransportService.TestPlugin.class.getName()).build();
} }

View File

@ -58,7 +58,7 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) { protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder builder = Settings.builder() Settings.Builder builder = Settings.builder()
.put(super.nodeSettings(nodeOrdinal)) .put(super.nodeSettings(nodeOrdinal))
.extendArray("plugin.types", MockTransportService.Plugin.class.getName()) .extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
.put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(randomIntBetween(50, 300), ByteSizeUnit.BYTES)); .put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(randomIntBetween(50, 300), ByteSizeUnit.BYTES));
return builder.build(); return builder.build();
} }

Some files were not shown because too many files have changed in this diff Show More