Merge branch 'master' into feature/query-refactoring
core/src/test/java/org/elasticsearch/plugins/PluginManagerIT.java core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java
This commit is contained in:
commit
345a30a2a7
|
@ -5,15 +5,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch-parent</artifactId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>2.1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
|
||||
<packaging>jar</packaging>
|
||||
<name>Elasticsearch Core</name>
|
||||
<name>Elasticsearch: Core</name>
|
||||
<description>Elasticsearch - Open Source, Distributed, RESTful Search Engine</description>
|
||||
|
||||
<dependencies>
|
||||
|
@ -162,10 +161,6 @@
|
|||
<groupId>org.hdrhistogram</groupId>
|
||||
<artifactId>HdrHistogram</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
<include>com.ning:compress-lzf</include>
|
||||
<include>com.github.spullara.mustache.java:compiler</include>
|
||||
<include>com.tdunning:t-digest</include>
|
||||
<include>org.apache.commons:commons-lang3</include>
|
||||
<include>commons-cli:commons-cli</include>
|
||||
<include>com.twitter:jsr166e</include>
|
||||
<include>org.hdrhistogram:HdrHistogram</include>
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.queries;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** A {@link Query} that only matches documents that are greater than or equal
|
||||
* to a configured doc ID. */
|
||||
public final class MinDocQuery extends Query {
|
||||
|
||||
private final int minDoc;
|
||||
|
||||
/** Sole constructor. */
|
||||
public MinDocQuery(int minDoc) {
|
||||
this.minDoc = minDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + minDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
MinDocQuery that = (MinDocQuery) obj;
|
||||
return minDoc == that.minDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context, final Bits acceptDocs) throws IOException {
|
||||
final int maxDoc = context.reader().maxDoc();
|
||||
if (context.docBase + maxDoc <= minDoc) {
|
||||
return null;
|
||||
}
|
||||
final int segmentMinDoc = Math.max(0, minDoc - context.docBase);
|
||||
final DocIdSetIterator disi = new DocIdSetIterator() {
|
||||
|
||||
int doc = -1;
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextDoc() throws IOException {
|
||||
return advance(doc + 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int advance(int target) throws IOException {
|
||||
assert target > doc;
|
||||
if (doc == -1) {
|
||||
// skip directly to minDoc
|
||||
doc = Math.max(target, segmentMinDoc);
|
||||
} else {
|
||||
doc = target;
|
||||
}
|
||||
while (doc < maxDoc) {
|
||||
if (acceptDocs == null || acceptDocs.get(doc)) {
|
||||
break;
|
||||
}
|
||||
doc += 1;
|
||||
}
|
||||
if (doc >= maxDoc) {
|
||||
doc = NO_MORE_DOCS;
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long cost() {
|
||||
return maxDoc - segmentMinDoc;
|
||||
}
|
||||
|
||||
};
|
||||
return new ConstantScoreScorer(this, score(), disi);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "MinDocQuery(minDoc=" + minDoc + ")";
|
||||
}
|
||||
}
|
|
@ -277,11 +277,19 @@ public class Bootstrap {
|
|||
closeSysError();
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// disable console logging, so user does not see the exception twice (jvm will show it already)
|
||||
if (foreground) {
|
||||
Loggers.disableConsoleLogging();
|
||||
}
|
||||
ESLogger logger = Loggers.getLogger(Bootstrap.class);
|
||||
if (INSTANCE.node != null) {
|
||||
logger = Loggers.getLogger(Bootstrap.class, INSTANCE.node.settings().get("name"));
|
||||
}
|
||||
logger.error("Exception", e);
|
||||
// re-enable it if appropriate, so they can see any logging during the shutdown process
|
||||
if (foreground) {
|
||||
Loggers.enableConsoleLogging();
|
||||
}
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
|
|
@ -260,7 +260,7 @@ public class TransportClient extends AbstractClient {
|
|||
// ignore, might not be bounded
|
||||
}
|
||||
|
||||
for (Class<? extends LifecycleComponent> plugin : injector.getInstance(PluginsService.class).services()) {
|
||||
for (Class<? extends LifecycleComponent> plugin : injector.getInstance(PluginsService.class).nodeServices()) {
|
||||
injector.getInstance(plugin).close();
|
||||
}
|
||||
try {
|
||||
|
|
|
@ -138,7 +138,7 @@ public class DiscoveryNode implements Streamable, ToXContent {
|
|||
* @param version the version of the node.
|
||||
*/
|
||||
public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes, Version version) {
|
||||
this(nodeName, nodeId, NetworkUtils.getLocalHost().getHostName(), NetworkUtils.getLocalHost().getHostAddress(), address, attributes, version);
|
||||
this(nodeName, nodeId, address.getHost(), address.getAddress(), address, attributes, version);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -40,6 +40,8 @@ import org.elasticsearch.common.logging.ESLogger;
|
|||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.*;
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
|
@ -159,7 +161,8 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
|
|||
Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes();
|
||||
// note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling
|
||||
final String nodeId = DiscoveryService.generateNodeId(settings);
|
||||
DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, transportService.boundAddress().publishAddress(), nodeAttributes, version);
|
||||
final TransportAddress publishAddress = transportService.boundAddress().publishAddress();
|
||||
DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, publishAddress, nodeAttributes, version);
|
||||
DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id());
|
||||
this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build();
|
||||
this.transportService.setLocalNode(localNode);
|
||||
|
|
|
@ -76,7 +76,7 @@ public abstract class CheckFileCommand extends CliTool.Command {
|
|||
if (paths != null && paths.length > 0) {
|
||||
for (Path path : paths) {
|
||||
try {
|
||||
boolean supportsPosixPermissions = Files.getFileStore(path).supportsFileAttributeView(PosixFileAttributeView.class);
|
||||
boolean supportsPosixPermissions = Environment.getFileStore(path).supportsFileAttributeView(PosixFileAttributeView.class);
|
||||
if (supportsPosixPermissions) {
|
||||
PosixFileAttributes attributes = Files.readAttributes(path, PosixFileAttributes.class);
|
||||
permissions.put(path, attributes.permissions());
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.google.common.base.Preconditions;
|
|||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.GnuParser;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
|
|
@ -22,9 +22,9 @@ package org.elasticsearch.common.collect;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.lucene.util.mutable.MutableValueInt;
|
||||
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
|
@ -134,12 +134,13 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
|
||||
@Override
|
||||
V get(Object key, int hash) {
|
||||
final int slot = ArrayUtils.indexOf(keys, key);
|
||||
if (slot < 0) {
|
||||
return null;
|
||||
} else {
|
||||
return values[slot];
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (key.equals(keys[i])) {
|
||||
return values[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
||||
}
|
||||
|
||||
private static <T> T[] replace(T[] array, int index, T value) {
|
||||
|
@ -151,14 +152,20 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
@Override
|
||||
Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) {
|
||||
assert hashBits <= 0 : hashBits;
|
||||
final int slot = ArrayUtils.indexOf(keys, key);
|
||||
int slot = -1;
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (key.equals(keys[i])) {
|
||||
slot = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
final K[] keys2;
|
||||
final V[] values2;
|
||||
|
||||
if (slot < 0) {
|
||||
keys2 = ArrayUtils.add(keys, key);
|
||||
values2 = ArrayUtils.add(values, value);
|
||||
keys2 = appendElement(keys, key);
|
||||
values2 = appendElement(values, value);
|
||||
newValue.value = 1;
|
||||
} else {
|
||||
keys2 = replace(keys, slot, key);
|
||||
|
@ -170,16 +177,49 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
|
||||
@Override
|
||||
Leaf<K, V> remove(Object key, int hash) {
|
||||
final int slot = ArrayUtils.indexOf(keys, key);
|
||||
int slot = -1;
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (key.equals(keys[i])) {
|
||||
slot = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (slot < 0) {
|
||||
return this;
|
||||
}
|
||||
final K[] keys2 = ArrayUtils.remove(keys, slot);
|
||||
final V[] values2 = ArrayUtils.remove(values, slot);
|
||||
final K[] keys2 = removeArrayElement(keys, slot);
|
||||
final V[] values2 = removeArrayElement(values, slot);
|
||||
return new Leaf<>(keys2, values2);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T> T[] removeArrayElement(T[] array, int index) {
|
||||
final Object result = Array.newInstance(array.getClass().getComponentType(), array.length - 1);
|
||||
System.arraycopy(array, 0, result, 0, index);
|
||||
if (index < array.length - 1) {
|
||||
System.arraycopy(array, index + 1, result, index, array.length - index - 1);
|
||||
}
|
||||
|
||||
return (T[]) result;
|
||||
}
|
||||
|
||||
public static <T> T[] appendElement(final T[] array, final T element) {
|
||||
final T[] newArray = Arrays.copyOf(array, array.length + 1);
|
||||
newArray[newArray.length - 1] = element;
|
||||
return newArray;
|
||||
}
|
||||
|
||||
public static <T> T[] insertElement(final T[] array, final T element, final int index) {
|
||||
final T[] result = Arrays.copyOf(array, array.length + 1);
|
||||
System.arraycopy(array, 0, result, 0, index);
|
||||
result[index] = element;
|
||||
if (index < array.length) {
|
||||
System.arraycopy(array, index, result, index + 1, array.length - index);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* An inner node in this trie. Inner nodes store up to 64 key-value pairs
|
||||
* and use a bitmap in order to associate hashes to them. For example, if
|
||||
|
@ -320,8 +360,8 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
|
||||
private InnerNode<K, V> putNew(K key, int hash6, int slot, V value) {
|
||||
final long mask2 = mask | (1L << hash6);
|
||||
final K[] keys2 = ArrayUtils.add(keys, slot, key);
|
||||
final Object[] subNodes2 = ArrayUtils.add(subNodes, slot, value);
|
||||
final K[] keys2 = insertElement(keys, key, slot);
|
||||
final Object[] subNodes2 = insertElement(subNodes, value, slot);
|
||||
return new InnerNode<>(mask2, keys2, subNodes2);
|
||||
}
|
||||
|
||||
|
@ -342,8 +382,8 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
|
|||
|
||||
private InnerNode<K, V> removeSlot(int hash6, int slot) {
|
||||
final long mask2 = mask & ~(1L << hash6);
|
||||
final K[] keys2 = ArrayUtils.remove(keys, slot);
|
||||
final Object[] subNodes2 = ArrayUtils.remove(subNodes, slot);
|
||||
final K[] keys2 = removeArrayElement(keys, slot);
|
||||
final Object[] subNodes2 = removeArrayElement(subNodes, slot);
|
||||
return new InnerNode<>(mask2, keys2, subNodes2);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import com.google.common.collect.Sets;
|
|||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.*;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -98,7 +98,6 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
/**
|
||||
* build new hole to the polygon
|
||||
* @param hole linear ring defining the hole
|
||||
* @return this
|
||||
*/
|
||||
public Ring<E> hole() {
|
||||
|
@ -285,7 +284,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
Edge current = edge;
|
||||
Edge prev = edge;
|
||||
// bookkeep the source and sink of each visited coordinate
|
||||
HashMap<Coordinate, Pair<Edge, Edge>> visitedEdge = new HashMap<>();
|
||||
HashMap<Coordinate, Tuple<Edge, Edge>> visitedEdge = new HashMap<>();
|
||||
do {
|
||||
current.coordinate = shift(current.coordinate, shiftOffset);
|
||||
current.component = id;
|
||||
|
@ -301,7 +300,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// since we're splitting connected components, we want the edges method to visit
|
||||
// the newly separated component
|
||||
final int visitID = -id;
|
||||
Edge firstAppearance = visitedEdge.get(current.coordinate).getRight();
|
||||
Edge firstAppearance = visitedEdge.get(current.coordinate).v2();
|
||||
// correct the graph pointers by correcting the 'next' pointer for both the
|
||||
// first appearance and this appearance of the edge
|
||||
Edge temp = firstAppearance.next;
|
||||
|
@ -312,12 +311,12 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// a non-visited value (anything positive)
|
||||
do {
|
||||
prev.component = visitID;
|
||||
prev = visitedEdge.get(prev.coordinate).getLeft();
|
||||
prev = visitedEdge.get(prev.coordinate).v1();
|
||||
++splitIndex;
|
||||
} while (!current.coordinate.equals(prev.coordinate));
|
||||
++connectedComponents;
|
||||
} else {
|
||||
visitedEdge.put(current.coordinate, Pair.of(prev, current));
|
||||
visitedEdge.put(current.coordinate, new Tuple<Edge, Edge>(prev, current));
|
||||
}
|
||||
edges.add(current);
|
||||
prev = current;
|
||||
|
|
|
@ -26,9 +26,8 @@ import com.spatial4j.core.shape.jts.JtsGeometry;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.unit.DistanceUnit.Distance;
|
||||
|
@ -487,7 +486,7 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
return top;
|
||||
}
|
||||
|
||||
private static final Pair range(Coordinate[] points, int offset, int length) {
|
||||
private static final double[] range(Coordinate[] points, int offset, int length) {
|
||||
double minX = points[0].x;
|
||||
double maxX = points[0].x;
|
||||
double minY = points[0].y;
|
||||
|
@ -507,7 +506,7 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
maxY = points[offset + i].y;
|
||||
}
|
||||
}
|
||||
return Pair.of(Pair.of(minX, maxX), Pair.of(minY, maxY));
|
||||
return new double[] {minX, maxX, minY, maxY};
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -585,8 +584,8 @@ public abstract class ShapeBuilder implements ToXContent {
|
|||
// and convert to a right handed system
|
||||
|
||||
// compute the bounding box and calculate range
|
||||
Pair<Pair, Pair> range = range(points, offset, length);
|
||||
final double rng = (Double)range.getLeft().getRight() - (Double)range.getLeft().getLeft();
|
||||
double[] range = range(points, offset, length);
|
||||
final double rng = range[1] - range[0];
|
||||
// translate the points if the following is true
|
||||
// 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres
|
||||
// (translation would result in a collapsed poly)
|
||||
|
|
|
@ -360,10 +360,11 @@ public class HttpDownloadHelper {
|
|||
|
||||
if (connection instanceof HttpURLConnection) {
|
||||
((HttpURLConnection) connection).setInstanceFollowRedirects(false);
|
||||
((HttpURLConnection) connection).setUseCaches(true);
|
||||
((HttpURLConnection) connection).setConnectTimeout(5000);
|
||||
connection.setUseCaches(true);
|
||||
connection.setConnectTimeout(5000);
|
||||
}
|
||||
connection.setRequestProperty("ES-Version", Version.CURRENT.toString());
|
||||
connection.setRequestProperty("ES-Build-Hash", Build.CURRENT.hashShort());
|
||||
connection.setRequestProperty("User-Agent", "elasticsearch-plugin-manager");
|
||||
|
||||
// connect to the remote site (may take some time)
|
||||
|
|
|
@ -553,6 +553,8 @@ public abstract class StreamInput extends InputStream {
|
|||
return (T) readStackTrace(new IllegalStateException(readOptionalString(), readThrowable()), this);
|
||||
case 17:
|
||||
return (T) readStackTrace(new LockObtainFailedException(readOptionalString(), readThrowable()), this);
|
||||
case 18:
|
||||
return (T) readStackTrace(new InterruptedException(readOptionalString()), this);
|
||||
default:
|
||||
assert false : "no such exception for id: " + key;
|
||||
}
|
||||
|
|
|
@ -591,6 +591,9 @@ public abstract class StreamOutput extends OutputStream {
|
|||
writeVInt(16);
|
||||
} else if (throwable instanceof LockObtainFailedException) {
|
||||
writeVInt(17);
|
||||
} else if (throwable instanceof InterruptedException) {
|
||||
writeVInt(18);
|
||||
writeCause = false;
|
||||
} else {
|
||||
ElasticsearchException ex;
|
||||
final String name = throwable.getClass().getName();
|
||||
|
|
|
@ -275,9 +275,9 @@ public class Joda {
|
|||
.toFormatter()
|
||||
.withZoneUTC();
|
||||
|
||||
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[] {longFormatter.getParser(), shortFormatter.getParser()});
|
||||
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder().append(longFormatter.withZone(DateTimeZone.UTC).getPrinter(), new DateTimeParser[]{longFormatter.getParser(), shortFormatter.getParser(), new EpochTimeParser(true)});
|
||||
|
||||
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
|
||||
return new FormatDateTimeFormatter("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", builder.toFormatter().withZone(DateTimeZone.UTC), Locale.ROOT);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.logging;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.util.SuppressForbidden;
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -74,20 +75,27 @@ public class Loggers {
|
|||
return getLogger(buildClassLoggerName(clazz), settings, prefixes);
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "using localhost for logging on which host it is is fine")
|
||||
private static InetAddress getHostAddress() {
|
||||
try {
|
||||
return InetAddress.getLocalHost();
|
||||
} catch (UnknownHostException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) {
|
||||
List<String> prefixesList = newArrayList();
|
||||
if (settings.getAsBoolean("logger.logHostAddress", false)) {
|
||||
try {
|
||||
prefixesList.add(InetAddress.getLocalHost().getHostAddress());
|
||||
} catch (UnknownHostException e) {
|
||||
// ignore
|
||||
final InetAddress addr = getHostAddress();
|
||||
if (addr != null) {
|
||||
prefixesList.add(addr.getHostAddress());
|
||||
}
|
||||
}
|
||||
if (settings.getAsBoolean("logger.logHostName", false)) {
|
||||
try {
|
||||
prefixesList.add(InetAddress.getLocalHost().getHostName());
|
||||
} catch (UnknownHostException e) {
|
||||
// ignore
|
||||
final InetAddress addr = getHostAddress();
|
||||
if (addr != null) {
|
||||
prefixesList.add(addr.getHostName());
|
||||
}
|
||||
}
|
||||
String name = settings.get("name");
|
||||
|
|
|
@ -0,0 +1,167 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.network;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InterfaceAddress;
|
||||
import java.net.NetworkInterface;
|
||||
import java.net.SocketException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Simple class to log {@code ifconfig}-style output at DEBUG logging.
|
||||
*/
|
||||
final class IfConfig {
|
||||
|
||||
private static final ESLogger logger = Loggers.getLogger(IfConfig.class);
|
||||
private static final String INDENT = " ";
|
||||
|
||||
/** log interface configuration at debug level, if its enabled */
|
||||
static void logIfNecessary() {
|
||||
if (logger.isDebugEnabled()) {
|
||||
try {
|
||||
doLogging();
|
||||
} catch (IOException | SecurityException e) {
|
||||
logger.warn("unable to gather network information", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** perform actual logging: might throw exception if things go wrong */
|
||||
private static void doLogging() throws IOException {
|
||||
StringBuilder msg = new StringBuilder();
|
||||
for (NetworkInterface nic : NetworkUtils.getInterfaces()) {
|
||||
msg.append(System.lineSeparator());
|
||||
|
||||
// ordinary name
|
||||
msg.append(nic.getName());
|
||||
msg.append(System.lineSeparator());
|
||||
|
||||
// display name (e.g. on windows)
|
||||
if (!nic.getName().equals(nic.getDisplayName())) {
|
||||
msg.append(INDENT);
|
||||
msg.append(nic.getDisplayName());
|
||||
msg.append(System.lineSeparator());
|
||||
}
|
||||
|
||||
// addresses: v4 first, then v6
|
||||
List<InterfaceAddress> addresses = nic.getInterfaceAddresses();
|
||||
for (InterfaceAddress address : addresses) {
|
||||
if (address.getAddress() instanceof Inet6Address == false) {
|
||||
msg.append(INDENT);
|
||||
msg.append(formatAddress(address));
|
||||
msg.append(System.lineSeparator());
|
||||
}
|
||||
}
|
||||
|
||||
for (InterfaceAddress address : addresses) {
|
||||
if (address.getAddress() instanceof Inet6Address) {
|
||||
msg.append(INDENT);
|
||||
msg.append(formatAddress(address));
|
||||
msg.append(System.lineSeparator());
|
||||
}
|
||||
}
|
||||
|
||||
// hardware address
|
||||
byte hardware[] = nic.getHardwareAddress();
|
||||
if (hardware != null) {
|
||||
msg.append(INDENT);
|
||||
msg.append("hardware ");
|
||||
for (int i = 0; i < hardware.length; i++) {
|
||||
if (i > 0) {
|
||||
msg.append(":");
|
||||
}
|
||||
msg.append(String.format(Locale.ROOT, "%02X", hardware[i]));
|
||||
}
|
||||
msg.append(System.lineSeparator());
|
||||
}
|
||||
|
||||
// attributes
|
||||
msg.append(INDENT);
|
||||
msg.append(formatFlags(nic));
|
||||
msg.append(System.lineSeparator());
|
||||
}
|
||||
logger.debug("configuration:" + System.lineSeparator() + "{}", msg.toString());
|
||||
}
|
||||
|
||||
/** format internet address: java's default doesn't include everything useful */
|
||||
private static String formatAddress(InterfaceAddress interfaceAddress) throws IOException {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
InetAddress address = interfaceAddress.getAddress();
|
||||
if (address instanceof Inet6Address) {
|
||||
sb.append("inet6 ");
|
||||
sb.append(address.toString().substring(1));
|
||||
sb.append(" prefixlen:");
|
||||
sb.append(interfaceAddress.getNetworkPrefixLength());
|
||||
} else {
|
||||
sb.append("inet ");
|
||||
sb.append(address.toString().substring(1));
|
||||
int netmask = 0xFFFFFFFF << (32 - interfaceAddress.getNetworkPrefixLength());
|
||||
sb.append(" netmask:" + InetAddress.getByAddress(new byte[] {
|
||||
(byte)(netmask >>> 24),
|
||||
(byte)(netmask >>> 16 & 0xFF),
|
||||
(byte)(netmask >>> 8 & 0xFF),
|
||||
(byte)(netmask & 0xFF)
|
||||
}).toString().substring(1));
|
||||
InetAddress broadcast = interfaceAddress.getBroadcast();
|
||||
if (broadcast != null) {
|
||||
sb.append(" broadcast:" + broadcast.toString().substring(1));
|
||||
}
|
||||
}
|
||||
if (address.isLoopbackAddress()) {
|
||||
sb.append(" scope:host");
|
||||
} else if (address.isLinkLocalAddress()) {
|
||||
sb.append(" scope:link");
|
||||
} else if (address.isSiteLocalAddress()) {
|
||||
sb.append(" scope:site");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/** format network interface flags */
|
||||
private static String formatFlags(NetworkInterface nic) throws SocketException {
|
||||
StringBuilder flags = new StringBuilder();
|
||||
if (nic.isUp()) {
|
||||
flags.append("UP ");
|
||||
}
|
||||
if (nic.supportsMulticast()) {
|
||||
flags.append("MULTICAST ");
|
||||
}
|
||||
if (nic.isLoopback()) {
|
||||
flags.append("LOOPBACK ");
|
||||
}
|
||||
if (nic.isPointToPoint()) {
|
||||
flags.append("POINTOPOINT ");
|
||||
}
|
||||
if (nic.isVirtual()) {
|
||||
flags.append("VIRTUAL ");
|
||||
}
|
||||
flags.append("mtu:" + nic.getMTU());
|
||||
flags.append(" index:" + nic.getIndex());
|
||||
return flags.toString();
|
||||
}
|
||||
}
|
|
@ -82,6 +82,7 @@ public class NetworkService extends AbstractComponent {
|
|||
@Inject
|
||||
public NetworkService(Settings settings) {
|
||||
super(settings);
|
||||
IfConfig.logIfNecessary();
|
||||
InetSocketTransportAddress.setResolveAddress(settings.getAsBoolean("network.address.serialization.resolve", false));
|
||||
}
|
||||
|
||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.common.network;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.net.Inet4Address;
|
||||
import java.net.Inet6Address;
|
||||
|
@ -34,10 +32,12 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Utilities for network interfaces / addresses
|
||||
* Utilities for network interfaces / addresses binding and publishing.
|
||||
* Its only intended for that purpose, not general purpose usage!!!!
|
||||
*/
|
||||
public abstract class NetworkUtils {
|
||||
|
||||
|
@ -84,7 +84,7 @@ public abstract class NetworkUtils {
|
|||
* @deprecated remove this when multihoming is really correct
|
||||
*/
|
||||
@Deprecated
|
||||
private static void sortAddresses(List<InetAddress> list) {
|
||||
static void sortAddresses(List<InetAddress> list) {
|
||||
Collections.sort(list, new Comparator<InetAddress>() {
|
||||
@Override
|
||||
public int compare(InetAddress left, InetAddress right) {
|
||||
|
@ -97,8 +97,6 @@ public abstract class NetworkUtils {
|
|||
});
|
||||
}
|
||||
|
||||
private final static ESLogger logger = Loggers.getLogger(NetworkUtils.class);
|
||||
|
||||
/** Return all interfaces (and subinterfaces) on the system */
|
||||
static List<NetworkInterface> getInterfaces() throws SocketException {
|
||||
List<NetworkInterface> all = new ArrayList<>();
|
||||
|
@ -127,19 +125,8 @@ public abstract class NetworkUtils {
|
|||
return Constants.WINDOWS ? false : true;
|
||||
}
|
||||
|
||||
/** Returns localhost, or if its misconfigured, falls back to loopback. Use with caution!!!! */
|
||||
// TODO: can we remove this?
|
||||
public static InetAddress getLocalHost() {
|
||||
try {
|
||||
return InetAddress.getLocalHost();
|
||||
} catch (UnknownHostException e) {
|
||||
logger.warn("failed to resolve local host, fallback to loopback", e);
|
||||
return InetAddress.getLoopbackAddress();
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns addresses for all loopback interfaces that are up. */
|
||||
public static InetAddress[] getLoopbackAddresses() throws SocketException {
|
||||
static InetAddress[] getLoopbackAddresses() throws SocketException {
|
||||
List<InetAddress> list = new ArrayList<>();
|
||||
for (NetworkInterface intf : getInterfaces()) {
|
||||
if (intf.isLoopback() && intf.isUp()) {
|
||||
|
@ -154,7 +141,7 @@ public abstract class NetworkUtils {
|
|||
}
|
||||
|
||||
/** Returns addresses for the first non-loopback interface that is up. */
|
||||
public static InetAddress[] getFirstNonLoopbackAddresses() throws SocketException {
|
||||
static InetAddress[] getFirstNonLoopbackAddresses() throws SocketException {
|
||||
List<InetAddress> list = new ArrayList<>();
|
||||
for (NetworkInterface intf : getInterfaces()) {
|
||||
if (intf.isLoopback() == false && intf.isUp()) {
|
||||
|
@ -170,7 +157,7 @@ public abstract class NetworkUtils {
|
|||
}
|
||||
|
||||
/** Returns addresses for the given interface (it must be marked up) */
|
||||
public static InetAddress[] getAddressesForInterface(String name) throws SocketException {
|
||||
static InetAddress[] getAddressesForInterface(String name) throws SocketException {
|
||||
NetworkInterface intf = NetworkInterface.getByName(name);
|
||||
if (intf == null) {
|
||||
throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces());
|
||||
|
@ -187,14 +174,17 @@ public abstract class NetworkUtils {
|
|||
}
|
||||
|
||||
/** Returns addresses for the given host, sorted by order of preference */
|
||||
public static InetAddress[] getAllByName(String host) throws UnknownHostException {
|
||||
static InetAddress[] getAllByName(String host) throws UnknownHostException {
|
||||
InetAddress addresses[] = InetAddress.getAllByName(host);
|
||||
sortAddresses(Arrays.asList(addresses));
|
||||
return addresses;
|
||||
// deduplicate, in case of resolver misconfiguration
|
||||
// stuff like https://bugzilla.redhat.com/show_bug.cgi?id=496300
|
||||
List<InetAddress> unique = new ArrayList<>(new HashSet<>(Arrays.asList(addresses)));
|
||||
sortAddresses(unique);
|
||||
return unique.toArray(new InetAddress[unique.size()]);
|
||||
}
|
||||
|
||||
/** Returns only the IPV4 addresses in {@code addresses} */
|
||||
public static InetAddress[] filterIPV4(InetAddress addresses[]) {
|
||||
static InetAddress[] filterIPV4(InetAddress addresses[]) {
|
||||
List<InetAddress> list = new ArrayList<>();
|
||||
for (InetAddress address : addresses) {
|
||||
if (address instanceof Inet4Address) {
|
||||
|
@ -208,7 +198,7 @@ public abstract class NetworkUtils {
|
|||
}
|
||||
|
||||
/** Returns only the IPV6 addresses in {@code addresses} */
|
||||
public static InetAddress[] filterIPV6(InetAddress addresses[]) {
|
||||
static InetAddress[] filterIPV6(InetAddress addresses[]) {
|
||||
List<InetAddress> list = new ArrayList<>();
|
||||
for (InetAddress address : addresses) {
|
||||
if (address instanceof Inet6Address) {
|
||||
|
|
|
@ -44,6 +44,21 @@ public class DummyTransportAddress implements TransportAddress {
|
|||
return other == INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
return "dummy";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAddress() {
|
||||
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getPort() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DummyTransportAddress readFrom(StreamInput in) throws IOException {
|
||||
return INSTANCE;
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.net.InetSocketAddress;
|
|||
/**
|
||||
* A transport address used for IP socket address (wraps {@link java.net.InetSocketAddress}).
|
||||
*/
|
||||
public class InetSocketTransportAddress implements TransportAddress {
|
||||
public final class InetSocketTransportAddress implements TransportAddress {
|
||||
|
||||
private static boolean resolveAddress = false;
|
||||
|
||||
|
@ -92,6 +92,25 @@ public class InetSocketTransportAddress implements TransportAddress {
|
|||
address.getAddress().equals(((InetSocketTransportAddress) other).address.getAddress());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
if (resolveAddress) {
|
||||
return address.getHostName();
|
||||
} else {
|
||||
return getAddress();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAddress() {
|
||||
return address.getAddress().getHostAddress();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getPort() {
|
||||
return address.getPort();
|
||||
}
|
||||
|
||||
public InetSocketAddress address() {
|
||||
return this.address;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class LocalTransportAddress implements TransportAddress {
|
||||
public final class LocalTransportAddress implements TransportAddress {
|
||||
|
||||
public static final LocalTransportAddress PROTO = new LocalTransportAddress("_na");
|
||||
|
||||
|
@ -57,6 +57,21 @@ public class LocalTransportAddress implements TransportAddress {
|
|||
return other instanceof LocalTransportAddress && id.equals(((LocalTransportAddress) other).id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
return "local";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAddress() {
|
||||
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getPort() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LocalTransportAddress readFrom(StreamInput in) throws IOException {
|
||||
return new LocalTransportAddress(in);
|
||||
|
|
|
@ -28,7 +28,24 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
*/
|
||||
public interface TransportAddress extends Writeable<TransportAddress> {
|
||||
|
||||
/**
|
||||
* Returns the host string for this transport address
|
||||
*/
|
||||
String getHost();
|
||||
|
||||
/**
|
||||
* Returns the address string for this transport address
|
||||
*/
|
||||
String getAddress();
|
||||
|
||||
/**
|
||||
* Returns the port of this transport address if applicable
|
||||
*/
|
||||
int getPort();
|
||||
|
||||
short uniqueAddressTypeId();
|
||||
|
||||
boolean sameHost(TransportAddress other);
|
||||
|
||||
public String toString();
|
||||
}
|
||||
|
|
|
@ -131,13 +131,16 @@ public abstract class ExtensionPoint {
|
|||
* the settings object.
|
||||
*
|
||||
* @param binder the binder to use
|
||||
* @param settings the settings to look up the key to find the implemetation to bind
|
||||
* @param settings the settings to look up the key to find the implementation to bind
|
||||
* @param settingsKey the key to use with the settings
|
||||
* @param defaultValue the default value if they settings doesn't contain the key
|
||||
* @param defaultValue the default value if the settings do not contain the key, or null if there is no default
|
||||
* @return the actual bound type key
|
||||
*/
|
||||
public String bindType(Binder binder, Settings settings, String settingsKey, String defaultValue) {
|
||||
final String type = settings.get(settingsKey, defaultValue);
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("Missing setting [" + settingsKey + "]");
|
||||
}
|
||||
final Class<? extends T> instance = getExtension(type);
|
||||
if (instance == null) {
|
||||
throw new IllegalArgumentException("Unknown [" + this.name + "] type [" + type + "]");
|
||||
|
|
|
@ -26,10 +26,12 @@ import org.elasticsearch.common.io.PathUtils;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileStore;
|
||||
import java.nio.file.FileSystemException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileAttributeView;
|
||||
import java.nio.file.attribute.FileStoreAttributeView;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Implementation of FileStore that supports
|
||||
|
@ -73,13 +75,16 @@ class ESFileStore extends FileStore {
|
|||
}
|
||||
}
|
||||
|
||||
/** Files.getFileStore(Path) useless here! Don't complain, just try it yourself. */
|
||||
static FileStore getMatchingFileStore(Path path, FileStore fileStores[]) throws IOException {
|
||||
FileStore store = Files.getFileStore(path);
|
||||
|
||||
/**
|
||||
* Files.getFileStore(Path) useless here! Don't complain, just try it yourself.
|
||||
*/
|
||||
@SuppressForbidden(reason = "works around the bugs")
|
||||
static FileStore getMatchingFileStore(Path path, FileStore fileStores[]) throws IOException {
|
||||
if (Constants.WINDOWS) {
|
||||
return store; // be defensive, don't even try to do anything fancy.
|
||||
return getFileStoreWindows(path, fileStores);
|
||||
}
|
||||
|
||||
FileStore store = Files.getFileStore(path);
|
||||
|
||||
try {
|
||||
String mount = getMountPointLinux(store);
|
||||
|
@ -110,6 +115,57 @@ class ESFileStore extends FileStore {
|
|||
// fall back to crappy one we got from Files.getFileStore
|
||||
return store;
|
||||
}
|
||||
|
||||
/**
|
||||
* remove this code and just use getFileStore for windows on java 9
|
||||
* works around https://bugs.openjdk.java.net/browse/JDK-8034057
|
||||
*/
|
||||
@SuppressForbidden(reason = "works around https://bugs.openjdk.java.net/browse/JDK-8034057")
|
||||
static FileStore getFileStoreWindows(Path path, FileStore fileStores[]) throws IOException {
|
||||
assert Constants.WINDOWS;
|
||||
|
||||
try {
|
||||
return Files.getFileStore(path);
|
||||
} catch (FileSystemException possibleBug) {
|
||||
final char driveLetter;
|
||||
// look for a drive letter to see if its the SUBST bug,
|
||||
// it might be some other type of path, like a windows share
|
||||
// if something goes wrong, we just deliver the original exception
|
||||
try {
|
||||
String root = path.toRealPath().getRoot().toString();
|
||||
if (root.length() < 2) {
|
||||
throw new RuntimeException("root isn't a drive letter: " + root);
|
||||
}
|
||||
driveLetter = Character.toLowerCase(root.charAt(0));
|
||||
if (Character.isAlphabetic(driveLetter) == false || root.charAt(1) != ':') {
|
||||
throw new RuntimeException("root isn't a drive letter: " + root);
|
||||
}
|
||||
} catch (Throwable checkFailed) {
|
||||
// something went wrong,
|
||||
possibleBug.addSuppressed(checkFailed);
|
||||
throw possibleBug;
|
||||
}
|
||||
|
||||
// we have a drive letter: the hack begins!!!!!!!!
|
||||
try {
|
||||
// we have no choice but to parse toString of all stores and find the matching drive letter
|
||||
for (FileStore store : fileStores) {
|
||||
String toString = store.toString();
|
||||
int length = toString.length();
|
||||
if (length > 3 && toString.endsWith(":)") && toString.charAt(length - 4) == '(') {
|
||||
if (Character.toLowerCase(toString.charAt(length - 3)) == driveLetter) {
|
||||
return store;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new RuntimeException("no filestores matched");
|
||||
} catch (Throwable weTried) {
|
||||
IOException newException = new IOException("Unable to retrieve filestore for '" + path + "', tried matching against " + Arrays.toString(fileStores), weTried);
|
||||
newException.addSuppressed(possibleBug);
|
||||
throw newException;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.env;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
@ -302,9 +303,37 @@ public class Environment {
|
|||
* <li>Only requires the security permissions of {@link Files#getFileStore(Path)},
|
||||
* no permissions to the actual mount point are required.
|
||||
* <li>Exception handling has the same semantics as {@link Files#getFileStore(Path)}.
|
||||
* <li>Works around https://bugs.openjdk.java.net/browse/JDK-8034057.
|
||||
* </ul>
|
||||
*/
|
||||
public FileStore getFileStore(Path path) throws IOException {
|
||||
public static FileStore getFileStore(Path path) throws IOException {
|
||||
return ESFileStore.getMatchingFileStore(path, fileStores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the path is writable.
|
||||
* Acts just like {@link Files#isWritable(Path)}, except won't
|
||||
* falsely return false for paths on SUBST'd drive letters
|
||||
* See https://bugs.openjdk.java.net/browse/JDK-8034057
|
||||
* Note this will set the file modification time (to its already-set value)
|
||||
* to test access.
|
||||
*/
|
||||
@SuppressForbidden(reason = "works around https://bugs.openjdk.java.net/browse/JDK-8034057")
|
||||
public static boolean isWritable(Path path) throws IOException {
|
||||
boolean v = Files.isWritable(path);
|
||||
if (v || Constants.WINDOWS == false) {
|
||||
return v;
|
||||
}
|
||||
|
||||
// isWritable returned false on windows, the hack begins!!!!!!
|
||||
// resetting the modification time is the least destructive/simplest
|
||||
// way to check for both files and directories, and fails early just
|
||||
// in getting the current value if file doesn't exist, etc
|
||||
try {
|
||||
Files.setLastModifiedTime(path, Files.getLastModifiedTime(path));
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -230,7 +230,7 @@ public class Node implements Releasable {
|
|||
// hack around dependency injection problem (for now...)
|
||||
injector.getInstance(Discovery.class).setRoutingService(injector.getInstance(RoutingService.class));
|
||||
|
||||
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
|
||||
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
|
||||
injector.getInstance(plugin).start();
|
||||
}
|
||||
|
||||
|
@ -297,7 +297,7 @@ public class Node implements Releasable {
|
|||
injector.getInstance(RestController.class).stop();
|
||||
injector.getInstance(TransportService.class).stop();
|
||||
|
||||
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
|
||||
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
|
||||
injector.getInstance(plugin).stop();
|
||||
}
|
||||
// we should stop this last since it waits for resources to get released
|
||||
|
@ -364,7 +364,7 @@ public class Node implements Releasable {
|
|||
stopWatch.stop().start("percolator_service");
|
||||
injector.getInstance(PercolatorService.class).close();
|
||||
|
||||
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
|
||||
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
|
||||
stopWatch.stop().start("plugin(" + plugin.getName() + ")");
|
||||
injector.getInstance(plugin).close();
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Counter;
|
||||
|
@ -53,7 +52,6 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
|
@ -68,6 +66,7 @@ import org.elasticsearch.search.highlight.SearchContextHighlight;
|
|||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHitField;
|
||||
import org.elasticsearch.search.internal.ScrollContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.ShardSearchRequest;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
|
@ -348,12 +347,12 @@ public class PercolateContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Scroll scroll() {
|
||||
public ScrollContext scrollContext() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext scroll(Scroll scroll) {
|
||||
public SearchContext scrollContext(ScrollContext scroll) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
@ -621,16 +620,6 @@ public class PercolateContext extends SearchContext {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lastEmittedDoc(ScoreDoc doc) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScoreDoc lastEmittedDoc() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DfsSearchResult dfsResult() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -1,121 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* A base class for a plugin.
|
||||
* <p/>
|
||||
* A plugin can be dynamically injected with {@link Module} by implementing <tt>onModule(AnyModule)</tt> method
|
||||
* removing the need to override {@link #processModule(org.elasticsearch.common.inject.Module)} and check using
|
||||
* instanceof.
|
||||
*/
|
||||
public abstract class AbstractPlugin implements Plugin {
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> modules() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Module> modules(Settings settings) {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Class<? extends LifecycleComponent>> services() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Module> indexModules(Settings settings) {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Class<? extends Closeable>> indexServices() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> shardModules() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Module> shardModules(Settings settings) {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
/**
|
||||
* Defaults to return an empty list.
|
||||
*/
|
||||
@Override
|
||||
public Collection<Class<? extends Closeable>> shardServices() {
|
||||
return ImmutableList.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processModule(Module module) {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.Builder.EMPTY_SETTINGS;
|
||||
}
|
||||
|
||||
}
|
|
@ -19,18 +19,12 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.PreProcessModule;
|
||||
import org.elasticsearch.common.inject.SpawnModules;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.inject.Modules.createModule;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -47,13 +41,7 @@ public class IndexPluginsModule extends AbstractModule implements SpawnModules,
|
|||
|
||||
@Override
|
||||
public Iterable<? extends Module> spawnModules() {
|
||||
List<Module> modules = Lists.newArrayList();
|
||||
Collection<Class<? extends Module>> modulesClasses = pluginsService.indexModules();
|
||||
for (Class<? extends Module> moduleClass : modulesClasses) {
|
||||
modules.add(createModule(moduleClass, settings));
|
||||
}
|
||||
modules.addAll(pluginsService.indexModules(settings));
|
||||
return modules;
|
||||
return pluginsService.indexModules(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,82 +25,73 @@ import org.elasticsearch.common.settings.Settings;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* An extension point allowing to plug in custom functionality.
|
||||
* <p/>
|
||||
* A plugin can be dynamically injected with {@link Module} by implementing <tt>onModule(AnyModule)</tt> method
|
||||
* removing the need to override {@link #processModule(org.elasticsearch.common.inject.Module)} and check using
|
||||
* instanceof.
|
||||
* A plugin can be register custom extensions to builtin behavior by implementing <tt>onModule(AnyModule)</tt>,
|
||||
* and registering the extension with the given module.
|
||||
*/
|
||||
public interface Plugin {
|
||||
public abstract class Plugin {
|
||||
|
||||
/**
|
||||
* The name of the plugin.
|
||||
*/
|
||||
String name();
|
||||
public abstract String name();
|
||||
|
||||
/**
|
||||
* The description of the plugin.
|
||||
*/
|
||||
String description();
|
||||
public abstract String description();
|
||||
|
||||
/**
|
||||
* Node level modules (classes, will automatically be created).
|
||||
* Node level modules.
|
||||
*/
|
||||
Collection<Class<? extends Module>> modules();
|
||||
|
||||
/**
|
||||
* Node level modules (instances)
|
||||
*
|
||||
* @param settings The node level settings.
|
||||
*/
|
||||
Collection<? extends Module> modules(Settings settings);
|
||||
public Collection<Module> nodeModules() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Node level services that will be automatically started/stopped/closed.
|
||||
*/
|
||||
Collection<Class<? extends LifecycleComponent>> services();
|
||||
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Per index modules.
|
||||
*/
|
||||
Collection<Class<? extends Module>> indexModules();
|
||||
|
||||
/**
|
||||
* Per index modules.
|
||||
*/
|
||||
Collection<? extends Module> indexModules(Settings settings);
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Per index services that will be automatically closed.
|
||||
*/
|
||||
Collection<Class<? extends Closeable>> indexServices();
|
||||
public Collection<Class<? extends Closeable>> indexServices() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Per index shard module.
|
||||
*/
|
||||
Collection<Class<? extends Module>> shardModules();
|
||||
|
||||
/**
|
||||
* Per index shard module.
|
||||
*/
|
||||
Collection<? extends Module> shardModules(Settings settings);
|
||||
public Collection<Module> shardModules(Settings indexSettings) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Per index shard service that will be automatically closed.
|
||||
*/
|
||||
Collection<Class<? extends Closeable>> shardServices();
|
||||
|
||||
/**
|
||||
* Process a specific module. Note, its simpler to implement a custom <tt>onModule(AnyModule module)</tt>
|
||||
* method, which will be automatically be called by the relevant type.
|
||||
*/
|
||||
void processModule(Module module);
|
||||
public Collection<Class<? extends Closeable>> shardServices() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional node settings loaded by the plugin. Note that settings that are explicit in the nodes settings can't be
|
||||
* overwritten with the additional settings. These settings added if they don't exist.
|
||||
*/
|
||||
Settings additionalSettings();
|
||||
public Settings additionalSettings() {
|
||||
return Settings.Builder.EMPTY_SETTINGS;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
private String description;
|
||||
private boolean site;
|
||||
private String version;
|
||||
|
||||
|
||||
private boolean jvm;
|
||||
private String classname;
|
||||
private boolean isolated;
|
||||
|
@ -86,7 +86,11 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
try (InputStream stream = Files.newInputStream(descriptor)) {
|
||||
props.load(stream);
|
||||
}
|
||||
String name = dir.getFileName().toString();
|
||||
String name = props.getProperty("name");
|
||||
if (name == null || name.isEmpty()) {
|
||||
throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]");
|
||||
}
|
||||
PluginManager.checkForForbiddenName(name);
|
||||
String description = props.getProperty("description");
|
||||
if (description == null) {
|
||||
throw new IllegalArgumentException("Property [description] is missing for plugin [" + name + "]");
|
||||
|
@ -95,6 +99,7 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
if (version == null) {
|
||||
throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]");
|
||||
}
|
||||
|
||||
boolean jvm = Boolean.parseBoolean(props.getProperty("jvm"));
|
||||
boolean site = Boolean.parseBoolean(props.getProperty("site"));
|
||||
if (jvm == false && site == false) {
|
||||
|
@ -122,7 +127,7 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (site) {
|
||||
if (!Files.exists(dir.resolve("_site"))) {
|
||||
throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory");
|
||||
|
@ -159,14 +164,14 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
public boolean isJvm() {
|
||||
return jvm;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return true if jvm plugin has isolated classloader
|
||||
*/
|
||||
public boolean isIsolated() {
|
||||
return isolated;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return jvm plugin's classname
|
||||
*/
|
||||
|
|
|
@ -75,27 +75,27 @@ public class PluginManager {
|
|||
|
||||
static final ImmutableSet<String> OFFICIAL_PLUGINS = ImmutableSet.<String>builder()
|
||||
.add(
|
||||
"elasticsearch-analysis-icu",
|
||||
"elasticsearch-analysis-kuromoji",
|
||||
"elasticsearch-analysis-phonetic",
|
||||
"elasticsearch-analysis-smartcn",
|
||||
"elasticsearch-analysis-stempel",
|
||||
"elasticsearch-cloud-aws",
|
||||
"elasticsearch-cloud-azure",
|
||||
"elasticsearch-cloud-gce",
|
||||
"elasticsearch-delete-by-query",
|
||||
"elasticsearch-lang-javascript",
|
||||
"elasticsearch-lang-python",
|
||||
"elasticsearch-mapper-murmur3",
|
||||
"elasticsearch-mapper-size"
|
||||
"analysis-icu",
|
||||
"analysis-kuromoji",
|
||||
"analysis-phonetic",
|
||||
"analysis-smartcn",
|
||||
"analysis-stempel",
|
||||
"cloud-aws",
|
||||
"cloud-azure",
|
||||
"cloud-gce",
|
||||
"delete-by-query",
|
||||
"lang-javascript",
|
||||
"lang-python",
|
||||
"mapper-murmur3",
|
||||
"mapper-size"
|
||||
).build();
|
||||
|
||||
private final Environment environment;
|
||||
private String url;
|
||||
private URL url;
|
||||
private OutputMode outputMode;
|
||||
private TimeValue timeout;
|
||||
|
||||
public PluginManager(Environment environment, String url, OutputMode outputMode, TimeValue timeout) {
|
||||
public PluginManager(Environment environment, URL url, OutputMode outputMode, TimeValue timeout) {
|
||||
this.environment = environment;
|
||||
this.url = url;
|
||||
this.outputMode = outputMode;
|
||||
|
@ -103,8 +103,8 @@ public class PluginManager {
|
|||
}
|
||||
|
||||
public void downloadAndExtract(String name, Terminal terminal) throws IOException {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("plugin name must be supplied with install [name].");
|
||||
if (name == null && url == null) {
|
||||
throw new IllegalArgumentException("plugin name or url must be supplied with install.");
|
||||
}
|
||||
|
||||
if (!Files.exists(environment.pluginsFile())) {
|
||||
|
@ -112,12 +112,18 @@ public class PluginManager {
|
|||
Files.createDirectory(environment.pluginsFile());
|
||||
}
|
||||
|
||||
if (!Files.isWritable(environment.pluginsFile())) {
|
||||
if (!Environment.isWritable(environment.pluginsFile())) {
|
||||
throw new IOException("plugin directory " + environment.pluginsFile() + " is read only");
|
||||
}
|
||||
|
||||
PluginHandle pluginHandle = PluginHandle.parse(name);
|
||||
checkForForbiddenName(pluginHandle.name);
|
||||
PluginHandle pluginHandle;
|
||||
if (name != null) {
|
||||
pluginHandle = PluginHandle.parse(name);
|
||||
checkForForbiddenName(pluginHandle.name);
|
||||
} else {
|
||||
// if we have no name but url, use temporary name that will be overwritten later
|
||||
pluginHandle = new PluginHandle("temp_name" + new Random().nextInt(), null, null);
|
||||
}
|
||||
|
||||
Path pluginFile = download(pluginHandle, terminal);
|
||||
extract(pluginHandle, terminal, pluginFile);
|
||||
|
@ -138,7 +144,7 @@ public class PluginManager {
|
|||
|
||||
// first, try directly from the URL provided
|
||||
if (url != null) {
|
||||
URL pluginUrl = new URL(url);
|
||||
URL pluginUrl = url;
|
||||
boolean isSecureProcotol = "https".equalsIgnoreCase(pluginUrl.getProtocol());
|
||||
boolean isAuthInfoSet = !Strings.isNullOrEmpty(pluginUrl.getUserInfo());
|
||||
if (isAuthInfoSet && !isSecureProcotol) {
|
||||
|
@ -163,7 +169,7 @@ public class PluginManager {
|
|||
terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e));
|
||||
}
|
||||
} else {
|
||||
if (PluginHandle.isOfficialPlugin(pluginHandle.repo, pluginHandle.user, pluginHandle.version)) {
|
||||
if (PluginHandle.isOfficialPlugin(pluginHandle.name, pluginHandle.user, pluginHandle.version)) {
|
||||
checkForOfficialPlugins(pluginHandle.name);
|
||||
}
|
||||
}
|
||||
|
@ -204,14 +210,10 @@ public class PluginManager {
|
|||
}
|
||||
|
||||
private void extract(PluginHandle pluginHandle, Terminal terminal, Path pluginFile) throws IOException {
|
||||
final Path extractLocation = pluginHandle.extractedDir(environment);
|
||||
if (Files.exists(extractLocation)) {
|
||||
throw new IOException("plugin directory " + extractLocation.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + pluginHandle.name + "' command");
|
||||
}
|
||||
|
||||
// unzip plugin to a staging temp dir, named for the plugin
|
||||
Path tmp = Files.createTempDirectory(environment.tmpFile(), null);
|
||||
Path root = tmp.resolve(pluginHandle.name);
|
||||
Path root = tmp.resolve(pluginHandle.name);
|
||||
unzipPlugin(pluginFile, root);
|
||||
|
||||
// find the actual root (in case its unzipped with extra directory wrapping)
|
||||
|
@ -226,6 +228,13 @@ public class PluginManager {
|
|||
jarHellCheck(root, info.isIsolated());
|
||||
}
|
||||
|
||||
// update name in handle based on 'name' property found in descriptor file
|
||||
pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
|
||||
final Path extractLocation = pluginHandle.extractedDir(environment);
|
||||
if (Files.exists(extractLocation)) {
|
||||
throw new IOException("plugin directory " + extractLocation.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + pluginHandle.name + "' command");
|
||||
}
|
||||
|
||||
// install plugin
|
||||
FileSystemUtils.copyDirectoryRecursively(root, extractLocation);
|
||||
terminal.println("Installed %s into %s", pluginHandle.name, extractLocation.toAbsolutePath());
|
||||
|
@ -246,7 +255,7 @@ public class PluginManager {
|
|||
} catch (IOException e) {
|
||||
throw new IOException("Could not move [" + binFile + "] to [" + toLocation + "]", e);
|
||||
}
|
||||
if (Files.getFileStore(toLocation).supportsFileAttributeView(PosixFileAttributeView.class)) {
|
||||
if (Environment.getFileStore(toLocation).supportsFileAttributeView(PosixFileAttributeView.class)) {
|
||||
// add read and execute permissions to existing perms, so execution will work.
|
||||
// read should generally be set already, but set it anyway: don't rely on umask...
|
||||
final Set<PosixFilePermission> executePerms = new HashSet<>();
|
||||
|
@ -334,7 +343,7 @@ public class PluginManager {
|
|||
|
||||
private void unzipPlugin(Path zip, Path target) throws IOException {
|
||||
Files.createDirectories(target);
|
||||
|
||||
|
||||
try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
|
||||
ZipEntry entry;
|
||||
byte[] buffer = new byte[8192];
|
||||
|
@ -395,7 +404,7 @@ public class PluginManager {
|
|||
}
|
||||
}
|
||||
|
||||
private static void checkForForbiddenName(String name) {
|
||||
static void checkForForbiddenName(String name) {
|
||||
if (!hasLength(name) || BLACKLIST.contains(name.toLowerCase(Locale.ROOT))) {
|
||||
throw new IllegalArgumentException("Illegal plugin name: " + name);
|
||||
}
|
||||
|
@ -438,43 +447,41 @@ public class PluginManager {
|
|||
*/
|
||||
static class PluginHandle {
|
||||
|
||||
final String name;
|
||||
final String version;
|
||||
final String user;
|
||||
final String repo;
|
||||
final String name;
|
||||
|
||||
PluginHandle(String name, String version, String user, String repo) {
|
||||
this.name = name;
|
||||
PluginHandle(String name, String version, String user) {
|
||||
this.version = version;
|
||||
this.user = user;
|
||||
this.repo = repo;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
List<URL> urls() {
|
||||
List<URL> urls = new ArrayList<>();
|
||||
if (version != null) {
|
||||
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0
|
||||
// Elasticsearch new download service uses groupId org.elasticsearch.plugin from 2.0.0
|
||||
if (user == null) {
|
||||
// TODO Update to https
|
||||
if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/elasticsearch-%s-%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", version, Build.CURRENT.hashShort(), repo, version, repo, version));
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip", version, Build.CURRENT.hashShort(), name, version, name, version));
|
||||
}
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", repo, version, repo, version));
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", name, version, name, version));
|
||||
} else {
|
||||
// Elasticsearch old download service
|
||||
// TODO Update to https
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, repo, version));
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, name, version));
|
||||
// Maven central repository
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version));
|
||||
addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
|
||||
// Sonatype repository
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version));
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
|
||||
// Github repository
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, repo, version));
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, name, version));
|
||||
}
|
||||
}
|
||||
if (user != null) {
|
||||
// Github repository for master branch (assume site)
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, repo));
|
||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, name));
|
||||
}
|
||||
return urls;
|
||||
}
|
||||
|
@ -526,20 +533,11 @@ public class PluginManager {
|
|||
}
|
||||
}
|
||||
|
||||
String endname = repo;
|
||||
if (repo.startsWith("elasticsearch-")) {
|
||||
// remove elasticsearch- prefix
|
||||
endname = repo.substring("elasticsearch-".length());
|
||||
} else if (repo.startsWith("es-")) {
|
||||
// remove es- prefix
|
||||
endname = repo.substring("es-".length());
|
||||
}
|
||||
|
||||
if (isOfficialPlugin(repo, user, version)) {
|
||||
return new PluginHandle(endname, Version.CURRENT.number(), null, repo);
|
||||
return new PluginHandle(repo, Version.CURRENT.number(), null);
|
||||
}
|
||||
|
||||
return new PluginHandle(endname, version, user, repo);
|
||||
return new PluginHandle(repo, version, user);
|
||||
}
|
||||
|
||||
static boolean isOfficialPlugin(String repo, String user, String version) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.CliToolConfig;
|
||||
|
@ -32,7 +33,8 @@ import org.elasticsearch.env.Environment;
|
|||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.plugins.PluginManager.OutputMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
|
||||
|
@ -166,19 +168,29 @@ public class PluginManagerCliParser extends CliTool {
|
|||
private static final String NAME = "install";
|
||||
|
||||
private static final CliToolConfig.Cmd CMD = cmd(NAME, Install.class)
|
||||
.options(option("u", "url").required(false).hasArg(true))
|
||||
.options(option("t", "timeout").required(false).hasArg(false))
|
||||
.build();
|
||||
|
||||
static Command parse(Terminal terminal, CommandLine cli) {
|
||||
String[] args = cli.getArgs();
|
||||
|
||||
// install [plugin-name/url]
|
||||
if ((args == null) || (args.length == 0)) {
|
||||
return exitCmd(ExitStatus.USAGE, terminal, "plugin name is missing (type -h for help)");
|
||||
return exitCmd(ExitStatus.USAGE, terminal, "plugin name or url is missing (type -h for help)");
|
||||
}
|
||||
String name = args[0];
|
||||
|
||||
URL optionalPluginUrl = null;
|
||||
// try parsing cli argument as URL
|
||||
try {
|
||||
optionalPluginUrl = new URL(name);
|
||||
name = null;
|
||||
} catch (MalformedURLException e) {
|
||||
// we tried to parse the cli argument as url and failed
|
||||
// continue treating it as a symbolic plugin name like `analysis-icu` etc.
|
||||
}
|
||||
|
||||
String name = args[0];
|
||||
TimeValue timeout = TimeValue.parseTimeValue(cli.getOptionValue("t"), DEFAULT_TIMEOUT, "cli");
|
||||
String url = cli.getOptionValue("u");
|
||||
|
||||
OutputMode outputMode = OutputMode.DEFAULT;
|
||||
if (cli.hasOption("s")) {
|
||||
|
@ -188,15 +200,15 @@ public class PluginManagerCliParser extends CliTool {
|
|||
outputMode = OutputMode.VERBOSE;
|
||||
}
|
||||
|
||||
return new Install(terminal, name, outputMode, url, timeout);
|
||||
return new Install(terminal, name, outputMode, optionalPluginUrl, timeout);
|
||||
}
|
||||
|
||||
final String name;
|
||||
private OutputMode outputMode;
|
||||
final String url;
|
||||
final URL url;
|
||||
final TimeValue timeout;
|
||||
|
||||
Install(Terminal terminal, String name, OutputMode outputMode, String url, TimeValue timeout) {
|
||||
Install(Terminal terminal, String name, OutputMode outputMode, URL url, TimeValue timeout) {
|
||||
super(terminal);
|
||||
this.name = name;
|
||||
this.outputMode = outputMode;
|
||||
|
@ -207,7 +219,11 @@ public class PluginManagerCliParser extends CliTool {
|
|||
@Override
|
||||
public ExitStatus execute(Settings settings, Environment env) throws Exception {
|
||||
PluginManager pluginManager = new PluginManager(env, url, outputMode, timeout);
|
||||
terminal.println("-> Installing " + Strings.nullToEmpty(name) + "...");
|
||||
if (name != null) {
|
||||
terminal.println("-> Installing " + Strings.nullToEmpty(name) + "...");
|
||||
} else {
|
||||
terminal.println("-> Installing from " + url + "...");
|
||||
}
|
||||
pluginManager.downloadAndExtract(name, terminal);
|
||||
return ExitStatus.OK;
|
||||
}
|
||||
|
|
|
@ -19,18 +19,12 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.PreProcessModule;
|
||||
import org.elasticsearch.common.inject.SpawnModules;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.inject.Modules.createModule;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -47,13 +41,7 @@ public class PluginsModule extends AbstractModule implements SpawnModules, PrePr
|
|||
|
||||
@Override
|
||||
public Iterable<? extends Module> spawnModules() {
|
||||
List<Module> modules = Lists.newArrayList();
|
||||
Collection<Class<? extends Module>> modulesClasses = pluginsService.modules();
|
||||
for (Class<? extends Module> moduleClass : modulesClasses) {
|
||||
modules.add(createModule(moduleClass, settings));
|
||||
}
|
||||
modules.addAll(pluginsService.modules(settings));
|
||||
return modules;
|
||||
return pluginsService.nodeModules();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -47,7 +47,6 @@ import java.nio.file.DirectoryStream;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -184,7 +183,6 @@ public class PluginsService extends AbstractComponent {
|
|||
|
||||
public void processModule(Module module) {
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins()) {
|
||||
plugin.v2().processModule(module);
|
||||
// see if there are onModule references
|
||||
List<OnModuleReference> references = onModuleReferences.get(plugin.v2());
|
||||
if (references != null) {
|
||||
|
@ -202,49 +200,42 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
|
||||
public Settings updatedSettings() {
|
||||
Map<String, String> foundSettings = new HashMap<>();
|
||||
final Settings.Builder builder = Settings.settingsBuilder();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
builder.put(plugin.v2().additionalSettings());
|
||||
Settings settings = plugin.v2().additionalSettings();
|
||||
for (String setting : settings.getAsMap().keySet()) {
|
||||
String oldPlugin = foundSettings.put(setting, plugin.v1().getName());
|
||||
if (oldPlugin != null) {
|
||||
throw new IllegalArgumentException("Cannot have additional setting [" + setting + "] " +
|
||||
"in plugin [" + plugin.v1().getName() + "], already added in plugin [" + oldPlugin + "]");
|
||||
}
|
||||
}
|
||||
builder.put(settings);
|
||||
}
|
||||
return builder.put(this.settings).build();
|
||||
}
|
||||
|
||||
public Collection<Class<? extends Module>> modules() {
|
||||
List<Class<? extends Module>> modules = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().modules());
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
||||
public Collection<Module> modules(Settings settings) {
|
||||
public Collection<Module> nodeModules() {
|
||||
List<Module> modules = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().modules(settings));
|
||||
modules.addAll(plugin.v2().nodeModules());
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
||||
public Collection<Class<? extends LifecycleComponent>> services() {
|
||||
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
|
||||
List<Class<? extends LifecycleComponent>> services = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
services.addAll(plugin.v2().services());
|
||||
services.addAll(plugin.v2().nodeServices());
|
||||
}
|
||||
return services;
|
||||
}
|
||||
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
List<Class<? extends Module>> modules = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().indexModules());
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
||||
public Collection<Module> indexModules(Settings settings) {
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
List<Module> modules = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().indexModules(settings));
|
||||
modules.addAll(plugin.v2().indexModules(indexSettings));
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
@ -257,18 +248,10 @@ public class PluginsService extends AbstractComponent {
|
|||
return services;
|
||||
}
|
||||
|
||||
public Collection<Class<? extends Module>> shardModules() {
|
||||
List<Class<? extends Module>> modules = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().shardModules());
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
||||
public Collection<Module> shardModules(Settings settings) {
|
||||
public Collection<Module> shardModules(Settings indexSettings) {
|
||||
List<Module> modules = new ArrayList<>();
|
||||
for (Tuple<PluginInfo, Plugin> plugin : plugins) {
|
||||
modules.addAll(plugin.v2().shardModules(settings));
|
||||
modules.addAll(plugin.v2().shardModules(indexSettings));
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
|
|
@ -19,18 +19,12 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.PreProcessModule;
|
||||
import org.elasticsearch.common.inject.SpawnModules;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.inject.Modules.createModule;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -47,13 +41,7 @@ public class ShardsPluginsModule extends AbstractModule implements SpawnModules,
|
|||
|
||||
@Override
|
||||
public Iterable<? extends Module> spawnModules() {
|
||||
List<Module> modules = Lists.newArrayList();
|
||||
Collection<Class<? extends Module>> modulesClasses = pluginsService.shardModules();
|
||||
for (Class<? extends Module> moduleClass : modulesClasses) {
|
||||
modules.add(createModule(moduleClass, settings));
|
||||
}
|
||||
modules.addAll(pluginsService.shardModules(settings));
|
||||
return modules;
|
||||
return pluginsService.shardModules(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,16 +19,8 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
/** A site-only plugin, just serves resources */
|
||||
final class SitePlugin implements Plugin {
|
||||
final class SitePlugin extends Plugin {
|
||||
final String name;
|
||||
final String description;
|
||||
|
||||
|
@ -46,58 +38,4 @@ final class SitePlugin implements Plugin {
|
|||
public String description() {
|
||||
return description;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> modules() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends Module> modules(Settings settings) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends LifecycleComponent>> services() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends Module> indexModules(Settings settings) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Closeable>> indexServices() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> shardModules() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends Module> shardModules(Settings settings) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Closeable>> shardServices() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processModule(Module module) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.EMPTY;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,44 +19,33 @@
|
|||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.TransportNodesSnapshotsStatus;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.repositories.fs.FsRepositoryModule;
|
||||
import org.elasticsearch.repositories.uri.URLRepository;
|
||||
import org.elasticsearch.repositories.uri.URLRepositoryModule;
|
||||
import org.elasticsearch.snapshots.RestoreService;
|
||||
import org.elasticsearch.snapshots.SnapshotsService;
|
||||
import org.elasticsearch.snapshots.SnapshotShardsService;
|
||||
|
||||
import java.util.Map;
|
||||
import org.elasticsearch.snapshots.SnapshotsService;
|
||||
|
||||
/**
|
||||
* Module responsible for registering other repositories.
|
||||
* <p/>
|
||||
* Repositories implemented as plugins should implement {@code onModule(RepositoriesModule module)} method, in which
|
||||
* they should register repository using {@link #registerRepository(String, Class)} method.
|
||||
* Sets up classes for Snapshot/Restore.
|
||||
*
|
||||
* Plugins can add custom repository types by calling {@link #registerRepository(String, Class, Class)}.
|
||||
*/
|
||||
public class RepositoriesModule extends AbstractModule {
|
||||
|
||||
private Map<String, Class<? extends Module>> repositoryTypes = Maps.newHashMap();
|
||||
private final RepositoryTypesRegistry repositoryTypes = new RepositoryTypesRegistry();
|
||||
|
||||
public RepositoriesModule() {
|
||||
registerRepository(FsRepository.TYPE, FsRepositoryModule.class);
|
||||
registerRepository(URLRepository.TYPE, URLRepositoryModule.class);
|
||||
registerRepository(FsRepository.TYPE, FsRepository.class, BlobStoreIndexShardRepository.class);
|
||||
registerRepository(URLRepository.TYPE, URLRepository.class, BlobStoreIndexShardRepository.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a custom repository type name against a module.
|
||||
*
|
||||
* @param type The type
|
||||
* @param module The module
|
||||
*/
|
||||
public void registerRepository(String type, Class<? extends Module> module) {
|
||||
repositoryTypes.put(type, module);
|
||||
/** Registers a custom repository type to the given {@link Repository} and {@link IndexShardRepository}. */
|
||||
public void registerRepository(String type, Class<? extends Repository> repositoryType, Class<? extends IndexShardRepository> shardRepositoryType) {
|
||||
repositoryTypes.registerRepository(type, repositoryType, shardRepositoryType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -66,6 +55,6 @@ public class RepositoriesModule extends AbstractModule {
|
|||
bind(SnapshotShardsService.class).asEagerSingleton();
|
||||
bind(TransportNodesSnapshotsStatus.class).asEagerSingleton();
|
||||
bind(RestoreService.class).asEagerSingleton();
|
||||
bind(RepositoryTypesRegistry.class).toInstance(new RepositoryTypesRegistry(ImmutableMap.copyOf(repositoryTypes)));
|
||||
bind(RepositoryTypesRegistry.class).toInstance(repositoryTypes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.Modules;
|
||||
|
@ -29,12 +28,10 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.common.Strings.toCamelCase;
|
||||
|
||||
/**
|
||||
* This module spawns specific repository module
|
||||
* Binds repository classes for the specific repository type.
|
||||
*/
|
||||
public class RepositoryModule extends AbstractModule implements SpawnModules {
|
||||
public class RepositoryModule extends AbstractModule {
|
||||
|
||||
private RepositoryName repositoryName;
|
||||
|
||||
|
@ -59,28 +56,12 @@ public class RepositoryModule extends AbstractModule implements SpawnModules {
|
|||
this.typesRegistry = typesRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns repository module.
|
||||
* <p/>
|
||||
* First repository type is looked up in typesRegistry and if it's not found there, this module tries to
|
||||
* load repository by it's class name.
|
||||
*
|
||||
* @return repository module
|
||||
*/
|
||||
@Override
|
||||
public Iterable<? extends Module> spawnModules() {
|
||||
Class<? extends Module> repoModuleClass = typesRegistry.type(repositoryName.type());
|
||||
if (repoModuleClass == null) {
|
||||
throw new IllegalArgumentException("Could not find repository type [" + repositoryName.getType() + "] for repository [" + repositoryName.getName() + "]");
|
||||
}
|
||||
return Collections.unmodifiableList(Arrays.asList(Modules.createModule(repoModuleClass, globalSettings)));
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void configure() {
|
||||
typesRegistry.bindType(binder(), repositoryName.type());
|
||||
bind(RepositorySettings.class).toInstance(new RepositorySettings(globalSettings, settings));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,31 +19,34 @@
|
|||
|
||||
package org.elasticsearch.repositories;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.inject.Binder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
|
||||
/**
|
||||
* Map of registered repository types and associated with these types modules
|
||||
* A mapping from type name to implementations of {@link Repository} and {@link IndexShardRepository}.
|
||||
*/
|
||||
public class RepositoryTypesRegistry {
|
||||
private final ImmutableMap<String, Class<? extends Module>> repositoryTypes;
|
||||
// invariant: repositories and shardRepositories have the same keyset
|
||||
private final ExtensionPoint.SelectedType<Repository> repositoryTypes =
|
||||
new ExtensionPoint.SelectedType<>("repository", Repository.class);
|
||||
private final ExtensionPoint.SelectedType<IndexShardRepository> shardRepositoryTypes =
|
||||
new ExtensionPoint.SelectedType<>("index_repository", IndexShardRepository.class);
|
||||
|
||||
/**
|
||||
* Creates new repository with given map of types
|
||||
*
|
||||
* @param repositoryTypes
|
||||
*/
|
||||
public RepositoryTypesRegistry(ImmutableMap<String, Class<? extends Module>> repositoryTypes) {
|
||||
this.repositoryTypes = repositoryTypes;
|
||||
/** Adds a new repository type to the registry, bound to the given implementation classes. */
|
||||
public void registerRepository(String name, Class<? extends Repository> repositoryType, Class<? extends IndexShardRepository> shardRepositoryType) {
|
||||
repositoryTypes.registerExtension(name, repositoryType);
|
||||
shardRepositoryTypes.registerExtension(name, shardRepositoryType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns repository module class for the given type
|
||||
*
|
||||
* @param type repository type
|
||||
* @return repository module class or null if type is not found
|
||||
* Looks up the given type and binds the implementation into the given binder.
|
||||
* Throws an {@link IllegalArgumentException} if the given type does not exist.
|
||||
*/
|
||||
public Class<? extends Module> type(String type) {
|
||||
return repositoryTypes.get(type);
|
||||
public void bindType(Binder binder, String type) {
|
||||
Settings settings = Settings.builder().put("type", type).build();
|
||||
repositoryTypes.bindType(binder, settings, "type", null);
|
||||
shardRepositoryTypes.bindType(binder, settings, "type", null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories.fs;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
|
||||
/**
|
||||
* File system repository module
|
||||
*/
|
||||
public class FsRepositoryModule extends AbstractModule {
|
||||
|
||||
public FsRepositoryModule() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Repository.class).to(FsRepository.class).asEagerSingleton();
|
||||
bind(IndexShardRepository.class).to(BlobStoreIndexShardRepository.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -156,7 +156,7 @@ public class URLRepository extends BlobStoreRepository {
|
|||
logger.warn("cannot parse the specified url [{}]", url);
|
||||
throw new RepositoryException(repositoryName, "cannot parse the specified url [" + url + "]");
|
||||
}
|
||||
// We didn't match white list - try to resolve against repo.path
|
||||
// We didn't match white list - try to resolve against path.repo
|
||||
URL normalizedUrl = environment.resolveRepoURL(url);
|
||||
if (normalizedUrl == null) {
|
||||
logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles());
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories.uri;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.snapshots.IndexShardRepository;
|
||||
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
|
||||
import org.elasticsearch.repositories.Repository;
|
||||
|
||||
/**
|
||||
* URL repository module
|
||||
*/
|
||||
public class URLRepositoryModule extends AbstractModule {
|
||||
|
||||
public URLRepositoryModule() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(Repository.class).to(URLRepository.class).asEagerSingleton();
|
||||
bind(IndexShardRepository.class).to(BlobStoreIndexShardRepository.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableMap;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
|
@ -54,7 +53,6 @@ import org.elasticsearch.common.xcontent.XContentLocation;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -82,7 +80,6 @@ import org.elasticsearch.script.ScriptContext;
|
|||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.search.dfs.CachedDfSource;
|
||||
import org.elasticsearch.search.dfs.DfsPhase;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
import org.elasticsearch.search.fetch.*;
|
||||
|
@ -274,7 +271,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
throw new IllegalArgumentException("aggregations are not supported with search_type=scan");
|
||||
}
|
||||
|
||||
if (context.scroll() == null) {
|
||||
if (context.scrollContext() == null || context.scrollContext().scroll == null) {
|
||||
throw new ElasticsearchException("Scroll must be provided when scanning...");
|
||||
}
|
||||
|
||||
|
@ -322,7 +319,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
try {
|
||||
shortcutDocIdsToLoadForScanning(context);
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null || context.fetchResult().hits().hits().length < context.size()) {
|
||||
if (context.scrollContext() == null || context.fetchResult().hits().hits().length < context.size()) {
|
||||
freeContext(request.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -365,7 +362,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
|
||||
loadOrExecuteQueryPhase(request, context, queryPhase);
|
||||
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scroll() == null) {
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
|
||||
freeContext(context.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -412,23 +409,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
public QuerySearchResult executeQueryPhase(QuerySearchRequest request) {
|
||||
final SearchContext context = findContext(request.id());
|
||||
contextProcessing(context);
|
||||
context.searcher().setAggregatedDfs(request.dfs());
|
||||
IndexShard indexShard = context.indexShard();
|
||||
try {
|
||||
final IndexCache indexCache = indexShard.indexService().cache();
|
||||
final QueryCachingPolicy cachingPolicy = indexShard.getQueryCachingPolicy();
|
||||
context.searcher().dfSource(new CachedDfSource(context.searcher().getIndexReader(), request.dfs(), context.similarityService().similarity(),
|
||||
indexCache.query(), cachingPolicy));
|
||||
} catch (Throwable e) {
|
||||
processFailure(context, e);
|
||||
cleanContext(context);
|
||||
throw new QueryPhaseExecutionException(context, "Failed to set aggregated df", e);
|
||||
}
|
||||
ShardSearchStats shardSearchStats = indexShard.searchService();
|
||||
try {
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
long time = System.nanoTime();
|
||||
queryPhase.execute(context);
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scroll() == null) {
|
||||
if (context.queryResult().topDocs().scoreDocs.length == 0 && context.scrollContext() == null) {
|
||||
// no hits, we can release the context since there will be no fetch phase
|
||||
freeContext(context.id());
|
||||
} else {
|
||||
|
@ -446,6 +434,16 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
}
|
||||
}
|
||||
|
||||
private boolean fetchPhaseShouldFreeContext(SearchContext context) {
|
||||
if (context.scrollContext() == null) {
|
||||
// simple search, no scroll
|
||||
return true;
|
||||
} else {
|
||||
// scroll request, but the scroll was not extended
|
||||
return context.scrollContext().scroll == null;
|
||||
}
|
||||
}
|
||||
|
||||
public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) {
|
||||
final SearchContext context = createAndPutContext(request);
|
||||
contextProcessing(context);
|
||||
|
@ -465,7 +463,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
try {
|
||||
shortcutDocIdsToLoad(context);
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null) {
|
||||
if (fetchPhaseShouldFreeContext(context)) {
|
||||
freeContext(context.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -488,17 +486,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request) {
|
||||
final SearchContext context = findContext(request.id());
|
||||
contextProcessing(context);
|
||||
try {
|
||||
final IndexShard indexShard = context.indexShard();
|
||||
final IndexCache indexCache = indexShard.indexService().cache();
|
||||
final QueryCachingPolicy cachingPolicy = indexShard.getQueryCachingPolicy();
|
||||
context.searcher().dfSource(new CachedDfSource(context.searcher().getIndexReader(), request.dfs(), context.similarityService().similarity(),
|
||||
indexCache.query(), cachingPolicy));
|
||||
} catch (Throwable e) {
|
||||
freeContext(context.id());
|
||||
cleanContext(context);
|
||||
throw new QueryPhaseExecutionException(context, "Failed to set aggregated df", e);
|
||||
}
|
||||
context.searcher().setAggregatedDfs(request.dfs());
|
||||
try {
|
||||
ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
shardSearchStats.onPreQueryPhase(context);
|
||||
|
@ -515,7 +503,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
try {
|
||||
shortcutDocIdsToLoad(context);
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null) {
|
||||
if (fetchPhaseShouldFreeContext(context)) {
|
||||
freeContext(request.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -555,7 +543,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
try {
|
||||
shortcutDocIdsToLoad(context);
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null) {
|
||||
if (fetchPhaseShouldFreeContext(context)) {
|
||||
freeContext(request.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -581,13 +569,13 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
final ShardSearchStats shardSearchStats = context.indexShard().searchService();
|
||||
try {
|
||||
if (request.lastEmittedDoc() != null) {
|
||||
context.lastEmittedDoc(request.lastEmittedDoc());
|
||||
context.scrollContext().lastEmittedDoc = request.lastEmittedDoc();
|
||||
}
|
||||
context.docIdsToLoad(request.docIds(), 0, request.docIdsSize());
|
||||
shardSearchStats.onPreFetchPhase(context);
|
||||
long time = System.nanoTime();
|
||||
fetchPhase.execute(context);
|
||||
if (context.scroll() == null) {
|
||||
if (fetchPhaseShouldFreeContext(context)) {
|
||||
freeContext(request.id());
|
||||
} else {
|
||||
contextProcessedSuccessfully(context);
|
||||
|
@ -642,7 +630,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, defaultSearchTimeout);
|
||||
SearchContext.setCurrent(context);
|
||||
try {
|
||||
context.scroll(request.scroll());
|
||||
if (request.scroll() != null) {
|
||||
context.scrollContext(new ScrollContext());
|
||||
context.scrollContext().scroll = request.scroll();
|
||||
}
|
||||
|
||||
parseTemplate(request, context);
|
||||
parseSource(context, request.source());
|
||||
|
@ -695,7 +686,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
if (context != null) {
|
||||
try {
|
||||
context.indexShard().searchService().onFreeContext(context);
|
||||
if (context.scroll() != null) {
|
||||
if (context.scrollContext() != null) {
|
||||
context.indexShard().searchService().onFreeScrollContext(context);
|
||||
}
|
||||
} finally {
|
||||
|
@ -708,7 +699,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
|
||||
public void freeAllScrollContexts() {
|
||||
for (SearchContext searchContext : activeContexts.values()) {
|
||||
if (searchContext.scroll() != null) {
|
||||
if (searchContext.scrollContext() != null) {
|
||||
freeContext(searchContext.id());
|
||||
}
|
||||
}
|
||||
|
@ -902,7 +893,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
|
||||
// process scroll
|
||||
context.from(context.from() + context.size());
|
||||
context.scroll(request.scroll());
|
||||
context.scrollContext().scroll = request.scroll();
|
||||
// update the context keep alive based on the new scroll value
|
||||
if (request.scroll() != null && request.scroll().keepAlive() != null) {
|
||||
context.keepAlive(request.scroll().keepAlive().millis());
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource> {
|
||||
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource> {
|
||||
|
||||
private final long precisionThreshold;
|
||||
|
||||
|
|
|
@ -69,14 +69,14 @@ public class ValueFormat {
|
|||
public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT);
|
||||
|
||||
public static DateTime format(String format, DateTimeZone timezone) {
|
||||
return new DateTime(format, new ValueFormatter.DateTime(format, timezone), new ValueParser.DateMath(format));
|
||||
return new DateTime(format, new ValueFormatter.DateTime(format, timezone), new ValueParser.DateMath(format, timezone));
|
||||
}
|
||||
|
||||
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType, DateTimeZone timezone) {
|
||||
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType, timezone), ValueParser.DateMath.mapper(fieldType));
|
||||
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType, timezone), ValueParser.DateMath.mapper(fieldType, timezone));
|
||||
}
|
||||
|
||||
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
|
||||
private DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
|
||||
super(pattern, formatter, parser);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.support.format;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
|
@ -25,6 +26,7 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
|||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.DecimalFormatSymbols;
|
||||
|
@ -80,16 +82,21 @@ public interface ValueParser {
|
|||
*/
|
||||
static class DateMath implements ValueParser {
|
||||
|
||||
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER));
|
||||
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER), DateTimeZone.UTC);
|
||||
|
||||
private DateMathParser parser;
|
||||
|
||||
public DateMath(String format) {
|
||||
this(new DateMathParser(Joda.forPattern(format)));
|
||||
private DateTimeZone timezone = DateTimeZone.UTC;
|
||||
|
||||
public DateMath(String format, DateTimeZone timezone) {
|
||||
this(new DateMathParser(Joda.forPattern(format)), timezone);
|
||||
}
|
||||
|
||||
public DateMath(DateMathParser parser) {
|
||||
public DateMath(DateMathParser parser, @Nullable DateTimeZone timeZone) {
|
||||
this.parser = parser;
|
||||
if (timeZone != null) {
|
||||
this.timezone = timeZone;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -100,7 +107,7 @@ public interface ValueParser {
|
|||
return searchContext.nowInMillis();
|
||||
}
|
||||
};
|
||||
return parser.parse(value, now);
|
||||
return parser.parse(value, now, false, timezone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -108,8 +115,8 @@ public interface ValueParser {
|
|||
return parseLong(value, searchContext);
|
||||
}
|
||||
|
||||
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType) {
|
||||
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()));
|
||||
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType, @Nullable DateTimeZone timezone) {
|
||||
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()), timezone);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.dfs;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class CachedDfSource extends IndexSearcher {
|
||||
|
||||
private final AggregatedDfs aggregatedDfs;
|
||||
|
||||
private final int maxDoc;
|
||||
|
||||
public CachedDfSource(IndexReader reader, AggregatedDfs aggregatedDfs, Similarity similarity,
|
||||
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) throws IOException {
|
||||
super(reader);
|
||||
this.aggregatedDfs = aggregatedDfs;
|
||||
setSimilarity(similarity);
|
||||
setQueryCache(queryCache);
|
||||
setQueryCachingPolicy(queryCachingPolicy);
|
||||
if (aggregatedDfs.maxDoc() > Integer.MAX_VALUE) {
|
||||
maxDoc = Integer.MAX_VALUE;
|
||||
} else {
|
||||
maxDoc = (int) aggregatedDfs.maxDoc();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public TermStatistics termStatistics(Term term, TermContext context) throws IOException {
|
||||
TermStatistics termStatistics = aggregatedDfs.termStatistics().get(term);
|
||||
if (termStatistics == null) {
|
||||
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
|
||||
return super.termStatistics(term, context);
|
||||
}
|
||||
return termStatistics;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionStatistics collectionStatistics(String field) throws IOException {
|
||||
CollectionStatistics collectionStatistics = aggregatedDfs.fieldStatistics().get(field);
|
||||
if (collectionStatistics == null) {
|
||||
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
|
||||
return super.collectionStatistics(field);
|
||||
}
|
||||
return collectionStatistics;
|
||||
}
|
||||
|
||||
public int maxDoc() {
|
||||
return this.maxDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document doc(int i) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doc(int docID, StoredFieldVisitor fieldVisitor) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(Weight weight, int doc) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -20,15 +20,13 @@
|
|||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.search.dfs.CachedDfSource;
|
||||
import org.elasticsearch.search.dfs.AggregatedDfs;
|
||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -46,21 +44,23 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
|||
|
||||
private final SearchContext searchContext;
|
||||
|
||||
private CachedDfSource dfSource;
|
||||
private AggregatedDfs aggregatedDfs;
|
||||
|
||||
public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
|
||||
super(searcher.reader());
|
||||
in = searcher.searcher();
|
||||
this.searchContext = searchContext;
|
||||
setSimilarity(searcher.searcher().getSimilarity(true));
|
||||
setQueryCache(searchContext.indexShard().indexService().cache().query());
|
||||
setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public void dfSource(CachedDfSource dfSource) {
|
||||
this.dfSource = dfSource;
|
||||
public void setAggregatedDfs(AggregatedDfs aggregatedDfs) {
|
||||
this.aggregatedDfs = aggregatedDfs;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -75,10 +75,12 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
|||
|
||||
@Override
|
||||
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
|
||||
// During tests we prefer to use the wrapped IndexSearcher, because then we use the AssertingIndexSearcher
|
||||
// it is hacky, because if we perform a dfs search, we don't use the wrapped IndexSearcher...
|
||||
try {
|
||||
// if scores are needed and we have dfs data then use it
|
||||
if (dfSource != null && needsScores) {
|
||||
return dfSource.createNormalizedWeight(query, needsScores);
|
||||
if (aggregatedDfs != null && needsScores) {
|
||||
return super.createNormalizedWeight(query, needsScores);
|
||||
}
|
||||
return in.createNormalizedWeight(query, needsScores);
|
||||
} catch (Throwable t) {
|
||||
|
@ -104,4 +106,32 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
|||
searchContext.clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TermStatistics termStatistics(Term term, TermContext context) throws IOException {
|
||||
if (aggregatedDfs == null) {
|
||||
// we are either executing the dfs phase or the search_type doesn't include the dfs phase.
|
||||
return super.termStatistics(term, context);
|
||||
}
|
||||
TermStatistics termStatistics = aggregatedDfs.termStatistics().get(term);
|
||||
if (termStatistics == null) {
|
||||
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
|
||||
return super.termStatistics(term, context);
|
||||
}
|
||||
return termStatistics;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionStatistics collectionStatistics(String field) throws IOException {
|
||||
if (aggregatedDfs == null) {
|
||||
// we are either executing the dfs phase or the search_type doesn't include the dfs phase.
|
||||
return super.collectionStatistics(field);
|
||||
}
|
||||
CollectionStatistics collectionStatistics = aggregatedDfs.fieldStatistics().get(field);
|
||||
if (collectionStatistics == null) {
|
||||
// we don't have stats for this - this might be a must_not clauses etc. that doesn't allow extract terms on the query
|
||||
return super.collectionStatistics(field);
|
||||
}
|
||||
return collectionStatistics;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,6 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
|
@ -98,7 +97,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
// terminate after count
|
||||
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
|
||||
private List<String> groupStats;
|
||||
private Scroll scroll;
|
||||
private ScrollContext scrollContext;
|
||||
private boolean explain;
|
||||
private boolean version = false; // by default, we don't return versions
|
||||
private List<String> fieldNames;
|
||||
|
@ -290,13 +289,13 @@ public class DefaultSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Scroll scroll() {
|
||||
return this.scroll;
|
||||
public ScrollContext scrollContext() {
|
||||
return this.scrollContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext scroll(Scroll scroll) {
|
||||
this.scroll = scroll;
|
||||
public SearchContext scrollContext(ScrollContext scrollContext) {
|
||||
this.scrollContext = scrollContext;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -652,16 +651,6 @@ public class DefaultSearchContext extends SearchContext {
|
|||
this.keepAlive = keepAlive;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lastEmittedDoc(ScoreDoc doc) {
|
||||
this.lastEmittedDoc = doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScoreDoc lastEmittedDoc() {
|
||||
return lastEmittedDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchLookup lookup() {
|
||||
// TODO: The types should take into account the parsing context in QueryParserContext...
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
|||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
|
@ -42,7 +41,6 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
|
@ -154,13 +152,13 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Scroll scroll() {
|
||||
return in.scroll();
|
||||
public ScrollContext scrollContext() {
|
||||
return in.scrollContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext scroll(Scroll scroll) {
|
||||
return in.scroll(scroll);
|
||||
public SearchContext scrollContext(ScrollContext scroll) {
|
||||
return in.scrollContext(scroll);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -483,16 +481,6 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
in.keepAlive(keepAlive);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lastEmittedDoc(ScoreDoc doc) {
|
||||
in.lastEmittedDoc(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScoreDoc lastEmittedDoc() {
|
||||
return in.lastEmittedDoc();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchLookup lookup() {
|
||||
return in.lookup();
|
||||
|
|
|
@ -17,24 +17,17 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.nodesinfo.plugin.dummy2;
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
|
||||
public class TestNoVersionPlugin extends AbstractPlugin {
|
||||
/** Wrapper around information that needs to stay around when scrolling. */
|
||||
public class ScrollContext {
|
||||
|
||||
static final public class Fields {
|
||||
static public final String NAME = "test-no-version-plugin";
|
||||
static public final String DESCRIPTION = NAME + " description";
|
||||
}
|
||||
public int totalHits = -1;
|
||||
public float maxScore;
|
||||
public ScoreDoc lastEmittedDoc;
|
||||
public Scroll scroll;
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return Fields.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return Fields.DESCRIPTION;
|
||||
}
|
||||
}
|
|
@ -24,7 +24,6 @@ import com.google.common.collect.MultimapBuilder;
|
|||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
|
@ -159,9 +158,9 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
|
|||
|
||||
protected abstract long nowInMillisImpl();
|
||||
|
||||
public abstract Scroll scroll();
|
||||
public abstract ScrollContext scrollContext();
|
||||
|
||||
public abstract SearchContext scroll(Scroll scroll);
|
||||
public abstract SearchContext scrollContext(ScrollContext scroll);
|
||||
|
||||
public abstract SearchContextAggregations aggregations();
|
||||
|
||||
|
@ -303,10 +302,6 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
|
|||
|
||||
public abstract void keepAlive(long keepAlive);
|
||||
|
||||
public abstract void lastEmittedDoc(ScoreDoc doc);
|
||||
|
||||
public abstract ScoreDoc lastEmittedDoc();
|
||||
|
||||
public abstract SearchLookup lookup();
|
||||
|
||||
public abstract DfsSearchResult dfsResult();
|
||||
|
|
|
@ -21,13 +21,10 @@ package org.elasticsearch.search.internal;
|
|||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
|
@ -101,7 +98,7 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SearchContext scroll(Scroll scroll) {
|
||||
public SearchContext scrollContext(ScrollContext scrollContext) {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
}
|
||||
|
||||
|
@ -304,11 +301,6 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
throw new UnsupportedOperationException("Not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lastEmittedDoc(ScoreDoc doc) {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public QuerySearchResult queryResult() {
|
||||
return querySearchResult;
|
||||
|
|
|
@ -21,12 +21,16 @@ package org.elasticsearch.search.query;
|
|||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.queries.MinDocQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TimeLimitingCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
|
@ -43,8 +47,8 @@ import org.elasticsearch.search.SearchParseElement;
|
|||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.search.aggregations.AggregationPhase;
|
||||
import org.elasticsearch.search.internal.ScrollContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||
import org.elasticsearch.search.rescore.RescorePhase;
|
||||
import org.elasticsearch.search.rescore.RescoreSearchContext;
|
||||
import org.elasticsearch.search.scan.ScanContext.ScanCollector;
|
||||
|
@ -52,7 +56,6 @@ import org.elasticsearch.search.sort.SortParseElement;
|
|||
import org.elasticsearch.search.sort.TrackScoresParseElement;
|
||||
import org.elasticsearch.search.suggest.SuggestPhase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -115,6 +118,7 @@ public class QueryPhase implements SearchPhase {
|
|||
|
||||
searchContext.queryResult().searchTimedOut(false);
|
||||
|
||||
final SearchType searchType = searchContext.searchType();
|
||||
boolean rescore = false;
|
||||
try {
|
||||
searchContext.queryResult().from(searchContext.from());
|
||||
|
@ -138,7 +142,7 @@ public class QueryPhase implements SearchPhase {
|
|||
return new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
|
||||
}
|
||||
};
|
||||
} else if (searchContext.searchType() == SearchType.SCAN) {
|
||||
} else if (searchType == SearchType.SCAN) {
|
||||
query = searchContext.scanContext().wrapQuery(query);
|
||||
final ScanCollector scanCollector = searchContext.scanContext().collector(searchContext);
|
||||
collector = scanCollector;
|
||||
|
@ -150,11 +154,32 @@ public class QueryPhase implements SearchPhase {
|
|||
};
|
||||
} else {
|
||||
// Perhaps have a dedicated scroll phase?
|
||||
final ScrollContext scrollContext = searchContext.scrollContext();
|
||||
assert (scrollContext != null) == (searchContext.request().scroll() != null);
|
||||
final TopDocsCollector<?> topDocsCollector;
|
||||
ScoreDoc lastEmittedDoc;
|
||||
if (searchContext.request().scroll() != null) {
|
||||
numDocs = Math.min(searchContext.size(), totalNumDocs);
|
||||
lastEmittedDoc = searchContext.lastEmittedDoc();
|
||||
lastEmittedDoc = scrollContext.lastEmittedDoc;
|
||||
|
||||
if (Sort.INDEXORDER.equals(searchContext.sort())) {
|
||||
if (scrollContext.totalHits == -1) {
|
||||
// first round
|
||||
assert scrollContext.lastEmittedDoc == null;
|
||||
// there is not much that we can optimize here since we want to collect all
|
||||
// documents in order to get the total number of hits
|
||||
} else {
|
||||
// now this gets interesting: since we sort in index-order, we can directly
|
||||
// skip to the desired doc and stop collecting after ${size} matches
|
||||
if (scrollContext.lastEmittedDoc != null) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(query, Occur.MUST);
|
||||
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), Occur.FILTER);
|
||||
query = bq;
|
||||
}
|
||||
searchContext.terminateAfter(numDocs);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lastEmittedDoc = null;
|
||||
}
|
||||
|
@ -177,7 +202,31 @@ public class QueryPhase implements SearchPhase {
|
|||
topDocsCallable = new Callable<TopDocs>() {
|
||||
@Override
|
||||
public TopDocs call() throws Exception {
|
||||
return topDocsCollector.topDocs();
|
||||
TopDocs topDocs = topDocsCollector.topDocs();
|
||||
if (scrollContext != null) {
|
||||
if (scrollContext.totalHits == -1) {
|
||||
// first round
|
||||
scrollContext.totalHits = topDocs.totalHits;
|
||||
scrollContext.maxScore = topDocs.getMaxScore();
|
||||
} else {
|
||||
// subsequent round: the total number of hits and
|
||||
// the maximum score were computed on the first round
|
||||
topDocs.totalHits = scrollContext.totalHits;
|
||||
topDocs.setMaxScore(scrollContext.maxScore);
|
||||
}
|
||||
switch (searchType) {
|
||||
case QUERY_AND_FETCH:
|
||||
case DFS_QUERY_AND_FETCH:
|
||||
// for (DFS_)QUERY_AND_FETCH, we already know the last emitted doc
|
||||
if (topDocs.scoreDocs.length > 0) {
|
||||
// set the last emitted doc
|
||||
scrollContext.lastEmittedDoc = topDocs.scoreDocs[topDocs.scoreDocs.length - 1];
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return topDocs;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -227,19 +276,7 @@ public class QueryPhase implements SearchPhase {
|
|||
searchContext.queryResult().terminatedEarly(false);
|
||||
}
|
||||
|
||||
final TopDocs topDocs = topDocsCallable.call();
|
||||
if (searchContext.request().scroll() != null) {
|
||||
int size = topDocs.scoreDocs.length;
|
||||
if (size > 0) {
|
||||
// In the case of *QUERY_AND_FETCH we don't get back to shards telling them which least
|
||||
// relevant docs got emitted as hit, we can simply mark the last doc as last emitted
|
||||
if (searchContext.searchType() == SearchType.QUERY_AND_FETCH ||
|
||||
searchContext.searchType() == SearchType.DFS_QUERY_AND_FETCH) {
|
||||
searchContext.lastEmittedDoc(topDocs.scoreDocs[size - 1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
searchContext.queryResult().topDocs(topDocs);
|
||||
searchContext.queryResult().topDocs(topDocsCallable.call());
|
||||
} catch (Throwable e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e);
|
||||
}
|
||||
|
|
|
@ -20,18 +20,13 @@
|
|||
package org.elasticsearch.search.scan;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.MinDocQuery;
|
||||
import org.apache.lucene.search.CollectionTerminatedException;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -118,93 +113,4 @@ public class ScanContext {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A filtering query that matches all doc IDs that are not deleted and
|
||||
* greater than or equal to the configured doc ID.
|
||||
*/
|
||||
// pkg-private for testing
|
||||
static class MinDocQuery extends Query {
|
||||
|
||||
private final int minDoc;
|
||||
|
||||
MinDocQuery(int minDoc) {
|
||||
this.minDoc = minDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + minDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
MinDocQuery that = (MinDocQuery) obj;
|
||||
return minDoc == that.minDoc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context, final Bits acceptDocs) throws IOException {
|
||||
final int maxDoc = context.reader().maxDoc();
|
||||
if (context.docBase + maxDoc <= minDoc) {
|
||||
return null;
|
||||
}
|
||||
final int segmentMinDoc = Math.max(0, minDoc - context.docBase);
|
||||
final DocIdSetIterator disi = new DocIdSetIterator() {
|
||||
|
||||
int doc = -1;
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextDoc() throws IOException {
|
||||
return advance(doc + 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int advance(int target) throws IOException {
|
||||
assert target > doc;
|
||||
if (doc == -1) {
|
||||
// skip directly to minDoc
|
||||
doc = Math.max(target, segmentMinDoc);
|
||||
} else {
|
||||
doc = target;
|
||||
}
|
||||
while (doc < maxDoc) {
|
||||
if (acceptDocs == null || acceptDocs.get(doc)) {
|
||||
break;
|
||||
}
|
||||
doc += 1;
|
||||
}
|
||||
if (doc >= maxDoc) {
|
||||
doc = NO_MORE_DOCS;
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long cost() {
|
||||
return maxDoc - minDoc;
|
||||
}
|
||||
|
||||
};
|
||||
return new ConstantScoreScorer(this, score(), disi);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "MinDocQuery(minDoc=" + minDoc + ")";
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -404,6 +404,9 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
|||
} catch (IOException e) {
|
||||
throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e);
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("binding server bootstrap to: {}", hostAddresses);
|
||||
}
|
||||
for (InetAddress hostAddress : hostAddresses) {
|
||||
bindServerBootstrap(name, hostAddress, settings);
|
||||
}
|
||||
|
@ -497,7 +500,6 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
|||
serverBootstrap.setOption("child.receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
|
||||
serverBootstrap.setOption("reuseAddress", reuseAddress);
|
||||
serverBootstrap.setOption("child.reuseAddress", reuseAddress);
|
||||
|
||||
serverBootstraps.put(name, serverBootstrap);
|
||||
}
|
||||
|
||||
|
|
|
@ -4,13 +4,13 @@ NAME
|
|||
|
||||
SYNOPSIS
|
||||
|
||||
plugin install <name>
|
||||
plugin install <name or url>
|
||||
|
||||
DESCRIPTION
|
||||
|
||||
This command installs an elasticsearch plugin
|
||||
|
||||
<name> can be one of the official plugins, or refer to a github repository, or to one of the official plugins
|
||||
The argument can be a <name> of one of the official plugins, or refer to a github repository
|
||||
|
||||
The notation of just specifying a plugin name, downloads an officially supported plugin.
|
||||
|
||||
|
@ -20,37 +20,41 @@ DESCRIPTION
|
|||
|
||||
The notation of 'username/repository' refers to a github repository.
|
||||
|
||||
The argument can be an valid <url> which points to a download or file location for the plugin to be loaded from.
|
||||
|
||||
EXAMPLES
|
||||
|
||||
plugin install elasticsearch-analysis-kuromoji
|
||||
plugin install analysis-kuromoji
|
||||
|
||||
plugin install elasticsearch/shield/latest
|
||||
|
||||
plugin install lmenezes/elasticsearch-kopf
|
||||
|
||||
plugin install http://download.elasticsearch.org/elasticsearch/elasticsearch-analysis-kuromoji/elasticsearch-analysis-kuromoji-2.7.0.zip
|
||||
|
||||
plugin install file:/path/to/plugin/elasticsearch-analysis-kuromoji-2.7.0.zip
|
||||
|
||||
OFFICIAL PLUGINS
|
||||
|
||||
The following plugins are officially supported and can be installed by just referring to their name
|
||||
|
||||
- elasticsearch-analysis-icu
|
||||
- elasticsearch-analysis-kuromoji
|
||||
- elasticsearch-analysis-phonetic
|
||||
- elasticsearch-analysis-smartcn
|
||||
- elasticsearch-analysis-stempel
|
||||
- elasticsearch-cloud-aws
|
||||
- elasticsearch-cloud-azure
|
||||
- elasticsearch-cloud-gce
|
||||
- elasticsearch-delete-by-query
|
||||
- elasticsearch-lang-javascript
|
||||
- elasticsearch-lang-python
|
||||
- elasticsearch-mapper-murmur3
|
||||
- elasticsearch-mapper-size
|
||||
- analysis-icu
|
||||
- analysis-kuromoji
|
||||
- analysis-phonetic
|
||||
- analysis-smartcn
|
||||
- analysis-stempel
|
||||
- cloud-aws
|
||||
- cloud-azure
|
||||
- cloud-gce
|
||||
- delete-by-query
|
||||
- lang-javascript
|
||||
- lang-python
|
||||
- mapper-murmur3
|
||||
- mapper-size
|
||||
|
||||
|
||||
OPTIONS
|
||||
|
||||
-u,--url URL to retrive the plugin from
|
||||
|
||||
-t,--timeout Timeout until the plugin download is abort
|
||||
|
||||
-v,--verbose Verbose output
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.queries;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MinDocQueryTests extends ESTestCase {
|
||||
|
||||
public void testBasics() {
|
||||
MinDocQuery query1 = new MinDocQuery(42);
|
||||
MinDocQuery query2 = new MinDocQuery(42);
|
||||
MinDocQuery query3 = new MinDocQuery(43);
|
||||
QueryUtils.check(query1);
|
||||
QueryUtils.checkEqual(query1, query2);
|
||||
QueryUtils.checkUnequal(query1, query3);
|
||||
}
|
||||
|
||||
public void testRandom() throws IOException {
|
||||
final int numDocs = randomIntBetween(10, 200);
|
||||
final Document doc = new Document();
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
w.addDocument(doc);
|
||||
}
|
||||
final IndexReader reader = w.getReader();
|
||||
final IndexSearcher searcher = newSearcher(reader);
|
||||
for (int i = 0; i <= numDocs; ++i) {
|
||||
assertEquals(numDocs - i, searcher.count(new MinDocQuery(i)));
|
||||
}
|
||||
w.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
}
|
|
@ -622,4 +622,20 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
assertEquals(ex.status(), e.status());
|
||||
assertEquals(RestStatus.UNAUTHORIZED, e.status());
|
||||
}
|
||||
|
||||
public void testInterruptedException() throws IOException {
|
||||
InterruptedException orig = randomBoolean() ? new InterruptedException("boom") : new InterruptedException();
|
||||
InterruptedException ex = serialize(orig);
|
||||
assertEquals(orig.getMessage(), ex.getMessage());
|
||||
}
|
||||
|
||||
public static class UnknownException extends Exception {
|
||||
public UnknownException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public UnknownException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,7 +89,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.action.SearchServiceTransportAction;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -144,7 +144,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.extendArray("plugin.types", InterceptingTransportService.Plugin.class.getName())
|
||||
.extendArray("plugin.types", InterceptingTransportService.TestPlugin.class.getName())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
@ -844,7 +844,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
|||
|
||||
public static class InterceptingTransportService extends TransportService {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "intercepting-transport-service";
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package org.elasticsearch.benchmark.scripts.expression;
|
||||
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
||||
public class NativeScriptPlugin extends AbstractPlugin {
|
||||
public class NativeScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
|
|
@ -18,11 +18,15 @@
|
|||
*/
|
||||
package org.elasticsearch.benchmark.scripts.score.plugin;
|
||||
|
||||
import org.elasticsearch.benchmark.scripts.score.script.*;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.benchmark.scripts.score.script.NativeConstantForLoopScoreScript;
|
||||
import org.elasticsearch.benchmark.scripts.score.script.NativeConstantScoreScript;
|
||||
import org.elasticsearch.benchmark.scripts.score.script.NativeNaiveTFIDFScoreScript;
|
||||
import org.elasticsearch.benchmark.scripts.score.script.NativePayloadSumNoRecordScoreScript;
|
||||
import org.elasticsearch.benchmark.scripts.score.script.NativePayloadSumScoreScript;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
||||
public class NativeScriptExamplesPlugin extends AbstractPlugin {
|
||||
public class NativeScriptExamplesPlugin extends Plugin {
|
||||
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,8 +25,6 @@ import com.google.common.collect.Sets;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.recycler.Recycler.V;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.NodeModule;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
|
|
@ -35,9 +35,17 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.*;
|
||||
import org.elasticsearch.transport.ConnectTransportException;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportModule;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestOptions;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.transport.TransportResponseHandler;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -58,7 +66,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTests {
|
|||
TransportClient client = TransportClient.builder().settings(Settings.builder()
|
||||
.put("client.transport.sniff", false)
|
||||
.put("node.name", "transport_client_" + this.getTestName())
|
||||
.put("plugin.types", InternalTransportService.Plugin.class.getName())
|
||||
.put("plugin.types", InternalTransportService.TestPlugin.class.getName())
|
||||
.put(headersSettings)
|
||||
.build()).build();
|
||||
|
||||
|
@ -73,7 +81,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTests {
|
|||
.put("cluster.name", "cluster1")
|
||||
.put("node.name", "transport_client_" + this.getTestName() + "_1")
|
||||
.put("client.transport.nodes_sampler_interval", "1s")
|
||||
.put("plugin.types", InternalTransportService.Plugin.class.getName())
|
||||
.put("plugin.types", InternalTransportService.TestPlugin.class.getName())
|
||||
.put(HEADER_SETTINGS)
|
||||
.put("path.home", createTempDir().toString())
|
||||
.build()).build();
|
||||
|
@ -96,7 +104,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTests {
|
|||
|
||||
public static class InternalTransportService extends TransportService {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "mock-transport-service";
|
||||
|
|
|
@ -35,11 +35,14 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
import org.elasticsearch.transport.*;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestOptions;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -64,7 +67,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
|
||||
public class ClusterInfoServiceIT extends ESIntegTestCase {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
@ -143,7 +146,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase {
|
|||
return Settings.builder()
|
||||
// manual collection or upon cluster forming.
|
||||
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, "1s")
|
||||
.putArray("plugin.types", Plugin.class.getName(), MockTransportService.Plugin.class.getName())
|
||||
.putArray("plugin.types", TestPlugin.class.getName(), MockTransportService.TestPlugin.class.getName())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.cluster;
|
|||
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
|
@ -36,7 +35,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.inject.Singleton;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.MockLogAppender;
|
||||
|
@ -44,7 +43,12 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
@ -52,7 +56,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -992,7 +1000,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static class TestPlugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
@ -1005,7 +1013,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends LifecycleComponent>> services() {
|
||||
public Collection<Class<? extends LifecycleComponent>> nodeServices() {
|
||||
List<Class<? extends LifecycleComponent>> services = new ArrayList<>(1);
|
||||
services.add(MasterAwareService.class);
|
||||
return services;
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.MockDiskUsagesIT;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -39,7 +39,7 @@ import java.util.concurrent.CountDownLatch;
|
|||
*/
|
||||
public class MockInternalClusterInfoService extends InternalClusterInfoService {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "mock-cluster-info-service";
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.cluster.*;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.Test;
|
||||
|
@ -50,7 +51,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
// Use the mock internal cluster info service, which has fake-able disk usages
|
||||
.extendArray("plugin.types", MockInternalClusterInfoService.Plugin.class.getName())
|
||||
.extendArray("plugin.types", MockInternalClusterInfoService.TestPlugin.class.getName())
|
||||
// Update more frequently
|
||||
.put(InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, "1s")
|
||||
.build();
|
||||
|
@ -167,7 +168,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
|
||||
paths[0] = path;
|
||||
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
|
||||
return new NodeStats(new DiscoveryNode(nodeName, null, Version.V_2_0_0_beta1),
|
||||
return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT),
|
||||
System.currentTimeMillis(),
|
||||
null, null, null, null, null,
|
||||
fsInfo,
|
||||
|
|
|
@ -25,12 +25,13 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
|
@ -49,7 +50,7 @@ public class SettingsFilteringIT extends ESIntegTestCase {
|
|||
.build();
|
||||
}
|
||||
|
||||
public static class SettingsFilteringPlugin extends AbstractPlugin {
|
||||
public static class SettingsFilteringPlugin extends Plugin {
|
||||
/**
|
||||
* The name of the plugin.
|
||||
*/
|
||||
|
@ -67,10 +68,8 @@ public class SettingsFilteringIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
Collection<Class<? extends Module>> modules = newArrayList();
|
||||
modules.add(SettingsFilteringModule.class);
|
||||
return modules;
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.<Module>singletonList(new SettingsFilteringModule());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -77,7 +76,10 @@ public class ParseFieldTests extends ESTestCase {
|
|||
String[] deprecated = new String[]{"text", "same_as_text"};
|
||||
String[] allValues = values;
|
||||
if (withDeprecatedNames) {
|
||||
allValues = ArrayUtils.addAll(values, deprecated);
|
||||
String[] newArray = new String[allValues.length + deprecated.length];
|
||||
System.arraycopy(allValues, 0, newArray, 0, allValues.length);
|
||||
System.arraycopy(deprecated, 0, newArray, allValues.length, deprecated.length);
|
||||
allValues = newArray;
|
||||
}
|
||||
|
||||
ParseField field = new ParseField(randomFrom(values));
|
||||
|
|
|
@ -24,14 +24,12 @@ import com.carrotsearch.randomizedtesting.SeedUtils;
|
|||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.Accountables;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
|
|
|
@ -143,7 +143,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
.put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly
|
||||
.put("http.enabled", false) // just to make test quicker
|
||||
.put("gateway.local.list_timeout", "10s") // still long to induce failures but to long so test won't time out
|
||||
.put("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.put("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.build();
|
||||
|
||||
private void configureCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException {
|
||||
|
|
|
@ -416,7 +416,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
|
|||
Path dataPath = createTempDir();
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put("node.add_id_to_custom_path", false)
|
||||
.put("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.put("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.put("path.shared_data", dataPath)
|
||||
.build();
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ public class TransportIndexFailuresIT extends ESIntegTestCase {
|
|||
.put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly
|
||||
.put(DiscoverySettings.PUBLISH_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly
|
||||
.put("discovery.zen.minimum_master_nodes", 1)
|
||||
.put("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.put("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.build();
|
||||
|
||||
@Override
|
||||
|
|
|
@ -83,6 +83,9 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1");
|
||||
assertThat(fieldMapper, instanceOf(DateFieldMapper.class));
|
||||
DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper;
|
||||
assertEquals("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", dateFieldMapper.fieldType().dateTimeFormatter().format());
|
||||
assertEquals(1265587200000L, dateFieldMapper.fieldType().dateTimeFormatter().parser().parseMillis("1265587200000"));
|
||||
fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field2");
|
||||
assertThat(fieldMapper, instanceOf(DateFieldMapper.class));
|
||||
|
||||
|
|
|
@ -20,33 +20,25 @@
|
|||
package org.elasticsearch.index.mapper.externalvalues;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
|
||||
public class ExternalMapperPlugin extends AbstractPlugin {
|
||||
/**
|
||||
* The name of the plugin.
|
||||
*/
|
||||
public class ExternalMapperPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "external-mappers";
|
||||
}
|
||||
|
||||
/**
|
||||
* The description of the plugin.
|
||||
*/
|
||||
@Override
|
||||
public String description() {
|
||||
return "External Mappers Plugin";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
Collection<Class<? extends Module>> modules = newArrayList();
|
||||
modules.add(ExternalIndexModule.class);
|
||||
return modules;
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.<Module>singletonList(new ExternalIndexModule());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,11 +27,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.*;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DummyQueryParserPlugin extends AbstractPlugin {
|
||||
public class DummyQueryParserPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
|
|
@ -19,16 +19,16 @@
|
|||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.engine.MockEngineFactory;
|
||||
import org.elasticsearch.test.engine.MockEngineSupportModule;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Collections;
|
||||
|
||||
// this must exist in the same package as IndexShardModule to allow access to setting the impl
|
||||
public class MockEngineFactoryPlugin extends AbstractPlugin {
|
||||
public class MockEngineFactoryPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "mock-engine-factory";
|
||||
|
@ -38,10 +38,8 @@ public class MockEngineFactoryPlugin extends AbstractPlugin {
|
|||
return "a mock engine factory for testing";
|
||||
}
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> indexModules() {
|
||||
List<Class<? extends Module>> modules = new ArrayList<>();
|
||||
modules.add(MockEngineSupportModule.class);
|
||||
return modules;
|
||||
public Collection<Module> indexModules(Settings indexSettings) {
|
||||
return Collections.<Module>singletonList(new MockEngineSupportModule());
|
||||
}
|
||||
public void onModule(IndexShardModule module) {
|
||||
module.engineFactoryImpl = MockEngineFactory.class;
|
||||
|
|
|
@ -98,7 +98,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
// we really need local GW here since this also checks for corruption etc.
|
||||
// and we need to make sure primaries are not just trashed if we don't have replicas
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.extendArray("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
// speed up recoveries
|
||||
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10)
|
||||
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10)
|
||||
|
|
|
@ -66,7 +66,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase {
|
|||
// we really need local GW here since this also checks for corruption etc.
|
||||
// and we need to make sure primaries are not just trashed if we don't have replicas
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.extendArray("plugin.types", MockTransportService.Plugin.class.getName()).build();
|
||||
.extendArray("plugin.types", MockTransportService.TestPlugin.class.getName()).build();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -54,7 +54,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.extendArray("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
|
||||
package org.elasticsearch.indices.analysis;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.index.analysis.AnalysisModule;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
public class DummyAnalysisPlugin extends AbstractPlugin {
|
||||
public class DummyAnalysisPlugin extends Plugin {
|
||||
/**
|
||||
* The name of the plugin.
|
||||
*/
|
||||
|
@ -44,8 +44,8 @@ public class DummyAnalysisPlugin extends AbstractPlugin {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Collection<Class<? extends Module>> modules() {
|
||||
return ImmutableList.<Class<? extends Module>>of(DummyIndicesAnalysisModule.class);
|
||||
public Collection<Module> nodeModules() {
|
||||
return Collections.<Module>singletonList(new DummyIndicesAnalysisModule());
|
||||
}
|
||||
|
||||
public void onModule(AnalysisModule module) {
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
|
@ -107,7 +107,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
|||
|
||||
Settings.Builder settings = settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.Plugin.class.getName())
|
||||
.extendArray("plugin.types", RandomExceptionDirectoryReaderWrapper.TestPlugin.class.getName())
|
||||
.put(EXCEPTION_TOP_LEVEL_RATIO_KEY, topLevelRate)
|
||||
.put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate)
|
||||
.put(MockEngineSupport.WRAP_READER_RATIO, 1.0d);
|
||||
|
@ -202,7 +202,7 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
|||
// TODO: Generalize this class and add it as a utility
|
||||
public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper {
|
||||
|
||||
public static class Plugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "random-exception-reader-wrapper";
|
||||
|
|
|
@ -519,7 +519,7 @@ public class IndexRecoveryIT extends ESIntegTestCase {
|
|||
final Settings nodeSettings = Settings.builder()
|
||||
.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, "100ms")
|
||||
.put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, "1s")
|
||||
.put("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.put("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again
|
||||
.build();
|
||||
// start a master node
|
||||
|
|
|
@ -87,7 +87,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
|
|||
// which is between 1 and 2 sec can cause each of the shard deletion requests to timeout.
|
||||
// to prevent this we are setting the timeout here to something highish ie. the default in practice
|
||||
.put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS))
|
||||
.extendArray("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
|
|||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
@ -80,7 +80,7 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static class TestPlugin extends AbstractPlugin {
|
||||
public static class TestPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "test-plugin";
|
||||
|
|
|
@ -20,9 +20,9 @@ package org.elasticsearch.node;
|
|||
|
||||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
public class NodeMocksPlugin extends AbstractPlugin {
|
||||
public class NodeMocksPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.nodesinfo.plugin.dummy1;
|
||||
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
public class TestPlugin extends AbstractPlugin {
|
||||
public class TestPlugin extends Plugin {
|
||||
|
||||
static final public class Fields {
|
||||
static public final String NAME = "test-plugin";
|
||||
|
|
|
@ -73,7 +73,7 @@ public class PluggableTransportModuleIT extends ESIntegTestCase {
|
|||
assertThat("Expected send request counter to be greather than zero", countAfterRequest, is(greaterThan(countBeforeRequest)));
|
||||
}
|
||||
|
||||
public static class CountingSentRequestsPlugin extends AbstractPlugin {
|
||||
public static class CountingSentRequestsPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "counting-pipelines-plugin";
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.plugins;
|
|||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsInfo;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -53,13 +54,14 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"jvm", "true",
|
||||
"classname", "FakePlugin");
|
||||
PluginInfo info = PluginInfo.readFromProperties(pluginDir);
|
||||
assertEquals("fake-plugin", info.getName());
|
||||
assertEquals("my_plugin", info.getName());
|
||||
assertEquals("fake desc", info.getDescription());
|
||||
assertEquals("1.0", info.getVersion());
|
||||
assertEquals("FakePlugin", info.getClassname());
|
||||
|
@ -69,9 +71,28 @@ public class PluginInfoTests extends ESTestCase {
|
|||
assertNull(info.getUrl());
|
||||
}
|
||||
|
||||
public void testReadFromPropertiesDescriptionMissing() throws Exception {
|
||||
public void testReadFromPropertiesNameMissing() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir);
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected missing name exception");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("Property [name] is missing in"));
|
||||
}
|
||||
|
||||
writeProperties(pluginDir, "name", "");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected missing name exception");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertTrue(e.getMessage().contains("Property [name] is missing in"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadFromPropertiesDescriptionMissing() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir, "name", "fake-plugin");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected missing description exception");
|
||||
|
@ -82,7 +103,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
|
||||
public void testReadFromPropertiesVersionMissing() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir, "description", "fake desc");
|
||||
writeProperties(pluginDir, "description", "fake desc", "name", "fake-plugin");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected missing version exception");
|
||||
|
@ -95,7 +116,8 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"version", "1.0");
|
||||
"version", "1.0",
|
||||
"name", "my_plugin");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected jvm or site exception");
|
||||
|
@ -108,6 +130,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"jvm", "true");
|
||||
try {
|
||||
|
@ -122,6 +145,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"version", "1.0",
|
||||
"jvm", "true");
|
||||
|
@ -134,9 +158,11 @@ public class PluginInfoTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testReadFromPropertiesJavaVersionIncompatible() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
String pluginName = "fake-plugin";
|
||||
Path pluginDir = createTempDir().resolve(pluginName);
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", "1000000.0",
|
||||
"classname", "FakePlugin",
|
||||
|
@ -146,7 +172,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected incompatible java version exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("fake-plugin requires Java"));
|
||||
assertTrue(e.getMessage(), e.getMessage().contains(pluginName + " requires Java"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -156,6 +182,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
"description", "fake desc",
|
||||
"version", "1.0",
|
||||
"jvm", "true",
|
||||
"name", "my_plugin",
|
||||
"elasticsearch.version", "bogus");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
|
@ -169,6 +196,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"jvm", "true",
|
||||
"elasticsearch.version", Version.V_1_7_0.toString());
|
||||
|
@ -184,6 +212,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
|
@ -201,6 +230,7 @@ public class PluginInfoTests extends ESTestCase {
|
|||
Files.createDirectories(pluginDir.resolve("_site"));
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
PluginInfo info = PluginInfo.readFromProperties(pluginDir);
|
||||
|
@ -208,11 +238,12 @@ public class PluginInfoTests extends ESTestCase {
|
|||
assertFalse(info.isJvm());
|
||||
assertEquals("NA", info.getClassname());
|
||||
}
|
||||
|
||||
|
||||
public void testReadFromPropertiesSitePluginWithoutSite() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "my_plugin",
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
try {
|
||||
|
|
|
@ -52,9 +52,7 @@ import javax.net.ssl.HttpsURLConnection;
|
|||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.FileVisitResult;
|
||||
|
@ -177,11 +175,13 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "fake-plugin",
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"jvm", "true",
|
||||
"classname", "FakePlugin");
|
||||
assertStatus("install --url " + pluginUrl, USAGE);
|
||||
assertStatus("install", USAGE);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -196,6 +196,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
|
@ -207,7 +208,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Path pluginBinDir = binDir.resolve(pluginName);
|
||||
|
||||
Path pluginConfigDir = env.configFile().resolve(pluginName);
|
||||
assertStatusOk("install " + pluginName + " --url " + pluginUrl + " --verbose");
|
||||
assertStatusOk("install " + pluginUrl + " --verbose");
|
||||
|
||||
terminal.getTerminalOutput().clear();
|
||||
assertStatusOk("list");
|
||||
|
@ -241,6 +242,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
|
@ -250,7 +252,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Environment env = initialSettings.v2();
|
||||
Path pluginConfigDir = env.configFile().resolve(pluginName);
|
||||
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
|
||||
|
||||
/*
|
||||
First time, our plugin contains:
|
||||
|
@ -277,13 +279,14 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Files.write(pluginDir.resolve("config").resolve("dir").resolve("subdir").resolve("testsubdir.txt"), "version1".getBytes(StandardCharsets.UTF_8));
|
||||
pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "2.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"jvm", "true",
|
||||
"classname", "FakePlugin");
|
||||
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
|
||||
|
||||
assertFileContent(pluginConfigDir, "test.txt", "version1");
|
||||
assertFileContent(pluginConfigDir, "test.txt.new", "version2");
|
||||
|
@ -313,13 +316,14 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Files.write(pluginDir.resolve("config").resolve("dir").resolve("subdir").resolve("testsubdir.txt"), "version2".getBytes(StandardCharsets.UTF_8));
|
||||
pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "3.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"jvm", "true",
|
||||
"classname", "FakePlugin");
|
||||
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
|
||||
|
||||
assertFileContent(pluginConfigDir, "test.txt", "version1");
|
||||
assertFileContent(pluginConfigDir, "test2.txt", "version1");
|
||||
|
@ -341,6 +345,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Files.createFile(pluginDir.resolve("bin").resolve("tool"));;
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", "fake-plugin",
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
|
@ -351,7 +356,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Path binDir = env.binFile();
|
||||
Path pluginBinDir = binDir.resolve(pluginName);
|
||||
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
|
||||
assertThatPluginIsListed(pluginName);
|
||||
assertDirectoryExists(pluginBinDir);
|
||||
}
|
||||
|
@ -375,12 +380,13 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Path pluginDir = createTempDir().resolve(pluginName);
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "1.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
"jvm", "true",
|
||||
"classname", "FakePlugin");
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
|
||||
assertThatPluginIsListed(pluginName);
|
||||
}
|
||||
|
||||
|
@ -391,10 +397,11 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Files.createDirectories(pluginDir.resolve("_site"));
|
||||
Files.createFile(pluginDir.resolve("_site").resolve("somefile"));
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl));
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl));
|
||||
assertThatPluginIsListed(pluginName);
|
||||
// We want to check that Plugin Manager moves content to _site
|
||||
assertFileExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site"));
|
||||
|
@ -410,7 +417,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
"description", "fake desc",
|
||||
"version", "1.0",
|
||||
"site", "true");
|
||||
assertStatus(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginName, pluginUrl),
|
||||
assertStatus(String.format(Locale.ROOT, "install %s --verbose", pluginUrl),
|
||||
ExitStatus.IO_ERROR);
|
||||
assertThatPluginIsNotListed(pluginName);
|
||||
assertFileNotExists(initialSettings.v2().pluginsFile().resolve(pluginName).resolve("_site"));
|
||||
|
@ -421,7 +428,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
if (pluginCoordinates == null) {
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginDescriptor));
|
||||
} else {
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --url %s --verbose", pluginDescriptor, pluginCoordinates));
|
||||
assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginCoordinates));
|
||||
}
|
||||
assertThatPluginIsListed(pluginName);
|
||||
|
||||
|
@ -498,6 +505,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Path pluginDir = createTempDir().resolve(pluginName);
|
||||
String pluginUrl = createPlugin(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"version", "1.0.0",
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", System.getProperty("java.specification.version"),
|
||||
|
@ -539,18 +547,19 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
|
||||
@Test
|
||||
public void testOfficialPluginName_ThrowsException() throws IOException {
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-icu");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-kuromoji");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-phonetic");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-smartcn");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-stempel");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-aws");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-azure");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-gce");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-delete-by-query");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-lang-javascript");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-lang-python");
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-murmur3");
|
||||
PluginManager.checkForOfficialPlugins("analysis-icu");
|
||||
PluginManager.checkForOfficialPlugins("analysis-kuromoji");
|
||||
PluginManager.checkForOfficialPlugins("analysis-phonetic");
|
||||
PluginManager.checkForOfficialPlugins("analysis-smartcn");
|
||||
PluginManager.checkForOfficialPlugins("analysis-stempel");
|
||||
PluginManager.checkForOfficialPlugins("cloud-aws");
|
||||
PluginManager.checkForOfficialPlugins("cloud-azure");
|
||||
PluginManager.checkForOfficialPlugins("cloud-gce");
|
||||
PluginManager.checkForOfficialPlugins("delete-by-query");
|
||||
PluginManager.checkForOfficialPlugins("lang-javascript");
|
||||
PluginManager.checkForOfficialPlugins("lang-python");
|
||||
PluginManager.checkForOfficialPlugins("mapper-murmur3");
|
||||
PluginManager.checkForOfficialPlugins("mapper-size");
|
||||
|
||||
try {
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-attachment");
|
||||
|
@ -562,7 +571,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
|
||||
@Test
|
||||
public void testThatBasicAuthIsRejectedOnHttp() throws Exception {
|
||||
assertStatus(String.format(Locale.ROOT, "install foo --url http://user:pass@localhost:12345/foo.zip --verbose"), CliTool.ExitStatus.IO_ERROR);
|
||||
assertStatus(String.format(Locale.ROOT, "install http://user:pass@localhost:12345/foo.zip --verbose"), CliTool.ExitStatus.IO_ERROR);
|
||||
assertThat(terminal.getTerminalOutput(), hasItem(containsString("Basic auth is only supported for HTTPS!")));
|
||||
}
|
||||
|
||||
|
@ -599,7 +608,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
|||
Channel channel = serverBootstrap.bind(new InetSocketAddress("localhost", 0));
|
||||
int port = ((InetSocketAddress) channel.getLocalAddress()).getPort();
|
||||
// IO_ERROR because there is no real file delivered...
|
||||
assertStatus(String.format(Locale.ROOT, "install foo --url https://user:pass@localhost:%s/foo.zip --verbose --timeout 1s", port), ExitStatus.IO_ERROR);
|
||||
assertStatus(String.format(Locale.ROOT, "install https://user:pass@localhost:%s/foo.zip --verbose --timeout 1s", port), ExitStatus.IO_ERROR);
|
||||
|
||||
// ensure that we did not try any other data source like download.elastic.co, in case we specified our own local URL
|
||||
assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("download.elastic.co"))));
|
||||
|
|
|
@ -62,7 +62,7 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
.build();
|
||||
Environment environment = new Environment(settings);
|
||||
|
||||
PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user", "repo");
|
||||
PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user");
|
||||
String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).normalize().toString());
|
||||
String expectedDirPath = Files.simplifyPath(genericConfigFolder.resolve(pluginName).normalize().toString());
|
||||
|
||||
|
@ -82,12 +82,12 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
Iterator<URL> iterator = handle.urls().iterator();
|
||||
|
||||
if (supportStagingUrls) {
|
||||
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/elasticsearch-%s-%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip",
|
||||
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
Version.CURRENT.number(), Build.CURRENT.hashShort(), pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next().toExternalForm(), is(expectedStagingURL));
|
||||
}
|
||||
|
||||
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-" + pluginName + "/" + Version.CURRENT.number() + "/elasticsearch-" +
|
||||
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/" + pluginName + "/" + Version.CURRENT.number() + "/" +
|
||||
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
||||
assertThat(iterator.next().toExternalForm(), is(expected.toExternalForm()));
|
||||
|
||||
|
@ -95,10 +95,10 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testTrimmingElasticsearchFromOfficialPluginName() throws IOException {
|
||||
String randomPluginName = randomFrom(PluginManager.OFFICIAL_PLUGINS.asList()).replaceFirst("elasticsearch-", "");
|
||||
public void testOfficialPluginName() throws IOException {
|
||||
String randomPluginName = randomFrom(PluginManager.OFFICIAL_PLUGINS.asList());
|
||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
|
||||
assertThat(handle.name, is(randomPluginName.replaceAll("^elasticsearch-", "")));
|
||||
assertThat(handle.name, is(randomPluginName));
|
||||
|
||||
boolean supportStagingUrls = randomBoolean();
|
||||
if (supportStagingUrls) {
|
||||
|
@ -108,12 +108,12 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
Iterator<URL> iterator = handle.urls().iterator();
|
||||
|
||||
if (supportStagingUrls) {
|
||||
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/elasticsearch-%s-%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip",
|
||||
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
Version.CURRENT.number(), Build.CURRENT.hashShort(), randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next().toExternalForm(), is(expectedStagingUrl));
|
||||
}
|
||||
|
||||
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip",
|
||||
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||
assertThat(iterator.next().toExternalForm(), is(releaseUrl));
|
||||
|
||||
|
@ -121,12 +121,11 @@ public class PluginManagerUnitTests extends ESTestCase {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testTrimmingElasticsearchFromGithubPluginName() throws IOException {
|
||||
public void testGithubPluginName() throws IOException {
|
||||
String user = randomAsciiOfLength(6);
|
||||
String randomName = randomAsciiOfLength(10);
|
||||
String pluginName = randomFrom("elasticsearch-", "es-") + randomName;
|
||||
String pluginName = randomAsciiOfLength(10);
|
||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(user + "/" + pluginName);
|
||||
assertThat(handle.name, is(randomName));
|
||||
assertThat(handle.name, is(pluginName));
|
||||
assertThat(handle.urls(), hasSize(1));
|
||||
assertThat(handle.urls().get(0).toExternalForm(), is(new URL("https", "github.com", "/" + user + "/" + pluginName + "/" + "archive/master.zip").toExternalForm()));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.store.IndexStoreModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class PluginsServiceTests extends ESTestCase {
|
||||
public static class AdditionalSettingsPlugin1 extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "additional-settings1";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "adds additional setting 'foo.bar'";
|
||||
}
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.builder().put("foo.bar", "1").put(IndexStoreModule.STORE_TYPE, IndexStoreModule.Type.MMAPFS.getSettingsKey()).build();
|
||||
}
|
||||
}
|
||||
public static class AdditionalSettingsPlugin2 extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "additional-settings2";
|
||||
}
|
||||
@Override
|
||||
public String description() {
|
||||
return "adds additional setting 'foo.bar'";
|
||||
}
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
return Settings.builder().put("foo.bar", "2").build();
|
||||
}
|
||||
}
|
||||
|
||||
public void testAdditionalSettings() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("path.home", createTempDir())
|
||||
.put("my.setting", "test")
|
||||
.put(IndexStoreModule.STORE_TYPE, IndexStoreModule.Type.SIMPLEFS.getSettingsKey())
|
||||
.putArray("plugin.types", AdditionalSettingsPlugin1.class.getName()).build();
|
||||
PluginsService service = new PluginsService(settings, new Environment(settings));
|
||||
Settings newSettings = service.updatedSettings();
|
||||
assertEquals("test", newSettings.get("my.setting")); // previous settings still exist
|
||||
assertEquals("1", newSettings.get("foo.bar")); // added setting exists
|
||||
assertEquals(IndexStoreModule.Type.SIMPLEFS.getSettingsKey(), newSettings.get(IndexStoreModule.STORE_TYPE)); // does not override pre existing settings
|
||||
}
|
||||
|
||||
public void testAdditionalSettingsClash() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("path.home", createTempDir())
|
||||
.putArray("plugin.types", AdditionalSettingsPlugin1.class.getName(), AdditionalSettingsPlugin2.class.getName()).build();
|
||||
PluginsService service = new PluginsService(settings, new Environment(settings));
|
||||
try {
|
||||
service.updatedSettings();
|
||||
fail("Expected exception when building updated settings");
|
||||
} catch (IllegalArgumentException e) {
|
||||
String msg = e.getMessage();
|
||||
assertTrue(msg, msg.contains("Cannot have additional setting [foo.bar]"));
|
||||
assertTrue(msg, msg.contains("plugin [additional-settings1]"));
|
||||
assertTrue(msg, msg.contains("plugin [additional-settings2]"));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.plugins.loading.classpath;
|
||||
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
public class InClassPathPlugin extends AbstractPlugin {
|
||||
public class InClassPathPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package org.elasticsearch.plugins.responseheader;
|
||||
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestModule;
|
||||
|
||||
public class TestResponseHeaderPlugin extends AbstractPlugin {
|
||||
public class TestResponseHeaderPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
|
|
|
@ -102,7 +102,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
.put("plugin.types", MockTransportService.Plugin.class.getName()).build();
|
||||
.put("plugin.types", MockTransportService.TestPlugin.class.getName()).build();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ public class TruncatedRecoveryIT extends ESIntegTestCase {
|
|||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.extendArray("plugin.types", MockTransportService.Plugin.class.getName())
|
||||
.extendArray("plugin.types", MockTransportService.TestPlugin.class.getName())
|
||||
.put(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(randomIntBetween(50, 300), ByteSizeUnit.BYTES));
|
||||
return builder.build();
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue