Merge branch 'master' into init2
This commit is contained in:
commit
553214d771
|
@ -23,6 +23,16 @@ apply plugin: 'groovy'
|
|||
|
||||
group = 'org.elasticsearch.gradle'
|
||||
|
||||
// TODO: remove this when upgrading to a version that supports ProgressLogger
|
||||
// gradle 2.14 made internal apis unavailable to plugins, and gradle considered
|
||||
// ProgressLogger to be an internal api. Until this is made available again,
|
||||
// we can't upgrade without losing our nice progress logging
|
||||
// NOTE that this check duplicates that in BuildPlugin, but we need to check
|
||||
// early here before trying to compile the broken classes in buildSrc
|
||||
if (GradleVersion.current() != GradleVersion.version('2.13')) {
|
||||
throw new GradleException('Gradle 2.13 is required to build elasticsearch')
|
||||
}
|
||||
|
||||
if (project == rootProject) {
|
||||
// change the build dir used during build init, so that doing a clean
|
||||
// won't wipe out the buildscript jar
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.index.IndexNotFoundException;
|
|||
import org.elasticsearch.tasks.PersistedTaskInfo;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
import org.elasticsearch.tasks.TaskPersistenceService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.BaseTransportResponseHandler;
|
||||
|
@ -140,6 +141,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
|
|||
void getRunningTaskFromNode(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
|
||||
Task runningTask = taskManager.getTask(request.getTaskId().getId());
|
||||
if (runningTask == null) {
|
||||
// Task isn't running, go look in the task index
|
||||
getFinishedTaskFromIndex(thisTask, request, listener);
|
||||
} else {
|
||||
if (request.getWaitForCompletion()) {
|
||||
|
@ -148,9 +150,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
|
|||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
taskManager.waitForTaskCompletion(runningTask, waitForCompletionTimeout(request.getTimeout()));
|
||||
// TODO look up the task's result from the .tasks index now that it is done
|
||||
listener.onResponse(
|
||||
new GetTaskResponse(new PersistedTaskInfo(runningTask.taskInfo(clusterService.localNode(), true))));
|
||||
waitedForCompletion(thisTask, request, runningTask.taskInfo(clusterService.localNode(), true), listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -159,15 +159,44 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
|
|||
}
|
||||
});
|
||||
} else {
|
||||
listener.onResponse(new GetTaskResponse(new PersistedTaskInfo(runningTask.taskInfo(clusterService.localNode(), true))));
|
||||
TaskInfo info = runningTask.taskInfo(clusterService.localNode(), true);
|
||||
listener.onResponse(new GetTaskResponse(new PersistedTaskInfo(false, info)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a {@link GetRequest} to the results index looking for the results of the task. It'll only be found only if the task's result was
|
||||
* persisted. Called on the node that once had the task if that node is part of the cluster or on the coordinating node if the node
|
||||
* wasn't part of the cluster.
|
||||
* Called after waiting for the task to complete. Attempts to load the results of the task from the tasks index. If it isn't in the
|
||||
* index then returns a snapshot of the task taken shortly after completion.
|
||||
*/
|
||||
void waitedForCompletion(Task thisTask, GetTaskRequest request, TaskInfo snapshotOfRunningTask,
|
||||
ActionListener<GetTaskResponse> listener) {
|
||||
getFinishedTaskFromIndex(thisTask, request, new ActionListener<GetTaskResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetTaskResponse response) {
|
||||
// We were able to load the task from the task index. Let's send that back.
|
||||
listener.onResponse(response);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
/*
|
||||
* We couldn't load the task from the task index. Instead of 404 we should use the snapshot we took after it finished. If
|
||||
* the error isn't a 404 then we'll just throw it back to the user.
|
||||
*/
|
||||
if (ExceptionsHelper.unwrap(e, ResourceNotFoundException.class) != null) {
|
||||
listener.onResponse(new GetTaskResponse(new PersistedTaskInfo(true, snapshotOfRunningTask)));
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a {@link GetRequest} to the tasks index looking for a persisted copy of the task completed task. It'll only be found only if the
|
||||
* task's result was persisted. Called on the node that once had the task if that node is still part of the cluster or on the
|
||||
* coordinating node if the node is no longer part of the cluster.
|
||||
*/
|
||||
void getFinishedTaskFromIndex(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
|
||||
GetRequest get = new GetRequest(TaskPersistenceService.TASK_INDEX, TaskPersistenceService.TASK_TYPE,
|
||||
|
@ -202,6 +231,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
|
|||
void onGetFinishedTaskFromIndex(GetResponse response, ActionListener<GetTaskResponse> listener) throws IOException {
|
||||
if (false == response.isExists()) {
|
||||
listener.onFailure(new ResourceNotFoundException("task [{}] isn't running or persisted", response.getId()));
|
||||
return;
|
||||
}
|
||||
if (response.isSourceEmpty()) {
|
||||
listener.onFailure(new ElasticsearchException("Stored task status for [{}] didn't contain any source!", response.getId()));
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.BaseTransportResponseHandler;
|
||||
import org.elasticsearch.transport.ConnectTransportException;
|
||||
|
@ -45,7 +46,6 @@ import org.elasticsearch.transport.TransportService;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.RejectedExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
|
@ -203,7 +203,7 @@ public class MasterFaultDetection extends FaultDetection {
|
|||
listener.onMasterFailure(masterNode, cause, reason);
|
||||
}
|
||||
});
|
||||
} catch (RejectedExecutionException e) {
|
||||
} catch (EsRejectedExecutionException e) {
|
||||
logger.error("master failure notification was rejected, it's highly likely the node is shutting down", e);
|
||||
}
|
||||
stop("master failure, " + reason);
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -101,9 +100,6 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) {
|
||||
throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field.");
|
||||
}
|
||||
Builder builder = new Builder();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
|
@ -169,9 +165,6 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL,
|
||||
Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings);
|
||||
if (enabled.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) {
|
||||
throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field.");
|
||||
}
|
||||
this.enabledState = enabled;
|
||||
this.defaultTTL = defaultTTL;
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
|
@ -127,9 +126,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) {
|
||||
throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents.");
|
||||
}
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings());
|
||||
boolean defaultSet = false;
|
||||
Boolean ignoreMissing = null;
|
||||
|
@ -204,9 +200,6 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState,
|
||||
String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) {
|
||||
super(NAME, fieldType, defaultFieldType, indexSettings);
|
||||
if (enabledState.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) {
|
||||
throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents.");
|
||||
}
|
||||
this.enabledState = enabledState;
|
||||
this.defaultTimestamp = defaultTimestamp;
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.translog;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.file.OpenOption;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
|
||||
/**
|
||||
* only for testing until we have a disk-full FileSystem
|
||||
*/
|
||||
@FunctionalInterface
|
||||
interface ChannelFactory {
|
||||
default FileChannel open(Path path) throws IOException {
|
||||
return open(path, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
|
||||
}
|
||||
|
||||
FileChannel open(Path path, OpenOption... options) throws IOException;
|
||||
}
|
|
@ -82,8 +82,8 @@ class Checkpoint {
|
|||
}
|
||||
}
|
||||
|
||||
public static void write(Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException {
|
||||
try (FileChannel channel = FileChannel.open(checkpointFile, options)) {
|
||||
public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException {
|
||||
try (FileChannel channel = factory.open(checkpointFile, options)) {
|
||||
checkpoint.write(channel);
|
||||
channel.force(false);
|
||||
}
|
||||
|
|
|
@ -200,7 +200,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
Files.createDirectories(location);
|
||||
final long generation = 1;
|
||||
Checkpoint checkpoint = new Checkpoint(0, 0, generation);
|
||||
Checkpoint.write(location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
|
||||
Checkpoint.write(getChannelFactory(), location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
|
||||
current = createWriter(generation);
|
||||
this.lastCommittedTranslogFileGeneration = NOT_SET_GENERATION;
|
||||
|
||||
|
@ -1313,8 +1313,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
return outstandingViews.size();
|
||||
}
|
||||
|
||||
TranslogWriter.ChannelFactory getChannelFactory() {
|
||||
return TranslogWriter.ChannelFactory.DEFAULT;
|
||||
ChannelFactory getChannelFactory() {
|
||||
return FileChannel::open;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -49,6 +49,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
public static final int VERSION = VERSION_CHECKPOINTS;
|
||||
|
||||
private final ShardId shardId;
|
||||
private final ChannelFactory channelFactory;
|
||||
/* the offset in bytes that was written when the file was last synced*/
|
||||
private volatile long lastSyncedOffset;
|
||||
/* the number of translog operations written to this file */
|
||||
|
@ -64,9 +65,10 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
// lock order synchronized(syncLock) -> synchronized(this)
|
||||
private final Object syncLock = new Object();
|
||||
|
||||
public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
|
||||
public TranslogWriter(ChannelFactory channelFactory, ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
|
||||
super(generation, channel, path, channel.position());
|
||||
this.shardId = shardId;
|
||||
this.channelFactory = channelFactory;
|
||||
this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channel), bufferSize.bytesAsInt());
|
||||
this.lastSyncedOffset = channel.position();
|
||||
totalOffset = lastSyncedOffset;
|
||||
|
@ -92,8 +94,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
out.writeInt(ref.length);
|
||||
out.writeBytes(ref.bytes, ref.offset, ref.length);
|
||||
channel.force(true);
|
||||
writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE);
|
||||
final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, channel, file, bufferSize);
|
||||
writeCheckpoint(channelFactory, headerLength, 0, file.getParent(), fileGeneration);
|
||||
final TranslogWriter writer = new TranslogWriter(channelFactory, shardId, fileGeneration, channel, file, bufferSize);
|
||||
return writer;
|
||||
} catch (Throwable throwable) {
|
||||
// if we fail to bake the file-generation into the checkpoint we stick with the file and once we recover and that
|
||||
|
@ -254,7 +256,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
// we can continue writing to the buffer etc.
|
||||
try {
|
||||
channel.force(false);
|
||||
writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE);
|
||||
writeCheckpoint(channelFactory, offsetToSync, opsCounter, path.getParent(), generation);
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
|
@ -286,20 +288,10 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
|
||||
}
|
||||
|
||||
private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException {
|
||||
private static void writeCheckpoint(ChannelFactory channelFactory, long syncPosition, int numOperations, Path translogFile, long generation) throws IOException {
|
||||
final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME);
|
||||
Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation);
|
||||
Checkpoint.write(checkpointFile, checkpoint, options);
|
||||
}
|
||||
|
||||
static class ChannelFactory {
|
||||
|
||||
static final ChannelFactory DEFAULT = new ChannelFactory();
|
||||
|
||||
// only for testing until we have a disk-full FileSystem
|
||||
public FileChannel open(Path file) throws IOException {
|
||||
return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
|
||||
}
|
||||
Checkpoint.write(channelFactory::open, checkpointFile, checkpoint, StandardOpenOption.WRITE);
|
||||
}
|
||||
|
||||
protected final void ensureOpen() {
|
||||
|
|
|
@ -68,7 +68,6 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
|
|||
for (int i = 0; i < internalHits.length; i++) {
|
||||
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
|
||||
InternalSearchHit searchHitFields = internalHits[i];
|
||||
searchHitFields.shard(innerHits.shardTarget());
|
||||
searchHitFields.score(scoreDoc.score);
|
||||
if (scoreDoc instanceof FieldDoc) {
|
||||
FieldDoc fieldDoc = (FieldDoc) scoreDoc;
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.io.IOException;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Objects.requireNonNull;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
@ -45,56 +46,61 @@ import static org.elasticsearch.common.xcontent.XContentHelper.convertToMap;
|
|||
|
||||
/**
|
||||
* Information about a persisted or running task. Running tasks just have a {@link #getTask()} while persisted tasks will have either a
|
||||
* {@link #getError()} or {@link #getResult()}.
|
||||
* {@link #getError()} or {@link #getResponse()}.
|
||||
*/
|
||||
public final class PersistedTaskInfo implements Writeable, ToXContent {
|
||||
private final boolean completed;
|
||||
private final TaskInfo task;
|
||||
@Nullable
|
||||
private final BytesReference error;
|
||||
@Nullable
|
||||
private final BytesReference result;
|
||||
private final BytesReference response;
|
||||
|
||||
/**
|
||||
* Construct a {@linkplain PersistedTaskInfo} for a running task.
|
||||
* Construct a {@linkplain PersistedTaskInfo} for a task for which we don't have a result or error. That usually means that the task
|
||||
* is incomplete, but it could also mean that we waited for the task to complete but it didn't save any error information.
|
||||
*/
|
||||
public PersistedTaskInfo(TaskInfo task) {
|
||||
this(task, null, null);
|
||||
public PersistedTaskInfo(boolean completed, TaskInfo task) {
|
||||
this(completed, task, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@linkplain PersistedTaskInfo} for a task that completed with an error.
|
||||
*/
|
||||
public PersistedTaskInfo(TaskInfo task, Throwable error) throws IOException {
|
||||
this(task, toXContent(error), null);
|
||||
this(true, task, toXContent(error), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@linkplain PersistedTaskInfo} for a task that completed successfully.
|
||||
*/
|
||||
public PersistedTaskInfo(TaskInfo task, ToXContent result) throws IOException {
|
||||
this(task, null, toXContent(result));
|
||||
public PersistedTaskInfo(TaskInfo task, ToXContent response) throws IOException {
|
||||
this(true, task, null, toXContent(response));
|
||||
}
|
||||
|
||||
private PersistedTaskInfo(TaskInfo task, @Nullable BytesReference error, @Nullable BytesReference result) {
|
||||
private PersistedTaskInfo(boolean completed, TaskInfo task, @Nullable BytesReference error, @Nullable BytesReference result) {
|
||||
this.completed = completed;
|
||||
this.task = requireNonNull(task, "task is required");
|
||||
this.error = error;
|
||||
this.result = result;
|
||||
this.response = result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public PersistedTaskInfo(StreamInput in) throws IOException {
|
||||
completed = in.readBoolean();
|
||||
task = new TaskInfo(in);
|
||||
error = in.readOptionalBytesReference();
|
||||
result = in.readOptionalBytesReference();
|
||||
response = in.readOptionalBytesReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeBoolean(completed);
|
||||
task.writeTo(out);
|
||||
out.writeOptionalBytesReference(error);
|
||||
out.writeOptionalBytesReference(result);
|
||||
out.writeOptionalBytesReference(response);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -105,46 +111,45 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
|
|||
}
|
||||
|
||||
/**
|
||||
* Get the error that finished this task. Will return null if the task didn't finish with an error or it hasn't yet finished.
|
||||
* Get the error that finished this task. Will return null if the task didn't finish with an error, it hasn't yet finished, or didn't
|
||||
* persist its result.
|
||||
*/
|
||||
public BytesReference getError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert {@link #getError()} from XContent to a Map for easy processing. Will return null if the task didn't finish with an error or
|
||||
* hasn't yet finished.
|
||||
* Convert {@link #getError()} from XContent to a Map for easy processing. Will return an empty map if the task didn't finish with an
|
||||
* error, hasn't yet finished, or didn't persist its result.
|
||||
*/
|
||||
public Map<String, Object> getErrorAsMap() {
|
||||
if (error == null) {
|
||||
return null;
|
||||
return emptyMap();
|
||||
}
|
||||
return convertToMap(error, false).v2();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the result that this task finished with. Will return null if the task was finished by an error or it hasn't yet finished.
|
||||
* Get the response that this task finished with. Will return null if the task was finished by an error, it hasn't yet finished, or
|
||||
* didn't persist its result.
|
||||
*/
|
||||
public BytesReference getResult() {
|
||||
return result;
|
||||
public BytesReference getResponse() {
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert {@link #getResult()} from XContent to a Map for easy processing. Will return null if the task was finished with an error or
|
||||
* hasn't yet finished.
|
||||
* Convert {@link #getResponse()} from XContent to a Map for easy processing. Will return an empty map if the task was finished with an
|
||||
* error, hasn't yet finished, or didn't persist its result.
|
||||
*/
|
||||
public Map<String, Object> getResultAsMap() {
|
||||
if (result == null) {
|
||||
return null;
|
||||
public Map<String, Object> getResponseAsMap() {
|
||||
if (response == null) {
|
||||
return emptyMap();
|
||||
}
|
||||
return convertToMap(result, false).v2();
|
||||
return convertToMap(response, false).v2();
|
||||
}
|
||||
|
||||
/**
|
||||
* Was the task completed before returned?
|
||||
*/
|
||||
public boolean isCompleted() {
|
||||
return error != null || result != null;
|
||||
return completed;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -159,18 +164,18 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
|
|||
if (error != null) {
|
||||
XContentHelper.writeRawField("error", error, builder, params);
|
||||
}
|
||||
if (result != null) {
|
||||
XContentHelper.writeRawField("result", result, builder, params);
|
||||
if (response != null) {
|
||||
XContentHelper.writeRawField("response", response, builder, params);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static final ConstructingObjectParser<PersistedTaskInfo, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
|
||||
"persisted_task_info", a -> new PersistedTaskInfo((TaskInfo) a[0], (BytesReference) a[1], (BytesReference) a[2]));
|
||||
"persisted_task_info", a -> new PersistedTaskInfo(true, (TaskInfo) a[0], (BytesReference) a[1], (BytesReference) a[2]));
|
||||
static {
|
||||
PARSER.declareObject(constructorArg(), TaskInfo.PARSER, new ParseField("task"));
|
||||
PARSER.declareRawObject(optionalConstructorArg(), new ParseField("error"));
|
||||
PARSER.declareRawObject(optionalConstructorArg(), new ParseField("result"));
|
||||
PARSER.declareRawObject(optionalConstructorArg(), new ParseField("response"));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -189,9 +194,10 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
|
|||
* Equality of error and result is done by converting them to a map first. Not efficient but ignores field order and spacing
|
||||
* differences so perfect for testing.
|
||||
*/
|
||||
return Objects.equals(task, other.task)
|
||||
return Objects.equals(completed, other.completed)
|
||||
&& Objects.equals(task, other.task)
|
||||
&& Objects.equals(getErrorAsMap(), other.getErrorAsMap())
|
||||
&& Objects.equals(getResultAsMap(), other.getResultAsMap());
|
||||
&& Objects.equals(getResponseAsMap(), other.getResponseAsMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -200,7 +206,7 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
|
|||
* Hashing of error and result is done by converting them to a map first. Not efficient but ignores field order and spacing
|
||||
* differences so perfect for testing.
|
||||
*/
|
||||
return Objects.hash(task, getErrorAsMap(), getResultAsMap());
|
||||
return Objects.hash(completed, task, getErrorAsMap(), getResponseAsMap());
|
||||
}
|
||||
|
||||
private static BytesReference toXContent(ToXContent result) throws IOException {
|
||||
|
|
|
@ -37,7 +37,7 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"result" : {
|
||||
"response" : {
|
||||
"type" : "object",
|
||||
"enabled" : false
|
||||
},
|
||||
|
|
|
@ -70,7 +70,6 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
@ -85,6 +84,7 @@ import static org.hamcrest.Matchers.allOf;
|
|||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.emptyCollectionOf;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
|
@ -437,8 +437,8 @@ public class TasksIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testListTasksWaitForCompletion() throws Exception {
|
||||
waitForCompletionTestCase(id -> {
|
||||
return client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]")
|
||||
waitForCompletionTestCase(randomBoolean(), id -> {
|
||||
return client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME)
|
||||
.setWaitForCompletion(true).execute();
|
||||
}, response -> {
|
||||
assertThat(response.getNodeFailures(), empty());
|
||||
|
@ -446,25 +446,39 @@ public class TasksIT extends ESIntegTestCase {
|
|||
});
|
||||
}
|
||||
|
||||
public void testGetTaskWaitForCompletion() throws Exception {
|
||||
waitForCompletionTestCase(id -> {
|
||||
public void testGetTaskWaitForCompletionNoPersist() throws Exception {
|
||||
waitForCompletionTestCase(false, id -> {
|
||||
return client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).execute();
|
||||
}, response -> {
|
||||
// Really we're just happy we didn't get any exceptions
|
||||
assertNotNull(response.getTask().getTask());
|
||||
assertTrue(response.getTask().isCompleted());
|
||||
// We didn't persist the result so it won't come back when we wait
|
||||
assertNull(response.getTask().getResponse());
|
||||
});
|
||||
}
|
||||
|
||||
public void testGetTaskWaitForCompletionWithPersist() throws Exception {
|
||||
waitForCompletionTestCase(true, id -> {
|
||||
return client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).execute();
|
||||
}, response -> {
|
||||
assertNotNull(response.getTask().getTask());
|
||||
assertTrue(response.getTask().isCompleted());
|
||||
// We persisted the task so we should get its results
|
||||
assertEquals(0, response.getTask().getResponseAsMap().get("failure_count"));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Test wait for completion.
|
||||
* @param persist should the task persist its results
|
||||
* @param wait start waiting for a task. Accepts that id of the task to wait for and returns a future waiting for it.
|
||||
* @param validator validate the response and return the task ids that were found
|
||||
*/
|
||||
private <T> void waitForCompletionTestCase(Function<TaskId, ListenableActionFuture<T>> wait, Consumer<T> validator)
|
||||
private <T> void waitForCompletionTestCase(boolean persist, Function<TaskId, ListenableActionFuture<T>> wait, Consumer<T> validator)
|
||||
throws Exception {
|
||||
// Start blocking test task
|
||||
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
|
||||
.execute();
|
||||
.setShouldPersistResult(persist).execute();
|
||||
|
||||
ListenableActionFuture<T> waitResponseFuture;
|
||||
TaskId taskId;
|
||||
|
@ -513,7 +527,7 @@ public class TasksIT extends ESIntegTestCase {
|
|||
public void testListTasksWaitForTimeout() throws Exception {
|
||||
waitForTimeoutTestCase(id -> {
|
||||
ListTasksResponse response = client().admin().cluster().prepareListTasks()
|
||||
.setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(100))
|
||||
.setActions(TestTaskPlugin.TestTaskAction.NAME).setWaitForCompletion(true).setTimeout(timeValueMillis(100))
|
||||
.get();
|
||||
assertThat(response.getNodeFailures(), not(empty()));
|
||||
return response.getNodeFailures();
|
||||
|
@ -539,6 +553,9 @@ public class TasksIT extends ESIntegTestCase {
|
|||
try {
|
||||
TaskId taskId = waitForTestTaskStartOnAllNodes();
|
||||
|
||||
// Wait for the task to start
|
||||
assertBusy(() -> client().admin().cluster().prepareGetTask(taskId).get());
|
||||
|
||||
// Spin up a request that should wait for those tasks to finish
|
||||
// It will timeout because we haven't unblocked the tasks
|
||||
Iterable<? extends Throwable> failures = wait.apply(taskId);
|
||||
|
@ -554,15 +571,18 @@ public class TasksIT extends ESIntegTestCase {
|
|||
future.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the test task to be running on all nodes and return the TaskId of the primary task.
|
||||
*/
|
||||
private TaskId waitForTestTaskStartOnAllNodes() throws Exception {
|
||||
AtomicReference<TaskId> result = new AtomicReference<>();
|
||||
assertBusy(() -> {
|
||||
List<TaskInfo> tasks = client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]")
|
||||
.get().getTasks();
|
||||
assertEquals(internalCluster().size(), tasks.size());
|
||||
result.set(tasks.get(0).getTaskId());
|
||||
});
|
||||
return result.get();
|
||||
List<TaskInfo> task = client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME).get().getTasks();
|
||||
assertThat(task, hasSize(1));
|
||||
return task.get(0).getTaskId();
|
||||
}
|
||||
|
||||
public void testTasksListWaitForNoTask() throws Exception {
|
||||
|
@ -626,7 +646,7 @@ public class TasksIT extends ESIntegTestCase {
|
|||
assertEquals(Long.toString(taskInfo.getId()), task.get("id").toString());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> result = (Map<String, Object>) source.get("result");
|
||||
Map<String, Object> result = (Map<String, Object>) source.get("response");
|
||||
assertEquals("0", result.get("failure_count").toString());
|
||||
|
||||
assertNull(source.get("failure"));
|
||||
|
@ -647,7 +667,7 @@ public class TasksIT extends ESIntegTestCase {
|
|||
assertEquals(1L, searchResponse.getHits().totalHits());
|
||||
|
||||
GetTaskResponse getResponse = expectFinishedTask(taskId);
|
||||
assertEquals(result, getResponse.getTask().getResultAsMap());
|
||||
assertEquals(result, getResponse.getTask().getResponseAsMap());
|
||||
assertNull(getResponse.getTask().getError());
|
||||
}
|
||||
|
||||
|
@ -688,7 +708,7 @@ public class TasksIT extends ESIntegTestCase {
|
|||
assertNull(source.get("result"));
|
||||
|
||||
GetTaskResponse getResponse = expectFinishedTask(failedTaskId);
|
||||
assertNull(getResponse.getTask().getResult());
|
||||
assertNull(getResponse.getTask().getResponse());
|
||||
assertEquals(error, getResponse.getTask().getErrorAsMap());
|
||||
}
|
||||
|
||||
|
@ -728,7 +748,7 @@ public class TasksIT extends ESIntegTestCase {
|
|||
GetTaskResponse response = expectFinishedTask(new TaskId("fake:1"));
|
||||
assertEquals("test", response.getTask().getTask().getAction());
|
||||
assertNotNull(response.getTask().getError());
|
||||
assertNull(response.getTask().getResult());
|
||||
assertNull(response.getTask().getResponse());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1035,13 +1035,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testAliasFilterWithNowInRangeFilterAndQuery() throws Exception {
|
||||
assertAcked(prepareCreate("my-index").addMapping("my-type", "timestamp", "type=date"));
|
||||
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", rangeQuery("timestamp").from("2016-12-01").to("2016-12-31")));
|
||||
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", rangeQuery("timestamp").from("2016-01-01").to("2016-12-31")));
|
||||
assertAcked(prepareCreate("my-index").addMapping("my-type", "_timestamp", "enabled=true"));
|
||||
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", rangeQuery("_timestamp").from("now-1d").to("now")));
|
||||
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", rangeQuery("_timestamp").from("now-1d").to("now")));
|
||||
|
||||
final int numDocs = scaledRandomIntBetween(5, 52);
|
||||
for (int i = 1; i <= numDocs; i++) {
|
||||
client().prepareIndex("my-index", "my-type").setCreate(true).setSource("timestamp", "2016-12-12").get();
|
||||
client().prepareIndex("my-index", "my-type").setCreate(true).setSource("{}").get();
|
||||
if (i % 2 == 0) {
|
||||
refresh();
|
||||
SearchResponse response = client().prepareSearch("filter1").get();
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
|||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -112,18 +111,16 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
|
|||
internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
|
||||
createIndex("test");
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("timestamp", "type=date"));
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("_timestamp", "enabled=true"));
|
||||
|
||||
MappingMetaData defaultMapping = client().admin().cluster().prepareState().get().getState().getMetaData().getIndices().get("test").getMappings().get("_default_");
|
||||
Map<?,?> properties = (Map<?, ?>) defaultMapping.getSourceAsMap().get("properties");
|
||||
assertThat(properties.get("timestamp"), notNullValue());
|
||||
assertThat(defaultMapping.getSourceAsMap().get("_timestamp"), notNullValue());
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("timestamp", "type=date"));
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("_timestamp", "enabled=true"));
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("foo", "enabled=true"));
|
||||
MappingMetaData type1Mapping = client().admin().cluster().prepareState().get().getState().getMetaData().getIndices().get("test").getMappings().get("type1");
|
||||
properties = (Map<?, ?>) type1Mapping.getSourceAsMap().get("properties");
|
||||
assertThat(properties.get("timestamp"), notNullValue());
|
||||
assertThat(type1Mapping.getSourceAsMap().get("_timestamp"), notNullValue());
|
||||
}
|
||||
|
||||
public void testAliasFilterValidation() throws Exception {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.get;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
|
||||
|
@ -29,6 +30,7 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequestBuilder;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -38,11 +40,15 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
|
@ -51,6 +57,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -529,7 +536,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
public void testGetFieldsMetaData() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("parent")
|
||||
.addMapping("my-type1", "_parent", "type=parent")
|
||||
.addMapping("my-type1", "_timestamp", "enabled=true", "_ttl", "enabled=true", "_parent", "type=parent")
|
||||
.addAlias(new Alias("alias"))
|
||||
.setSettings(Settings.builder().put("index.refresh_interval", -1)));
|
||||
|
||||
|
@ -550,6 +557,12 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
|
||||
assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true));
|
||||
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
|
||||
assertThat(getResponse.getField("_timestamp").isMetadataField(), equalTo(true));
|
||||
assertThat(getResponse.getField("_timestamp").getValue().toString(), equalTo("205097"));
|
||||
assertThat(getResponse.getField("_ttl").isMetadataField(), equalTo(true));
|
||||
// TODO: _ttl should return the original value, but it does not work today because
|
||||
// it would use now() instead of the value of _timestamp to rebase
|
||||
// assertThat(getResponse.getField("_ttl").getValue().toString(), equalTo("10000000205097"));
|
||||
assertThat(getResponse.getField("_parent").isMetadataField(), equalTo(true));
|
||||
assertThat(getResponse.getField("_parent").getValue().toString(), equalTo("parent_1"));
|
||||
|
||||
|
@ -564,6 +577,12 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
|
||||
assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true));
|
||||
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
|
||||
assertThat(getResponse.getField("_timestamp").isMetadataField(), equalTo(true));
|
||||
assertThat(getResponse.getField("_timestamp").getValue().toString(), equalTo("205097"));
|
||||
assertThat(getResponse.getField("_ttl").isMetadataField(), equalTo(true));
|
||||
// TODO: _ttl should return the original value, but it does not work today because
|
||||
// it would use now() instead of the value of _timestamp to rebase
|
||||
//assertThat(getResponse.getField("_ttl").getValue().toString(), equalTo("10000000000000"));
|
||||
assertThat(getResponse.getField("_parent").isMetadataField(), equalTo(true));
|
||||
assertThat(getResponse.getField("_parent").getValue().toString(), equalTo("parent_1"));
|
||||
}
|
||||
|
@ -760,10 +779,16 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
" },\n" +
|
||||
" \"mappings\": {\n" +
|
||||
" \"parentdoc\": {\n" +
|
||||
" \"_ttl\": {\n" +
|
||||
" \"enabled\": true\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"doc\": {\n" +
|
||||
" \"_parent\": {\n" +
|
||||
" \"type\": \"parentdoc\"\n" +
|
||||
" },\n" +
|
||||
" \"_ttl\": {\n" +
|
||||
" \"enabled\": true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
|
@ -773,7 +798,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
|
||||
client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").setTTL(TimeValue.timeValueHours(1).getMillis()).get();
|
||||
|
||||
String[] fieldsList = {"_parent"};
|
||||
String[] fieldsList = {"_ttl", "_parent"};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1");
|
||||
refresh();
|
||||
|
@ -789,6 +814,14 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
" \"settings\": {\n" +
|
||||
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
|
||||
" \"refresh_interval\": \"-1\"\n" +
|
||||
" },\n" +
|
||||
" \"mappings\": {\n" +
|
||||
" \"parentdoc\": {},\n" +
|
||||
" \"doc\": {\n" +
|
||||
" \"_timestamp\": {\n" +
|
||||
" \"enabled\": true\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
|
@ -798,7 +831,7 @@ public class GetActionIT extends ESIntegTestCase {
|
|||
" \"text\": \"some text.\"\n" +
|
||||
"}\n";
|
||||
client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get();
|
||||
String[] fieldsList = {"_routing"};
|
||||
String[] fieldsList = {"_timestamp", "_routing"};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1");
|
||||
refresh();
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
@ -49,7 +50,9 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
|
||||
|
@ -427,6 +430,23 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
|
|||
parser.parse("test", new CompressedXContent(mapping));
|
||||
}
|
||||
|
||||
// issue https://github.com/elastic/elasticsearch/issues/5864
|
||||
public void testMetadataMappersStillWorking() throws MapperParsingException, IOException {
|
||||
String mapping = "{";
|
||||
Map<String, String> rootTypes = new HashMap<>();
|
||||
//just pick some example from DocumentMapperParser.rootTypeParsers
|
||||
rootTypes.put(TimestampFieldMapper.NAME, "{\"enabled\" : true}");
|
||||
rootTypes.put("include_in_all", "true");
|
||||
rootTypes.put("dynamic_date_formats", "[\"yyyy-MM-dd\", \"dd-MM-yyyy\"]");
|
||||
rootTypes.put("numeric_detection", "true");
|
||||
rootTypes.put("dynamic_templates", "[]");
|
||||
for (String key : rootTypes.keySet()) {
|
||||
mapping += "\"" + key+ "\"" + ":" + rootTypes.get(key) + ",\n";
|
||||
}
|
||||
mapping += "\"properties\":{}}" ;
|
||||
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
|
||||
}
|
||||
|
||||
public void testDocValuesNotAllowed() throws IOException {
|
||||
String mapping = jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_all")
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.timestamp;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
|
@ -32,31 +31,25 @@ import org.elasticsearch.common.compress.CompressedXContent;
|
|||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -65,35 +58,19 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class TimestampMappingTests extends ESSingleNodeTestCase {
|
||||
|
||||
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build();
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testRejectedOn5x() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", true)
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
IndexService index = createIndex("test");
|
||||
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class,
|
||||
() -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(expected.getMessage(), startsWith("[_timestamp] is removed"));
|
||||
}
|
||||
|
||||
public void testSimpleDisabled() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
BytesReference source = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
|
@ -108,7 +85,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
BytesReference source = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
|
@ -122,7 +99,10 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testDefaultValues() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.V_5_0_0_alpha3);
|
||||
Version version;
|
||||
do {
|
||||
version = randomVersion(random());
|
||||
} while (version.before(Version.V_2_0_0_beta1));
|
||||
for (String mapping : Arrays.asList(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(),
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().endObject().string())) {
|
||||
|
@ -140,7 +120,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
|
@ -166,7 +146,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.field("foo", "bar")
|
||||
.endObject();
|
||||
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
@ -192,7 +172,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject();
|
||||
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
||||
|
@ -215,7 +195,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject().endObject();
|
||||
try {
|
||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||
|
@ -232,7 +212,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject();
|
||||
|
||||
try {
|
||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||
|
@ -250,7 +230,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject();
|
||||
|
||||
try {
|
||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
|
||||
|
@ -270,7 +250,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject();
|
||||
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
|
||||
|
||||
|
@ -343,7 +323,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.field("enabled", true)
|
||||
.field("default", "1970-01-01")
|
||||
.endObject().endObject().endObject().string();
|
||||
DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser();
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
|
||||
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
docMapper = parser.parse("type", docMapper.mappingSource());
|
||||
|
@ -380,7 +360,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
try {
|
||||
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
|
@ -395,7 +375,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
|
||||
|
||||
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject();
|
||||
|
@ -412,7 +392,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes();
|
||||
// test with 2.x
|
||||
DocumentMapper currentMapper = createIndex("new-index", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
// this works with 2.x
|
||||
IndexRequest request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1970-01-01");
|
||||
|
@ -427,54 +407,4 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(e.getMessage(), containsString("failed to parse timestamp [1234567890]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testSizeTimestampIndexParsing() throws IOException {
|
||||
IndexService indexService = createIndex("test", BW_SETTINGS);
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
|
||||
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true);
|
||||
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
|
||||
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
|
||||
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
|
||||
}
|
||||
|
||||
public void testDefaultApplied() throws IOException {
|
||||
createIndex("test1", BW_SETTINGS);
|
||||
createIndex("test2", BW_SETTINGS);
|
||||
XContentBuilder defaultMapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject(MapperService.DEFAULT_MAPPING).startObject("_timestamp").field("enabled", true).endObject().endObject()
|
||||
.endObject();
|
||||
client().admin().indices().preparePutMapping().setType(MapperService.DEFAULT_MAPPING).setSource(defaultMapping).get();
|
||||
XContentBuilder typeMapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type").startObject("_all").field("enabled", false).endObject().endObject()
|
||||
.endObject();
|
||||
client().admin().indices().preparePutMapping("test1").setType("type").setSource(typeMapping).get();
|
||||
client().admin().indices().preparePutMapping("test1", "test2").setType("type").setSource(typeMapping).get();
|
||||
|
||||
GetMappingsResponse response = client().admin().indices().prepareGetMappings("test2").get();
|
||||
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_all"));
|
||||
assertFalse((Boolean) ((LinkedHashMap) response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")).get("enabled"));
|
||||
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp"));
|
||||
assertTrue((Boolean)((LinkedHashMap)response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")).get("enabled"));
|
||||
}
|
||||
|
||||
public void testTimestampParsing() throws IOException {
|
||||
IndexService indexService = createIndex("test", BW_SETTINGS);
|
||||
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
|
||||
boolean enabled = randomBoolean();
|
||||
indexMapping.startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", enabled)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
|
||||
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
|
||||
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
|
||||
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,9 +20,7 @@
|
|||
package org.elasticsearch.index.mapper.ttl;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -32,48 +30,22 @@ import org.elasticsearch.index.IndexService;
|
|||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class TTLMappingTests extends ESSingleNodeTestCase {
|
||||
|
||||
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build();
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testRejectedOn5x() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.startObject("_ttl")
|
||||
.field("enabled", true)
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
IndexService index = createIndex("test");
|
||||
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class,
|
||||
() -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(expected.getMessage(), startsWith("[_ttl] is removed"));
|
||||
}
|
||||
|
||||
public void testSimpleDisabled() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
BytesReference source = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
|
@ -88,7 +60,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl").field("enabled", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
BytesReference source = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "value")
|
||||
|
@ -103,7 +75,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testDefaultValues() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled));
|
||||
assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored()));
|
||||
assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions()));
|
||||
|
@ -121,7 +93,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
|
@ -144,7 +116,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
|
@ -155,7 +127,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
public void testThatDisablingTTLReportsConflict() throws Exception {
|
||||
String mappingWithTtl = getMappingWithTtlEnabled().string();
|
||||
String mappingWithTtlDisabled = getMappingWithTtlDisabled().string();
|
||||
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
try {
|
||||
|
@ -171,7 +143,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
public void testThatDisablingTTLReportsConflictOnCluster() throws Exception {
|
||||
String mappingWithTtl = getMappingWithTtlEnabled().string();
|
||||
String mappingWithTtlDisabled = getMappingWithTtlDisabled().string();
|
||||
assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", mappingWithTtl));
|
||||
assertAcked(client().admin().indices().prepareCreate("testindex").addMapping("type", mappingWithTtl));
|
||||
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get();
|
||||
try {
|
||||
client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtlDisabled).setType("type").get();
|
||||
|
@ -186,7 +158,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
public void testThatEnablingTTLAfterFirstDisablingWorks() throws Exception {
|
||||
String mappingWithTtl = getMappingWithTtlEnabled().string();
|
||||
String withTtlDisabled = getMappingWithTtlDisabled().string();
|
||||
assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", withTtlDisabled));
|
||||
assertAcked(client().admin().indices().prepareCreate("testindex").addMapping("type", withTtlDisabled));
|
||||
GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get();
|
||||
assertThat(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").sourceAsMap().get("_ttl").toString(), equalTo("{enabled=false}"));
|
||||
client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtl).setType("type").get();
|
||||
|
@ -195,20 +167,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
|
||||
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type");
|
||||
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type");
|
||||
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
|
||||
indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
}
|
||||
|
||||
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
|
||||
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type");
|
||||
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type");
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
}
|
||||
|
||||
public void testMergeWithOnlyDefaultSet() throws Exception {
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled);
|
||||
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type", mappingWithTtlEnabled);
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
|
@ -217,7 +189,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception {
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d");
|
||||
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled);
|
||||
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type", mappingWithTtlEnabled);
|
||||
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
|
@ -230,7 +202,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
|
|||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
|
||||
try {
|
||||
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -31,12 +31,12 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
public class ParseMappingTypeLevelTests extends ESSingleNodeTestCase {
|
||||
public void testTypeLevel() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_all").field("enabled", false).endObject()
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
assertThat(mapper.type(), equalTo("type"));
|
||||
assertThat(mapper.allFieldMapper().enabled(), equalTo(false));
|
||||
assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,6 +139,16 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
compareMappingOnNodes(mappingsBeforeUpdateResponse);
|
||||
}
|
||||
|
||||
// checks if the setting for timestamp and size are kept even if disabled
|
||||
public void testDisabledSizeTimestampIndexDoNotLooseMappings() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
|
||||
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
|
||||
GetMappingsResponse mappingsBeforeGreen = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
|
||||
ensureGreen(INDEX);
|
||||
// make sure all nodes have same cluster state
|
||||
compareMappingOnNodes(mappingsBeforeGreen);
|
||||
}
|
||||
|
||||
protected void testConflict(String mapping, String mappingUpdate, String... errorMessages) throws InterruptedException {
|
||||
assertAcked(prepareCreate(INDEX).setSource(mapping).get());
|
||||
ensureGreen(INDEX);
|
||||
|
|
|
@ -259,6 +259,56 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testTimestampParsing() throws IOException {
|
||||
IndexService indexService = createIndex("test");
|
||||
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
|
||||
boolean enabled = randomBoolean();
|
||||
indexMapping.startObject()
|
||||
.startObject("type")
|
||||
.startObject("_timestamp")
|
||||
.field("enabled", enabled)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
|
||||
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
|
||||
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
|
||||
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
|
||||
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
|
||||
}
|
||||
|
||||
public void testSizeTimestampIndexParsing() throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.builder().build());
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
|
||||
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true);
|
||||
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
|
||||
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
|
||||
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
|
||||
}
|
||||
|
||||
public void testDefaultApplied() throws IOException {
|
||||
createIndex("test1", Settings.builder().build());
|
||||
createIndex("test2", Settings.builder().build());
|
||||
XContentBuilder defaultMapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject(MapperService.DEFAULT_MAPPING).startObject("_timestamp").field("enabled", true).endObject().endObject()
|
||||
.endObject();
|
||||
client().admin().indices().preparePutMapping().setType(MapperService.DEFAULT_MAPPING).setSource(defaultMapping).get();
|
||||
XContentBuilder typeMapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type").startObject("_all").field("enabled", false).endObject().endObject()
|
||||
.endObject();
|
||||
client().admin().indices().preparePutMapping("test1").setType("type").setSource(typeMapping).get();
|
||||
client().admin().indices().preparePutMapping("test1", "test2").setType("type").setSource(typeMapping).get();
|
||||
|
||||
GetMappingsResponse response = client().admin().indices().prepareGetMappings("test2").get();
|
||||
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_all"));
|
||||
assertFalse((Boolean) ((LinkedHashMap) response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")).get("enabled"));
|
||||
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp"));
|
||||
assertTrue((Boolean)((LinkedHashMap)response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")).get("enabled"));
|
||||
}
|
||||
|
||||
public void testRejectFieldDefinedTwice() throws IOException {
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
|
|
|
@ -60,7 +60,9 @@ import java.nio.charset.Charset;
|
|||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.InvalidPathException;
|
||||
import java.nio.file.OpenOption;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -1125,13 +1127,13 @@ public class TranslogTests extends ESTestCase {
|
|||
Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME);
|
||||
Checkpoint read = Checkpoint.read(ckp);
|
||||
Checkpoint corrupted = new Checkpoint(0, 0, 0);
|
||||
Checkpoint.write(config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), corrupted, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
|
||||
Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), corrupted, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
|
||||
try (Translog translog = new Translog(config, translogGeneration)) {
|
||||
fail("corrupted");
|
||||
} catch (IllegalStateException ex) {
|
||||
assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=2683, numOps=55, translogFileGeneration= 2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration= 0}");
|
||||
}
|
||||
Checkpoint.write(config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
|
||||
Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
|
||||
try (Translog translog = new Translog(config, translogGeneration)) {
|
||||
assertNotNull(translogGeneration);
|
||||
assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration());
|
||||
|
@ -1564,22 +1566,20 @@ public class TranslogTests extends ESTestCase {
|
|||
private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean paritalWrites, final boolean throwUnknownException, Translog.TranslogGeneration generation) throws IOException {
|
||||
return new Translog(config, generation) {
|
||||
@Override
|
||||
TranslogWriter.ChannelFactory getChannelFactory() {
|
||||
final TranslogWriter.ChannelFactory factory = super.getChannelFactory();
|
||||
ChannelFactory getChannelFactory() {
|
||||
final ChannelFactory factory = super.getChannelFactory();
|
||||
|
||||
return new TranslogWriter.ChannelFactory() {
|
||||
@Override
|
||||
public FileChannel open(Path file) throws IOException {
|
||||
FileChannel channel = factory.open(file);
|
||||
boolean success = false;
|
||||
try {
|
||||
ThrowingFileChannel throwingFileChannel = new ThrowingFileChannel(fail, paritalWrites, throwUnknownException, channel);
|
||||
success = true;
|
||||
return throwingFileChannel;
|
||||
} finally {
|
||||
if (success == false) {
|
||||
IOUtils.closeWhileHandlingException(channel);
|
||||
}
|
||||
return (file, openOption) -> {
|
||||
FileChannel channel = factory.open(file, openOption);
|
||||
boolean success = false;
|
||||
try {
|
||||
final boolean isCkpFile = file.getFileName().toString().endsWith(".ckp"); // don't do partial writes for checkpoints we rely on the fact that the 20bytes are written as an atomic operation
|
||||
ThrowingFileChannel throwingFileChannel = new ThrowingFileChannel(fail, isCkpFile ? false : paritalWrites, throwUnknownException, channel);
|
||||
success = true;
|
||||
return throwingFileChannel;
|
||||
} finally {
|
||||
if (success == false) {
|
||||
IOUtils.closeWhileHandlingException(channel);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1840,11 +1840,18 @@ public class TranslogTests extends ESTestCase {
|
|||
} catch (IOException ex) {
|
||||
assertEquals(ex.getMessage(), "__FAKE__ no space left on device");
|
||||
} finally {
|
||||
Checkpoint checkpoint = failableTLog.readCheckpoint();
|
||||
if (checkpoint.numOps == unsynced.size() + syncedDocs.size()) {
|
||||
syncedDocs.addAll(unsynced); // failed in fsync but got fully written
|
||||
unsynced.clear();
|
||||
}
|
||||
generation = failableTLog.getGeneration();
|
||||
IOUtils.closeWhileHandlingException(failableTLog);
|
||||
}
|
||||
} catch (TranslogException | MockDirectoryWrapper.FakeIOException ex) {
|
||||
// failed - that's ok, we didn't even create it
|
||||
} catch (IOException ex) {
|
||||
assertEquals(ex.getMessage(), "__FAKE__ no space left on device");
|
||||
}
|
||||
// now randomly open this failing tlog again just to make sure we can also recover from failing during recovery
|
||||
if (randomBoolean()) {
|
||||
|
@ -1852,9 +1859,12 @@ public class TranslogTests extends ESTestCase {
|
|||
IOUtils.close(getFailableTranslog(fail, config, randomBoolean(), false, generation));
|
||||
} catch (TranslogException | MockDirectoryWrapper.FakeIOException ex) {
|
||||
// failed - that's ok, we didn't even create it
|
||||
} catch (IOException ex) {
|
||||
assertEquals(ex.getMessage(), "__FAKE__ no space left on device");
|
||||
}
|
||||
}
|
||||
|
||||
fail.failNever(); // we don't wanna fail here but we might since we write a new checkpoint and create a new tlog file
|
||||
try (Translog translog = new Translog(config, generation)) {
|
||||
Translog.Snapshot snapshot = translog.newSnapshot();
|
||||
assertEquals(syncedDocs.size(), snapshot.totalOperations());
|
||||
|
@ -1866,4 +1876,30 @@ public class TranslogTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testCheckpointOnDiskFull() throws IOException {
|
||||
Checkpoint checkpoint = new Checkpoint(randomLong(), randomInt(), randomLong());
|
||||
Path tempDir = createTempDir();
|
||||
Checkpoint.write(FileChannel::open, tempDir.resolve("foo.cpk"), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
|
||||
Checkpoint checkpoint2 = new Checkpoint(randomLong(), randomInt(), randomLong());
|
||||
try {
|
||||
Checkpoint.write((p, o) -> {
|
||||
if (randomBoolean()) {
|
||||
throw new MockDirectoryWrapper.FakeIOException();
|
||||
}
|
||||
FileChannel open = FileChannel.open(p, o);
|
||||
FailSwitch failSwitch = new FailSwitch();
|
||||
failSwitch.failNever(); // don't fail in the ctor
|
||||
ThrowingFileChannel channel = new ThrowingFileChannel(failSwitch, false, false, open);
|
||||
failSwitch.failAlways();
|
||||
return channel;
|
||||
|
||||
}, tempDir.resolve("foo.cpk"), checkpoint2, StandardOpenOption.WRITE);
|
||||
fail("should have failed earlier");
|
||||
} catch (MockDirectoryWrapper.FakeIOException ex) {
|
||||
//fine
|
||||
}
|
||||
Checkpoint read = Checkpoint.read(tempDir.resolve("foo.cpk"));
|
||||
assertEquals(read, checkpoint);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,6 +65,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.core.IsNull.notNullValue;
|
||||
|
||||
|
@ -106,7 +107,8 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
createIndex("idx", "idx_unmapped");
|
||||
assertAcked(prepareCreate("idx").addMapping("type", "_timestamp", "enabled=true"));
|
||||
createIndex("idx_unmapped");
|
||||
// TODO: would be nice to have more random data here
|
||||
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer"));
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
|
@ -1139,6 +1141,13 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testTimestampField() { // see #11692
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("_timestamp").dateHistogramInterval(randomFrom(DateHistogramInterval.DAY, DateHistogramInterval.MONTH))).get();
|
||||
assertSearchResponse(response);
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
assertThat(histo.getBuckets().size(), greaterThan(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* When DST ends, local time turns back one hour, so between 2am and 4am wall time we should have four buckets:
|
||||
* "2015-10-25T02:00:00.000+02:00",
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.index.query.QueryBuilders;
|
|||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.hamcrest.Matchers;
|
||||
|
@ -69,8 +70,10 @@ import static org.hamcrest.Matchers.closeTo;
|
|||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
|
@ -1337,12 +1340,16 @@ public class FieldSortIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSortMetaField() throws Exception {
|
||||
createIndex("test");
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", mapping));
|
||||
ensureGreen();
|
||||
final int numDocs = randomIntBetween(10, 20);
|
||||
IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs];
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i))
|
||||
indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000)))
|
||||
.setSource();
|
||||
}
|
||||
indexRandom(true, indexReqs);
|
||||
|
@ -1361,6 +1368,37 @@ public class FieldSortIT extends ESIntegTestCase {
|
|||
assertThat(previous, order == SortOrder.ASC ? lessThan(uid) : greaterThan(uid));
|
||||
previous = uid;
|
||||
}
|
||||
|
||||
/*
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(randomIntBetween(1, numDocs + 5))
|
||||
.addSort("_id", order)
|
||||
.execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
hits = searchResponse.getHits().hits();
|
||||
previous = order == SortOrder.ASC ? new BytesRef() : UnicodeUtil.BIG_TERM;
|
||||
for (int i = 0; i < hits.length; ++i) {
|
||||
final BytesRef id = new BytesRef(Uid.createUid(hits[i].type(), hits[i].id()));
|
||||
assertThat(previous, order == SortOrder.ASC ? lessThan(id) : greaterThan(id));
|
||||
previous = id;
|
||||
}*/
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(randomIntBetween(1, numDocs + 5))
|
||||
.addSort("_timestamp", order)
|
||||
.addField("_timestamp")
|
||||
.execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
hits = searchResponse.getHits().hits();
|
||||
Long previousTs = order == SortOrder.ASC ? 0 : Long.MAX_VALUE;
|
||||
for (int i = 0; i < hits.length; ++i) {
|
||||
SearchHitField timestampField = hits[i].getFields().get("_timestamp");
|
||||
Long timestamp = timestampField.<Long>getValue();
|
||||
assertThat(previousTs, order == SortOrder.ASC ? lessThanOrEqualTo(timestamp) : greaterThanOrEqualTo(timestamp));
|
||||
previousTs = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -78,11 +78,11 @@ public class PersistedTaskInfoTests extends ESTestCase {
|
|||
private static PersistedTaskInfo randomTaskResult() throws IOException {
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
return new PersistedTaskInfo(randomTaskInfo());
|
||||
return new PersistedTaskInfo(randomBoolean(), randomTaskInfo());
|
||||
case 1:
|
||||
return new PersistedTaskInfo(randomTaskInfo(), new RuntimeException("error"));
|
||||
case 2:
|
||||
return new PersistedTaskInfo(randomTaskInfo(), randomTaskActionResult());
|
||||
return new PersistedTaskInfo(randomTaskInfo(), randomTaskResponse());
|
||||
default:
|
||||
throw new UnsupportedOperationException("Unsupported random TaskResult constructor");
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ public class PersistedTaskInfoTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private static ToXContent randomTaskActionResult() {
|
||||
private static ToXContent randomTaskResponse() {
|
||||
Map<String, String> result = new TreeMap<>();
|
||||
int fields = between(0, 10);
|
||||
for (int f = 0; f < fields; f++) {
|
||||
|
@ -126,7 +126,7 @@ public class PersistedTaskInfoTests extends ESTestCase {
|
|||
return new ToXContent() {
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
// Results in Elasticsearch never output a leading startObject. There isn't really a good reason, they just don't.
|
||||
// Responses in Elasticsearch never output a leading startObject. There isn't really a good reason, they just don't.
|
||||
for (Map.Entry<String, String> entry : result.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
|
|
@ -19,20 +19,14 @@
|
|||
|
||||
package org.elasticsearch.timestamp;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
|
@ -47,17 +41,8 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
/**
|
||||
*/
|
||||
public class SimpleTimestampIT extends ESIntegTestCase {
|
||||
|
||||
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build();
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testSimpleTimestamp() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(BW_SETTINGS)
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("_timestamp").field("enabled", true).endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
@ -113,7 +98,7 @@ public class SimpleTimestampIT extends ESIntegTestCase {
|
|||
String type = "mytype";
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject();
|
||||
assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder));
|
||||
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
|
||||
|
||||
// check mapping again
|
||||
assertTimestampMappingEnabled(index, type, true);
|
||||
|
@ -132,7 +117,7 @@ public class SimpleTimestampIT extends ESIntegTestCase {
|
|||
String type = "mytype";
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject();
|
||||
assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder));
|
||||
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
|
||||
|
||||
// check mapping again
|
||||
assertTimestampMappingEnabled(index, type, true);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.ttl;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
|
@ -27,19 +26,14 @@ import org.elasticsearch.action.get.GetResponse;
|
|||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequestBuilder;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -66,11 +60,6 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
|||
return 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
|
@ -81,7 +70,6 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimpleTTL() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
|
@ -221,7 +209,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
|||
String type = "mytype";
|
||||
|
||||
XContentBuilder builder = jsonBuilder().startObject().startObject("_ttl").field("enabled", true).endObject().endObject();
|
||||
assertAcked(client().admin().indices().prepareCreate(index).setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id).addMapping(type, builder));
|
||||
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
|
||||
|
||||
// check mapping again
|
||||
assertTTLMappingEnabled(index, type);
|
||||
|
@ -244,7 +232,6 @@ public class SimpleTTLIT extends ESIntegTestCase {
|
|||
*/
|
||||
public void testNoopUpdate() throws IOException {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
|
|
|
@ -1,237 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.update;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
public class TimestampTTLBWIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(
|
||||
UpdateIT.FieldIncrementScriptPlugin.class,
|
||||
UpdateIT.ExtractContextInSourceScriptPlugin.class,
|
||||
UpdateIT.PutFieldValuesScriptPlugin.class,
|
||||
InternalSettingsPlugin.class
|
||||
);
|
||||
}
|
||||
|
||||
public void testSort() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
|
||||
.addMapping("type", mapping));
|
||||
ensureGreen();
|
||||
final int numDocs = randomIntBetween(10, 20);
|
||||
IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs];
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000)))
|
||||
.setSource();
|
||||
}
|
||||
indexRandom(true, indexReqs);
|
||||
|
||||
SortOrder order = randomFrom(SortOrder.values());
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(randomIntBetween(1, numDocs + 5))
|
||||
.addSort("_timestamp", order)
|
||||
.addField("_timestamp")
|
||||
.execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
SearchHit[] hits = searchResponse.getHits().hits();
|
||||
Long previousTs = order == SortOrder.ASC ? 0 : Long.MAX_VALUE;
|
||||
for (int i = 0; i < hits.length; ++i) {
|
||||
SearchHitField timestampField = hits[i].getFields().get("_timestamp");
|
||||
Long timestamp = timestampField.<Long>getValue();
|
||||
assertThat(previousTs, order == SortOrder.ASC ? lessThanOrEqualTo(timestamp) : greaterThanOrEqualTo(timestamp));
|
||||
previousTs = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
public void testUpdate() throws Exception {
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject()));
|
||||
|
||||
ensureGreen();
|
||||
|
||||
try {
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
|
||||
fail();
|
||||
} catch (DocumentMissingException e) {
|
||||
// all is well
|
||||
}
|
||||
|
||||
// check TTL is kept after an update without TTL
|
||||
client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get();
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
|
||||
long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
|
||||
assertThat(ttl, greaterThan(0L));
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "2")
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
|
||||
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
|
||||
assertThat(ttl, greaterThan(0L));
|
||||
|
||||
// check TTL update
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "2")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values",
|
||||
Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
|
||||
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
|
||||
assertThat(ttl, greaterThan(0L));
|
||||
assertThat(ttl, lessThanOrEqualTo(3600000L));
|
||||
|
||||
// check timestamp update
|
||||
client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get();
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "3")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values",
|
||||
Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute()
|
||||
.actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "3").setFields("_timestamp").execute().actionGet();
|
||||
long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
|
||||
assertThat(timestamp, equalTo(1258294332000L));
|
||||
}
|
||||
|
||||
public void testContextVariables() throws Exception {
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
.addMapping("subtype1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("subtype1")
|
||||
.startObject("_parent").field("type", "type1").endObject()
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
);
|
||||
ensureGreen();
|
||||
|
||||
// Index some documents
|
||||
long timestamp = System.currentTimeMillis();
|
||||
client().prepareIndex()
|
||||
.setIndex("test")
|
||||
.setType("type1")
|
||||
.setId("parentId1")
|
||||
.setTimestamp(String.valueOf(timestamp-1))
|
||||
.setSource("field1", 0, "content", "bar")
|
||||
.execute().actionGet();
|
||||
|
||||
long ttl = 10000;
|
||||
client().prepareIndex()
|
||||
.setIndex("test")
|
||||
.setType("subtype1")
|
||||
.setId("id1")
|
||||
.setParent("parentId1")
|
||||
.setRouting("routing1")
|
||||
.setTimestamp(String.valueOf(timestamp))
|
||||
.setTTL(ttl)
|
||||
.setSource("field1", 1, "content", "foo")
|
||||
.execute().actionGet();
|
||||
|
||||
// Update the first object and note context variables values
|
||||
UpdateResponse updateResponse = client().prepareUpdate("test", "subtype1", "id1")
|
||||
.setRouting("routing1")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "extract_ctx", null))
|
||||
.execute().actionGet();
|
||||
|
||||
assertEquals(2, updateResponse.getVersion());
|
||||
|
||||
GetResponse getResponse = client().prepareGet("test", "subtype1", "id1").setRouting("routing1").execute().actionGet();
|
||||
Map<String, Object> updateContext = (Map<String, Object>) getResponse.getSourceAsMap().get("update_context");
|
||||
assertEquals("test", updateContext.get("_index"));
|
||||
assertEquals("subtype1", updateContext.get("_type"));
|
||||
assertEquals("id1", updateContext.get("_id"));
|
||||
assertEquals(1, updateContext.get("_version"));
|
||||
assertEquals("parentId1", updateContext.get("_parent"));
|
||||
assertEquals("routing1", updateContext.get("_routing"));
|
||||
assertThat(((Integer) updateContext.get("_ttl")).longValue(), allOf(greaterThanOrEqualTo(ttl-3000), lessThanOrEqualTo(ttl)));
|
||||
|
||||
// Idem with the second object
|
||||
updateResponse = client().prepareUpdate("test", "type1", "parentId1")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "extract_ctx", null))
|
||||
.execute().actionGet();
|
||||
|
||||
assertEquals(2, updateResponse.getVersion());
|
||||
|
||||
getResponse = client().prepareGet("test", "type1", "parentId1").execute().actionGet();
|
||||
updateContext = (Map<String, Object>) getResponse.getSourceAsMap().get("update_context");
|
||||
assertEquals("test", updateContext.get("_index"));
|
||||
assertEquals("type1", updateContext.get("_type"));
|
||||
assertEquals("parentId1", updateContext.get("_id"));
|
||||
assertEquals(1, updateContext.get("_version"));
|
||||
assertNull(updateContext.get("_parent"));
|
||||
assertNull(updateContext.get("_routing"));
|
||||
assertNull(updateContext.get("_ttl"));
|
||||
}
|
||||
|
||||
private static String indexOrAlias() {
|
||||
return randomBoolean() ? "test" : "alias";
|
||||
}
|
||||
}
|
|
@ -62,12 +62,14 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
@ -355,14 +357,21 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
FieldIncrementScriptPlugin.class,
|
||||
ScriptedUpsertScriptPlugin.class,
|
||||
ExtractContextInSourceScriptPlugin.class,
|
||||
InternalSettingsPlugin.class
|
||||
InternalSettingsPlugin.class // uses index.merge.enabled
|
||||
);
|
||||
}
|
||||
|
||||
private void createTestIndex() throws Exception {
|
||||
logger.info("--> creating index test");
|
||||
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject()));
|
||||
}
|
||||
|
||||
public void testUpsert() throws Exception {
|
||||
|
@ -629,6 +638,34 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.isExists(), equalTo(false));
|
||||
}
|
||||
|
||||
// check TTL is kept after an update without TTL
|
||||
client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get();
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
|
||||
long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
|
||||
assertThat(ttl, greaterThan(0L));
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "2")
|
||||
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
|
||||
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
|
||||
assertThat(ttl, greaterThan(0L));
|
||||
|
||||
// check TTL update
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "2")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
|
||||
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
|
||||
assertThat(ttl, greaterThan(0L));
|
||||
assertThat(ttl, lessThanOrEqualTo(3600000L));
|
||||
|
||||
// check timestamp update
|
||||
client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get();
|
||||
client().prepareUpdate(indexOrAlias(), "type1", "3")
|
||||
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute()
|
||||
.actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "3").setFields("_timestamp").execute().actionGet();
|
||||
long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
|
||||
assertThat(timestamp, equalTo(1258294332000L));
|
||||
|
||||
// check fields parameter
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
|
||||
|
@ -645,7 +682,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1"));
|
||||
assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2"));
|
||||
}
|
||||
|
@ -653,7 +690,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
// change existing field
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3"));
|
||||
assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2"));
|
||||
}
|
||||
|
@ -671,7 +708,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet();
|
||||
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
|
||||
Map map1 = (Map) getResponse.getSourceAsMap().get("map");
|
||||
assertThat(map1.size(), equalTo(3));
|
||||
assertThat(map1.containsKey("map1"), equalTo(true));
|
||||
|
@ -723,12 +760,16 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
.addMapping("subtype1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("subtype1")
|
||||
.startObject("_parent").field("type", "type1").endObject()
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
);
|
||||
|
@ -772,6 +813,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
assertEquals(1, updateContext.get("_version"));
|
||||
assertEquals("parentId1", updateContext.get("_parent"));
|
||||
assertEquals("routing1", updateContext.get("_routing"));
|
||||
assertThat(((Integer) updateContext.get("_ttl")).longValue(), allOf(greaterThanOrEqualTo(ttl-3000), lessThanOrEqualTo(ttl)));
|
||||
|
||||
// Idem with the second object
|
||||
updateResponse = client().prepareUpdate("test", "type1", "parentId1")
|
||||
|
@ -862,6 +904,13 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
public void testStressUpdateDeleteConcurrency() throws Exception {
|
||||
//We create an index with merging disabled so that deletes don't get merged away
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_timestamp").field("enabled", true).endObject()
|
||||
.startObject("_ttl").field("enabled", true).endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
.setSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)));
|
||||
ensureGreen();
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.messy.tests;
|
|||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -44,6 +45,7 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
|
@ -86,6 +88,8 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
// _timestamp is randomly enabled via templates but we don't want it here to test stored fields behaviour
|
||||
.startObject("_timestamp").field("enabled", false).endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "text").field("store", true).endObject()
|
||||
.startObject("field2").field("type", "text").field("store", false).endObject()
|
||||
|
@ -694,7 +698,7 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
public void testLoadMetadata() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("parent")
|
||||
.addMapping("my-type1", "_parent", "type=parent"));
|
||||
.addMapping("my-type1", "_timestamp", "enabled=true", "_ttl", "enabled=true", "_parent", "type=parent"));
|
||||
|
||||
indexRandom(true,
|
||||
client().prepareIndex("test", "my-type1", "1")
|
||||
|
@ -713,6 +717,12 @@ public class SearchFieldsTests extends ESIntegTestCase {
|
|||
assertThat(fields.get("field1"), nullValue());
|
||||
assertThat(fields.get("_routing").isMetadataField(), equalTo(true));
|
||||
assertThat(fields.get("_routing").getValue().toString(), equalTo("1"));
|
||||
assertThat(fields.get("_timestamp").isMetadataField(), equalTo(true));
|
||||
assertThat(fields.get("_timestamp").getValue().toString(), equalTo("205097"));
|
||||
assertThat(fields.get("_ttl").isMetadataField(), equalTo(true));
|
||||
// TODO: _ttl should return the original value, but it does not work today because
|
||||
// it would use now() instead of the value of _timestamp to rebase
|
||||
// assertThat(fields.get("_ttl").getValue().toString(), equalTo("10000000205097"));
|
||||
assertThat(fields.get("_parent").isMetadataField(), equalTo(true));
|
||||
assertThat(fields.get("_parent").getValue().toString(), equalTo("parent_1"));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,410 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.BiPredicate;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.ObjIntConsumer;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.ToDoubleFunction;
|
||||
import java.util.regex.Matcher;
|
||||
|
||||
/** Additional methods added to classes. These must be static methods with receiver as first argument */
|
||||
public class Augmentation {
|
||||
|
||||
// static methods only!
|
||||
private Augmentation() {}
|
||||
|
||||
/** Exposes List.size() as getLength(), so that .length shortcut works on lists */
|
||||
public static <T> int getLength(List<T> receiver) {
|
||||
return receiver.size();
|
||||
}
|
||||
|
||||
/** Exposes Matcher.group(String) as namedGroup(String), so it doesn't conflict with group(int) */
|
||||
public static String namedGroup(Matcher receiver, String name) {
|
||||
return receiver.group(name);
|
||||
}
|
||||
|
||||
// some groovy methods on iterable
|
||||
// see http://docs.groovy-lang.org/latest/html/groovy-jdk/java/lang/Iterable.html
|
||||
|
||||
/** Iterates over the contents of an iterable, and checks whether a predicate is valid for at least one element. */
|
||||
public static <T> boolean any(Iterable<T> receiver, Predicate<T> predicate) {
|
||||
for (T t : receiver) {
|
||||
if (predicate.test(t)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Counts the number of occurrences which satisfy the given predicate from inside this Iterable. */
|
||||
public static <T> int count(Iterable<T> receiver, Predicate<T> predicate) {
|
||||
int count = 0;
|
||||
for (T t : receiver) {
|
||||
if (predicate.test(t)) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
// instead of covariant overrides for every possibility, we just return receiver as 'def' for now
|
||||
// that way if someone chains the calls, everything works.
|
||||
|
||||
/** Iterates through an Iterable, passing each item to the given consumer. */
|
||||
public static <T> Object each(Iterable<T> receiver, Consumer<T> consumer) {
|
||||
receiver.forEach(consumer);
|
||||
return receiver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through an iterable type, passing each item and the item's index
|
||||
* (a counter starting at zero) to the given consumer.
|
||||
*/
|
||||
public static <T> Object eachWithIndex(Iterable<T> receiver, ObjIntConsumer<T> consumer) {
|
||||
int count = 0;
|
||||
for (T t : receiver) {
|
||||
consumer.accept(t, count++);
|
||||
}
|
||||
return receiver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to determine if the given predicate is valid (i.e. returns true for all items in this iterable).
|
||||
*/
|
||||
public static <T> boolean every(Iterable<T> receiver, Predicate<T> predicate) {
|
||||
for (T t : receiver) {
|
||||
if (predicate.test(t) == false) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the Iterable transforming items using the supplied function and
|
||||
* collecting any non-null results.
|
||||
*/
|
||||
public static <T,U> List<U> findResults(Iterable<T> receiver, Function<T,U> filter) {
|
||||
List<U> list = new ArrayList<>();
|
||||
for (T t: receiver) {
|
||||
U result = filter.apply(t);
|
||||
if (result != null) {
|
||||
list.add(result);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts all Iterable members into groups determined by the supplied mapping function.
|
||||
*/
|
||||
public static <T,U> Map<U,List<T>> groupBy(Iterable<T> receiver, Function<T,U> mapper) {
|
||||
Map<U,List<T>> map = new LinkedHashMap<>();
|
||||
for (T t : receiver) {
|
||||
U mapped = mapper.apply(t);
|
||||
List<T> results = map.get(mapped);
|
||||
if (results == null) {
|
||||
results = new ArrayList<>();
|
||||
map.put(mapped, results);
|
||||
}
|
||||
results.add(t);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenates the toString() representation of each item in this Iterable,
|
||||
* with the given String as a separator between each item.
|
||||
*/
|
||||
public static <T> String join(Iterable<T> receiver, String separator) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (T t : receiver) {
|
||||
if (sb.length() > 0) {
|
||||
sb.append(separator);
|
||||
}
|
||||
sb.append(t);
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sums the result of applying a function to each item of an Iterable.
|
||||
*/
|
||||
public static <T> double sum(Iterable<T> receiver, ToDoubleFunction<T> function) {
|
||||
double sum = 0;
|
||||
for (T t : receiver) {
|
||||
sum += function.applyAsDouble(t);
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
// some groovy methods on collection
|
||||
// see http://docs.groovy-lang.org/latest/html/groovy-jdk/java/util/Collection.html
|
||||
|
||||
/**
|
||||
* Iterates through this collection transforming each entry into a new value using
|
||||
* the function, returning a list of transformed values.
|
||||
*/
|
||||
public static <T,U> List<U> collect(Collection<T> receiver, Function<T,U> function) {
|
||||
List<U> list = new ArrayList<>();
|
||||
for (T t : receiver) {
|
||||
list.add(function.apply(t));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through this collection transforming each entry into a new value using
|
||||
* the function, adding the values to the specified collection.
|
||||
*/
|
||||
public static <T,U> Object collect(Collection<T> receiver, Collection<U> collection, Function<T,U> function) {
|
||||
for (T t : receiver) {
|
||||
collection.add(function.apply(t));
|
||||
}
|
||||
return collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the first value matching the predicate, or returns null.
|
||||
*/
|
||||
public static <T> T find(Collection<T> receiver, Predicate<T> predicate) {
|
||||
for (T t : receiver) {
|
||||
if (predicate.test(t)) {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all values matching the predicate, returns as a list
|
||||
*/
|
||||
public static <T> List<T> findAll(Collection<T> receiver, Predicate<T> predicate) {
|
||||
List<T> list = new ArrayList<>();
|
||||
for (T t : receiver) {
|
||||
if (predicate.test(t)) {
|
||||
list.add(t);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the collection calling the given function for each item
|
||||
* but stopping once the first non-null result is found and returning that result.
|
||||
* If all results are null, null is returned.
|
||||
*/
|
||||
public static <T,U> Object findResult(Collection<T> receiver, Function<T,U> function) {
|
||||
return findResult(receiver, null, function);
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the collection calling the given function for each item
|
||||
* but stopping once the first non-null result is found and returning that result.
|
||||
* If all results are null, defaultResult is returned.
|
||||
*/
|
||||
public static <T,U> Object findResult(Collection<T> receiver, Object defaultResult, Function<T,U> function) {
|
||||
for (T t : receiver) {
|
||||
U value = function.apply(t);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return defaultResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits all items into two collections based on the predicate.
|
||||
* The first list contains all items which match the closure expression. The second list all those that don't.
|
||||
*/
|
||||
public static <T> List<List<T>> split(Collection<T> receiver, Predicate<T> predicate) {
|
||||
List<T> matched = new ArrayList<>();
|
||||
List<T> unmatched = new ArrayList<>();
|
||||
List<List<T>> result = new ArrayList<>(2);
|
||||
result.add(matched);
|
||||
result.add(unmatched);
|
||||
for (T t : receiver) {
|
||||
if (predicate.test(t)) {
|
||||
matched.add(t);
|
||||
} else {
|
||||
unmatched.add(t);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// some groovy methods on map
|
||||
// see http://docs.groovy-lang.org/latest/html/groovy-jdk/java/util/Map.html
|
||||
|
||||
/**
|
||||
* Iterates through this map transforming each entry into a new value using
|
||||
* the function, returning a list of transformed values.
|
||||
*/
|
||||
public static <K,V,T> List<T> collect(Map<K,V> receiver, BiFunction<K,V,T> function) {
|
||||
List<T> list = new ArrayList<>();
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
list.add(function.apply(kvPair.getKey(), kvPair.getValue()));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through this map transforming each entry into a new value using
|
||||
* the function, adding the values to the specified collection.
|
||||
*/
|
||||
public static <K,V,T> Object collect(Map<K,V> receiver, Collection<T> collection, BiFunction<K,V,T> function) {
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
collection.add(function.apply(kvPair.getKey(), kvPair.getValue()));
|
||||
}
|
||||
return collection;
|
||||
}
|
||||
|
||||
/** Counts the number of occurrences which satisfy the given predicate from inside this Map */
|
||||
public static <K,V> int count(Map<K,V> receiver, BiPredicate<K,V> predicate) {
|
||||
int count = 0;
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
if (predicate.test(kvPair.getKey(), kvPair.getValue())) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
/** Iterates through a Map, passing each item to the given consumer. */
|
||||
public static <K,V> Object each(Map<K,V> receiver, BiConsumer<K,V> consumer) {
|
||||
receiver.forEach(consumer);
|
||||
return receiver;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to determine if the given predicate is valid (i.e. returns true for all items in this map).
|
||||
*/
|
||||
public static <K,V> boolean every(Map<K,V> receiver, BiPredicate<K,V> predicate) {
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
if (predicate.test(kvPair.getKey(), kvPair.getValue()) == false) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the first entry matching the predicate, or returns null.
|
||||
*/
|
||||
public static <K,V> Map.Entry<K,V> find(Map<K,V> receiver, BiPredicate<K,V> predicate) {
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
if (predicate.test(kvPair.getKey(), kvPair.getValue())) {
|
||||
return kvPair;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all values matching the predicate, returns as a map.
|
||||
*/
|
||||
public static <K,V> Map<K,V> findAll(Map<K,V> receiver, BiPredicate<K,V> predicate) {
|
||||
// try to preserve some properties of the receiver (see the groovy javadocs)
|
||||
final Map<K,V> map;
|
||||
if (receiver instanceof TreeMap) {
|
||||
map = new TreeMap<>();
|
||||
} else {
|
||||
map = new LinkedHashMap<>();
|
||||
}
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
if (predicate.test(kvPair.getKey(), kvPair.getValue())) {
|
||||
map.put(kvPair.getKey(), kvPair.getValue());
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the map calling the given function for each item
|
||||
* but stopping once the first non-null result is found and returning that result.
|
||||
* If all results are null, null is returned.
|
||||
*/
|
||||
public static <K,V,T> Object findResult(Map<K,V> receiver, BiFunction<K,V,T> function) {
|
||||
return findResult(receiver, null, function);
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the map calling the given function for each item
|
||||
* but stopping once the first non-null result is found and returning that result.
|
||||
* If all results are null, defaultResult is returned.
|
||||
*/
|
||||
public static <K,V,T> Object findResult(Map<K,V> receiver, Object defaultResult, BiFunction<K,V,T> function) {
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
T value = function.apply(kvPair.getKey(), kvPair.getValue());
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return defaultResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through the map transforming items using the supplied function and
|
||||
* collecting any non-null results.
|
||||
*/
|
||||
public static <K,V,T> List<T> findResults(Map<K,V> receiver, BiFunction<K,V,T> filter) {
|
||||
List<T> list = new ArrayList<>();
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
T result = filter.apply(kvPair.getKey(), kvPair.getValue());
|
||||
if (result != null) {
|
||||
list.add(result);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts all Map members into groups determined by the supplied mapping function.
|
||||
*/
|
||||
public static <K,V,T> Map<T,Map<K,V>> groupBy(Map<K,V> receiver, BiFunction<K,V,T> mapper) {
|
||||
Map<T,Map<K,V>> map = new LinkedHashMap<>();
|
||||
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
|
||||
T mapped = mapper.apply(kvPair.getKey(), kvPair.getValue());
|
||||
Map<K,V> results = map.get(mapped);
|
||||
if (results == null) {
|
||||
// try to preserve some properties of the receiver (see the groovy javadocs)
|
||||
if (receiver instanceof TreeMap) {
|
||||
results = new TreeMap<>();
|
||||
} else {
|
||||
results = new LinkedHashMap<>();
|
||||
}
|
||||
map.put(mapped, results);
|
||||
}
|
||||
results.put(kvPair.getKey(), kvPair.getValue());
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
|
@ -350,10 +350,10 @@ public final class Def {
|
|||
}
|
||||
throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments.");
|
||||
}
|
||||
ref = new FunctionRef(clazz, interfaceMethod, handle, captures);
|
||||
ref = new FunctionRef(clazz, interfaceMethod, handle, captures.length);
|
||||
} else {
|
||||
// whitelist lookup
|
||||
ref = new FunctionRef(clazz, type, call, captures);
|
||||
ref = new FunctionRef(clazz, type, call, captures.length);
|
||||
}
|
||||
final CallSite callSite;
|
||||
if (ref.needsBridges()) {
|
||||
|
|
|
@ -186,15 +186,17 @@ public final class Definition {
|
|||
public static class Method {
|
||||
public final String name;
|
||||
public final Struct owner;
|
||||
public final boolean augmentation;
|
||||
public final Type rtn;
|
||||
public final List<Type> arguments;
|
||||
public final org.objectweb.asm.commons.Method method;
|
||||
public final int modifiers;
|
||||
public final MethodHandle handle;
|
||||
|
||||
public Method(String name, Struct owner, Type rtn, List<Type> arguments,
|
||||
public Method(String name, Struct owner, boolean augmentation, Type rtn, List<Type> arguments,
|
||||
org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) {
|
||||
this.name = name;
|
||||
this.augmentation = augmentation;
|
||||
this.owner = owner;
|
||||
this.rtn = rtn;
|
||||
this.arguments = Collections.unmodifiableList(arguments);
|
||||
|
@ -217,7 +219,15 @@ public final class Definition {
|
|||
// otherwise compute it
|
||||
final Class<?> params[];
|
||||
final Class<?> returnValue;
|
||||
if (Modifier.isStatic(modifiers)) {
|
||||
if (augmentation) {
|
||||
// static method disguised as virtual/interface method
|
||||
params = new Class<?>[1 + arguments.size()];
|
||||
params[0] = Augmentation.class;
|
||||
for (int i = 0; i < arguments.size(); i++) {
|
||||
params[i + 1] = arguments.get(i).clazz;
|
||||
}
|
||||
returnValue = rtn.clazz;
|
||||
} else if (Modifier.isStatic(modifiers)) {
|
||||
// static method: straightforward copy
|
||||
params = new Class<?>[arguments.size()];
|
||||
for (int i = 0; i < arguments.size(); i++) {
|
||||
|
@ -242,6 +252,24 @@ public final class Definition {
|
|||
}
|
||||
return MethodType.methodType(returnValue, params);
|
||||
}
|
||||
|
||||
public void write(MethodWriter writer) {
|
||||
final org.objectweb.asm.Type type;
|
||||
if (augmentation) {
|
||||
assert java.lang.reflect.Modifier.isStatic(modifiers);
|
||||
type = WriterConstants.AUGMENTATION_TYPE;
|
||||
} else {
|
||||
type = owner.type;
|
||||
}
|
||||
|
||||
if (java.lang.reflect.Modifier.isStatic(modifiers)) {
|
||||
writer.invokeStatic(type, method);
|
||||
} else if (java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(type, method);
|
||||
} else {
|
||||
writer.invokeVirtual(type, method);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Field {
|
||||
|
@ -690,7 +718,7 @@ public final class Definition {
|
|||
" with arguments " + Arrays.toString(classes) + ".");
|
||||
}
|
||||
|
||||
final Method constructor = new Method(name, owner, returnType, Arrays.asList(args), asm, reflect.getModifiers(), handle);
|
||||
final Method constructor = new Method(name, owner, false, returnType, Arrays.asList(args), asm, reflect.getModifiers(), handle);
|
||||
|
||||
owner.constructors.put(methodKey, constructor);
|
||||
}
|
||||
|
@ -734,24 +762,20 @@ public final class Definition {
|
|||
}
|
||||
addConstructorInternal(className, "<init>", args);
|
||||
} else {
|
||||
if (methodName.indexOf('/') >= 0) {
|
||||
String nameAndAlias[] = methodName.split("/");
|
||||
if (nameAndAlias.length != 2) {
|
||||
throw new IllegalArgumentException("Currently only two aliases are allowed!");
|
||||
}
|
||||
addMethodInternal(className, nameAndAlias[0], nameAndAlias[1], rtn, args);
|
||||
if (methodName.indexOf("*") >= 0) {
|
||||
addMethodInternal(className, methodName.substring(0, methodName.length() - 1), true, rtn, args);
|
||||
} else {
|
||||
addMethodInternal(className, methodName, null, rtn, args);
|
||||
addMethodInternal(className, methodName, false, rtn, args);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// field
|
||||
addFieldInternal(className, elements[1], null, rtn);
|
||||
addFieldInternal(className, elements[1], rtn);
|
||||
}
|
||||
}
|
||||
|
||||
private final void addMethodInternal(final String struct, final String name, final String alias,
|
||||
final Type rtn, final Type[] args) {
|
||||
private final void addMethodInternal(String struct, String name, boolean augmentation,
|
||||
Type rtn, Type[] args) {
|
||||
final Struct owner = structsMap.get(struct);
|
||||
|
||||
if (owner == null) {
|
||||
|
@ -777,20 +801,32 @@ public final class Definition {
|
|||
"Duplicate method signature [" + methodKey + "] found within the struct [" + owner.name + "].");
|
||||
}
|
||||
|
||||
final Class<?>[] classes = new Class<?>[args.length];
|
||||
|
||||
for (int count = 0; count < classes.length; ++count) {
|
||||
classes[count] = args[count].clazz;
|
||||
final Class<?> implClass;
|
||||
final Class<?>[] params;
|
||||
|
||||
if (augmentation == false) {
|
||||
implClass = owner.clazz;
|
||||
params = new Class<?>[args.length];
|
||||
for (int count = 0; count < args.length; ++count) {
|
||||
params[count] = args[count].clazz;
|
||||
}
|
||||
} else {
|
||||
implClass = Augmentation.class;
|
||||
params = new Class<?>[args.length + 1];
|
||||
params[0] = owner.clazz;
|
||||
for (int count = 0; count < args.length; ++count) {
|
||||
params[count+1] = args[count].clazz;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
final java.lang.reflect.Method reflect;
|
||||
|
||||
try {
|
||||
reflect = owner.clazz.getMethod(alias == null ? name : alias, classes);
|
||||
} catch (final NoSuchMethodException exception) {
|
||||
throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) +
|
||||
"] not found for class [" + owner.clazz.getName() + "]" +
|
||||
" with arguments " + Arrays.toString(classes) + ".");
|
||||
reflect = implClass.getMethod(name, params);
|
||||
} catch (NoSuchMethodException exception) {
|
||||
throw new IllegalArgumentException("Method [" + name +
|
||||
"] not found for class [" + implClass.getName() + "]" +
|
||||
" with arguments " + Arrays.toString(params) + ".");
|
||||
}
|
||||
|
||||
if (!reflect.getReturnType().equals(rtn.clazz)) {
|
||||
|
@ -805,25 +841,24 @@ public final class Definition {
|
|||
MethodHandle handle;
|
||||
|
||||
try {
|
||||
handle = MethodHandles.publicLookup().in(owner.clazz).unreflect(reflect);
|
||||
handle = MethodHandles.publicLookup().in(implClass).unreflect(reflect);
|
||||
} catch (final IllegalAccessException exception) {
|
||||
throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + "]" +
|
||||
" not found for class [" + owner.clazz.getName() + "]" +
|
||||
" with arguments " + Arrays.toString(classes) + ".");
|
||||
throw new IllegalArgumentException("Method [" + name + "]" +
|
||||
" not found for class [" + implClass.getName() + "]" +
|
||||
" with arguments " + Arrays.toString(params) + ".");
|
||||
}
|
||||
|
||||
final int modifiers = reflect.getModifiers();
|
||||
final Method method = new Method(name, owner, rtn, Arrays.asList(args), asm, modifiers, handle);
|
||||
final Method method = new Method(name, owner, augmentation, rtn, Arrays.asList(args), asm, modifiers, handle);
|
||||
|
||||
if (java.lang.reflect.Modifier.isStatic(modifiers)) {
|
||||
if (augmentation == false && java.lang.reflect.Modifier.isStatic(modifiers)) {
|
||||
owner.staticMethods.put(methodKey, method);
|
||||
} else {
|
||||
owner.methods.put(methodKey, method);
|
||||
}
|
||||
}
|
||||
|
||||
private final void addFieldInternal(final String struct, final String name, final String alias,
|
||||
final Type type) {
|
||||
private final void addFieldInternal(String struct, String name, Type type) {
|
||||
final Struct owner = structsMap.get(struct);
|
||||
|
||||
if (owner == null) {
|
||||
|
@ -844,9 +879,9 @@ public final class Definition {
|
|||
java.lang.reflect.Field reflect;
|
||||
|
||||
try {
|
||||
reflect = owner.clazz.getField(alias == null ? name : alias);
|
||||
reflect = owner.clazz.getField(name);
|
||||
} catch (final NoSuchFieldException exception) {
|
||||
throw new IllegalArgumentException("Field [" + (alias == null ? name : alias) + "]" +
|
||||
throw new IllegalArgumentException("Field [" + name + "]" +
|
||||
" not found for class [" + owner.clazz.getName() + "].");
|
||||
}
|
||||
|
||||
|
@ -862,7 +897,7 @@ public final class Definition {
|
|||
setter = MethodHandles.publicLookup().unreflectSetter(reflect);
|
||||
}
|
||||
} catch (final IllegalAccessException exception) {
|
||||
throw new IllegalArgumentException("Getter/Setter [" + (alias == null ? name : alias) + "]" +
|
||||
throw new IllegalArgumentException("Getter/Setter [" + name + "]" +
|
||||
" not found for class [" + owner.clazz.getName() + "].");
|
||||
}
|
||||
|
||||
|
@ -875,9 +910,9 @@ public final class Definition {
|
|||
" within the struct [" + owner.name + "] is not final.");
|
||||
}
|
||||
|
||||
owner.staticMembers.put(alias == null ? name : alias, field);
|
||||
owner.staticMembers.put(name, field);
|
||||
} else {
|
||||
owner.members.put(alias == null ? name : alias, field);
|
||||
owner.members.put(name, field);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -915,11 +950,24 @@ public final class Definition {
|
|||
// https://bugs.openjdk.java.net/browse/JDK-8072746
|
||||
} else {
|
||||
try {
|
||||
Class<?> arguments[] = new Class<?>[method.arguments.size()];
|
||||
for (int i = 0; i < method.arguments.size(); i++) {
|
||||
arguments[i] = method.arguments.get(i).clazz;
|
||||
// TODO: we *have* to remove all these public members and use getter methods to encapsulate!
|
||||
final Class<?> impl;
|
||||
final Class<?> arguments[];
|
||||
if (method.augmentation) {
|
||||
impl = Augmentation.class;
|
||||
arguments = new Class<?>[method.arguments.size() + 1];
|
||||
arguments[0] = method.owner.clazz;
|
||||
for (int i = 0; i < method.arguments.size(); i++) {
|
||||
arguments[i + 1] = method.arguments.get(i).clazz;
|
||||
}
|
||||
} else {
|
||||
impl = owner.clazz;
|
||||
arguments = new Class<?>[method.arguments.size()];
|
||||
for (int i = 0; i < method.arguments.size(); i++) {
|
||||
arguments[i] = method.arguments.get(i).clazz;
|
||||
}
|
||||
}
|
||||
java.lang.reflect.Method m = owner.clazz.getMethod(method.method.getName(), arguments);
|
||||
java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
|
||||
if (m.getReturnType() != method.rtn.clazz) {
|
||||
throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
|
||||
}
|
||||
|
|
|
@ -53,10 +53,10 @@ public class FunctionRef {
|
|||
* @param expected interface type to implement.
|
||||
* @param type the left hand side of a method reference expression
|
||||
* @param call the right hand side of a method reference expression
|
||||
* @param captures captured arguments
|
||||
* @param numCaptures number of captured arguments
|
||||
*/
|
||||
public FunctionRef(Definition.Type expected, String type, String call, Class<?>... captures) {
|
||||
this(expected, expected.struct.getFunctionalMethod(), lookup(expected, type, call, captures.length > 0), captures);
|
||||
public FunctionRef(Definition.Type expected, String type, String call, int numCaptures) {
|
||||
this(expected, expected.struct.getFunctionalMethod(), lookup(expected, type, call, numCaptures > 0), numCaptures);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -64,13 +64,16 @@ public class FunctionRef {
|
|||
* @param expected interface type to implement
|
||||
* @param method functional interface method
|
||||
* @param impl implementation method
|
||||
* @param captures captured arguments
|
||||
* @param numCaptures number of captured arguments
|
||||
*/
|
||||
public FunctionRef(Definition.Type expected, Definition.Method method, Definition.Method impl, Class<?>... captures) {
|
||||
public FunctionRef(Definition.Type expected, Definition.Method method, Definition.Method impl, int numCaptures) {
|
||||
// e.g. compareTo
|
||||
invokedName = method.name;
|
||||
// e.g. (Object)Comparator
|
||||
invokedType = MethodType.methodType(expected.clazz, captures);
|
||||
MethodType implType = impl.getMethodType();
|
||||
// only include captured parameters as arguments
|
||||
invokedType = MethodType.methodType(expected.clazz,
|
||||
implType.dropParameterTypes(numCaptures, implType.parameterCount()));
|
||||
// e.g. (Object,Object)int
|
||||
interfaceMethodType = method.getMethodType().dropParameterTypes(0, 1);
|
||||
|
||||
|
@ -90,6 +93,9 @@ public class FunctionRef {
|
|||
// owner == null: script class itself
|
||||
ownerIsInterface = false;
|
||||
owner = WriterConstants.CLASS_TYPE.getInternalName();
|
||||
} else if (impl.augmentation) {
|
||||
ownerIsInterface = false;
|
||||
owner = WriterConstants.AUGMENTATION_TYPE.getInternalName();
|
||||
} else {
|
||||
ownerIsInterface = impl.owner.clazz.isInterface();
|
||||
owner = impl.owner.type.getInternalName();
|
||||
|
@ -98,7 +104,7 @@ public class FunctionRef {
|
|||
implMethod = impl.handle;
|
||||
|
||||
// remove any prepended captured arguments for the 'natural' signature.
|
||||
samMethodType = adapt(interfaceMethodType, impl.getMethodType().dropParameterTypes(0, captures.length));
|
||||
samMethodType = adapt(interfaceMethodType, impl.getMethodType().dropParameterTypes(0, numCaptures));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -106,11 +112,14 @@ public class FunctionRef {
|
|||
* <p>
|
||||
* This will <b>not</b> set implMethodASM. It is for runtime use only.
|
||||
*/
|
||||
public FunctionRef(Definition.Type expected, Definition.Method method, MethodHandle impl, Class<?>... captures) {
|
||||
public FunctionRef(Definition.Type expected, Definition.Method method, MethodHandle impl, int numCaptures) {
|
||||
// e.g. compareTo
|
||||
invokedName = method.name;
|
||||
// e.g. (Object)Comparator
|
||||
invokedType = MethodType.methodType(expected.clazz, captures);
|
||||
MethodType implType = impl.type();
|
||||
// only include captured parameters as arguments
|
||||
invokedType = MethodType.methodType(expected.clazz,
|
||||
implType.dropParameterTypes(numCaptures, implType.parameterCount()));
|
||||
// e.g. (Object,Object)int
|
||||
interfaceMethodType = method.getMethodType().dropParameterTypes(0, 1);
|
||||
|
||||
|
@ -119,7 +128,7 @@ public class FunctionRef {
|
|||
implMethodASM = null;
|
||||
|
||||
// remove any prepended captured arguments for the 'natural' signature.
|
||||
samMethodType = adapt(interfaceMethodType, impl.type().dropParameterTypes(0, captures.length));
|
||||
samMethodType = adapt(interfaceMethodType, impl.type().dropParameterTypes(0, numCaptures));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -72,6 +72,8 @@ public final class WriterConstants {
|
|||
public final static Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class);
|
||||
|
||||
public final static Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class);
|
||||
|
||||
public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class);
|
||||
|
||||
/**
|
||||
* A Method instance for {@linkplain Pattern#compile}. This isn't available from Definition because we intentionally don't add it there
|
||||
|
|
|
@ -76,7 +76,7 @@ public class ECapturingFunctionRef extends AExpression implements ILambda {
|
|||
// static case
|
||||
if (captured.type.sort != Definition.Sort.DEF) {
|
||||
try {
|
||||
ref = new FunctionRef(expected, captured.type.name, call, captured.type.clazz);
|
||||
ref = new FunctionRef(expected, captured.type.name, call, 1);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw createError(e);
|
||||
}
|
||||
|
|
|
@ -76,10 +76,10 @@ public class EFunctionRef extends AExpression implements ILambda {
|
|||
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
|
||||
"to [" + expected.name + "], function not found");
|
||||
}
|
||||
ref = new FunctionRef(expected, interfaceMethod, implMethod);
|
||||
ref = new FunctionRef(expected, interfaceMethod, implMethod, 0);
|
||||
} else {
|
||||
// whitelist lookup
|
||||
ref = new FunctionRef(expected, type, call);
|
||||
ref = new FunctionRef(expected, type, call, 0);
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw createError(e);
|
||||
|
|
|
@ -175,11 +175,7 @@ public class ELambda extends AExpression implements ILambda {
|
|||
} else {
|
||||
defPointer = null;
|
||||
try {
|
||||
Class<?> captureClasses[] = new Class<?>[captures.size()];
|
||||
for (int i = 0; i < captures.size(); i++) {
|
||||
captureClasses[i] = captures.get(i).type.clazz;
|
||||
}
|
||||
ref = new FunctionRef(expected, interfaceMethod, desugared.method, captureClasses);
|
||||
ref = new FunctionRef(expected, interfaceMethod, desugared.method, captures.size());
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw createError(e);
|
||||
}
|
||||
|
|
|
@ -122,14 +122,8 @@ public final class LCallInvoke extends ALink {
|
|||
for (AExpression argument : arguments) {
|
||||
argument.write(writer, globals);
|
||||
}
|
||||
|
||||
if (java.lang.reflect.Modifier.isStatic(method.modifiers)) {
|
||||
writer.invokeStatic(method.owner.type, method.method);
|
||||
} else if (java.lang.reflect.Modifier.isInterface(method.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(method.owner.type, method.method);
|
||||
} else {
|
||||
writer.invokeVirtual(method.owner.type, method.method);
|
||||
}
|
||||
|
||||
method.write(writer);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -92,11 +92,7 @@ final class LListShortcut extends ALink {
|
|||
void load(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(getter.owner.type, getter.method);
|
||||
} else {
|
||||
writer.invokeVirtual(getter.owner.type, getter.method);
|
||||
}
|
||||
getter.write(writer);
|
||||
|
||||
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
|
||||
writer.checkCast(getter.rtn.type);
|
||||
|
@ -107,11 +103,7 @@ final class LListShortcut extends ALink {
|
|||
void store(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(setter.owner.type, setter.method);
|
||||
} else {
|
||||
writer.invokeVirtual(setter.owner.type, setter.method);
|
||||
}
|
||||
setter.write(writer);
|
||||
|
||||
writer.writePop(setter.rtn.sort.size);
|
||||
}
|
||||
|
|
|
@ -91,11 +91,7 @@ final class LMapShortcut extends ALink {
|
|||
void load(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(getter.owner.type, getter.method);
|
||||
} else {
|
||||
writer.invokeVirtual(getter.owner.type, getter.method);
|
||||
}
|
||||
getter.write(writer);
|
||||
|
||||
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
|
||||
writer.checkCast(getter.rtn.type);
|
||||
|
@ -106,11 +102,7 @@ final class LMapShortcut extends ALink {
|
|||
void store(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(setter.owner.type, setter.method);
|
||||
} else {
|
||||
writer.invokeVirtual(setter.owner.type, setter.method);
|
||||
}
|
||||
setter.write(writer);
|
||||
|
||||
writer.writePop(setter.rtn.sort.size);
|
||||
}
|
||||
|
|
|
@ -95,11 +95,7 @@ final class LShortcut extends ALink {
|
|||
void load(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(getter.owner.type, getter.method);
|
||||
} else {
|
||||
writer.invokeVirtual(getter.owner.type, getter.method);
|
||||
}
|
||||
getter.write(writer);
|
||||
|
||||
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
|
||||
writer.checkCast(getter.rtn.type);
|
||||
|
@ -110,11 +106,7 @@ final class LShortcut extends ALink {
|
|||
void store(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(setter.owner.type, setter.method);
|
||||
} else {
|
||||
writer.invokeVirtual(setter.owner.type, setter.method);
|
||||
}
|
||||
setter.write(writer);
|
||||
|
||||
writer.writePop(setter.rtn.sort.size);
|
||||
}
|
||||
|
|
|
@ -206,10 +206,8 @@ public class SEach extends AStatement {
|
|||
Type itr = Definition.getType("Iterator");
|
||||
org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(itr.type, Definition.DEF_TYPE.type);
|
||||
writer.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR);
|
||||
} else if (java.lang.reflect.Modifier.isInterface(method.owner.clazz.getModifiers())) {
|
||||
writer.invokeInterface(method.owner.type, method.method);
|
||||
} else {
|
||||
writer.invokeVirtual(method.owner.type, method.method);
|
||||
method.write(writer);
|
||||
}
|
||||
|
||||
writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ISTORE), iterator.getSlot());
|
||||
|
|
|
@ -114,7 +114,7 @@ public class SFunction extends AStatement {
|
|||
|
||||
org.objectweb.asm.commons.Method method =
|
||||
new org.objectweb.asm.commons.Method(name, MethodType.methodType(rtnType.clazz, paramClasses).toMethodDescriptorString());
|
||||
this.method = new Method(name, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null);
|
||||
this.method = new Method(name, null, false, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -50,6 +50,15 @@ class Iterable -> java.lang.Iterable {
|
|||
void forEach(Consumer)
|
||||
Iterator iterator()
|
||||
Spliterator spliterator()
|
||||
# some adaptations of groovy methods
|
||||
boolean any*(Predicate)
|
||||
def each*(Consumer)
|
||||
def eachWithIndex*(ObjIntConsumer)
|
||||
boolean every*(Predicate)
|
||||
List findResults*(Function)
|
||||
Map groupBy*(Function)
|
||||
String join*(String)
|
||||
double sum*(ToDoubleFunction)
|
||||
}
|
||||
|
||||
# Readable: i/o
|
||||
|
|
|
@ -42,7 +42,7 @@ class Matcher -> java.util.regex.Matcher extends Object {
|
|||
boolean find(int)
|
||||
String group()
|
||||
String group(int)
|
||||
String namedGroup/group(String)
|
||||
String namedGroup*(String)
|
||||
int groupCount()
|
||||
boolean hasAnchoringBounds()
|
||||
boolean hasTransparentBounds()
|
||||
|
|
|
@ -39,6 +39,15 @@ class Collection -> java.util.Collection extends Iterable {
|
|||
Stream stream()
|
||||
def[] toArray()
|
||||
def[] toArray(def[])
|
||||
|
||||
# some adaptations of groovy methods
|
||||
List collect*(Function)
|
||||
def collect*(Collection,Function)
|
||||
def find*(Predicate)
|
||||
List findAll*(Predicate)
|
||||
def findResult*(Function)
|
||||
def findResult*(def,Function)
|
||||
List split*(Predicate)
|
||||
}
|
||||
|
||||
class Comparator -> java.util.Comparator {
|
||||
|
@ -114,8 +123,7 @@ class List -> java.util.List extends Collection,Iterable {
|
|||
def remove(int)
|
||||
void replaceAll(UnaryOperator)
|
||||
def set(int,def)
|
||||
# TODO: wtf?
|
||||
int getLength/size()
|
||||
int getLength*()
|
||||
void sort(Comparator)
|
||||
List subList(int,int)
|
||||
}
|
||||
|
@ -153,6 +161,19 @@ class Map -> java.util.Map {
|
|||
void replaceAll(BiFunction)
|
||||
int size()
|
||||
Collection values()
|
||||
|
||||
# some adaptations of groovy methods
|
||||
List collect*(BiFunction)
|
||||
def collect*(Collection,BiFunction)
|
||||
int count*(BiPredicate)
|
||||
def each*(BiConsumer)
|
||||
boolean every*(BiPredicate)
|
||||
Map.Entry find*(BiPredicate)
|
||||
Map findAll*(BiPredicate)
|
||||
def findResult*(BiFunction)
|
||||
def findResult*(def,BiFunction)
|
||||
List findResults*(BiFunction)
|
||||
Map groupBy*(BiFunction)
|
||||
}
|
||||
|
||||
class Map.Entry -> java.util.Map$Entry {
|
||||
|
|
|
@ -0,0 +1,178 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class AugmentationTests extends ScriptTestCase {
|
||||
|
||||
public void testStatic() {
|
||||
assertEquals(1, exec("ArrayList l = new ArrayList(); l.add(1); return l.getLength();"));
|
||||
assertEquals(1, exec("ArrayList l = new ArrayList(); l.add(1); return l.length;"));
|
||||
}
|
||||
|
||||
public void testSubclass() {
|
||||
assertEquals(1, exec("List l = new ArrayList(); l.add(1); return l.getLength();"));
|
||||
assertEquals(1, exec("List l = new ArrayList(); l.add(1); return l.length;"));
|
||||
}
|
||||
|
||||
public void testDef() {
|
||||
assertEquals(1, exec("def l = new ArrayList(); l.add(1); return l.getLength();"));
|
||||
assertEquals(1, exec("def l = new ArrayList(); l.add(1); return l.length;"));
|
||||
}
|
||||
|
||||
public void testCapturingReference() {
|
||||
assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
|
||||
"ArrayList l = new ArrayList(); l.add(1);" +
|
||||
"return foo(l::getLength);"));
|
||||
assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
|
||||
"List l = new ArrayList(); l.add(1);" +
|
||||
"return foo(l::getLength);"));
|
||||
assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
|
||||
"def l = new ArrayList(); l.add(1);" +
|
||||
"return foo(l::getLength);"));
|
||||
}
|
||||
|
||||
public void testIterable_Any() {
|
||||
assertEquals(true,
|
||||
exec("List l = new ArrayList(); l.add(1); l.any(x -> x == 1)"));
|
||||
}
|
||||
|
||||
public void testIterable_Each() {
|
||||
assertEquals(1,
|
||||
exec("List l = new ArrayList(); l.add(1); List l2 = new ArrayList(); l.each(l2::add); return l2.size()"));
|
||||
}
|
||||
|
||||
public void testIterable_EachWithIndex() {
|
||||
assertEquals(0,
|
||||
exec("List l = new ArrayList(); l.add(2); Map m = new HashMap(); l.eachWithIndex(m::put); return m.get(2)"));
|
||||
}
|
||||
|
||||
public void testIterable_Every() {
|
||||
assertEquals(false, exec("List l = new ArrayList(); l.add(1); l.add(2); l.every(x -> x == 1)"));
|
||||
}
|
||||
|
||||
public void testIterable_FindResults() {
|
||||
assertEquals(1,
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); l.findResults(x -> x == 1 ? x : null).size()"));
|
||||
}
|
||||
|
||||
public void testIterable_GroupBy() {
|
||||
assertEquals(2,
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(-1); l.groupBy(x -> x < 0 ? 'negative' : 'positive').size()"));
|
||||
}
|
||||
|
||||
public void testIterable_Join() {
|
||||
assertEquals("test,ing",
|
||||
exec("List l = new ArrayList(); l.add('test'); l.add('ing'); l.join(',')"));
|
||||
}
|
||||
|
||||
public void testIterable_Sum() {
|
||||
assertEquals(5.0D,
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); l.sum(x -> x + 1)"));
|
||||
}
|
||||
|
||||
public void testCollection_Collect() {
|
||||
assertEquals(Arrays.asList(2, 3),
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(x -> x + 1)"));
|
||||
assertEquals(asSet(2, 3),
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(new HashSet(), x -> x + 1)"));
|
||||
}
|
||||
|
||||
public void testCollection_Find() {
|
||||
assertEquals(2,
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.find(x -> x == 2)"));
|
||||
}
|
||||
|
||||
public void testCollection_FindAll() {
|
||||
assertEquals(Arrays.asList(2),
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findAll(x -> x == 2)"));
|
||||
}
|
||||
|
||||
public void testCollection_FindResult() {
|
||||
assertEquals("found",
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult(x -> x > 1 ? 'found' : null)"));
|
||||
assertEquals("notfound",
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult('notfound', x -> x > 10 ? 'found' : null)"));
|
||||
}
|
||||
|
||||
public void testCollection_Split() {
|
||||
assertEquals(Arrays.asList(Arrays.asList(2), Arrays.asList(1)),
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.split(x -> x == 2)"));
|
||||
}
|
||||
|
||||
public void testMap_Collect() {
|
||||
assertEquals(Arrays.asList("one1", "two2"),
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect((key,value) -> key + value)"));
|
||||
assertEquals(asSet("one1", "two2"),
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect(new HashSet(), (key,value) -> key + value)"));
|
||||
}
|
||||
|
||||
public void testMap_Count() {
|
||||
assertEquals(1,
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.count((key,value) -> value == 2)"));
|
||||
}
|
||||
|
||||
public void testMap_Each() {
|
||||
assertEquals(2,
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; Map m2 = new TreeMap(); m.each(m2::put); return m2.size()"));
|
||||
}
|
||||
|
||||
public void testMap_Every() {
|
||||
assertEquals(false,
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.every((key,value) -> value == 2)"));
|
||||
}
|
||||
|
||||
public void testMap_Find() {
|
||||
assertEquals("two",
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.find((key,value) -> value == 2).key"));
|
||||
}
|
||||
|
||||
public void testMap_FindAll() {
|
||||
assertEquals(Collections.singletonMap("two", 2),
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findAll((key,value) -> value == 2)"));
|
||||
}
|
||||
|
||||
public void testMap_FindResult() {
|
||||
assertEquals("found",
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findResult((key,value) -> value == 2 ? 'found' : null)"));
|
||||
assertEquals("notfound",
|
||||
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; " +
|
||||
"return m.findResult('notfound', (key,value) -> value == 10 ? 'found' : null)"));
|
||||
}
|
||||
|
||||
public void testMap_FindResults() {
|
||||
assertEquals(Arrays.asList("negative", "positive"),
|
||||
exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " +
|
||||
"return m.findResults((key,value) -> value < 0 ? 'negative' : 'positive')"));
|
||||
}
|
||||
|
||||
public void testMap_GroupBy() {
|
||||
Map<String,Map<String,Integer>> expected = new HashMap<>();
|
||||
expected.put("negative", Collections.singletonMap("a", -1));
|
||||
expected.put("positive", Collections.singletonMap("b", 1));
|
||||
assertEquals(expected,
|
||||
exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " +
|
||||
"return m.groupBy((key,value) -> value < 0 ? 'negative' : 'positive')"));
|
||||
}
|
||||
}
|
|
@ -170,10 +170,24 @@ public class FunctionRefTests extends ScriptTestCase {
|
|||
assertTrue(expected.getMessage().contains("Unknown reference"));
|
||||
}
|
||||
|
||||
public void testWrongArityNotEnough() {
|
||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||
exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
|
||||
});
|
||||
assertTrue(expected.getMessage().contains("Unknown reference"));
|
||||
}
|
||||
|
||||
public void testWrongArityDef() {
|
||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||
exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);");
|
||||
});
|
||||
assertTrue(expected.getMessage().contains("Unknown reference"));
|
||||
}
|
||||
|
||||
public void testWrongArityNotEnoughDef() {
|
||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||
exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
|
||||
});
|
||||
assertTrue(expected.getMessage().contains("Unknown reference"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,6 +180,22 @@ public class LambdaTests extends ScriptTestCase {
|
|||
assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters"));
|
||||
}
|
||||
|
||||
public void testWrongArityNotEnough() {
|
||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||
exec("List l = new ArrayList(); l.add(1); l.add(1); "
|
||||
+ "return l.stream().mapToInt(() -> 5).sum();");
|
||||
});
|
||||
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
|
||||
}
|
||||
|
||||
public void testWrongArityNotEnoughDef() {
|
||||
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
|
||||
exec("def l = new ArrayList(); l.add(1); l.add(1); "
|
||||
+ "return l.stream().mapToInt(() -> 5).sum();");
|
||||
});
|
||||
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
|
||||
}
|
||||
|
||||
public void testLambdaInFunction() {
|
||||
assertEquals(5, exec("def foo() { Optional.empty().orElseGet(() -> 5) } return foo();"));
|
||||
}
|
||||
|
|
|
@ -76,6 +76,17 @@
|
|||
# The task will be in the response even if it finished before we got here
|
||||
# because of task persistence.
|
||||
- is_true: task
|
||||
- is_false: response.timed_out
|
||||
- match: {response.deleted: 1}
|
||||
- is_false: response.created
|
||||
- is_false: response.updated
|
||||
- match: {response.version_conflicts: 0}
|
||||
- match: {response.batches: 1}
|
||||
- match: {response.failures: []}
|
||||
- match: {response.noops: 0}
|
||||
- match: {response.throttled_millis: 0}
|
||||
- gte: { response.took: 0 }
|
||||
- is_false: response.task
|
||||
|
||||
---
|
||||
"Response for version conflict":
|
||||
|
|
|
@ -100,6 +100,15 @@
|
|||
# The task will be in the response even if it finished before we got here
|
||||
# because of task persistence.
|
||||
- is_true: task
|
||||
- match: {response.created: 1}
|
||||
- match: {response.updated: 0}
|
||||
- match: {response.version_conflicts: 0}
|
||||
- match: {response.batches: 1}
|
||||
- match: {response.failures: []}
|
||||
- match: {response.throttled_millis: 0}
|
||||
- gte: { response.took: 0 }
|
||||
- is_false: response.task
|
||||
- is_false: response.deleted
|
||||
|
||||
---
|
||||
"Response format for version conflict":
|
||||
|
|
|
@ -60,6 +60,18 @@
|
|||
# The task will be in the response even if it finished before we got here
|
||||
# because of task persistence.
|
||||
- is_true: task
|
||||
- is_false: response.timed_out
|
||||
- match: {response.updated: 1}
|
||||
- match: {response.version_conflicts: 0}
|
||||
- match: {response.batches: 1}
|
||||
- match: {response.failures: []}
|
||||
- match: {response.noops: 0}
|
||||
- match: {response.throttled_millis: 0}
|
||||
- gte: { response.took: 0 }
|
||||
# Update by query can't create
|
||||
- is_false: response.created
|
||||
- is_false: response.task
|
||||
- is_false: response.deleted
|
||||
|
||||
---
|
||||
"Response for version conflict":
|
||||
|
|
|
@ -255,7 +255,7 @@ Supports also regular expressions with flag X for more readability (accepts whit
|
|||
Compares two numeric values, eg:
|
||||
|
||||
....
|
||||
- lt: { foo: 10000 } # the `foo` value is less than 10,000
|
||||
- lt: { _ttl: 10000 } # the `_ttl` value is less than 10,000
|
||||
....
|
||||
|
||||
=== `lte` and `gte`
|
||||
|
@ -263,7 +263,7 @@ Compares two numeric values, eg:
|
|||
Compares two numeric values, eg:
|
||||
|
||||
....
|
||||
- lte: { foo: 10000 } # the `foo` value is less than or equal to 10,000
|
||||
- lte: { _ttl: 10000 } # the `_ttl` value is less than or equal to 10,000
|
||||
....
|
||||
|
||||
=== `length`
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
---
|
||||
"Timestamp":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_timestamp:
|
||||
enabled: 1
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# blank timestamp
|
||||
- do:
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- is_true: _timestamp
|
||||
|
||||
# milliseconds since epoch
|
||||
|
||||
- do:
|
||||
delete:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
- do:
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
timestamp: 1372011280000
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- match: { _timestamp: 1372011280000 }
|
||||
|
||||
# date format
|
||||
|
||||
- do:
|
||||
delete:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
- do:
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
timestamp: 2013-06-23T18:14:40
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- match: { _timestamp: 1372011280000 }
|
||||
|
|
@ -0,0 +1,100 @@
|
|||
---
|
||||
"TTL":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_ttl:
|
||||
enabled: 1
|
||||
default: 10s
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# blank ttl
|
||||
- do:
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 10000}
|
||||
- gt: { _ttl: 0}
|
||||
|
||||
# milliseconds
|
||||
|
||||
- do:
|
||||
delete:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
- do:
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 100000ms
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 100000}
|
||||
- gt: { _ttl: 10000}
|
||||
|
||||
# duration
|
||||
|
||||
- do:
|
||||
delete:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
- do:
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 20s
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 20000}
|
||||
- gt: { _ttl: 10000}
|
||||
|
||||
# with timestamp
|
||||
|
||||
- do:
|
||||
delete:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
- do:
|
||||
catch: /already_expired_exception/
|
||||
create:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 20s
|
||||
timestamp: 2013-06-23T18:14:40
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
---
|
||||
"Timestamp":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_timestamp:
|
||||
enabled: 1
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# blank timestamp
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- is_true: _timestamp
|
||||
|
||||
# milliseconds since epoch
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
timestamp: 1372011280000
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- match: { _timestamp: 1372011280000 }
|
||||
|
||||
# date format
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
timestamp: 2013-06-23T18:14:40
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- match: { _timestamp: 1372011280000 }
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
---
|
||||
"TTL":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_ttl:
|
||||
enabled: 1
|
||||
default: 10s
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# blank ttl
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 10000}
|
||||
- gt: { _ttl: 0}
|
||||
|
||||
# milliseconds
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 100000ms
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 100000}
|
||||
- gt: { _ttl: 10000}
|
||||
|
||||
# duration
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 20s
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 20000}
|
||||
- gt: { _ttl: 10000}
|
||||
|
||||
# with timestamp
|
||||
|
||||
- do:
|
||||
catch: /already_expired_exception/
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 20s
|
||||
timestamp: 2013-06-23T18:14:40
|
||||
|
|
@ -78,7 +78,7 @@ setup:
|
|||
- match: { hits.hits.0._index: "test" }
|
||||
- match: { hits.hits.0._type: "type_2" }
|
||||
- match: { hits.hits.0._id: "1" }
|
||||
- match: { hits.hits.0.inner_hits.type_3.hits.hits.0._index: "test" }
|
||||
- is_false: hits.hits.0.inner_hits.type_3.hits.hits.0._index
|
||||
- match: { hits.hits.0.inner_hits.type_3.hits.hits.0._type: "type_3" }
|
||||
- match: { hits.hits.0.inner_hits.type_3.hits.hits.0._id: "1" }
|
||||
- is_false: hits.hits.0.inner_hits.type_3.hits.hits.0._nested
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
---
|
||||
"Timestamp":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_timestamp:
|
||||
enabled: 1
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# blank timestamp
|
||||
- do:
|
||||
update:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- is_true: _timestamp
|
||||
|
||||
# milliseconds since epoch
|
||||
|
||||
- do:
|
||||
update:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
timestamp: 1372011280000
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- match: { _timestamp: 1372011280000 }
|
||||
|
||||
# date format
|
||||
|
||||
- do:
|
||||
update:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
timestamp: 2013-06-23T18:14:40
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _timestamp
|
||||
|
||||
- match: { _timestamp: 1372011280000 }
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
---
|
||||
"TTL":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
_ttl:
|
||||
enabled: 1
|
||||
default: 10s
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# blank ttl
|
||||
- do:
|
||||
update:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 10000}
|
||||
- gt: { _ttl: 0}
|
||||
|
||||
# milliseconds
|
||||
|
||||
- do:
|
||||
update:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
ttl: 100000ms
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 100000}
|
||||
- gt: { _ttl: 10000}
|
||||
|
||||
# seconds
|
||||
|
||||
- do:
|
||||
update:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
detect_noop: false
|
||||
ttl: 20s
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
fields: _ttl
|
||||
|
||||
- lte: { _ttl: 20000}
|
||||
- gt: { _ttl: 10000}
|
||||
|
||||
# with timestamp
|
||||
|
||||
- do:
|
||||
catch: /already_expired_exception/
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
ttl: 20s
|
||||
timestamp: 2013-06-23T18:14:40
|
|
@ -12,6 +12,11 @@
|
|||
mappings:
|
||||
test:
|
||||
_parent: { type: "foo" }
|
||||
_timestamp:
|
||||
enabled: 1
|
||||
_ttl:
|
||||
enabled: 1
|
||||
default: 10s
|
||||
|
||||
- do:
|
||||
cluster.health:
|
||||
|
@ -23,13 +28,15 @@
|
|||
type: test
|
||||
id: 1
|
||||
parent: 5
|
||||
fields: [ _parent, _routing ]
|
||||
fields: [ _parent, _routing, _timestamp, _ttl ]
|
||||
body:
|
||||
doc: { foo: baz }
|
||||
upsert: { foo: bar }
|
||||
|
||||
- match: { get._parent: "5" }
|
||||
- match: { get._routing: "5" }
|
||||
- is_true: get._timestamp
|
||||
- is_true: get._ttl
|
||||
|
||||
- do:
|
||||
get:
|
||||
|
@ -37,6 +44,6 @@
|
|||
type: test
|
||||
id: 1
|
||||
parent: 5
|
||||
fields: [ _parent, _routing ]
|
||||
fields: [ _parent, _routing, _timestamp, _ttl ]
|
||||
|
||||
|
||||
|
|
|
@ -383,7 +383,13 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
|
||||
XContentBuilder mappings = null;
|
||||
if (frequently() && randomDynamicTemplates()) {
|
||||
mappings = XContentFactory.jsonBuilder().startObject().startObject("_default_").endObject().endObject();
|
||||
mappings = XContentFactory.jsonBuilder().startObject().startObject("_default_");
|
||||
if (randomBoolean()) {
|
||||
mappings.startObject(TimestampFieldMapper.NAME)
|
||||
.field("enabled", randomBoolean());
|
||||
mappings.endObject();
|
||||
}
|
||||
mappings.endObject().endObject();
|
||||
}
|
||||
|
||||
for (String setting : randomSettingsBuilder.internalMap().keySet()) {
|
||||
|
|
Loading…
Reference in New Issue