Merge branch 'master' into feature/http_client

This commit is contained in:
javanna 2016-06-22 09:50:07 +02:00 committed by Luca Cavanna
commit 490d9c8cf7
126 changed files with 3479 additions and 2186 deletions

View File

@ -23,6 +23,16 @@ apply plugin: 'groovy'
group = 'org.elasticsearch.gradle'
// TODO: remove this when upgrading to a version that supports ProgressLogger
// gradle 2.14 made internal apis unavailable to plugins, and gradle considered
// ProgressLogger to be an internal api. Until this is made available again,
// we can't upgrade without losing our nice progress logging
// NOTE that this check duplicates that in BuildPlugin, but we need to check
// early here before trying to compile the broken classes in buildSrc
if (GradleVersion.current() != GradleVersion.version('2.13')) {
throw new GradleException('Gradle 2.13 is required to build elasticsearch')
}
if (project == rootProject) {
// change the build dir used during build init, so that doing a clean
// won't wipe out the buildscript jar

View File

@ -19,16 +19,11 @@
package org.elasticsearch;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
@ -47,9 +42,9 @@ public class Build {
final String date;
final boolean isSnapshot;
Path path = getElasticsearchCodebase();
if (path.toString().endsWith(".jar")) {
try (JarInputStream jar = new JarInputStream(Files.newInputStream(path))) {
final URL url = getElasticsearchCodebase();
if (url.toString().endsWith(".jar")) {
try (JarInputStream jar = new JarInputStream(url.openStream())) {
Manifest manifest = jar.getManifest();
shortHash = manifest.getMainAttributes().getValue("Change");
date = manifest.getMainAttributes().getValue("Build-Date");
@ -80,14 +75,8 @@ public class Build {
/**
* Returns path to elasticsearch codebase path
*/
@SuppressForbidden(reason = "looks up path of elasticsearch.jar directly")
static Path getElasticsearchCodebase() {
URL url = Build.class.getProtectionDomain().getCodeSource().getLocation();
try {
return PathUtils.get(url.toURI());
} catch (URISyntaxException bogus) {
throw new RuntimeException(bogus);
}
static URL getElasticsearchCodebase() {
return Build.class.getProtectionDomain().getCodeSource().getLocation();
}
private String shortHash;

View File

@ -41,6 +41,7 @@ import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.tasks.PersistedTaskInfo;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.tasks.TaskPersistenceService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BaseTransportResponseHandler;
@ -140,6 +141,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
void getRunningTaskFromNode(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
Task runningTask = taskManager.getTask(request.getTaskId().getId());
if (runningTask == null) {
// Task isn't running, go look in the task index
getFinishedTaskFromIndex(thisTask, request, listener);
} else {
if (request.getWaitForCompletion()) {
@ -148,9 +150,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
@Override
protected void doRun() throws Exception {
taskManager.waitForTaskCompletion(runningTask, waitForCompletionTimeout(request.getTimeout()));
// TODO look up the task's result from the .tasks index now that it is done
listener.onResponse(
new GetTaskResponse(new PersistedTaskInfo(runningTask.taskInfo(clusterService.localNode(), true))));
waitedForCompletion(thisTask, request, runningTask.taskInfo(clusterService.localNode(), true), listener);
}
@Override
@ -159,15 +159,44 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
}
});
} else {
listener.onResponse(new GetTaskResponse(new PersistedTaskInfo(runningTask.taskInfo(clusterService.localNode(), true))));
TaskInfo info = runningTask.taskInfo(clusterService.localNode(), true);
listener.onResponse(new GetTaskResponse(new PersistedTaskInfo(false, info)));
}
}
}
/**
* Send a {@link GetRequest} to the results index looking for the results of the task. It'll only be found only if the task's result was
* persisted. Called on the node that once had the task if that node is part of the cluster or on the coordinating node if the node
* wasn't part of the cluster.
* Called after waiting for the task to complete. Attempts to load the results of the task from the tasks index. If it isn't in the
* index then returns a snapshot of the task taken shortly after completion.
*/
void waitedForCompletion(Task thisTask, GetTaskRequest request, TaskInfo snapshotOfRunningTask,
ActionListener<GetTaskResponse> listener) {
getFinishedTaskFromIndex(thisTask, request, new ActionListener<GetTaskResponse>() {
@Override
public void onResponse(GetTaskResponse response) {
// We were able to load the task from the task index. Let's send that back.
listener.onResponse(response);
}
@Override
public void onFailure(Throwable e) {
/*
* We couldn't load the task from the task index. Instead of 404 we should use the snapshot we took after it finished. If
* the error isn't a 404 then we'll just throw it back to the user.
*/
if (ExceptionsHelper.unwrap(e, ResourceNotFoundException.class) != null) {
listener.onResponse(new GetTaskResponse(new PersistedTaskInfo(true, snapshotOfRunningTask)));
} else {
listener.onFailure(e);
}
}
});
}
/**
* Send a {@link GetRequest} to the tasks index looking for a persisted copy of the task completed task. It'll only be found only if the
* task's result was persisted. Called on the node that once had the task if that node is still part of the cluster or on the
* coordinating node if the node is no longer part of the cluster.
*/
void getFinishedTaskFromIndex(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
GetRequest get = new GetRequest(TaskPersistenceService.TASK_INDEX, TaskPersistenceService.TASK_TYPE,
@ -202,6 +231,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
void onGetFinishedTaskFromIndex(GetResponse response, ActionListener<GetTaskResponse> listener) throws IOException {
if (false == response.isExists()) {
listener.onFailure(new ResourceNotFoundException("task [{}] isn't running or persisted", response.getId()));
return;
}
if (response.isSourceEmpty()) {
listener.onFailure(new ElasticsearchException("Stored task status for [{}] didn't contain any source!", response.getId()));

View File

@ -24,6 +24,7 @@ import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
@ -33,15 +34,12 @@ public final class TrackingResultProcessor implements Processor {
private final Processor actualProcessor;
private final List<SimulateProcessorResult> processorResultList;
private final boolean ignoreFailure;
public TrackingResultProcessor(Processor actualProcessor, List<SimulateProcessorResult> processorResultList) {
public TrackingResultProcessor(boolean ignoreFailure, Processor actualProcessor, List<SimulateProcessorResult> processorResultList) {
this.ignoreFailure = ignoreFailure;
this.processorResultList = processorResultList;
if (actualProcessor instanceof CompoundProcessor) {
CompoundProcessor trackedCompoundProcessor = decorate((CompoundProcessor) actualProcessor, processorResultList);
this.actualProcessor = trackedCompoundProcessor;
} else {
this.actualProcessor = actualProcessor;
}
this.actualProcessor = actualProcessor;
}
@Override
@ -50,7 +48,11 @@ public final class TrackingResultProcessor implements Processor {
actualProcessor.execute(ingestDocument);
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), new IngestDocument(ingestDocument)));
} catch (Exception e) {
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), e));
if (ignoreFailure) {
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), new IngestDocument(ingestDocument)));
} else {
processorResultList.add(new SimulateProcessorResult(actualProcessor.getTag(), e));
}
throw e;
}
}
@ -71,7 +73,7 @@ public final class TrackingResultProcessor implements Processor {
if (processor instanceof CompoundProcessor) {
processors.add(decorate((CompoundProcessor) processor, processorResultList));
} else {
processors.add(new TrackingResultProcessor(processor, processorResultList));
processors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList));
}
}
List<Processor> onFailureProcessors = new ArrayList<>(compoundProcessor.getProcessors().size());
@ -79,10 +81,10 @@ public final class TrackingResultProcessor implements Processor {
if (processor instanceof CompoundProcessor) {
onFailureProcessors.add(decorate((CompoundProcessor) processor, processorResultList));
} else {
onFailureProcessors.add(new TrackingResultProcessor(processor, processorResultList));
onFailureProcessors.add(new TrackingResultProcessor(compoundProcessor.isIgnoreFailure(), processor, processorResultList));
}
}
return new CompoundProcessor(false, processors, onFailureProcessors);
return new CompoundProcessor(compoundProcessor.isIgnoreFailure(), processors, onFailureProcessors);
}
}

View File

@ -32,6 +32,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
@ -45,7 +46,6 @@ import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
@ -203,7 +203,7 @@ public class MasterFaultDetection extends FaultDetection {
listener.onMasterFailure(masterNode, cause, reason);
}
});
} catch (RejectedExecutionException e) {
} catch (EsRejectedExecutionException e) {
logger.error("master failure notification was rejected, it's highly likely the node is shutting down", e);
}
stop("master failure, " + reason);

View File

@ -192,6 +192,11 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
return (KeywordFieldMapper) super.clone();
}
// pkg-private for testing
Boolean includeInAll() {
return includeInAll;
}
@Override
public KeywordFieldMapper includeInAll(Boolean includeInAll) {
if (includeInAll != null) {

View File

@ -73,12 +73,12 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
"type",
// common keyword parameters, for which the upgrade is straightforward
"index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to",
"fielddata", "ignore_above"));
"fielddata", "include_in_all", "ignore_above"));
private static final Set<String> SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_TEXT = new HashSet<>(Arrays.asList(
"type",
// common text parameters, for which the upgrade is straightforward
"index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to",
"fielddata", "analyzer", "search_analyzer", "search_quote_analyzer"));
"fielddata", "include_in_all", "analyzer", "search_analyzer", "search_quote_analyzer"));
public static class Defaults {
public static double FIELDDATA_MIN_FREQUENCY = 0;

View File

@ -317,6 +317,11 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
return (TextFieldMapper) super.clone();
}
// pkg-private for testing
Boolean includeInAll() {
return includeInAll;
}
@Override
public TextFieldMapper includeInAll(Boolean includeInAll) {
if (includeInAll != null) {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@ -100,6 +101,9 @@ public class TTLFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) {
throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field.");
}
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -165,6 +169,9 @@ public class TTLFieldMapper extends MetadataFieldMapper {
private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL,
Settings indexSettings) {
super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings);
if (enabled.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) {
throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field.");
}
this.enabledState = enabled;
this.defaultTTL = defaultTTL;
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.action.TimestampParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -126,6 +127,9 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) {
throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents.");
}
Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings());
boolean defaultSet = false;
Boolean ignoreMissing = null;
@ -200,6 +204,9 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState,
String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) {
super(NAME, fieldType, defaultFieldType, indexSettings);
if (enabledState.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) {
throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents.");
}
this.enabledState = enabledState;
this.defaultTimestamp = defaultTimestamp;
this.ignoreMissing = ignoreMissing;

View File

@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.translog;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
/**
* only for testing until we have a disk-full FileSystem
*/
@FunctionalInterface
interface ChannelFactory {
default FileChannel open(Path path) throws IOException {
return open(path, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
}
FileChannel open(Path path, OpenOption... options) throws IOException;
}

View File

@ -82,8 +82,8 @@ class Checkpoint {
}
}
public static void write(Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException {
try (FileChannel channel = FileChannel.open(checkpointFile, options)) {
public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException {
try (FileChannel channel = factory.open(checkpointFile, options)) {
checkpoint.write(channel);
channel.force(false);
}

View File

@ -200,7 +200,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
Files.createDirectories(location);
final long generation = 1;
Checkpoint checkpoint = new Checkpoint(0, 0, generation);
Checkpoint.write(location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
Checkpoint.write(getChannelFactory(), location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
current = createWriter(generation);
this.lastCommittedTranslogFileGeneration = NOT_SET_GENERATION;
@ -1313,8 +1313,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
return outstandingViews.size();
}
TranslogWriter.ChannelFactory getChannelFactory() {
return TranslogWriter.ChannelFactory.DEFAULT;
ChannelFactory getChannelFactory() {
return FileChannel::open;
}
/**

View File

@ -49,6 +49,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
public static final int VERSION = VERSION_CHECKPOINTS;
private final ShardId shardId;
private final ChannelFactory channelFactory;
/* the offset in bytes that was written when the file was last synced*/
private volatile long lastSyncedOffset;
/* the number of translog operations written to this file */
@ -64,9 +65,10 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
// lock order synchronized(syncLock) -> synchronized(this)
private final Object syncLock = new Object();
public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
public TranslogWriter(ChannelFactory channelFactory, ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
super(generation, channel, path, channel.position());
this.shardId = shardId;
this.channelFactory = channelFactory;
this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channel), bufferSize.bytesAsInt());
this.lastSyncedOffset = channel.position();
totalOffset = lastSyncedOffset;
@ -92,8 +94,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
out.writeInt(ref.length);
out.writeBytes(ref.bytes, ref.offset, ref.length);
channel.force(true);
writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE);
final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, channel, file, bufferSize);
writeCheckpoint(channelFactory, headerLength, 0, file.getParent(), fileGeneration);
final TranslogWriter writer = new TranslogWriter(channelFactory, shardId, fileGeneration, channel, file, bufferSize);
return writer;
} catch (Throwable throwable) {
// if we fail to bake the file-generation into the checkpoint we stick with the file and once we recover and that
@ -254,7 +256,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
// we can continue writing to the buffer etc.
try {
channel.force(false);
writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE);
writeCheckpoint(channelFactory, offsetToSync, opsCounter, path.getParent(), generation);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
@ -286,20 +288,10 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
}
private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException {
private static void writeCheckpoint(ChannelFactory channelFactory, long syncPosition, int numOperations, Path translogFile, long generation) throws IOException {
final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME);
Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation);
Checkpoint.write(checkpointFile, checkpoint, options);
}
static class ChannelFactory {
static final ChannelFactory DEFAULT = new ChannelFactory();
// only for testing until we have a disk-full FileSystem
public FileChannel open(Path file) throws IOException {
return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
}
Checkpoint.write(channelFactory::open, checkpointFile, checkpoint, StandardOpenOption.WRITE);
}
protected final void ensureOpen() {

View File

@ -243,6 +243,7 @@ public final class ConfigurationUtils {
}
}
}
return processors;
}
@ -256,7 +257,12 @@ public final class ConfigurationUtils {
List<Processor> onFailureProcessors = readProcessorConfigs(onFailureProcessorConfigs, processorRegistry);
Processor processor;
processor = factory.create(config);
if (!config.isEmpty()) {
if (onFailureProcessorConfigs != null && onFailureProcessors.isEmpty()) {
throw newConfigurationException(processor.getType(), processor.getTag(), Pipeline.ON_FAILURE_KEY,
"processors list cannot be empty");
}
if (config.isEmpty() == false) {
throw new ElasticsearchParseException("processor [{}] doesn't support one or more provided configuration parameters {}",
type, Arrays.toString(config.keySet().toArray()));
}

View File

@ -109,6 +109,9 @@ public final class Pipeline {
throw new ElasticsearchParseException("pipeline [" + id +
"] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray()));
}
if (onFailureProcessorConfigs != null && onFailureProcessors.isEmpty()) {
throw new ElasticsearchParseException("pipeline [" + id + "] cannot have an empty on_failure option defined");
}
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.unmodifiableList(processors),
Collections.unmodifiableList(onFailureProcessors));
return new Pipeline(id, description, compoundProcessor);

View File

@ -225,7 +225,9 @@ public class Node implements Closeable {
for (final ExecutorBuilder<?> builder : threadPool.builders()) {
additionalSettings.addAll(builder.getRegisteredSettings());
}
final ScriptModule scriptModule = ScriptModule.create(settings, pluginsService.filterPlugins(ScriptPlugin.class));
final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool);
final ScriptModule scriptModule = ScriptModule.create(settings, environment, resourceWatcherService,
pluginsService.filterPlugins(ScriptPlugin.class));
additionalSettings.addAll(scriptModule.getSettings());
// this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool
// so we might be late here already
@ -237,7 +239,6 @@ public class Node implements Closeable {
} catch (IOException ex) {
throw new IllegalStateException("Failed to created node environment", ex);
}
final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool);
resourcesToClose.add(resourceWatcherService);
final NetworkService networkService = new NetworkService(settings);
final ClusterService clusterService = new ClusterService(settings, settingsModule.getClusterSettings(), threadPool);
@ -253,7 +254,6 @@ public class Node implements Closeable {
final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool);
modules.add(new NodeModule(this, monitorService));
modules.add(new NetworkModule(networkService, settings, false, namedWriteableRegistry));
modules.add(scriptModule);
modules.add(new DiscoveryModule(this.settings));
modules.add(new ClusterModule(this.settings, clusterService));
modules.add(new IndicesModule(namedWriteableRegistry));
@ -279,6 +279,7 @@ public class Node implements Closeable {
b.bind(ResourceWatcherService.class).toInstance(resourceWatcherService);
b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService);
b.bind(BigArrays.class).toInstance(bigArrays);
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
}
);
injector = modules.createInjector();

View File

@ -19,12 +19,13 @@
package org.elasticsearch.script;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.util.ArrayList;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -34,57 +35,56 @@ import java.util.function.Function;
import java.util.stream.Collectors;
/**
* An {@link org.elasticsearch.common.inject.Module} which manages {@link ScriptEngineService}s, as well
* as named script
* Manages building {@link ScriptService} and {@link ScriptSettings} from a list of plugins.
*/
public class ScriptModule extends AbstractModule {
protected final ScriptContextRegistry scriptContextRegistry;
protected final ScriptEngineRegistry scriptEngineRegistry;
protected final ScriptSettings scriptSettings;
public ScriptModule(ScriptEngineService... services) {
this(Arrays.asList(services), Collections.emptyList());
}
public ScriptModule(List<ScriptEngineService> scriptEngineServices,
List<ScriptContext.Plugin> customScriptContexts) {
this.scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
this.scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineServices);
this.scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
}
public class ScriptModule {
private final ScriptSettings scriptSettings;
private final ScriptService scriptService;
/**
* This method is called after all modules have been processed but before we actually validate all settings. This allows the
* script extensions to add all their settings.
* Build from {@linkplain ScriptPlugin}s. Convenient for normal use but not great for tests. See
* {@link ScriptModule#ScriptModule(Settings, Environment, ResourceWatcherService, List, List)} for easier use in tests.
*/
public List<Setting<?>> getSettings() {
ArrayList<Setting<?>> settings = new ArrayList<>();
scriptSettings.getScriptTypeSettings().forEach(settings::add);
scriptSettings.getScriptContextSettings().forEach(settings::add);
scriptSettings.getScriptLanguageSettings().forEach(settings::add);
settings.add(scriptSettings.getDefaultScriptLanguageSetting());
return settings;
}
@Override
protected void configure() {
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
bind(ScriptSettings.class).toInstance(scriptSettings);
bind(ScriptService.class).asEagerSingleton();
}
public static ScriptModule create(Settings settings, List<ScriptPlugin> scriptPlugins) {
public static ScriptModule create(Settings settings, Environment environment, ResourceWatcherService resourceWatcherService,
List<ScriptPlugin> scriptPlugins) {
Map<String, NativeScriptFactory> factoryMap = scriptPlugins.stream().flatMap(x -> x.getNativeScripts().stream())
.collect(Collectors.toMap(NativeScriptFactory::getName, Function.identity()));
NativeScriptEngineService nativeScriptEngineService = new NativeScriptEngineService(settings, factoryMap);
List<ScriptEngineService> scriptEngineServices = scriptPlugins.stream().map(x -> x.getScriptEngineService(settings))
.filter(Objects::nonNull).collect(Collectors.toList());
scriptEngineServices.add(nativeScriptEngineService);
return new ScriptModule(scriptEngineServices, scriptPlugins.stream().map(x -> x.getCustomScriptContexts())
.filter(Objects::nonNull).collect(Collectors.toList()));
List<ScriptContext.Plugin> plugins = scriptPlugins.stream().map(x -> x.getCustomScriptContexts()).filter(Objects::nonNull)
.collect(Collectors.toList());
return new ScriptModule(settings, environment, resourceWatcherService, scriptEngineServices, plugins);
}
/**
* Build {@linkplain ScriptEngineService} and {@linkplain ScriptContext.Plugin}.
*/
public ScriptModule(Settings settings, Environment environment, ResourceWatcherService resourceWatcherService,
List<ScriptEngineService> scriptEngineServices, List<ScriptContext.Plugin> customScriptContexts) {
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineServices);
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
try {
scriptService = new ScriptService(settings, environment, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry,
scriptSettings);
} catch (IOException e) {
throw new RuntimeException("Couldn't setup ScriptService", e);
}
}
/**
* Extra settings for scripts.
*/
public List<Setting<?>> getSettings() {
return scriptSettings.getSettings();
}
/**
* Service responsible for managing scripts.
*/
public ScriptService getScriptService() {
return scriptService;
}
}

View File

@ -41,7 +41,6 @@ import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -72,7 +71,6 @@ import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import static java.util.Collections.unmodifiableMap;
@ -132,7 +130,6 @@ public class ScriptService extends AbstractComponent implements Closeable {
@Deprecated
public static final ParseField SCRIPT_INLINE = new ParseField("script");
@Inject
public ScriptService(Settings settings, Environment env,
ResourceWatcherService resourceWatcherService, ScriptEngineRegistry scriptEngineRegistry,
ScriptContextRegistry scriptContextRegistry, ScriptSettings scriptSettings) throws IOException {

View File

@ -142,12 +142,13 @@ public class ScriptSettings {
return scriptModeSettings;
}
public Iterable<Setting<Boolean>> getScriptTypeSettings() {
return Collections.unmodifiableCollection(SCRIPT_TYPE_SETTING_MAP.values());
}
public Iterable<Setting<Boolean>> getScriptContextSettings() {
return Collections.unmodifiableCollection(scriptContextSettingMap.values());
public List<Setting<?>> getSettings() {
List<Setting<?>> settings = new ArrayList<>();
settings.addAll(SCRIPT_TYPE_SETTING_MAP.values());
settings.addAll(scriptContextSettingMap.values());
settings.addAll(scriptLanguageSettings);
settings.add(defaultScriptLanguageSetting);
return settings;
}
public Iterable<Setting<Boolean>> getScriptLanguageSettings() {

View File

@ -68,7 +68,6 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
for (int i = 0; i < internalHits.length; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
InternalSearchHit searchHitFields = internalHits[i];
searchHitFields.shard(innerHits.shardTarget());
searchHitFields.score(scoreDoc.score);
if (scoreDoc instanceof FieldDoc) {
FieldDoc fieldDoc = (FieldDoc) scoreDoc;

View File

@ -38,6 +38,7 @@ import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
import static java.util.Objects.requireNonNull;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
@ -45,56 +46,61 @@ import static org.elasticsearch.common.xcontent.XContentHelper.convertToMap;
/**
* Information about a persisted or running task. Running tasks just have a {@link #getTask()} while persisted tasks will have either a
* {@link #getError()} or {@link #getResult()}.
* {@link #getError()} or {@link #getResponse()}.
*/
public final class PersistedTaskInfo implements Writeable, ToXContent {
private final boolean completed;
private final TaskInfo task;
@Nullable
private final BytesReference error;
@Nullable
private final BytesReference result;
private final BytesReference response;
/**
* Construct a {@linkplain PersistedTaskInfo} for a running task.
* Construct a {@linkplain PersistedTaskInfo} for a task for which we don't have a result or error. That usually means that the task
* is incomplete, but it could also mean that we waited for the task to complete but it didn't save any error information.
*/
public PersistedTaskInfo(TaskInfo task) {
this(task, null, null);
public PersistedTaskInfo(boolean completed, TaskInfo task) {
this(completed, task, null, null);
}
/**
* Construct a {@linkplain PersistedTaskInfo} for a task that completed with an error.
*/
public PersistedTaskInfo(TaskInfo task, Throwable error) throws IOException {
this(task, toXContent(error), null);
this(true, task, toXContent(error), null);
}
/**
* Construct a {@linkplain PersistedTaskInfo} for a task that completed successfully.
*/
public PersistedTaskInfo(TaskInfo task, ToXContent result) throws IOException {
this(task, null, toXContent(result));
public PersistedTaskInfo(TaskInfo task, ToXContent response) throws IOException {
this(true, task, null, toXContent(response));
}
private PersistedTaskInfo(TaskInfo task, @Nullable BytesReference error, @Nullable BytesReference result) {
private PersistedTaskInfo(boolean completed, TaskInfo task, @Nullable BytesReference error, @Nullable BytesReference result) {
this.completed = completed;
this.task = requireNonNull(task, "task is required");
this.error = error;
this.result = result;
this.response = result;
}
/**
* Read from a stream.
*/
public PersistedTaskInfo(StreamInput in) throws IOException {
completed = in.readBoolean();
task = new TaskInfo(in);
error = in.readOptionalBytesReference();
result = in.readOptionalBytesReference();
response = in.readOptionalBytesReference();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(completed);
task.writeTo(out);
out.writeOptionalBytesReference(error);
out.writeOptionalBytesReference(result);
out.writeOptionalBytesReference(response);
}
/**
@ -105,46 +111,45 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
}
/**
* Get the error that finished this task. Will return null if the task didn't finish with an error or it hasn't yet finished.
* Get the error that finished this task. Will return null if the task didn't finish with an error, it hasn't yet finished, or didn't
* persist its result.
*/
public BytesReference getError() {
return error;
}
/**
* Convert {@link #getError()} from XContent to a Map for easy processing. Will return null if the task didn't finish with an error or
* hasn't yet finished.
* Convert {@link #getError()} from XContent to a Map for easy processing. Will return an empty map if the task didn't finish with an
* error, hasn't yet finished, or didn't persist its result.
*/
public Map<String, Object> getErrorAsMap() {
if (error == null) {
return null;
return emptyMap();
}
return convertToMap(error, false).v2();
}
/**
* Get the result that this task finished with. Will return null if the task was finished by an error or it hasn't yet finished.
* Get the response that this task finished with. Will return null if the task was finished by an error, it hasn't yet finished, or
* didn't persist its result.
*/
public BytesReference getResult() {
return result;
public BytesReference getResponse() {
return response;
}
/**
* Convert {@link #getResult()} from XContent to a Map for easy processing. Will return null if the task was finished with an error or
* hasn't yet finished.
* Convert {@link #getResponse()} from XContent to a Map for easy processing. Will return an empty map if the task was finished with an
* error, hasn't yet finished, or didn't persist its result.
*/
public Map<String, Object> getResultAsMap() {
if (result == null) {
return null;
public Map<String, Object> getResponseAsMap() {
if (response == null) {
return emptyMap();
}
return convertToMap(result, false).v2();
return convertToMap(response, false).v2();
}
/**
* Was the task completed before returned?
*/
public boolean isCompleted() {
return error != null || result != null;
return completed;
}
@Override
@ -159,18 +164,18 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
if (error != null) {
XContentHelper.writeRawField("error", error, builder, params);
}
if (result != null) {
XContentHelper.writeRawField("result", result, builder, params);
if (response != null) {
XContentHelper.writeRawField("response", response, builder, params);
}
return builder;
}
public static final ConstructingObjectParser<PersistedTaskInfo, ParseFieldMatcherSupplier> PARSER = new ConstructingObjectParser<>(
"persisted_task_info", a -> new PersistedTaskInfo((TaskInfo) a[0], (BytesReference) a[1], (BytesReference) a[2]));
"persisted_task_info", a -> new PersistedTaskInfo(true, (TaskInfo) a[0], (BytesReference) a[1], (BytesReference) a[2]));
static {
PARSER.declareObject(constructorArg(), TaskInfo.PARSER, new ParseField("task"));
PARSER.declareRawObject(optionalConstructorArg(), new ParseField("error"));
PARSER.declareRawObject(optionalConstructorArg(), new ParseField("result"));
PARSER.declareRawObject(optionalConstructorArg(), new ParseField("response"));
}
@Override
@ -189,9 +194,10 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
* Equality of error and result is done by converting them to a map first. Not efficient but ignores field order and spacing
* differences so perfect for testing.
*/
return Objects.equals(task, other.task)
return Objects.equals(completed, other.completed)
&& Objects.equals(task, other.task)
&& Objects.equals(getErrorAsMap(), other.getErrorAsMap())
&& Objects.equals(getResultAsMap(), other.getResultAsMap());
&& Objects.equals(getResponseAsMap(), other.getResponseAsMap());
}
@Override
@ -200,7 +206,7 @@ public final class PersistedTaskInfo implements Writeable, ToXContent {
* Hashing of error and result is done by converting them to a map first. Not efficient but ignores field order and spacing
* differences so perfect for testing.
*/
return Objects.hash(task, getErrorAsMap(), getResultAsMap());
return Objects.hash(completed, task, getErrorAsMap(), getResponseAsMap());
}
private static BytesReference toXContent(ToXContent result) throws IOException {

View File

@ -37,7 +37,7 @@
}
}
},
"result" : {
"response" : {
"type" : "object",
"enabled" : false
},

View File

@ -22,16 +22,16 @@ package org.elasticsearch;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.nio.file.AccessMode;
import java.nio.file.Path;
import java.io.InputStream;
import java.net.URL;
public class BuildTests extends ESTestCase {
/** Asking for the jar metadata should not throw exception in tests, no matter how configured */
public void testJarMetadata() throws IOException {
Path path = Build.getElasticsearchCodebase();
URL url = Build.getElasticsearchCodebase();
// throws exception if does not exist, or we cannot access it
path.getFileSystem().provider().checkAccess(path, AccessMode.READ);
try (InputStream ignored = url.openStream()) {}
// these should never be null
assertNotNull(Build.CURRENT.date());
assertNotNull(Build.CURRENT.shortHash());

View File

@ -70,7 +70,6 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Consumer;
import java.util.function.Function;
@ -85,6 +84,7 @@ import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyCollectionOf;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
@ -437,8 +437,8 @@ public class TasksIT extends ESIntegTestCase {
}
public void testListTasksWaitForCompletion() throws Exception {
waitForCompletionTestCase(id -> {
return client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]")
waitForCompletionTestCase(randomBoolean(), id -> {
return client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME)
.setWaitForCompletion(true).execute();
}, response -> {
assertThat(response.getNodeFailures(), empty());
@ -446,25 +446,39 @@ public class TasksIT extends ESIntegTestCase {
});
}
public void testGetTaskWaitForCompletion() throws Exception {
waitForCompletionTestCase(id -> {
public void testGetTaskWaitForCompletionNoPersist() throws Exception {
waitForCompletionTestCase(false, id -> {
return client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).execute();
}, response -> {
// Really we're just happy we didn't get any exceptions
assertNotNull(response.getTask().getTask());
assertTrue(response.getTask().isCompleted());
// We didn't persist the result so it won't come back when we wait
assertNull(response.getTask().getResponse());
});
}
public void testGetTaskWaitForCompletionWithPersist() throws Exception {
waitForCompletionTestCase(true, id -> {
return client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).execute();
}, response -> {
assertNotNull(response.getTask().getTask());
assertTrue(response.getTask().isCompleted());
// We persisted the task so we should get its results
assertEquals(0, response.getTask().getResponseAsMap().get("failure_count"));
});
}
/**
* Test wait for completion.
* @param persist should the task persist its results
* @param wait start waiting for a task. Accepts that id of the task to wait for and returns a future waiting for it.
* @param validator validate the response and return the task ids that were found
*/
private <T> void waitForCompletionTestCase(Function<TaskId, ListenableActionFuture<T>> wait, Consumer<T> validator)
private <T> void waitForCompletionTestCase(boolean persist, Function<TaskId, ListenableActionFuture<T>> wait, Consumer<T> validator)
throws Exception {
// Start blocking test task
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
.execute();
.setShouldPersistResult(persist).execute();
ListenableActionFuture<T> waitResponseFuture;
TaskId taskId;
@ -513,7 +527,7 @@ public class TasksIT extends ESIntegTestCase {
public void testListTasksWaitForTimeout() throws Exception {
waitForTimeoutTestCase(id -> {
ListTasksResponse response = client().admin().cluster().prepareListTasks()
.setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(100))
.setActions(TestTaskPlugin.TestTaskAction.NAME).setWaitForCompletion(true).setTimeout(timeValueMillis(100))
.get();
assertThat(response.getNodeFailures(), not(empty()));
return response.getNodeFailures();
@ -539,6 +553,9 @@ public class TasksIT extends ESIntegTestCase {
try {
TaskId taskId = waitForTestTaskStartOnAllNodes();
// Wait for the task to start
assertBusy(() -> client().admin().cluster().prepareGetTask(taskId).get());
// Spin up a request that should wait for those tasks to finish
// It will timeout because we haven't unblocked the tasks
Iterable<? extends Throwable> failures = wait.apply(taskId);
@ -554,15 +571,18 @@ public class TasksIT extends ESIntegTestCase {
future.get();
}
/**
* Wait for the test task to be running on all nodes and return the TaskId of the primary task.
*/
private TaskId waitForTestTaskStartOnAllNodes() throws Exception {
AtomicReference<TaskId> result = new AtomicReference<>();
assertBusy(() -> {
List<TaskInfo> tasks = client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]")
.get().getTasks();
assertEquals(internalCluster().size(), tasks.size());
result.set(tasks.get(0).getTaskId());
});
return result.get();
List<TaskInfo> task = client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME).get().getTasks();
assertThat(task, hasSize(1));
return task.get(0).getTaskId();
}
public void testTasksListWaitForNoTask() throws Exception {
@ -626,7 +646,7 @@ public class TasksIT extends ESIntegTestCase {
assertEquals(Long.toString(taskInfo.getId()), task.get("id").toString());
@SuppressWarnings("unchecked")
Map<String, Object> result = (Map<String, Object>) source.get("result");
Map<String, Object> result = (Map<String, Object>) source.get("response");
assertEquals("0", result.get("failure_count").toString());
assertNull(source.get("failure"));
@ -647,7 +667,7 @@ public class TasksIT extends ESIntegTestCase {
assertEquals(1L, searchResponse.getHits().totalHits());
GetTaskResponse getResponse = expectFinishedTask(taskId);
assertEquals(result, getResponse.getTask().getResultAsMap());
assertEquals(result, getResponse.getTask().getResponseAsMap());
assertNull(getResponse.getTask().getError());
}
@ -688,7 +708,7 @@ public class TasksIT extends ESIntegTestCase {
assertNull(source.get("result"));
GetTaskResponse getResponse = expectFinishedTask(failedTaskId);
assertNull(getResponse.getTask().getResult());
assertNull(getResponse.getTask().getResponse());
assertEquals(error, getResponse.getTask().getErrorAsMap());
}
@ -728,7 +748,7 @@ public class TasksIT extends ESIntegTestCase {
GetTaskResponse response = expectFinishedTask(new TaskId("fake:1"));
assertEquals("test", response.getTask().getTask().getAction());
assertNotNull(response.getTask().getError());
assertNull(response.getTask().getResult());
assertNull(response.getTask().getResponse());
}
@Override

View File

@ -159,6 +159,22 @@ public class SimulateExecutionServiceTests extends ESTestCase {
assertThat(simulateDocumentVerboseResult.getProcessorResults().get(2).getFailure(), nullValue());
}
public void testExecuteVerboseItemExceptionWithIgnoreFailure() throws Exception {
TestProcessor testProcessor = new TestProcessor("processor_0", "mock", ingestDocument -> { throw new RuntimeException("processor failed"); });
CompoundProcessor processor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList());
Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor));
SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true);
assertThat(testProcessor.getInvokedCounter(), equalTo(1));
assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class));
SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse;
assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(1));
assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorTag(), equalTo("processor_0"));
assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue());
assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), not(sameInstance(ingestDocument)));
assertIngestDocument(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), ingestDocument);
assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata())));
}
public void testExecuteItemWithFailure() throws Exception {
TestProcessor processor = new TestProcessor(ingestDocument -> { throw new RuntimeException("processor failed"); });
Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor));

View File

@ -28,6 +28,7 @@ import org.junit.Before;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -52,7 +53,7 @@ public class TrackingResultProcessorTests extends ESTestCase {
public void testActualProcessor() throws Exception {
TestProcessor actualProcessor = new TestProcessor(ingestDocument -> {});
TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(actualProcessor, resultList);
TrackingResultProcessor trackingProcessor = new TrackingResultProcessor(false, actualProcessor, resultList);
trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(actualProcessor.getTag(), ingestDocument);
@ -127,4 +128,21 @@ public class TrackingResultProcessorTests extends ESTestCase {
assertThat(resultList.get(3).getFailure(), nullValue());
assertThat(resultList.get(3).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag()));
}
public void testActualCompoundProcessorWithIgnoreFailure() throws Exception {
RuntimeException exception = new RuntimeException("processor failed");
TestProcessor testProcessor = new TestProcessor(ingestDocument -> { throw exception; });
CompoundProcessor actualProcessor = new CompoundProcessor(true, Collections.singletonList(testProcessor),
Collections.emptyList());
CompoundProcessor trackingProcessor = decorate(actualProcessor, resultList);
trackingProcessor.execute(ingestDocument);
SimulateProcessorResult expectedResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument);
assertThat(testProcessor.getInvokedCounter(), equalTo(1));
assertThat(resultList.size(), equalTo(1));
assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument()));
assertThat(resultList.get(0).getFailure(), nullValue());
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag()));
}
}

View File

@ -1035,13 +1035,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
}
public void testAliasFilterWithNowInRangeFilterAndQuery() throws Exception {
assertAcked(prepareCreate("my-index").addMapping("my-type", "_timestamp", "enabled=true"));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", rangeQuery("_timestamp").from("now-1d").to("now")));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", rangeQuery("_timestamp").from("now-1d").to("now")));
assertAcked(prepareCreate("my-index").addMapping("my-type", "timestamp", "type=date"));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", rangeQuery("timestamp").from("2016-12-01").to("2016-12-31")));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", rangeQuery("timestamp").from("2016-01-01").to("2016-12-31")));
final int numDocs = scaledRandomIntBetween(5, 52);
for (int i = 1; i <= numDocs; i++) {
client().prepareIndex("my-index", "my-type").setCreate(true).setSource("{}").get();
client().prepareIndex("my-index", "my-type").setCreate(true).setSource("timestamp", "2016-12-12").get();
if (i % 2 == 0) {
refresh();
SearchResponse response = client().prepareSearch("filter1").get();

View File

@ -30,6 +30,7 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
@ -111,16 +112,18 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
createIndex("test");
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("_timestamp", "enabled=true"));
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("timestamp", "type=date"));
MappingMetaData defaultMapping = client().admin().cluster().prepareState().get().getState().getMetaData().getIndices().get("test").getMappings().get("_default_");
assertThat(defaultMapping.getSourceAsMap().get("_timestamp"), notNullValue());
Map<?,?> properties = (Map<?, ?>) defaultMapping.getSourceAsMap().get("properties");
assertThat(properties.get("timestamp"), notNullValue());
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("_timestamp", "enabled=true"));
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("timestamp", "type=date"));
assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("foo", "enabled=true"));
MappingMetaData type1Mapping = client().admin().cluster().prepareState().get().getState().getMetaData().getIndices().get("test").getMappings().get("type1");
assertThat(type1Mapping.getSourceAsMap().get("_timestamp"), notNullValue());
properties = (Map<?, ?>) type1Mapping.getSourceAsMap().get("properties");
assertThat(properties.get("timestamp"), notNullValue());
}
public void testAliasFilterValidation() throws Exception {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.get;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
@ -30,7 +29,6 @@ import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
@ -40,15 +38,11 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.singleton;
@ -57,7 +51,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -536,7 +529,7 @@ public class GetActionIT extends ESIntegTestCase {
public void testGetFieldsMetaData() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("my-type1", "_timestamp", "enabled=true", "_ttl", "enabled=true", "_parent", "type=parent")
.addMapping("my-type1", "_parent", "type=parent")
.addAlias(new Alias("alias"))
.setSettings(Settings.builder().put("index.refresh_interval", -1)));
@ -557,12 +550,6 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
assertThat(getResponse.getField("_timestamp").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_timestamp").getValue().toString(), equalTo("205097"));
assertThat(getResponse.getField("_ttl").isMetadataField(), equalTo(true));
// TODO: _ttl should return the original value, but it does not work today because
// it would use now() instead of the value of _timestamp to rebase
// assertThat(getResponse.getField("_ttl").getValue().toString(), equalTo("10000000205097"));
assertThat(getResponse.getField("_parent").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_parent").getValue().toString(), equalTo("parent_1"));
@ -577,12 +564,6 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
assertThat(getResponse.getField("_timestamp").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_timestamp").getValue().toString(), equalTo("205097"));
assertThat(getResponse.getField("_ttl").isMetadataField(), equalTo(true));
// TODO: _ttl should return the original value, but it does not work today because
// it would use now() instead of the value of _timestamp to rebase
//assertThat(getResponse.getField("_ttl").getValue().toString(), equalTo("10000000000000"));
assertThat(getResponse.getField("_parent").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_parent").getValue().toString(), equalTo("parent_1"));
}
@ -779,16 +760,10 @@ public class GetActionIT extends ESIntegTestCase {
" },\n" +
" \"mappings\": {\n" +
" \"parentdoc\": {\n" +
" \"_ttl\": {\n" +
" \"enabled\": true\n" +
" }\n" +
" },\n" +
" \"doc\": {\n" +
" \"_parent\": {\n" +
" \"type\": \"parentdoc\"\n" +
" },\n" +
" \"_ttl\": {\n" +
" \"enabled\": true\n" +
" }\n" +
" }\n" +
" }\n" +
@ -798,7 +773,7 @@ public class GetActionIT extends ESIntegTestCase {
client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").setTTL(TimeValue.timeValueHours(1).getMillis()).get();
String[] fieldsList = {"_ttl", "_parent"};
String[] fieldsList = {"_parent"};
// before refresh - document is only in translog
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1");
refresh();
@ -814,14 +789,6 @@ public class GetActionIT extends ESIntegTestCase {
" \"settings\": {\n" +
" \"index.translog.flush_threshold_size\": \"1pb\",\n" +
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
" \"parentdoc\": {},\n" +
" \"doc\": {\n" +
" \"_timestamp\": {\n" +
" \"enabled\": true\n" +
" }\n" +
" }\n" +
" }\n" +
"}";
@ -831,7 +798,7 @@ public class GetActionIT extends ESIntegTestCase {
" \"text\": \"some text.\"\n" +
"}\n";
client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get();
String[] fieldsList = {"_timestamp", "_routing"};
String[] fieldsList = {"_routing"};
// before refresh - document is only in translog
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1");
refresh();

View File

@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
@ -50,9 +49,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
@ -430,23 +427,6 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase {
parser.parse("test", new CompressedXContent(mapping));
}
// issue https://github.com/elastic/elasticsearch/issues/5864
public void testMetadataMappersStillWorking() throws MapperParsingException, IOException {
String mapping = "{";
Map<String, String> rootTypes = new HashMap<>();
//just pick some example from DocumentMapperParser.rootTypeParsers
rootTypes.put(TimestampFieldMapper.NAME, "{\"enabled\" : true}");
rootTypes.put("include_in_all", "true");
rootTypes.put("dynamic_date_formats", "[\"yyyy-MM-dd\", \"dd-MM-yyyy\"]");
rootTypes.put("numeric_detection", "true");
rootTypes.put("dynamic_templates", "[]");
for (String key : rootTypes.keySet()) {
mapping += "\"" + key+ "\"" + ":" + rootTypes.get(key) + ",\n";
}
mapping += "\"properties\":{}}" ;
createIndex("test").mapperService().documentMapperParser().parse("test", new CompressedXContent(mapping));
}
public void testDocValuesNotAllowed() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.startObject("_all")

View File

@ -198,6 +198,32 @@ public class StringMappingUpgradeTests extends ESSingleNodeTestCase {
assertEquals("keyword", field.fieldType().searchQuoteAnalyzer().name());
}
public void testUpgradeTextIncludeInAll() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string")
.field("include_in_all", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper field = mapper.mappers().getMapper("field");
assertThat(field, instanceOf(TextFieldMapper.class));
assertFalse(((TextFieldMapper) field).includeInAll());
}
public void testUpgradeKeywordIncludeInAll() throws IOException {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string")
.field("index", "not_analyzed").field("include_in_all", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
FieldMapper field = mapper.mappers().getMapper("field");
assertThat(field, instanceOf(KeywordFieldMapper.class));
assertTrue(((KeywordFieldMapper) field).includeInAll());
}
public void testUpgradeRandomMapping() throws IOException {
final int iters = 20;
for (int i = 0; i < iters; ++i) {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.timestamp;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.action.TimestampParsingException;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
@ -31,25 +32,31 @@ import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
@ -58,19 +65,35 @@ import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
/**
*/
public class TimestampMappingTests extends ESSingleNodeTestCase {
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build();
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testRejectedOn5x() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("_timestamp")
.field("enabled", true)
.endObject()
.endObject().endObject().string();
IndexService index = createIndex("test");
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class,
() -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false));
assertThat(expected.getMessage(), startsWith("[_timestamp] is removed"));
}
public void testSimpleDisabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -85,7 +108,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -99,10 +122,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
}
public void testDefaultValues() throws Exception {
Version version;
do {
version = randomVersion(random());
} while (version.before(Version.V_2_0_0_beta1));
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.V_5_0_0_alpha3);
for (String mapping : Arrays.asList(
XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(),
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().endObject().string())) {
@ -120,7 +140,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).endObject()
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -146,7 +166,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.field("foo", "bar")
.endObject();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -172,7 +192,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -195,7 +215,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject();
try {
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
} catch (TimestampParsingException e) {
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
@ -212,7 +232,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject();
try {
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
} catch (TimestampParsingException e) {
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
@ -230,7 +250,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject();
try {
createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false");
} catch (TimestampParsingException e) {
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
@ -250,7 +270,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject();
MetaData metaData = MetaData.builder().build();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
@ -323,7 +343,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.field("enabled", true)
.field("default", "1970-01-01")
.endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping));
docMapper = parser.parse("type", docMapper.mappingSource());
@ -360,7 +380,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
@ -375,7 +395,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject();
@ -392,7 +412,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
.endObject().endObject().string();
BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes();
// test with 2.x
DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper currentMapper = createIndex("new-index", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
// this works with 2.x
IndexRequest request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1970-01-01");
@ -407,4 +427,54 @@ public class TimestampMappingTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("failed to parse timestamp [1234567890]"));
}
}
public void testSizeTimestampIndexParsing() throws IOException {
IndexService indexService = createIndex("test", BW_SETTINGS);
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true);
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
}
public void testDefaultApplied() throws IOException {
createIndex("test1", BW_SETTINGS);
createIndex("test2", BW_SETTINGS);
XContentBuilder defaultMapping = XContentFactory.jsonBuilder().startObject()
.startObject(MapperService.DEFAULT_MAPPING).startObject("_timestamp").field("enabled", true).endObject().endObject()
.endObject();
client().admin().indices().preparePutMapping().setType(MapperService.DEFAULT_MAPPING).setSource(defaultMapping).get();
XContentBuilder typeMapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").startObject("_all").field("enabled", false).endObject().endObject()
.endObject();
client().admin().indices().preparePutMapping("test1").setType("type").setSource(typeMapping).get();
client().admin().indices().preparePutMapping("test1", "test2").setType("type").setSource(typeMapping).get();
GetMappingsResponse response = client().admin().indices().prepareGetMappings("test2").get();
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_all"));
assertFalse((Boolean) ((LinkedHashMap) response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")).get("enabled"));
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp"));
assertTrue((Boolean)((LinkedHashMap)response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")).get("enabled"));
}
public void testTimestampParsing() throws IOException {
IndexService indexService = createIndex("test", BW_SETTINGS);
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
boolean enabled = randomBoolean();
indexMapping.startObject()
.startObject("type")
.startObject("_timestamp")
.field("enabled", enabled)
.endObject()
.endObject()
.endObject();
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
}
}

View File

@ -20,7 +20,9 @@
package org.elasticsearch.index.mapper.ttl;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
@ -30,22 +32,48 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.util.Collection;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
public class TTLMappingTests extends ESSingleNodeTestCase {
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build();
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testRejectedOn5x() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("_ttl")
.field("enabled", true)
.endObject()
.endObject().endObject().string();
IndexService index = createIndex("test");
IllegalArgumentException expected = expectThrows(IllegalArgumentException.class,
() -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false));
assertThat(expected.getMessage(), startsWith("[_ttl] is removed"));
}
public void testSimpleDisabled() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -60,7 +88,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl").field("enabled", "yes").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
@ -75,7 +103,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testDefaultValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled));
assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored()));
assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions()));
@ -93,7 +121,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
@ -116,7 +144,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
.startObject("properties").field("field").startObject().field("type", "text").endObject().endObject()
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false);
@ -127,7 +155,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testThatDisablingTTLReportsConflict() throws Exception {
String mappingWithTtl = getMappingWithTtlEnabled().string();
String mappingWithTtlDisabled = getMappingWithTtlDisabled().string();
MapperService mapperService = createIndex("test").mapperService();
MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService();
DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false);
try {
@ -143,7 +171,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testThatDisablingTTLReportsConflictOnCluster() throws Exception {
String mappingWithTtl = getMappingWithTtlEnabled().string();
String mappingWithTtlDisabled = getMappingWithTtlDisabled().string();
assertAcked(client().admin().indices().prepareCreate("testindex").addMapping("type", mappingWithTtl));
assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", mappingWithTtl));
GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get();
try {
client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtlDisabled).setType("type").get();
@ -158,7 +186,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testThatEnablingTTLAfterFirstDisablingWorks() throws Exception {
String mappingWithTtl = getMappingWithTtlEnabled().string();
String withTtlDisabled = getMappingWithTtlDisabled().string();
assertAcked(client().admin().indices().prepareCreate("testindex").addMapping("type", withTtlDisabled));
assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", withTtlDisabled));
GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get();
assertThat(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").sourceAsMap().get("_ttl").toString(), equalTo("{enabled=false}"));
client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtl).setType("type").get();
@ -167,20 +195,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
}
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type");
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type");
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type");
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type");
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
}
public void testMergeWithOnlyDefaultSet() throws Exception {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type", mappingWithTtlEnabled);
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled);
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false);
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -189,7 +217,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d");
IndexService indexService = createIndex("testindex", Settings.builder().build(), "type", mappingWithTtlEnabled);
IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled);
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"text\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
@ -202,7 +230,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_ttl").field("enabled", true).endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try {
docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()

View File

@ -31,12 +31,12 @@ import static org.hamcrest.Matchers.equalTo;
public class ParseMappingTypeLevelTests extends ESSingleNodeTestCase {
public void testTypeLevel() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_all").field("enabled", false).endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertThat(mapper.type(), equalTo("type"));
assertThat(mapper.timestampFieldMapper().enabled(), equalTo(true));
assertThat(mapper.allFieldMapper().enabled(), equalTo(false));
}
}

View File

@ -139,16 +139,6 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
compareMappingOnNodes(mappingsBeforeUpdateResponse);
}
// checks if the setting for timestamp and size are kept even if disabled
public void testDisabledSizeTimestampIndexDoNotLooseMappings() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
prepareCreate(INDEX).addMapping(TYPE, mapping).get();
GetMappingsResponse mappingsBeforeGreen = client().admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).get();
ensureGreen(INDEX);
// make sure all nodes have same cluster state
compareMappingOnNodes(mappingsBeforeGreen);
}
protected void testConflict(String mapping, String mappingUpdate, String... errorMessages) throws InterruptedException {
assertAcked(prepareCreate(INDEX).setSource(mapping).get());
ensureGreen(INDEX);

View File

@ -259,56 +259,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
}
}
public void testTimestampParsing() throws IOException {
IndexService indexService = createIndex("test");
XContentBuilder indexMapping = XContentFactory.jsonBuilder();
boolean enabled = randomBoolean();
indexMapping.startObject()
.startObject("type")
.startObject("_timestamp")
.field("enabled", enabled)
.endObject()
.endObject()
.endObject();
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true);
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled));
assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues());
assertTrue(documentMapper.timestampFieldMapper().fieldType().stored());
}
public void testSizeTimestampIndexParsing() throws IOException {
IndexService indexService = createIndex("test", Settings.builder().build());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json");
DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true);
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true);
assertThat(documentMapper.mappingSource().string(), equalTo(mapping));
}
public void testDefaultApplied() throws IOException {
createIndex("test1", Settings.builder().build());
createIndex("test2", Settings.builder().build());
XContentBuilder defaultMapping = XContentFactory.jsonBuilder().startObject()
.startObject(MapperService.DEFAULT_MAPPING).startObject("_timestamp").field("enabled", true).endObject().endObject()
.endObject();
client().admin().indices().preparePutMapping().setType(MapperService.DEFAULT_MAPPING).setSource(defaultMapping).get();
XContentBuilder typeMapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").startObject("_all").field("enabled", false).endObject().endObject()
.endObject();
client().admin().indices().preparePutMapping("test1").setType("type").setSource(typeMapping).get();
client().admin().indices().preparePutMapping("test1", "test2").setType("type").setSource(typeMapping).get();
GetMappingsResponse response = client().admin().indices().prepareGetMappings("test2").get();
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_all"));
assertFalse((Boolean) ((LinkedHashMap) response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")).get("enabled"));
assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp"));
assertTrue((Boolean)((LinkedHashMap)response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")).get("enabled"));
}
public void testRejectFieldDefinedTwice() throws IOException {
String mapping1 = XContentFactory.jsonBuilder().startObject()
.startObject("type1")

View File

@ -60,7 +60,9 @@ import java.nio.charset.Charset;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Collection;
@ -1125,13 +1127,13 @@ public class TranslogTests extends ESTestCase {
Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME);
Checkpoint read = Checkpoint.read(ckp);
Checkpoint corrupted = new Checkpoint(0, 0, 0);
Checkpoint.write(config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), corrupted, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), corrupted, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
try (Translog translog = new Translog(config, translogGeneration)) {
fail("corrupted");
} catch (IllegalStateException ex) {
assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=2683, numOps=55, translogFileGeneration= 2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration= 0}");
}
Checkpoint.write(config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING);
try (Translog translog = new Translog(config, translogGeneration)) {
assertNotNull(translogGeneration);
assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration());
@ -1564,22 +1566,20 @@ public class TranslogTests extends ESTestCase {
private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean paritalWrites, final boolean throwUnknownException, Translog.TranslogGeneration generation) throws IOException {
return new Translog(config, generation) {
@Override
TranslogWriter.ChannelFactory getChannelFactory() {
final TranslogWriter.ChannelFactory factory = super.getChannelFactory();
ChannelFactory getChannelFactory() {
final ChannelFactory factory = super.getChannelFactory();
return new TranslogWriter.ChannelFactory() {
@Override
public FileChannel open(Path file) throws IOException {
FileChannel channel = factory.open(file);
boolean success = false;
try {
ThrowingFileChannel throwingFileChannel = new ThrowingFileChannel(fail, paritalWrites, throwUnknownException, channel);
success = true;
return throwingFileChannel;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(channel);
}
return (file, openOption) -> {
FileChannel channel = factory.open(file, openOption);
boolean success = false;
try {
final boolean isCkpFile = file.getFileName().toString().endsWith(".ckp"); // don't do partial writes for checkpoints we rely on the fact that the 20bytes are written as an atomic operation
ThrowingFileChannel throwingFileChannel = new ThrowingFileChannel(fail, isCkpFile ? false : paritalWrites, throwUnknownException, channel);
success = true;
return throwingFileChannel;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(channel);
}
}
};
@ -1840,11 +1840,18 @@ public class TranslogTests extends ESTestCase {
} catch (IOException ex) {
assertEquals(ex.getMessage(), "__FAKE__ no space left on device");
} finally {
Checkpoint checkpoint = failableTLog.readCheckpoint();
if (checkpoint.numOps == unsynced.size() + syncedDocs.size()) {
syncedDocs.addAll(unsynced); // failed in fsync but got fully written
unsynced.clear();
}
generation = failableTLog.getGeneration();
IOUtils.closeWhileHandlingException(failableTLog);
}
} catch (TranslogException | MockDirectoryWrapper.FakeIOException ex) {
// failed - that's ok, we didn't even create it
} catch (IOException ex) {
assertEquals(ex.getMessage(), "__FAKE__ no space left on device");
}
// now randomly open this failing tlog again just to make sure we can also recover from failing during recovery
if (randomBoolean()) {
@ -1852,9 +1859,12 @@ public class TranslogTests extends ESTestCase {
IOUtils.close(getFailableTranslog(fail, config, randomBoolean(), false, generation));
} catch (TranslogException | MockDirectoryWrapper.FakeIOException ex) {
// failed - that's ok, we didn't even create it
} catch (IOException ex) {
assertEquals(ex.getMessage(), "__FAKE__ no space left on device");
}
}
fail.failNever(); // we don't wanna fail here but we might since we write a new checkpoint and create a new tlog file
try (Translog translog = new Translog(config, generation)) {
Translog.Snapshot snapshot = translog.newSnapshot();
assertEquals(syncedDocs.size(), snapshot.totalOperations());
@ -1866,4 +1876,30 @@ public class TranslogTests extends ESTestCase {
}
}
}
public void testCheckpointOnDiskFull() throws IOException {
Checkpoint checkpoint = new Checkpoint(randomLong(), randomInt(), randomLong());
Path tempDir = createTempDir();
Checkpoint.write(FileChannel::open, tempDir.resolve("foo.cpk"), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
Checkpoint checkpoint2 = new Checkpoint(randomLong(), randomInt(), randomLong());
try {
Checkpoint.write((p, o) -> {
if (randomBoolean()) {
throw new MockDirectoryWrapper.FakeIOException();
}
FileChannel open = FileChannel.open(p, o);
FailSwitch failSwitch = new FailSwitch();
failSwitch.failNever(); // don't fail in the ctor
ThrowingFileChannel channel = new ThrowingFileChannel(failSwitch, false, false, open);
failSwitch.failAlways();
return channel;
}, tempDir.resolve("foo.cpk"), checkpoint2, StandardOpenOption.WRITE);
fail("should have failed earlier");
} catch (MockDirectoryWrapper.FakeIOException ex) {
//fine
}
Checkpoint read = Checkpoint.read(tempDir.resolve("foo.cpk"));
assertEquals(read, checkpoint);
}
}

View File

@ -86,6 +86,30 @@ public class PipelineFactoryTests extends ESTestCase {
assertThat(pipeline.getOnFailureProcessors().get(0).getType(), equalTo("test-processor"));
}
public void testCreateWithPipelineEmptyOnFailure() throws Exception {
Map<String, Object> processorConfig = new HashMap<>();
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
pipelineConfig.put(Pipeline.ON_FAILURE_KEY, Collections.emptyList());
Pipeline.Factory factory = new Pipeline.Factory();
ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory()));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create("_id", pipelineConfig, processorRegistry));
assertThat(e.getMessage(), equalTo("pipeline [_id] cannot have an empty on_failure option defined"));
}
public void testCreateWithPipelineEmptyOnFailureInProcessor() throws Exception {
Map<String, Object> processorConfig = new HashMap<>();
processorConfig.put(Pipeline.ON_FAILURE_KEY, Collections.emptyList());
Map<String, Object> pipelineConfig = new HashMap<>();
pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
Pipeline.Factory factory = new Pipeline.Factory();
ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory()));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create("_id", pipelineConfig, processorRegistry));
assertThat(e.getMessage(), equalTo("[on_failure] processors list cannot be empty"));
}
public void testCreateWithPipelineIgnoreFailure() throws Exception {
Map<String, Object> processorConfig = new HashMap<>();
processorConfig.put("ignore_failure", true);
@ -116,11 +140,8 @@ public class PipelineFactoryTests extends ESTestCase {
pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
Pipeline.Factory factory = new Pipeline.Factory();
ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory()));
try {
factory.create("_id", pipelineConfig, processorRegistry);
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));
}
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create("_id", pipelineConfig, processorRegistry));
assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));
}
public void testCreateProcessorsWithOnFailureProperties() throws Exception {

View File

@ -22,16 +22,12 @@ package org.elasticsearch.script;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.env.Environment;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.IOException;
@ -41,6 +37,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
@ -49,28 +48,18 @@ public class NativeScriptTests extends ESTestCase {
Settings settings = Settings.builder()
.put("node.name", "testNativeScript")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.build();
ScriptModule scriptModule = new ScriptModule(new NativeScriptEngineService(settings,
Collections.singletonMap("my", new MyNativeScriptFactory())));
ScriptModule scriptModule = new ScriptModule(settings, new Environment(settings), null,
singletonList(new NativeScriptEngineService(settings, singletonMap("my", new MyNativeScriptFactory()))), emptyList());
List<Setting<?>> scriptSettings = scriptModule.getSettings();
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
final ThreadPool threadPool = new ThreadPool(settings);
Injector injector = new ModulesBuilder().add(
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
},
new SettingsModule(settings),
scriptModule).createInjector();
ScriptService scriptService = injector.getInstance(ScriptService.class);
ClusterState state = ClusterState.builder(new ClusterName("_name")).build();
ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null),
ScriptContext.Standard.SEARCH, Collections.emptyMap(), state);
ExecutableScript executable = scriptModule.getScriptService().executable(
new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), ScriptContext.Standard.SEARCH,
Collections.emptyMap(), state);
assertThat(executable.run().toString(), equalTo("test"));
terminate(injector.getInstance(ThreadPool.class));
}
public void testFineGrainedSettingsDontAffectNativeScripts() throws IOException {

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@ -112,28 +111,29 @@ public class AggregatorParsingTests extends ESTestCase {
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
}, settingsModule
, scriptModule, new IndicesModule(namedWriteableRegistry) {
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
},
settingsModule,
new IndicesModule(namedWriteableRegistry) {
@Override
protected void configure() {
bindMapperExtension();
}
}, new SearchModule(settings, namedWriteableRegistry) {
@Override
protected void configureSearch() {
// Skip me
}
}, new IndexSettingsModule(index, settings),
@Override
protected void configure() {
bindMapperExtension();
}
}, new SearchModule(settings, namedWriteableRegistry) {
@Override
protected void configureSearch() {
// Skip me
}
}, new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toProvider(Providers.of(clusterService));
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
}
}).createInjector();
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toInstance(clusterService);
bind(CircuitBreakerService.class).toInstance(new NoneCircuitBreakerService());
bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
}
}).createInjector();
aggParsers = injector.getInstance(AggregatorParsers.class);
// create some random type with some default field, those types will
// stick around for all of the subclasses

View File

@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.inject.util.Providers;
@ -140,31 +139,26 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
b.bind(ClusterService.class).toProvider(Providers.of(clusterService));
b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
},
settingsModule,
scriptModule,
new IndicesModule(namedWriteableRegistry) {
@Override
protected void configure() {
bindMapperExtension();
}
}, new SearchModule(settings, namedWriteableRegistry) {
},
new SearchModule(settings, namedWriteableRegistry) {
@Override
protected void configureSearch() {
// Skip me
}
},
new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toProvider(Providers.of(clusterService));
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry);
}
}
new IndexSettingsModule(index, settings)
).createInjector();
}

View File

@ -65,7 +65,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
@ -107,8 +106,7 @@ public class DateHistogramIT extends ESIntegTestCase {
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(prepareCreate("idx").addMapping("type", "_timestamp", "enabled=true"));
createIndex("idx_unmapped");
createIndex("idx", "idx_unmapped");
// TODO: would be nice to have more random data here
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer"));
List<IndexRequestBuilder> builders = new ArrayList<>();
@ -1141,13 +1139,6 @@ public class DateHistogramIT extends ESIntegTestCase {
}
}
public void testTimestampField() { // see #11692
SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("_timestamp").dateHistogramInterval(randomFrom(DateHistogramInterval.DAY, DateHistogramInterval.MONTH))).get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), greaterThan(0));
}
/**
* When DST ends, local time turns back one hour, so between 2am and 4am wall time we should have four buckets:
* "2015-10-25T02:00:00.000+02:00",

View File

@ -134,13 +134,16 @@ public class SearchSourceBuilderTests extends ESTestCase {
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
}, settingsModule,
scriptModule, new IndicesModule(namedWriteableRegistry) {
b.bind(ScriptService.class).toInstance(scriptModule.getScriptService());
},
settingsModule,
new IndicesModule(namedWriteableRegistry) {
@Override
protected void configure() {
bindMapperExtension();
}
}, new SearchModule(settings, namedWriteableRegistry) {
},
new SearchModule(settings, namedWriteableRegistry) {
@Override
protected void configureSearch() {
// Skip me

View File

@ -36,7 +36,6 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.hamcrest.Matchers;
@ -70,10 +69,8 @@ import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
@ -1340,16 +1337,12 @@ public class FieldSortIT extends ESIntegTestCase {
}
public void testSortMetaField() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).endObject()
.endObject().endObject();
assertAcked(prepareCreate("test")
.addMapping("type", mapping));
createIndex("test");
ensureGreen();
final int numDocs = randomIntBetween(10, 20);
IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; ++i) {
indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000)))
indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i))
.setSource();
}
indexRandom(true, indexReqs);
@ -1368,37 +1361,6 @@ public class FieldSortIT extends ESIntegTestCase {
assertThat(previous, order == SortOrder.ASC ? lessThan(uid) : greaterThan(uid));
previous = uid;
}
/*
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(randomIntBetween(1, numDocs + 5))
.addSort("_id", order)
.execute().actionGet();
assertNoFailures(searchResponse);
hits = searchResponse.getHits().hits();
previous = order == SortOrder.ASC ? new BytesRef() : UnicodeUtil.BIG_TERM;
for (int i = 0; i < hits.length; ++i) {
final BytesRef id = new BytesRef(Uid.createUid(hits[i].type(), hits[i].id()));
assertThat(previous, order == SortOrder.ASC ? lessThan(id) : greaterThan(id));
previous = id;
}*/
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(randomIntBetween(1, numDocs + 5))
.addSort("_timestamp", order)
.addField("_timestamp")
.execute().actionGet();
assertNoFailures(searchResponse);
hits = searchResponse.getHits().hits();
Long previousTs = order == SortOrder.ASC ? 0 : Long.MAX_VALUE;
for (int i = 0; i < hits.length; ++i) {
SearchHitField timestampField = hits[i].getFields().get("_timestamp");
Long timestamp = timestampField.<Long>getValue();
assertThat(previousTs, order == SortOrder.ASC ? lessThanOrEqualTo(timestamp) : greaterThanOrEqualTo(timestamp));
previousTs = timestamp;
}
}
/**

View File

@ -78,11 +78,11 @@ public class PersistedTaskInfoTests extends ESTestCase {
private static PersistedTaskInfo randomTaskResult() throws IOException {
switch (between(0, 2)) {
case 0:
return new PersistedTaskInfo(randomTaskInfo());
return new PersistedTaskInfo(randomBoolean(), randomTaskInfo());
case 1:
return new PersistedTaskInfo(randomTaskInfo(), new RuntimeException("error"));
case 2:
return new PersistedTaskInfo(randomTaskInfo(), randomTaskActionResult());
return new PersistedTaskInfo(randomTaskInfo(), randomTaskResponse());
default:
throw new UnsupportedOperationException("Unsupported random TaskResult constructor");
}
@ -117,7 +117,7 @@ public class PersistedTaskInfoTests extends ESTestCase {
}
}
private static ToXContent randomTaskActionResult() {
private static ToXContent randomTaskResponse() {
Map<String, String> result = new TreeMap<>();
int fields = between(0, 10);
for (int f = 0; f < fields; f++) {
@ -126,7 +126,7 @@ public class PersistedTaskInfoTests extends ESTestCase {
return new ToXContent() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
// Results in Elasticsearch never output a leading startObject. There isn't really a good reason, they just don't.
// Responses in Elasticsearch never output a leading startObject. There isn't really a good reason, they just don't.
for (Map.Entry<String, String> entry : result.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}

View File

@ -19,14 +19,20 @@
package org.elasticsearch.timestamp;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.util.Collection;
import java.util.Locale;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
@ -41,8 +47,17 @@ import static org.hamcrest.Matchers.notNullValue;
/**
*/
public class SimpleTimestampIT extends ESIntegTestCase {
private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build();
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testSimpleTimestamp() throws Exception {
client().admin().indices().prepareCreate("test")
.setSettings(BW_SETTINGS)
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("_timestamp").field("enabled", true).endObject().endObject().endObject())
.execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
@ -98,7 +113,7 @@ public class SimpleTimestampIT extends ESIntegTestCase {
String type = "mytype";
XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder));
// check mapping again
assertTimestampMappingEnabled(index, type, true);
@ -117,7 +132,7 @@ public class SimpleTimestampIT extends ESIntegTestCase {
String type = "mytype";
XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder));
// check mapping again
assertTimestampMappingEnabled(index, type, true);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.ttl;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
@ -26,14 +27,19 @@ import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@ -60,6 +66,11 @@ public class SimpleTTLIT extends ESIntegTestCase {
return 2;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(InternalSettingsPlugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
@ -70,6 +81,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
public void testSimpleTTL() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
@ -209,7 +221,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
String type = "mytype";
XContentBuilder builder = jsonBuilder().startObject().startObject("_ttl").field("enabled", true).endObject().endObject();
assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder));
assertAcked(client().admin().indices().prepareCreate(index).setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id).addMapping(type, builder));
// check mapping again
assertTTLMappingEnabled(index, type);
@ -232,6 +244,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
*/
public void testNoopUpdate() throws IOException {
assertAcked(prepareCreate("test")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")

View File

@ -0,0 +1,237 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.update;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class TimestampTTLBWIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(
UpdateIT.FieldIncrementScriptPlugin.class,
UpdateIT.ExtractContextInSourceScriptPlugin.class,
UpdateIT.PutFieldValuesScriptPlugin.class,
InternalSettingsPlugin.class
);
}
public void testSort() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).endObject()
.endObject().endObject();
assertAcked(prepareCreate("test")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
.addMapping("type", mapping));
ensureGreen();
final int numDocs = randomIntBetween(10, 20);
IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; ++i) {
indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000)))
.setSource();
}
indexRandom(true, indexReqs);
SortOrder order = randomFrom(SortOrder.values());
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(randomIntBetween(1, numDocs + 5))
.addSort("_timestamp", order)
.addField("_timestamp")
.execute().actionGet();
assertNoFailures(searchResponse);
SearchHit[] hits = searchResponse.getHits().hits();
Long previousTs = order == SortOrder.ASC ? 0 : Long.MAX_VALUE;
for (int i = 0; i < hits.length; ++i) {
SearchHitField timestampField = hits[i].getFields().get("_timestamp");
Long timestamp = timestampField.<Long>getValue();
assertThat(previousTs, order == SortOrder.ASC ? lessThanOrEqualTo(timestamp) : greaterThanOrEqualTo(timestamp));
previousTs = timestamp;
}
}
public void testUpdate() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject()));
ensureGreen();
try {
client().prepareUpdate(indexOrAlias(), "type1", "1")
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
fail();
} catch (DocumentMissingException e) {
// all is well
}
// check TTL is kept after an update without TTL
client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get();
GetResponse getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L));
client().prepareUpdate(indexOrAlias(), "type1", "2")
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L));
// check TTL update
client().prepareUpdate(indexOrAlias(), "type1", "2")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values",
Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet();
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L));
assertThat(ttl, lessThanOrEqualTo(3600000L));
// check timestamp update
client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get();
client().prepareUpdate(indexOrAlias(), "type1", "3")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values",
Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute()
.actionGet();
getResponse = client().prepareGet("test", "type1", "3").setFields("_timestamp").execute().actionGet();
long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
assertThat(timestamp, equalTo(1258294332000L));
}
public void testContextVariables() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id)
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject())
.addMapping("subtype1", XContentFactory.jsonBuilder()
.startObject()
.startObject("subtype1")
.startObject("_parent").field("type", "type1").endObject()
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject())
);
ensureGreen();
// Index some documents
long timestamp = System.currentTimeMillis();
client().prepareIndex()
.setIndex("test")
.setType("type1")
.setId("parentId1")
.setTimestamp(String.valueOf(timestamp-1))
.setSource("field1", 0, "content", "bar")
.execute().actionGet();
long ttl = 10000;
client().prepareIndex()
.setIndex("test")
.setType("subtype1")
.setId("id1")
.setParent("parentId1")
.setRouting("routing1")
.setTimestamp(String.valueOf(timestamp))
.setTTL(ttl)
.setSource("field1", 1, "content", "foo")
.execute().actionGet();
// Update the first object and note context variables values
UpdateResponse updateResponse = client().prepareUpdate("test", "subtype1", "id1")
.setRouting("routing1")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "extract_ctx", null))
.execute().actionGet();
assertEquals(2, updateResponse.getVersion());
GetResponse getResponse = client().prepareGet("test", "subtype1", "id1").setRouting("routing1").execute().actionGet();
Map<String, Object> updateContext = (Map<String, Object>) getResponse.getSourceAsMap().get("update_context");
assertEquals("test", updateContext.get("_index"));
assertEquals("subtype1", updateContext.get("_type"));
assertEquals("id1", updateContext.get("_id"));
assertEquals(1, updateContext.get("_version"));
assertEquals("parentId1", updateContext.get("_parent"));
assertEquals("routing1", updateContext.get("_routing"));
assertThat(((Integer) updateContext.get("_ttl")).longValue(), allOf(greaterThanOrEqualTo(ttl-3000), lessThanOrEqualTo(ttl)));
// Idem with the second object
updateResponse = client().prepareUpdate("test", "type1", "parentId1")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "extract_ctx", null))
.execute().actionGet();
assertEquals(2, updateResponse.getVersion());
getResponse = client().prepareGet("test", "type1", "parentId1").execute().actionGet();
updateContext = (Map<String, Object>) getResponse.getSourceAsMap().get("update_context");
assertEquals("test", updateContext.get("_index"));
assertEquals("type1", updateContext.get("_type"));
assertEquals("parentId1", updateContext.get("_id"));
assertEquals(1, updateContext.get("_version"));
assertNull(updateContext.get("_parent"));
assertNull(updateContext.get("_routing"));
assertNull(updateContext.get("_ttl"));
}
private static String indexOrAlias() {
return randomBoolean() ? "test" : "alias";
}
}

View File

@ -62,14 +62,12 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
@ -357,21 +355,14 @@ public class UpdateIT extends ESIntegTestCase {
FieldIncrementScriptPlugin.class,
ScriptedUpsertScriptPlugin.class,
ExtractContextInSourceScriptPlugin.class,
InternalSettingsPlugin.class // uses index.merge.enabled
InternalSettingsPlugin.class
);
}
private void createTestIndex() throws Exception {
logger.info("--> creating index test");
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject()));
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
}
public void testUpsert() throws Exception {
@ -638,34 +629,6 @@ public class UpdateIT extends ESIntegTestCase {
assertThat(getResponse.isExists(), equalTo(false));
}
// check TTL is kept after an update without TTL
client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get();
GetResponse getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L));
client().prepareUpdate(indexOrAlias(), "type1", "2")
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L));
// check TTL update
client().prepareUpdate(indexOrAlias(), "type1", "2")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet();
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet();
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L));
assertThat(ttl, lessThanOrEqualTo(3600000L));
// check timestamp update
client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get();
client().prepareUpdate(indexOrAlias(), "type1", "3")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute()
.actionGet();
getResponse = client().prepareGet("test", "type1", "3").setFields("_timestamp").execute().actionGet();
long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
assertThat(timestamp, equalTo(1258294332000L));
// check fields parameter
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
@ -682,7 +645,7 @@ public class UpdateIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
for (int i = 0; i < 5; i++) {
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1"));
assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2"));
}
@ -690,7 +653,7 @@ public class UpdateIT extends ESIntegTestCase {
// change existing field
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
for (int i = 0; i < 5; i++) {
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3"));
assertThat(getResponse.getSourceAsMap().get("field2").toString(), equalTo("2"));
}
@ -708,7 +671,7 @@ public class UpdateIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
for (int i = 0; i < 5; i++) {
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
Map map1 = (Map) getResponse.getSourceAsMap().get("map");
assertThat(map1.size(), equalTo(3));
assertThat(map1.containsKey("map1"), equalTo(true));
@ -760,16 +723,12 @@ public class UpdateIT extends ESIntegTestCase {
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject())
.addMapping("subtype1", XContentFactory.jsonBuilder()
.startObject()
.startObject("subtype1")
.startObject("_parent").field("type", "type1").endObject()
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject())
);
@ -813,7 +772,6 @@ public class UpdateIT extends ESIntegTestCase {
assertEquals(1, updateContext.get("_version"));
assertEquals("parentId1", updateContext.get("_parent"));
assertEquals("routing1", updateContext.get("_routing"));
assertThat(((Integer) updateContext.get("_ttl")).longValue(), allOf(greaterThanOrEqualTo(ttl-3000), lessThanOrEqualTo(ttl)));
// Idem with the second object
updateResponse = client().prepareUpdate("test", "type1", "parentId1")
@ -904,13 +862,6 @@ public class UpdateIT extends ESIntegTestCase {
public void testStressUpdateDeleteConcurrency() throws Exception {
//We create an index with merging disabled so that deletes don't get merged away
assertAcked(prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("_timestamp").field("enabled", true).endObject()
.startObject("_ttl").field("enabled", true).endObject()
.endObject()
.endObject())
.setSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)));
ensureGreen();

View File

@ -127,26 +127,6 @@ Each bulk item can include the parent value using the `_parent`/`parent`
field. It automatically follows the behavior of the index / delete
operation based on the `_parent` / `_routing` mapping.
[float]
[[bulk-timestamp]]
=== Timestamp
deprecated[2.0.0,The `_timestamp` field is deprecated. Instead, use a normal <<date,`date`>> field and set its value explicitly]
Each bulk item can include the timestamp value using the
`_timestamp`/`timestamp` field. It automatically follows the behavior of
the index operation based on the `_timestamp` mapping.
[float]
[[bulk-ttl]]
=== TTL
deprecated[2.0.0,The current `_ttl` implementation is deprecated and will be replaced with a different implementation in a future version]
Each bulk item can include the ttl value using the `_ttl`/`ttl` field.
It automatically follows the behavior of the index operation based on
the `_ttl` mapping.
[float]
[[bulk-consistency]]
=== Write Consistency

View File

@ -290,70 +290,6 @@ When indexing a child document, the routing value is automatically set
to be the same as its parent, unless the routing value is explicitly
specified using the `routing` parameter.
[float]
[[index-timestamp]]
=== Timestamp
deprecated[2.0.0,The `_timestamp` field is deprecated. Instead, use a normal <<date,`date`>> field and set its value explicitly]
A document can be indexed with a `timestamp` associated with it. The
`timestamp` value of a document can be set using the `timestamp`
parameter. For example:
[source,js]
--------------------------------------------------
PUT twitter/tweet/1?timestamp=2009-11-15T14:12:12
{
"user" : "kimchy",
"message" : "trying out Elasticsearch"
}
--------------------------------------------------
// CONSOLE
If the `timestamp` value is not provided externally or in the `_source`,
the `timestamp` will be automatically set to the date the document was
processed by the indexing chain. More information can be found on the
<<mapping-timestamp-field,_timestamp mapping
page>>.
[float]
[[index-ttl]]
=== TTL
deprecated[2.0.0,The current `_ttl` implementation is deprecated and will be replaced with a different implementation in a future version]
A document can be indexed with a `ttl` (time to live) associated with
it. Expired documents will be expunged automatically. The expiration
date that will be set for a document with a provided `ttl` is relative
to the `timestamp` of the document, meaning it can be based on the time
of indexing or on any time provided. The provided `ttl` must be strictly
positive and can be a number (in milliseconds) or any valid time value
as shown in the following examples:
[source,js]
--------------------------------------------------
PUT twitter/tweet/1?ttl=86400000ms
{
"user": "kimchy",
"message": "Trying out elasticsearch, so far so good?"
}
--------------------------------------------------
// CONSOLE
[source,js]
--------------------------------------------------
PUT twitter/tweet/1?ttl=1d
{
"user": "kimchy",
"message": "Trying out elasticsearch, so far so good?"
}
--------------------------------------------------
// CONSOLE
More information can be found on the
<<mapping-ttl-field,_ttl mapping page>>.
[float]
[[index-distributed]]
=== Distributed

View File

@ -272,8 +272,6 @@ change:
* `_version`
* `_routing`
* `_parent`
* `_timestamp`
* `_ttl`
Setting `_version` to `null` or clearing it from the `ctx` map is just like not
sending the version in an indexing request. It will cause that document to be

View File

@ -57,7 +57,7 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
In addition to `_source`, the following variables are available through
the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing`,
`_parent`, `_timestamp`, `_ttl`.
`_parent`.
We can also add a new field to the document:

View File

@ -131,11 +131,6 @@ specific index module:
Set to `true` to disable index metadata reads and writes.
`index.ttl.disable_purge`::
experimental[] Disables the purge of <<mapping-ttl-field,expired docs>> on
the current index.
`index.max_refresh_listeners`::
Maximum number of refresh listeners available on each shard of the index.

View File

@ -439,8 +439,7 @@ The following example sets the `_id` metadata field of a document to `1`:
}
--------------------------------------------------
The following metadata fields are accessible by a processor: `_index`, `_type`, `_id`, `_routing`, `_parent`,
`_timestamp`, and `_ttl`.
The following metadata fields are accessible by a processor: `_index`, `_type`, `_id`, `_routing`, `_parent`.
[float]
[[accessing-ingest-metadata]]

View File

@ -48,14 +48,6 @@ can be customised when a mapping type is created.
All fields in the document which contain non-null values.
<<mapping-timestamp-field,`_timestamp`>>::
A timestamp associated with the document, either specified manually or auto-generated.
<<mapping-ttl-field,`_ttl`>>::
How long a document should live before it is automatically deleted.
[float]
=== Routing meta-fields
@ -92,10 +84,6 @@ include::fields/routing-field.asciidoc[]
include::fields/source-field.asciidoc[]
include::fields/timestamp-field.asciidoc[]
include::fields/ttl-field.asciidoc[]
include::fields/type-field.asciidoc[]
include::fields/uid-field.asciidoc[]

View File

@ -1,97 +0,0 @@
[[mapping-timestamp-field]]
=== `_timestamp` field
deprecated[2.0.0,The `_timestamp` field is deprecated. Instead, use a normal <<date,`date`>> field and set its value explicitly]
The `_timestamp` field, when enabled, allows a timestamp to be indexed and
stored with a document. The timestamp may be specified manually, generated
automatically, or set to a default value:
[source,js]
------------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"_timestamp": { <1>
"enabled": true
}
}
}
}
PUT my_index/my_type/1?timestamp=2015-01-01 <2>
{ "text": "Timestamp as a formatted date" }
PUT my_index/my_type/2?timestamp=1420070400000 <3>
{ "text": "Timestamp as milliseconds since the epoch" }
PUT my_index/my_type/3 <4>
{ "text": "Autogenerated timestamp set to now()" }
------------------------------------
// CONSOLE
<1> Enable the `_timestamp` field with default settings.
<2> Set the timestamp manually with a formatted date.
<3> Set the timestamp with milliseconds since the epoch.
<4> Auto-generates a timestamp with <<date-math,now()>>.
The behaviour of the `_timestamp` field can be configured with the following parameters:
`default`::
A default value to be used if none is provided. Defaults to <<date-math,now()>>.
`format`::
The <<mapping-date-format,date format>> (or formats) to use when parsing timestamps. Defaults to `epoch_millis||strictDateOptionalTime`.
`ignore_missing`::
If `true` (default), replace missing timestamps with the `default` value. If `false`, throw an exception.
The value of the `_timestamp` field is accessible in queries, aggregations, scripts,
and when sorting:
[source,js]
--------------------------
GET my_index/_search
{
"query": {
"range": {
"_timestamp": { <1>
"gte": "2015-01-01"
}
}
},
"aggs": {
"Timestamps": {
"terms": {
"field": "_timestamp", <2>
"size": 10
}
}
},
"sort": [
{
"_timestamp": { <3>
"order": "desc"
}
}
],
"script_fields": {
"Timestamp": {
"script": "doc['_timestamp']" <4>
}
}
}
--------------------------
// CONSOLE
// TEST[continued]
<1> Querying on the `_timestamp` field
<2> Aggregating on the `_timestamp` field
<3> Sorting on the `_timestamp` field
<4> Accessing the `_timestamp` field in scripts (inline scripts must be <<enable-dynamic-scripting,enabled>> for this example to work)

View File

@ -1,112 +0,0 @@
[[mapping-ttl-field]]
=== `_ttl` field
deprecated[2.0.0,The current `_ttl` implementation is deprecated and will be replaced with a different implementation in a future version]
Some types of documents, such as session data or special offers, come with an
expiration date. The `_ttl` field allows you to specify the minimum time a
document should live, after which time the document is deleted automatically.
[TIP]
.Prefer index-per-timeframe to TTL
======================================================
With TTL , expired documents first have to be marked as deleted then later
purged from the index when segments are merged. For append-only time-based
data such as log events, it is much more efficient to use an index-per-day /
week / month instead of TTLs. Old log data can be removed by simply deleting
old indices.
======================================================
The `_ttl` field may be enabled as follows:
[source,js]
-------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"_ttl": {
"enabled": true
}
}
}
}
PUT my_index/my_type/1?ttl=10m <1>
{
"text": "Will expire in 10 minutes"
}
PUT my_index/my_type/2 <2>
{
"text": "Will not expire"
}
-------------------------------
// CONSOLE
<1> This document will expire 10 minutes after being indexed.
<2> This document has no TTL set and will not expire.
The expiry time is calculated as the value of the
<<mapping-timestamp-field,`_timestamp`>> field (or `now()` if the `_timestamp`
is not enabled) plus the `ttl` specified in the indexing request.
==== Default TTL
You can provide a default `_ttl`, which will be applied to indexing requests where the `ttl` is not specified:
[source,js]
-------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"_ttl": {
"enabled": true,
"default": "5m"
}
}
}
}
PUT my_index/my_type/1?ttl=10m <1>
{
"text": "Will expire in 10 minutes"
}
PUT my_index/my_type/2 <2>
{
"text": "Will expire in 5 minutes"
}
-------------------------------
// CONSOLE
<1> This document will expire 10 minutes after being indexed.
<2> This document has no TTL set and so will expire after the default 5 minutes.
The `default` value can use <<time-units,time units>> like `d` for days, and
will use `ms` as the default unit if no time unit is provided.
You can dynamically update the `default` value using the put mapping
API. It won't change the `_ttl` of already indexed documents but will be
used for future documents.
==== Note on documents expiration
Expired documents will be automatically deleted periodically. The following
settings control the expiry process:
`indices.ttl.interval`::
How often the purge process should run. Defaults to `60s`. Expired documents
may still be retrieved before they are purged.
`indices.ttl.bulk_size`::
How many deletions are handled by a single <<docs-bulk,`bulk`>> request. The
default value is `10000`.
==== Note on `detect_noop`
If an update tries to update just the `_ttl` without changing the `_source` of
the document it's expiration time won't be updated if `detect_noop` is `true`.
In 2.1 `detect_noop` defaults to `true`.

View File

@ -7,13 +7,9 @@ your application to Elasticsearch 5.0.
[float]
=== Indices created before 5.0
Elasticsearch 5.0 can read indices created in version 2.0 and above. If any
of your indices were created before 2.0 you will need to upgrade to the
latest 2.x version of Elasticsearch first, in order to upgrade your indices or
to delete the old indices. Elasticsearch will not start in the presence of old
indices. To upgrade 2.x indices, first start a node which have access to all
the data folders and let it upgrade all the indices before starting up rest of
the cluster.
Elasticsearch 5.0 can read indices created in version 2.0 or above. An
Elasticsearch 5.0 node will not start in the presence of indices created in a
version of Elasticsearch before 2.0.
[IMPORTANT]
.Reindex indices from Elasticseach 1.x or before
@ -26,6 +22,12 @@ way to do this is to upgrade to Elasticsearch 2.3 or later and to use the
=========================================
The first time Elasticsearch 5.0 starts, it will automatically rename index
folders to use the index UUID instead of the index name. If you are using
<<indices-shadow-replicas,shadow replicas>> with shared data folders, first
start a single node with access to all data folders, and let it rename all
index folders before starting other nodes in the cluster.
[float]
=== Also see:

View File

@ -300,12 +300,12 @@ requests can now be validated at call time which results in much clearer errors.
The `setQuery(BytesReference)` method have been removed in favor of using `setQuery(QueryBuilder)`
=== ClusterStatsResponse
==== ClusterStatsResponse
Removed the `getMemoryAvailable` method from `OsStats`, which could be previously accessed calling
`clusterStatsResponse.getNodesStats().getOs().getMemoryAvailable()`.
=== setRefresh(boolean) has been removed
==== setRefresh(boolean) has been removed
`setRefresh(boolean)` has been removed in favor of `setRefreshPolicy(RefreshPolicy)` because there
are now three options (NONE, IMMEDIATE, and WAIT_FOR). `setRefresh(IMMEDIATE)` has the same behavior

View File

@ -76,6 +76,14 @@ PUT my_index
Also the `precision_step` parameter is now irrelevant and will be rejected on
indices that are created on or after 5.0.
==== `_timestamp` and `_ttl`
The `_timestamp` and `_ttl` fields were deprecated and are now removed. As a
replacement for `_timestamp`, you should populate a regular date field with the
current timestamp on application side. For `_ttl`, you should either use
time-based indices when applicable, or cron a delete-by-query with a range
query on a timestamp field
==== `index` property
On all field datatypes (except for the deprecated `string` field), the `index`

View File

@ -183,6 +183,9 @@ This is now consistent for source filtering on other places in the search API.
* Nested inner hits will now no longer include `_index`, `_type` and `_id` keys. For nested inner hits these values
are always the same as the `_index`, `_type` and `_id` keys of the root search hit.
* Parent/child inner hits will now no longer include the `_index` key. For parent/child inner hits the `_index` key is
always the same as the the parent search hit.
==== Query Profiler
In the response for profiling queries, the `query_type` has been renamed to `type` and `lucene` has been renamed to

View File

@ -30,10 +30,6 @@ Available settings include:
Control the resource limits on the shard recovery process.
<<indices-ttl,TTL interval>>::
Control how expired documents are removed.
include::indices/circuit_breaker.asciidoc[]
include::indices/fielddata.asciidoc[]
@ -46,5 +42,3 @@ include::indices/request_cache.asciidoc[]
include::indices/recovery.asciidoc[]
include::indices/ttl_interval.asciidoc[]

View File

@ -1,16 +0,0 @@
[[indices-ttl]]
=== TTL interval
Documents that have a <<mapping-ttl-field,`ttl`>> value set need to be deleted
once they have expired. How and how often they are deleted is controlled by
the following dynamic cluster settings:
`indices.ttl.interval`::
How often the deletion process runs. Defaults to `60s`.
`indices.ttl.bulk_size`::
The deletions are processed with a <<docs-bulk,bulk request>>.
The number of deletions processed can be configured with
this settings. Defaults to `10000`.

View File

@ -107,6 +107,61 @@
- match: { _source.foofield: "exists" }
- match: { _source.foofield2: "ran" }
---
"Test pipeline with empty on_failure in a processor":
- do:
catch: request
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"fail" : {
"tag" : "emptyfail",
"message" : "_message",
"on_failure": []
}
}
],
"on_failure": [
{
"set" : {
"field": "on_failure_executed",
"value": true
}
}
]
}
- match: { error.root_cause.0.type: "parse_exception" }
- match: { error.root_cause.0.reason: "[on_failure] processors list cannot be empty" }
- match: { error.root_cause.0.header.processor_type: "fail" }
- match: { error.root_cause.0.header.processor_tag: "emptyfail" }
- match: { error.root_cause.0.header.property_name: "on_failure" }
---
"Test pipeline with empty on_failure in pipeline":
- do:
catch: request
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"set" : {
"field" : "foo",
"value" : "_message"
}
}
],
"on_failure": []
}
- match: { error.root_cause.0.type: "parse_exception" }
- match: { error.root_cause.0.reason: "pipeline [my_pipeline] cannot have an empty on_failure option defined" }
---
"Test pipeline with ignore_failure in a processor":
- do:

View File

@ -479,3 +479,73 @@
- match: { docs.0.processor_results.4.doc._source.foofield2: "ran" }
- match: { docs.0.processor_results.4.doc._source.field1: "123.42 400 <foo>" }
- match: { docs.0.processor_results.4.doc._source.status: 200 }
---
"Test verbose simulate with ignore_failure":
- do:
ingest.simulate:
verbose: true
body: >
{
"pipeline" : {
"description": "_description",
"processors": [
{
"set" : {
"tag" : "setstatus-1",
"field" : "status",
"value" : 200
}
},
{
"rename" : {
"tag" : "rename-1",
"field" : "foofield",
"target_field" : "field1",
"ignore_failure": true,
"on_failure" : [
{
"set" : {
"tag" : "set on_failure rename",
"field" : "foofield",
"value" : "exists"
}
},
{
"rename" : {
"field" : "foofield2",
"target_field" : "field1",
"on_failure" : [
{
"set" : {
"field" : "foofield2",
"value" : "ran"
}
}
]
}
}
]
}
}
]
},
"docs": [
{
"_index": "index",
"_type": "type",
"_id": "id",
"_source": {
"field1": "123.42 400 <foo>"
}
}
]
}
- length: { docs: 1 }
- length: { docs.0.processor_results: 2 }
- match: { docs.0.processor_results.0.tag: "setstatus-1" }
- match: { docs.0.processor_results.0.doc._source.field1: "123.42 400 <foo>" }
- match: { docs.0.processor_results.0.doc._source.status: 200 }
- match: { docs.0.processor_results.1.tag: "rename-1" }
- match: { docs.0.processor_results.1.doc._source.field1: "123.42 400 <foo>" }
- match: { docs.0.processor_results.1.doc._source.status: 200 }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -45,7 +44,6 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
@ -88,8 +86,6 @@ public class SearchFieldsTests extends ESIntegTestCase {
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
// _timestamp is randomly enabled via templates but we don't want it here to test stored fields behaviour
.startObject("_timestamp").field("enabled", false).endObject()
.startObject("properties")
.startObject("field1").field("type", "text").field("store", true).endObject()
.startObject("field2").field("type", "text").field("store", false).endObject()
@ -698,7 +694,7 @@ public class SearchFieldsTests extends ESIntegTestCase {
public void testLoadMetadata() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("my-type1", "_timestamp", "enabled=true", "_ttl", "enabled=true", "_parent", "type=parent"));
.addMapping("my-type1", "_parent", "type=parent"));
indexRandom(true,
client().prepareIndex("test", "my-type1", "1")
@ -717,12 +713,6 @@ public class SearchFieldsTests extends ESIntegTestCase {
assertThat(fields.get("field1"), nullValue());
assertThat(fields.get("_routing").isMetadataField(), equalTo(true));
assertThat(fields.get("_routing").getValue().toString(), equalTo("1"));
assertThat(fields.get("_timestamp").isMetadataField(), equalTo(true));
assertThat(fields.get("_timestamp").getValue().toString(), equalTo("205097"));
assertThat(fields.get("_ttl").isMetadataField(), equalTo(true));
// TODO: _ttl should return the original value, but it does not work today because
// it would use now() instead of the value of _timestamp to rebase
// assertThat(fields.get("_ttl").getValue().toString(), equalTo("10000000205097"));
assertThat(fields.get("_parent").isMetadataField(), equalTo(true));
assertThat(fields.get("_parent").getValue().toString(), equalTo("parent_1"));
}

View File

@ -75,6 +75,8 @@ import java.util.Collections;
import java.util.List;
import java.util.function.Supplier;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.containsString;
/**
@ -93,7 +95,9 @@ public class TemplateQueryParserTests extends ESTestCase {
.put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config"))
.put("node.name", getClass().getName())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.build();
Environment environment = new Environment(settings);
final Client proxy = (Client) Proxy.newProxyInstance(
Client.class.getClassLoader(),
new Class<?>[]{Client.class}, (proxy1, method, args) -> {
@ -102,15 +106,18 @@ public class TemplateQueryParserTests extends ESTestCase {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
Index index = idxSettings.getIndex();
// TODO: make this use a mock engine instead of mustache and it will no longer be messy!
ScriptModule scriptModule = new ScriptModule(new MustacheScriptEngineService(settings));
ScriptModule scriptModule = new ScriptModule(settings, environment, null, singletonList(new MustacheScriptEngineService(settings)),
emptyList());
List<Setting<?>> scriptSettings = scriptModule.getSettings();
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
final ThreadPool threadPool = new ThreadPool(settings);
injector = new ModulesBuilder().add(
(b) -> {
b.bind(Environment.class).toInstance(new Environment(settings));
b.bind(ThreadPool.class).toInstance(threadPool);
b.bind(ThreadPool.class).toInstance(new ThreadPool(settings));
b.bind(Client.class).toInstance(proxy); // not needed here
Multibinder.newSetBinder(b, ScoreFunctionParser.class);
b.bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
},
settingsModule,
new SearchModule(settings, new NamedWriteableRegistry()) {
@ -119,21 +126,10 @@ public class TemplateQueryParserTests extends ESTestCase {
// skip so we don't need transport
}
},
scriptModule,
new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
bind(Client.class).toInstance(proxy); // not needed here
Multibinder.newSetBinder(binder(), ScoreFunctionParser.class);
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class);
}
}
new IndexSettingsModule(index, settings)
).createInjector();
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
ScriptService scriptService = injector.getInstance(ScriptService.class);
AnalysisService analysisService = new AnalysisRegistry(null, environment).build(idxSettings);
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry()).getMapperRegistry();
MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () ->
@ -153,7 +149,7 @@ public class TemplateQueryParserTests extends ESTestCase {
});
IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService,
similarityService, scriptService, indicesQueriesRegistry, proxy, null, null);
similarityService, scriptModule.getScriptService(), indicesQueriesRegistry, proxy, null, null);
}
@Override

View File

@ -126,21 +126,23 @@ expression returns [boolean s = true]
// processing a variable/method chain. This prevents the chain
// from being applied to rules where it wouldn't be allowed.
unary[boolean c] returns [boolean s = true]
: { !$c }? ( INCR | DECR ) chain[true] # pre
| { !$c }? chain[true] (INCR | DECR ) # post
| { !$c }? chain[false] # read
| { !$c }? ( OCTAL | HEX | INTEGER | DECIMAL ) { $s = false; } # numeric
| { !$c }? TRUE { $s = false; } # true
| { !$c }? FALSE { $s = false; } # false
| { !$c }? NULL { $s = false; } # null
| { !$c }? ( BOOLNOT | BWNOT | ADD | SUB ) unary[false] # operator
| LP decltype RP unary[$c] # cast
: { !$c }? ( INCR | DECR ) chain[true] # pre
| { !$c }? chain[true] (INCR | DECR ) # post
| { !$c }? chain[false] # read
| { !$c }? ( OCTAL | HEX | INTEGER | DECIMAL ) { $s = false; } # numeric
| { !$c }? TRUE { $s = false; } # true
| { !$c }? FALSE { $s = false; } # false
| { !$c }? NULL { $s = false; } # null
| { !$c }? listinitializer { $s = false; } # listinit
| { !$c }? mapinitializer { $s = false; } # mapinit
| { !$c }? ( BOOLNOT | BWNOT | ADD | SUB ) unary[false] # operator
| LP decltype RP unary[$c] # cast
;
chain[boolean c]
: p = primary[$c] secondary[$p.s]* # dynamic
| decltype dot secondary[true]* # static
| NEW TYPE (LBRACE expression RBRACE)+ (dot secondary[true]*)? # newarray
: p = primary[$c] secondary[$p.s]* # dynamic
| decltype dot secondary[true]* # static
| arrayinitializer # newarray
;
primary[boolean c] returns [boolean s = true]
@ -213,3 +215,22 @@ capturingFuncref
localFuncref
: THIS REF ID
;
arrayinitializer
: NEW TYPE (LBRACE expression RBRACE)+ (dot secondary[true]*)? # newstandardarray
| NEW TYPE LBRACE RBRACE LBRACK ( expression ( COMMA expression )* )? SEMICOLON? RBRACK # newinitializedarray
;
listinitializer
: LBRACE expression ( COMMA expression)* RBRACE
| LBRACE RBRACE
;
mapinitializer
: LBRACE maptoken ( COMMA maptoken )* RBRACE
| LBRACE COLON RBRACE
;
maptoken
: expression COLON expression
;

View File

@ -0,0 +1,410 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.BiPredicate;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.ObjIntConsumer;
import java.util.function.Predicate;
import java.util.function.ToDoubleFunction;
import java.util.regex.Matcher;
/** Additional methods added to classes. These must be static methods with receiver as first argument */
public class Augmentation {
// static methods only!
private Augmentation() {}
/** Exposes List.size() as getLength(), so that .length shortcut works on lists */
public static <T> int getLength(List<T> receiver) {
return receiver.size();
}
/** Exposes Matcher.group(String) as namedGroup(String), so it doesn't conflict with group(int) */
public static String namedGroup(Matcher receiver, String name) {
return receiver.group(name);
}
// some groovy methods on iterable
// see http://docs.groovy-lang.org/latest/html/groovy-jdk/java/lang/Iterable.html
/** Iterates over the contents of an iterable, and checks whether a predicate is valid for at least one element. */
public static <T> boolean any(Iterable<T> receiver, Predicate<T> predicate) {
for (T t : receiver) {
if (predicate.test(t)) {
return true;
}
}
return false;
}
/** Counts the number of occurrences which satisfy the given predicate from inside this Iterable. */
public static <T> int count(Iterable<T> receiver, Predicate<T> predicate) {
int count = 0;
for (T t : receiver) {
if (predicate.test(t)) {
count++;
}
}
return count;
}
// instead of covariant overrides for every possibility, we just return receiver as 'def' for now
// that way if someone chains the calls, everything works.
/** Iterates through an Iterable, passing each item to the given consumer. */
public static <T> Object each(Iterable<T> receiver, Consumer<T> consumer) {
receiver.forEach(consumer);
return receiver;
}
/**
* Iterates through an iterable type, passing each item and the item's index
* (a counter starting at zero) to the given consumer.
*/
public static <T> Object eachWithIndex(Iterable<T> receiver, ObjIntConsumer<T> consumer) {
int count = 0;
for (T t : receiver) {
consumer.accept(t, count++);
}
return receiver;
}
/**
* Used to determine if the given predicate is valid (i.e. returns true for all items in this iterable).
*/
public static <T> boolean every(Iterable<T> receiver, Predicate<T> predicate) {
for (T t : receiver) {
if (predicate.test(t) == false) {
return false;
}
}
return true;
}
/**
* Iterates through the Iterable transforming items using the supplied function and
* collecting any non-null results.
*/
public static <T,U> List<U> findResults(Iterable<T> receiver, Function<T,U> filter) {
List<U> list = new ArrayList<>();
for (T t: receiver) {
U result = filter.apply(t);
if (result != null) {
list.add(result);
}
}
return list;
}
/**
* Sorts all Iterable members into groups determined by the supplied mapping function.
*/
public static <T,U> Map<U,List<T>> groupBy(Iterable<T> receiver, Function<T,U> mapper) {
Map<U,List<T>> map = new LinkedHashMap<>();
for (T t : receiver) {
U mapped = mapper.apply(t);
List<T> results = map.get(mapped);
if (results == null) {
results = new ArrayList<>();
map.put(mapped, results);
}
results.add(t);
}
return map;
}
/**
* Concatenates the toString() representation of each item in this Iterable,
* with the given String as a separator between each item.
*/
public static <T> String join(Iterable<T> receiver, String separator) {
StringBuilder sb = new StringBuilder();
for (T t : receiver) {
if (sb.length() > 0) {
sb.append(separator);
}
sb.append(t);
}
return sb.toString();
}
/**
* Sums the result of applying a function to each item of an Iterable.
*/
public static <T> double sum(Iterable<T> receiver, ToDoubleFunction<T> function) {
double sum = 0;
for (T t : receiver) {
sum += function.applyAsDouble(t);
}
return sum;
}
// some groovy methods on collection
// see http://docs.groovy-lang.org/latest/html/groovy-jdk/java/util/Collection.html
/**
* Iterates through this collection transforming each entry into a new value using
* the function, returning a list of transformed values.
*/
public static <T,U> List<U> collect(Collection<T> receiver, Function<T,U> function) {
List<U> list = new ArrayList<>();
for (T t : receiver) {
list.add(function.apply(t));
}
return list;
}
/**
* Iterates through this collection transforming each entry into a new value using
* the function, adding the values to the specified collection.
*/
public static <T,U> Object collect(Collection<T> receiver, Collection<U> collection, Function<T,U> function) {
for (T t : receiver) {
collection.add(function.apply(t));
}
return collection;
}
/**
* Finds the first value matching the predicate, or returns null.
*/
public static <T> T find(Collection<T> receiver, Predicate<T> predicate) {
for (T t : receiver) {
if (predicate.test(t)) {
return t;
}
}
return null;
}
/**
* Finds all values matching the predicate, returns as a list
*/
public static <T> List<T> findAll(Collection<T> receiver, Predicate<T> predicate) {
List<T> list = new ArrayList<>();
for (T t : receiver) {
if (predicate.test(t)) {
list.add(t);
}
}
return list;
}
/**
* Iterates through the collection calling the given function for each item
* but stopping once the first non-null result is found and returning that result.
* If all results are null, null is returned.
*/
public static <T,U> Object findResult(Collection<T> receiver, Function<T,U> function) {
return findResult(receiver, null, function);
}
/**
* Iterates through the collection calling the given function for each item
* but stopping once the first non-null result is found and returning that result.
* If all results are null, defaultResult is returned.
*/
public static <T,U> Object findResult(Collection<T> receiver, Object defaultResult, Function<T,U> function) {
for (T t : receiver) {
U value = function.apply(t);
if (value != null) {
return value;
}
}
return defaultResult;
}
/**
* Splits all items into two collections based on the predicate.
* The first list contains all items which match the closure expression. The second list all those that don't.
*/
public static <T> List<List<T>> split(Collection<T> receiver, Predicate<T> predicate) {
List<T> matched = new ArrayList<>();
List<T> unmatched = new ArrayList<>();
List<List<T>> result = new ArrayList<>(2);
result.add(matched);
result.add(unmatched);
for (T t : receiver) {
if (predicate.test(t)) {
matched.add(t);
} else {
unmatched.add(t);
}
}
return result;
}
// some groovy methods on map
// see http://docs.groovy-lang.org/latest/html/groovy-jdk/java/util/Map.html
/**
* Iterates through this map transforming each entry into a new value using
* the function, returning a list of transformed values.
*/
public static <K,V,T> List<T> collect(Map<K,V> receiver, BiFunction<K,V,T> function) {
List<T> list = new ArrayList<>();
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
list.add(function.apply(kvPair.getKey(), kvPair.getValue()));
}
return list;
}
/**
* Iterates through this map transforming each entry into a new value using
* the function, adding the values to the specified collection.
*/
public static <K,V,T> Object collect(Map<K,V> receiver, Collection<T> collection, BiFunction<K,V,T> function) {
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
collection.add(function.apply(kvPair.getKey(), kvPair.getValue()));
}
return collection;
}
/** Counts the number of occurrences which satisfy the given predicate from inside this Map */
public static <K,V> int count(Map<K,V> receiver, BiPredicate<K,V> predicate) {
int count = 0;
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
if (predicate.test(kvPair.getKey(), kvPair.getValue())) {
count++;
}
}
return count;
}
/** Iterates through a Map, passing each item to the given consumer. */
public static <K,V> Object each(Map<K,V> receiver, BiConsumer<K,V> consumer) {
receiver.forEach(consumer);
return receiver;
}
/**
* Used to determine if the given predicate is valid (i.e. returns true for all items in this map).
*/
public static <K,V> boolean every(Map<K,V> receiver, BiPredicate<K,V> predicate) {
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
if (predicate.test(kvPair.getKey(), kvPair.getValue()) == false) {
return false;
}
}
return true;
}
/**
* Finds the first entry matching the predicate, or returns null.
*/
public static <K,V> Map.Entry<K,V> find(Map<K,V> receiver, BiPredicate<K,V> predicate) {
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
if (predicate.test(kvPair.getKey(), kvPair.getValue())) {
return kvPair;
}
}
return null;
}
/**
* Finds all values matching the predicate, returns as a map.
*/
public static <K,V> Map<K,V> findAll(Map<K,V> receiver, BiPredicate<K,V> predicate) {
// try to preserve some properties of the receiver (see the groovy javadocs)
final Map<K,V> map;
if (receiver instanceof TreeMap) {
map = new TreeMap<>();
} else {
map = new LinkedHashMap<>();
}
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
if (predicate.test(kvPair.getKey(), kvPair.getValue())) {
map.put(kvPair.getKey(), kvPair.getValue());
}
}
return map;
}
/**
* Iterates through the map calling the given function for each item
* but stopping once the first non-null result is found and returning that result.
* If all results are null, null is returned.
*/
public static <K,V,T> Object findResult(Map<K,V> receiver, BiFunction<K,V,T> function) {
return findResult(receiver, null, function);
}
/**
* Iterates through the map calling the given function for each item
* but stopping once the first non-null result is found and returning that result.
* If all results are null, defaultResult is returned.
*/
public static <K,V,T> Object findResult(Map<K,V> receiver, Object defaultResult, BiFunction<K,V,T> function) {
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
T value = function.apply(kvPair.getKey(), kvPair.getValue());
if (value != null) {
return value;
}
}
return defaultResult;
}
/**
* Iterates through the map transforming items using the supplied function and
* collecting any non-null results.
*/
public static <K,V,T> List<T> findResults(Map<K,V> receiver, BiFunction<K,V,T> filter) {
List<T> list = new ArrayList<>();
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
T result = filter.apply(kvPair.getKey(), kvPair.getValue());
if (result != null) {
list.add(result);
}
}
return list;
}
/**
* Sorts all Map members into groups determined by the supplied mapping function.
*/
public static <K,V,T> Map<T,Map<K,V>> groupBy(Map<K,V> receiver, BiFunction<K,V,T> mapper) {
Map<T,Map<K,V>> map = new LinkedHashMap<>();
for (Map.Entry<K,V> kvPair : receiver.entrySet()) {
T mapped = mapper.apply(kvPair.getKey(), kvPair.getValue());
Map<K,V> results = map.get(mapped);
if (results == null) {
// try to preserve some properties of the receiver (see the groovy javadocs)
if (receiver instanceof TreeMap) {
results = new TreeMap<>();
} else {
results = new LinkedHashMap<>();
}
map.put(mapped, results);
}
results.put(kvPair.getKey(), kvPair.getValue());
}
return map;
}
}

View File

@ -350,10 +350,10 @@ public final class Def {
}
throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments.");
}
ref = new FunctionRef(clazz, interfaceMethod, handle, captures);
ref = new FunctionRef(clazz, interfaceMethod, handle, captures.length);
} else {
// whitelist lookup
ref = new FunctionRef(clazz, type, call, captures);
ref = new FunctionRef(clazz, type, call, captures.length);
}
final CallSite callSite;
if (ref.needsBridges()) {

View File

@ -186,15 +186,17 @@ public final class Definition {
public static class Method {
public final String name;
public final Struct owner;
public final boolean augmentation;
public final Type rtn;
public final List<Type> arguments;
public final org.objectweb.asm.commons.Method method;
public final int modifiers;
public final MethodHandle handle;
public Method(String name, Struct owner, Type rtn, List<Type> arguments,
public Method(String name, Struct owner, boolean augmentation, Type rtn, List<Type> arguments,
org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) {
this.name = name;
this.augmentation = augmentation;
this.owner = owner;
this.rtn = rtn;
this.arguments = Collections.unmodifiableList(arguments);
@ -217,7 +219,15 @@ public final class Definition {
// otherwise compute it
final Class<?> params[];
final Class<?> returnValue;
if (Modifier.isStatic(modifiers)) {
if (augmentation) {
// static method disguised as virtual/interface method
params = new Class<?>[1 + arguments.size()];
params[0] = Augmentation.class;
for (int i = 0; i < arguments.size(); i++) {
params[i + 1] = arguments.get(i).clazz;
}
returnValue = rtn.clazz;
} else if (Modifier.isStatic(modifiers)) {
// static method: straightforward copy
params = new Class<?>[arguments.size()];
for (int i = 0; i < arguments.size(); i++) {
@ -242,6 +252,24 @@ public final class Definition {
}
return MethodType.methodType(returnValue, params);
}
public void write(MethodWriter writer) {
final org.objectweb.asm.Type type;
if (augmentation) {
assert java.lang.reflect.Modifier.isStatic(modifiers);
type = WriterConstants.AUGMENTATION_TYPE;
} else {
type = owner.type;
}
if (java.lang.reflect.Modifier.isStatic(modifiers)) {
writer.invokeStatic(type, method);
} else if (java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers())) {
writer.invokeInterface(type, method);
} else {
writer.invokeVirtual(type, method);
}
}
}
public static final class Field {
@ -690,7 +718,7 @@ public final class Definition {
" with arguments " + Arrays.toString(classes) + ".");
}
final Method constructor = new Method(name, owner, returnType, Arrays.asList(args), asm, reflect.getModifiers(), handle);
final Method constructor = new Method(name, owner, false, returnType, Arrays.asList(args), asm, reflect.getModifiers(), handle);
owner.constructors.put(methodKey, constructor);
}
@ -734,24 +762,20 @@ public final class Definition {
}
addConstructorInternal(className, "<init>", args);
} else {
if (methodName.indexOf('/') >= 0) {
String nameAndAlias[] = methodName.split("/");
if (nameAndAlias.length != 2) {
throw new IllegalArgumentException("Currently only two aliases are allowed!");
}
addMethodInternal(className, nameAndAlias[0], nameAndAlias[1], rtn, args);
if (methodName.indexOf("*") >= 0) {
addMethodInternal(className, methodName.substring(0, methodName.length() - 1), true, rtn, args);
} else {
addMethodInternal(className, methodName, null, rtn, args);
addMethodInternal(className, methodName, false, rtn, args);
}
}
} else {
// field
addFieldInternal(className, elements[1], null, rtn);
addFieldInternal(className, elements[1], rtn);
}
}
private final void addMethodInternal(final String struct, final String name, final String alias,
final Type rtn, final Type[] args) {
private final void addMethodInternal(String struct, String name, boolean augmentation,
Type rtn, Type[] args) {
final Struct owner = structsMap.get(struct);
if (owner == null) {
@ -777,20 +801,32 @@ public final class Definition {
"Duplicate method signature [" + methodKey + "] found within the struct [" + owner.name + "].");
}
final Class<?>[] classes = new Class<?>[args.length];
for (int count = 0; count < classes.length; ++count) {
classes[count] = args[count].clazz;
final Class<?> implClass;
final Class<?>[] params;
if (augmentation == false) {
implClass = owner.clazz;
params = new Class<?>[args.length];
for (int count = 0; count < args.length; ++count) {
params[count] = args[count].clazz;
}
} else {
implClass = Augmentation.class;
params = new Class<?>[args.length + 1];
params[0] = owner.clazz;
for (int count = 0; count < args.length; ++count) {
params[count+1] = args[count].clazz;
}
}
final java.lang.reflect.Method reflect;
try {
reflect = owner.clazz.getMethod(alias == null ? name : alias, classes);
} catch (final NoSuchMethodException exception) {
throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) +
"] not found for class [" + owner.clazz.getName() + "]" +
" with arguments " + Arrays.toString(classes) + ".");
reflect = implClass.getMethod(name, params);
} catch (NoSuchMethodException exception) {
throw new IllegalArgumentException("Method [" + name +
"] not found for class [" + implClass.getName() + "]" +
" with arguments " + Arrays.toString(params) + ".");
}
if (!reflect.getReturnType().equals(rtn.clazz)) {
@ -805,25 +841,24 @@ public final class Definition {
MethodHandle handle;
try {
handle = MethodHandles.publicLookup().in(owner.clazz).unreflect(reflect);
handle = MethodHandles.publicLookup().in(implClass).unreflect(reflect);
} catch (final IllegalAccessException exception) {
throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + "]" +
" not found for class [" + owner.clazz.getName() + "]" +
" with arguments " + Arrays.toString(classes) + ".");
throw new IllegalArgumentException("Method [" + name + "]" +
" not found for class [" + implClass.getName() + "]" +
" with arguments " + Arrays.toString(params) + ".");
}
final int modifiers = reflect.getModifiers();
final Method method = new Method(name, owner, rtn, Arrays.asList(args), asm, modifiers, handle);
final Method method = new Method(name, owner, augmentation, rtn, Arrays.asList(args), asm, modifiers, handle);
if (java.lang.reflect.Modifier.isStatic(modifiers)) {
if (augmentation == false && java.lang.reflect.Modifier.isStatic(modifiers)) {
owner.staticMethods.put(methodKey, method);
} else {
owner.methods.put(methodKey, method);
}
}
private final void addFieldInternal(final String struct, final String name, final String alias,
final Type type) {
private final void addFieldInternal(String struct, String name, Type type) {
final Struct owner = structsMap.get(struct);
if (owner == null) {
@ -844,9 +879,9 @@ public final class Definition {
java.lang.reflect.Field reflect;
try {
reflect = owner.clazz.getField(alias == null ? name : alias);
reflect = owner.clazz.getField(name);
} catch (final NoSuchFieldException exception) {
throw new IllegalArgumentException("Field [" + (alias == null ? name : alias) + "]" +
throw new IllegalArgumentException("Field [" + name + "]" +
" not found for class [" + owner.clazz.getName() + "].");
}
@ -862,7 +897,7 @@ public final class Definition {
setter = MethodHandles.publicLookup().unreflectSetter(reflect);
}
} catch (final IllegalAccessException exception) {
throw new IllegalArgumentException("Getter/Setter [" + (alias == null ? name : alias) + "]" +
throw new IllegalArgumentException("Getter/Setter [" + name + "]" +
" not found for class [" + owner.clazz.getName() + "].");
}
@ -875,9 +910,9 @@ public final class Definition {
" within the struct [" + owner.name + "] is not final.");
}
owner.staticMembers.put(alias == null ? name : alias, field);
owner.staticMembers.put(name, field);
} else {
owner.members.put(alias == null ? name : alias, field);
owner.members.put(name, field);
}
}
@ -915,11 +950,24 @@ public final class Definition {
// https://bugs.openjdk.java.net/browse/JDK-8072746
} else {
try {
Class<?> arguments[] = new Class<?>[method.arguments.size()];
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i] = method.arguments.get(i).clazz;
// TODO: we *have* to remove all these public members and use getter methods to encapsulate!
final Class<?> impl;
final Class<?> arguments[];
if (method.augmentation) {
impl = Augmentation.class;
arguments = new Class<?>[method.arguments.size() + 1];
arguments[0] = method.owner.clazz;
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i + 1] = method.arguments.get(i).clazz;
}
} else {
impl = owner.clazz;
arguments = new Class<?>[method.arguments.size()];
for (int i = 0; i < method.arguments.size(); i++) {
arguments[i] = method.arguments.get(i).clazz;
}
}
java.lang.reflect.Method m = owner.clazz.getMethod(method.method.getName(), arguments);
java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
if (m.getReturnType() != method.rtn.clazz) {
throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
}

View File

@ -53,10 +53,10 @@ public class FunctionRef {
* @param expected interface type to implement.
* @param type the left hand side of a method reference expression
* @param call the right hand side of a method reference expression
* @param captures captured arguments
* @param numCaptures number of captured arguments
*/
public FunctionRef(Definition.Type expected, String type, String call, Class<?>... captures) {
this(expected, expected.struct.getFunctionalMethod(), lookup(expected, type, call, captures.length > 0), captures);
public FunctionRef(Definition.Type expected, String type, String call, int numCaptures) {
this(expected, expected.struct.getFunctionalMethod(), lookup(expected, type, call, numCaptures > 0), numCaptures);
}
/**
@ -64,13 +64,16 @@ public class FunctionRef {
* @param expected interface type to implement
* @param method functional interface method
* @param impl implementation method
* @param captures captured arguments
* @param numCaptures number of captured arguments
*/
public FunctionRef(Definition.Type expected, Definition.Method method, Definition.Method impl, Class<?>... captures) {
public FunctionRef(Definition.Type expected, Definition.Method method, Definition.Method impl, int numCaptures) {
// e.g. compareTo
invokedName = method.name;
// e.g. (Object)Comparator
invokedType = MethodType.methodType(expected.clazz, captures);
MethodType implType = impl.getMethodType();
// only include captured parameters as arguments
invokedType = MethodType.methodType(expected.clazz,
implType.dropParameterTypes(numCaptures, implType.parameterCount()));
// e.g. (Object,Object)int
interfaceMethodType = method.getMethodType().dropParameterTypes(0, 1);
@ -90,6 +93,9 @@ public class FunctionRef {
// owner == null: script class itself
ownerIsInterface = false;
owner = WriterConstants.CLASS_TYPE.getInternalName();
} else if (impl.augmentation) {
ownerIsInterface = false;
owner = WriterConstants.AUGMENTATION_TYPE.getInternalName();
} else {
ownerIsInterface = impl.owner.clazz.isInterface();
owner = impl.owner.type.getInternalName();
@ -98,7 +104,7 @@ public class FunctionRef {
implMethod = impl.handle;
// remove any prepended captured arguments for the 'natural' signature.
samMethodType = adapt(interfaceMethodType, impl.getMethodType().dropParameterTypes(0, captures.length));
samMethodType = adapt(interfaceMethodType, impl.getMethodType().dropParameterTypes(0, numCaptures));
}
/**
@ -106,11 +112,14 @@ public class FunctionRef {
* <p>
* This will <b>not</b> set implMethodASM. It is for runtime use only.
*/
public FunctionRef(Definition.Type expected, Definition.Method method, MethodHandle impl, Class<?>... captures) {
public FunctionRef(Definition.Type expected, Definition.Method method, MethodHandle impl, int numCaptures) {
// e.g. compareTo
invokedName = method.name;
// e.g. (Object)Comparator
invokedType = MethodType.methodType(expected.clazz, captures);
MethodType implType = impl.type();
// only include captured parameters as arguments
invokedType = MethodType.methodType(expected.clazz,
implType.dropParameterTypes(numCaptures, implType.parameterCount()));
// e.g. (Object,Object)int
interfaceMethodType = method.getMethodType().dropParameterTypes(0, 1);
@ -119,7 +128,7 @@ public class FunctionRef {
implMethodASM = null;
// remove any prepended captured arguments for the 'natural' signature.
samMethodType = adapt(interfaceMethodType, impl.type().dropParameterTypes(0, captures.length));
samMethodType = adapt(interfaceMethodType, impl.type().dropParameterTypes(0, numCaptures));
}
/**

View File

@ -72,6 +72,8 @@ public final class WriterConstants {
public final static Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class);
public final static Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class);
public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class);
/**
* A Method instance for {@linkplain Pattern#compile}. This isn't available from Definition because we intentionally don't add it there

View File

@ -277,6 +277,20 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNull(PainlessParser.NullContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitListinit(PainlessParser.ListinitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMapinit(PainlessParser.MapinitContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@ -452,4 +466,39 @@ class PainlessParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implement
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLocalFuncref(PainlessParser.LocalFuncrefContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNewstandardarray(PainlessParser.NewstandardarrayContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitNewinitializedarray(PainlessParser.NewinitializedarrayContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitListinitializer(PainlessParser.ListinitializerContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMapinitializer(PainlessParser.MapinitializerContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMaptoken(PainlessParser.MaptokenContext ctx) { return visitChildren(ctx); }
}

View File

@ -263,6 +263,20 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitNull(PainlessParser.NullContext ctx);
/**
* Visit a parse tree produced by the {@code listinit}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitListinit(PainlessParser.ListinitContext ctx);
/**
* Visit a parse tree produced by the {@code mapinit}
* labeled alternative in {@link PainlessParser#unary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMapinit(PainlessParser.MapinitContext ctx);
/**
* Visit a parse tree produced by the {@code operator}
* labeled alternative in {@link PainlessParser#unary}.
@ -428,4 +442,36 @@ interface PainlessParserVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result
*/
T visitLocalFuncref(PainlessParser.LocalFuncrefContext ctx);
/**
* Visit a parse tree produced by the {@code newstandardarray}
* labeled alternative in {@link PainlessParser#arrayinitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNewstandardarray(PainlessParser.NewstandardarrayContext ctx);
/**
* Visit a parse tree produced by the {@code newinitializedarray}
* labeled alternative in {@link PainlessParser#arrayinitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNewinitializedarray(PainlessParser.NewinitializedarrayContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#listinitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitListinitializer(PainlessParser.ListinitializerContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#mapinitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMapinitializer(PainlessParser.MapinitializerContext ctx);
/**
* Visit a parse tree produced by {@link PainlessParser#maptoken}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMaptoken(PainlessParser.MaptokenContext ctx);
}

View File

@ -30,6 +30,15 @@ import org.antlr.v4.runtime.atn.PredictionMode;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.elasticsearch.painless.CompilerSettings;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.antlr.PainlessParser.ListinitContext;
import org.elasticsearch.painless.antlr.PainlessParser.ListinitializerContext;
import org.elasticsearch.painless.antlr.PainlessParser.MapinitContext;
import org.elasticsearch.painless.antlr.PainlessParser.MapinitializerContext;
import org.elasticsearch.painless.antlr.PainlessParser.MaptokenContext;
import org.elasticsearch.painless.antlr.PainlessParser.NewinitializedarrayContext;
import org.elasticsearch.painless.antlr.PainlessParser.NewstandardarrayContext;
import org.elasticsearch.painless.node.EListInit;
import org.elasticsearch.painless.node.EMapInit;
import org.elasticsearch.painless.node.SFunction.Reserved;
import org.elasticsearch.painless.node.SSource.MainMethodReserved;
import org.elasticsearch.painless.node.SFunction.FunctionReserved;
@ -241,7 +250,7 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
statements.add((AStatement)visit(statement));
}
return new SSource(settings, sourceName, sourceText, debugStream, (MainMethodReserved)reserved.pop(),
return new SSource(settings, sourceName, sourceText, debugStream, (MainMethodReserved)reserved.pop(),
location(ctx), functions, globals, statements);
}
@ -267,7 +276,7 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
statements.add((AStatement)visit(statement));
}
return new SFunction((FunctionReserved)reserved.pop(), location(ctx), rtnType, name,
return new SFunction((FunctionReserved)reserved.pop(), location(ctx), rtnType, name,
paramTypes, paramNames, statements, false);
}
@ -720,6 +729,16 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
return new ENull(location(ctx));
}
@Override
public Object visitListinit(ListinitContext ctx) {
return visit(ctx.listinitializer());
}
@Override
public Object visitMapinit(MapinitContext ctx) {
return visit(ctx.mapinitializer());
}
@Override
public Object visitOperator(OperatorContext ctx) {
if (ctx.SUB() != null && ctx.unary() instanceof NumericContext) {
@ -797,27 +816,7 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
@Override
public Object visitNewarray(NewarrayContext ctx) {
String type = ctx.TYPE().getText();
List<AExpression> expressions = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
expressions.add((AExpression)visitExpression(expression));
}
List<ALink> links = new ArrayList<>();
links.add(new LNewArray(location(ctx), type, expressions));
if (ctx.dot() != null) {
links.add((ALink)visit(ctx.dot()));
for (SecondaryContext secondary : ctx.secondary()) {
links.add((ALink)visit(secondary));
}
} else if (!ctx.secondary().isEmpty()) {
throw location(ctx).createError(new IllegalStateException("Illegal tree structure."));
}
return links;
return visit(ctx.arrayinitializer());
}
@Override
@ -978,9 +977,9 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
statements.add((AStatement)visit(statement));
}
}
String name = nextLambda();
return new ELambda(name, (FunctionReserved)reserved.pop(), location(ctx),
return new ELambda(name, (FunctionReserved)reserved.pop(), location(ctx),
paramTypes, paramNames, statements);
}
@ -1016,13 +1015,13 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
// taking integer as argument and returning a new instance, and return a ref to that.
Location location = location(ctx);
String arrayType = ctx.decltype().getText();
SReturn code = new SReturn(location,
SReturn code = new SReturn(location,
new EChain(location,
new LNewArray(location, arrayType, Arrays.asList(
new EChain(location,
new LVariable(location, "size"))))));
new EChain(location,
new LVariable(location, "size"))), false)));
String name = nextLambda();
globals.addSyntheticMethod(new SFunction(new FunctionReserved(), location, arrayType, name,
globals.addSyntheticMethod(new SFunction(new FunctionReserved(), location, arrayType, name,
Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true));
return new EFunctionRef(location(ctx), "this", name);
}
@ -1038,7 +1037,76 @@ public final class Walker extends PainlessParserBaseVisitor<Object> {
public Object visitLocalFuncref(LocalFuncrefContext ctx) {
return new EFunctionRef(location(ctx), ctx.THIS().getText(), ctx.ID().getText());
}
@Override
public Object visitNewstandardarray(NewstandardarrayContext ctx) {
String type = ctx.TYPE().getText();
List<AExpression> expressions = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
expressions.add((AExpression)visitExpression(expression));
}
List<ALink> links = new ArrayList<>();
links.add(new LNewArray(location(ctx), type, expressions, false));
if (ctx.dot() != null) {
links.add((ALink)visit(ctx.dot()));
for (SecondaryContext secondary : ctx.secondary()) {
links.add((ALink)visit(secondary));
}
} else if (!ctx.secondary().isEmpty()) {
throw location(ctx).createError(new IllegalStateException("Illegal tree structure."));
}
return links;
}
@Override
public Object visitNewinitializedarray(NewinitializedarrayContext ctx) {
String type = ctx.TYPE().getText();
List<AExpression> expressions = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
expressions.add((AExpression)visitExpression(expression));
}
List<ALink> links = new ArrayList<>();
links.add(new LNewArray(location(ctx), type, expressions, true));
return links;
}
@Override
public Object visitListinitializer(ListinitializerContext ctx) {
List<AExpression> values = new ArrayList<>();
for (ExpressionContext expression : ctx.expression()) {
values.add((AExpression)visitExpression(expression));
}
return new EListInit(location(ctx), values);
}
@Override
public Object visitMapinitializer(MapinitializerContext ctx) {
List<AExpression> keys = new ArrayList<>();
List<AExpression> values = new ArrayList<>();
for (MaptokenContext maptoken : ctx.maptoken()) {
keys.add((AExpression)visitExpression(maptoken.expression(0)));
values.add((AExpression)visitExpression(maptoken.expression(1)));
}
return new EMapInit(location(ctx), keys, values);
}
@Override
public Object visitMaptoken(MaptokenContext ctx) {
throw location(ctx).createError(new IllegalStateException("Illegal tree structure."));
}
/** Returns name of next lambda */
private String nextLambda() {
return "lambda$" + syntheticCounter++;

View File

@ -76,7 +76,7 @@ public class ECapturingFunctionRef extends AExpression implements ILambda {
// static case
if (captured.type.sort != Definition.Sort.DEF) {
try {
ref = new FunctionRef(expected, captured.type.name, call, captured.type.clazz);
ref = new FunctionRef(expected, captured.type.name, call, 1);
} catch (IllegalArgumentException e) {
throw createError(e);
}

View File

@ -76,10 +76,10 @@ public class EFunctionRef extends AExpression implements ILambda {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + expected.name + "], function not found");
}
ref = new FunctionRef(expected, interfaceMethod, implMethod);
ref = new FunctionRef(expected, interfaceMethod, implMethod, 0);
} else {
// whitelist lookup
ref = new FunctionRef(expected, type, call);
ref = new FunctionRef(expected, type, call, 0);
}
} catch (IllegalArgumentException e) {
throw createError(e);

View File

@ -175,11 +175,7 @@ public class ELambda extends AExpression implements ILambda {
} else {
defPointer = null;
try {
Class<?> captureClasses[] = new Class<?>[captures.size()];
for (int i = 0; i < captures.size(); i++) {
captureClasses[i] = captures.get(i).type.clazz;
}
ref = new FunctionRef(expected, interfaceMethod, desugared.method, captureClasses);
ref = new FunctionRef(expected, interfaceMethod, desugared.method, captures.size());
} catch (IllegalArgumentException e) {
throw createError(e);
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.MethodKey;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import java.util.List;
import java.util.Set;
/**
* Represents a list initialization shortcut.
*/
public class EListInit extends AExpression {
final List<AExpression> values;
Method constructor = null;
Method method = null;
public EListInit(Location location, List<AExpression> values) {
super(location);
this.values = values;
}
@Override
void extractVariables(Set<String> variables) {
for (AExpression value : values) {
value.extractVariables(variables);
}
}
@Override
void analyze(Locals locals) {
try {
actual = Definition.getType("ArrayList");
} catch (IllegalArgumentException exception) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
constructor = actual.struct.constructors.get(new MethodKey("<init>", 0));
if (constructor == null) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
method = actual.struct.methods.get(new MethodKey("add", 1));
if (method == null) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
for (int index = 0; index < values.size(); ++index) {
AExpression expression = values.get(index);
expression.expected = Definition.DEF_TYPE;
expression.internal = true;
expression.analyze(locals);
values.set(index, expression.cast(locals));
}
}
@Override
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.newInstance(actual.type);
writer.dup();
writer.invokeConstructor(constructor.owner.type, constructor.method);
for (AExpression value : values) {
writer.dup();
value.write(writer, globals);
method.write(writer);
writer.pop();
}
}
}

View File

@ -0,0 +1,123 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.MethodKey;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import java.util.List;
import java.util.Set;
/**
* Represents a map initialization shortcut.
*/
public class EMapInit extends AExpression {
final List<AExpression> keys;
final List<AExpression> values;
Method constructor = null;
Method method = null;
public EMapInit(Location location, List<AExpression> keys, List<AExpression> values) {
super(location);
this.keys = keys;
this.values = values;
}
@Override
void extractVariables(Set<String> variables) {
for (AExpression key : keys) {
key.extractVariables(variables);
}
for (AExpression value : values) {
value.extractVariables(variables);
}
}
@Override
void analyze(Locals locals) {
try {
actual = Definition.getType("HashMap");
} catch (IllegalArgumentException exception) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
constructor = actual.struct.constructors.get(new MethodKey("<init>", 0));
if (constructor == null) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
method = actual.struct.methods.get(new MethodKey("put", 2));
if (method == null) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
if (keys.size() != values.size()) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
for (int index = 0; index < keys.size(); ++index) {
AExpression expression = keys.get(index);
expression.expected = Definition.DEF_TYPE;
expression.internal = true;
expression.analyze(locals);
keys.set(index, expression.cast(locals));
}
for (int index = 0; index < values.size(); ++index) {
AExpression expression = values.get(index);
expression.expected = Definition.DEF_TYPE;
expression.internal = true;
expression.analyze(locals);
values.set(index, expression.cast(locals));
}
}
@Override
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
writer.newInstance(actual.type);
writer.dup();
writer.invokeConstructor(constructor.owner.type, constructor.method);
for (int index = 0; index < keys.size(); ++index) {
AExpression key = keys.get(index);
AExpression value = values.get(index);
writer.dup();
key.write(writer, globals);
value.write(writer, globals);
method.write(writer);
writer.pop();
}
}
}

View File

@ -122,14 +122,8 @@ public final class LCallInvoke extends ALink {
for (AExpression argument : arguments) {
argument.write(writer, globals);
}
if (java.lang.reflect.Modifier.isStatic(method.modifiers)) {
writer.invokeStatic(method.owner.type, method.method);
} else if (java.lang.reflect.Modifier.isInterface(method.owner.clazz.getModifiers())) {
writer.invokeInterface(method.owner.type, method.method);
} else {
writer.invokeVirtual(method.owner.type, method.method);
}
method.write(writer);
}
@Override

View File

@ -92,11 +92,7 @@ final class LListShortcut extends ALink {
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
writer.invokeInterface(getter.owner.type, getter.method);
} else {
writer.invokeVirtual(getter.owner.type, getter.method);
}
getter.write(writer);
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
writer.checkCast(getter.rtn.type);
@ -107,11 +103,7 @@ final class LListShortcut extends ALink {
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
writer.invokeInterface(setter.owner.type, setter.method);
} else {
writer.invokeVirtual(setter.owner.type, setter.method);
}
setter.write(writer);
writer.writePop(setter.rtn.sort.size);
}

View File

@ -91,11 +91,7 @@ final class LMapShortcut extends ALink {
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
writer.invokeInterface(getter.owner.type, getter.method);
} else {
writer.invokeVirtual(getter.owner.type, getter.method);
}
getter.write(writer);
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
writer.checkCast(getter.rtn.type);
@ -106,11 +102,7 @@ final class LMapShortcut extends ALink {
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
writer.invokeInterface(setter.owner.type, setter.method);
} else {
writer.invokeVirtual(setter.owner.type, setter.method);
}
setter.write(writer);
writer.writePop(setter.rtn.sort.size);
}

View File

@ -37,14 +37,16 @@ public final class LNewArray extends ALink {
final String type;
final List<AExpression> arguments;
final boolean initialize;
public LNewArray(Location location, String type, List<AExpression> arguments) {
public LNewArray(Location location, String type, List<AExpression> arguments, boolean initialize) {
super(location, -1);
this.type = Objects.requireNonNull(type);
this.arguments = Objects.requireNonNull(arguments);
this.initialize = initialize;
}
@Override
void extractVariables(Set<String> variables) {
for (AExpression argument : arguments) {
@ -73,12 +75,13 @@ public final class LNewArray extends ALink {
for (int argument = 0; argument < arguments.size(); ++argument) {
AExpression expression = arguments.get(argument);
expression.expected = Definition.INT_TYPE;
expression.expected = initialize ? Definition.getType(type.struct, 0) : Definition.INT_TYPE;
expression.internal = true;
expression.analyze(locals);
arguments.set(argument, expression.cast(locals));
}
after = Definition.getType(type.struct, arguments.size());
after = Definition.getType(type.struct, initialize ? 1 : arguments.size());
return this;
}
@ -92,14 +95,28 @@ public final class LNewArray extends ALink {
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
for (AExpression argument : arguments) {
argument.write(writer, globals);
}
if (arguments.size() > 1) {
writer.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions());
} else {
if (initialize) {
writer.push(arguments.size());
writer.newArray(Definition.getType(after.struct, 0).type);
for (int index = 0; index < arguments.size(); ++index) {
AExpression argument = arguments.get(index);
writer.dup();
writer.push(index);
argument.write(writer, globals);
writer.arrayStore(Definition.getType(after.struct, 0).type);
}
} else {
for (AExpression argument : arguments) {
argument.write(writer, globals);
}
if (arguments.size() > 1) {
writer.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions());
} else {
writer.newArray(Definition.getType(after.struct, 0).type);
}
}
}

View File

@ -95,11 +95,7 @@ final class LShortcut extends ALink {
void load(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) {
writer.invokeInterface(getter.owner.type, getter.method);
} else {
writer.invokeVirtual(getter.owner.type, getter.method);
}
getter.write(writer);
if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) {
writer.checkCast(getter.rtn.type);
@ -110,11 +106,7 @@ final class LShortcut extends ALink {
void store(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) {
writer.invokeInterface(setter.owner.type, setter.method);
} else {
writer.invokeVirtual(setter.owner.type, setter.method);
}
setter.write(writer);
writer.writePop(setter.rtn.sort.size);
}

View File

@ -206,10 +206,8 @@ public class SEach extends AStatement {
Type itr = Definition.getType("Iterator");
org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(itr.type, Definition.DEF_TYPE.type);
writer.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR);
} else if (java.lang.reflect.Modifier.isInterface(method.owner.clazz.getModifiers())) {
writer.invokeInterface(method.owner.type, method.method);
} else {
writer.invokeVirtual(method.owner.type, method.method);
method.write(writer);
}
writer.visitVarInsn(iterator.type.type.getOpcode(Opcodes.ISTORE), iterator.getSlot());

View File

@ -114,7 +114,7 @@ public class SFunction extends AStatement {
org.objectweb.asm.commons.Method method =
new org.objectweb.asm.commons.Method(name, MethodType.methodType(rtnType.clazz, paramClasses).toMethodDescriptorString());
this.method = new Method(name, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null);
this.method = new Method(name, null, false, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null);
}
@Override

View File

@ -44,6 +44,8 @@
* {@link org.elasticsearch.painless.node.EDecimal} - Represents a decimal constant.
* {@link org.elasticsearch.painless.node.EExplicit} - Represents an explicit cast.
* {@link org.elasticsearch.painless.node.EFunctionRef} - Represents a function reference (non-capturing).
* {@link org.elasticsearch.painless.node.EListInit} - Represents a list initialization shortcut.
* {@link org.elasticsearch.painless.node.EMapInit} - Represents a map initializiation shortcut.
* {@link org.elasticsearch.painless.node.ENull} - Represents a null constant.
* {@link org.elasticsearch.painless.node.ENumeric} - Represents a non-decimal numeric constant.
* {@link org.elasticsearch.painless.node.EUnary} - Represents a unary math expression.

View File

@ -50,6 +50,15 @@ class Iterable -> java.lang.Iterable {
void forEach(Consumer)
Iterator iterator()
Spliterator spliterator()
# some adaptations of groovy methods
boolean any*(Predicate)
def each*(Consumer)
def eachWithIndex*(ObjIntConsumer)
boolean every*(Predicate)
List findResults*(Function)
Map groupBy*(Function)
String join*(String)
double sum*(ToDoubleFunction)
}
# Readable: i/o

View File

@ -42,7 +42,7 @@ class Matcher -> java.util.regex.Matcher extends Object {
boolean find(int)
String group()
String group(int)
String namedGroup/group(String)
String namedGroup*(String)
int groupCount()
boolean hasAnchoringBounds()
boolean hasTransparentBounds()

View File

@ -39,6 +39,15 @@ class Collection -> java.util.Collection extends Iterable {
Stream stream()
def[] toArray()
def[] toArray(def[])
# some adaptations of groovy methods
List collect*(Function)
def collect*(Collection,Function)
def find*(Predicate)
List findAll*(Predicate)
def findResult*(Function)
def findResult*(def,Function)
List split*(Predicate)
}
class Comparator -> java.util.Comparator {
@ -114,8 +123,7 @@ class List -> java.util.List extends Collection,Iterable {
def remove(int)
void replaceAll(UnaryOperator)
def set(int,def)
# TODO: wtf?
int getLength/size()
int getLength*()
void sort(Comparator)
List subList(int,int)
}
@ -153,6 +161,19 @@ class Map -> java.util.Map {
void replaceAll(BiFunction)
int size()
Collection values()
# some adaptations of groovy methods
List collect*(BiFunction)
def collect*(Collection,BiFunction)
int count*(BiPredicate)
def each*(BiConsumer)
boolean every*(BiPredicate)
Map.Entry find*(BiPredicate)
Map findAll*(BiPredicate)
def findResult*(BiFunction)
def findResult*(def,BiFunction)
List findResults*(BiFunction)
Map groupBy*(BiFunction)
}
class Map.Entry -> java.util.Map$Entry {

View File

@ -0,0 +1,178 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class AugmentationTests extends ScriptTestCase {
public void testStatic() {
assertEquals(1, exec("ArrayList l = new ArrayList(); l.add(1); return l.getLength();"));
assertEquals(1, exec("ArrayList l = new ArrayList(); l.add(1); return l.length;"));
}
public void testSubclass() {
assertEquals(1, exec("List l = new ArrayList(); l.add(1); return l.getLength();"));
assertEquals(1, exec("List l = new ArrayList(); l.add(1); return l.length;"));
}
public void testDef() {
assertEquals(1, exec("def l = new ArrayList(); l.add(1); return l.getLength();"));
assertEquals(1, exec("def l = new ArrayList(); l.add(1); return l.length;"));
}
public void testCapturingReference() {
assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
"ArrayList l = new ArrayList(); l.add(1);" +
"return foo(l::getLength);"));
assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
"List l = new ArrayList(); l.add(1);" +
"return foo(l::getLength);"));
assertEquals(1, exec("int foo(Supplier t) { return t.get() }" +
"def l = new ArrayList(); l.add(1);" +
"return foo(l::getLength);"));
}
public void testIterable_Any() {
assertEquals(true,
exec("List l = new ArrayList(); l.add(1); l.any(x -> x == 1)"));
}
public void testIterable_Each() {
assertEquals(1,
exec("List l = new ArrayList(); l.add(1); List l2 = new ArrayList(); l.each(l2::add); return l2.size()"));
}
public void testIterable_EachWithIndex() {
assertEquals(0,
exec("List l = new ArrayList(); l.add(2); Map m = new HashMap(); l.eachWithIndex(m::put); return m.get(2)"));
}
public void testIterable_Every() {
assertEquals(false, exec("List l = new ArrayList(); l.add(1); l.add(2); l.every(x -> x == 1)"));
}
public void testIterable_FindResults() {
assertEquals(1,
exec("List l = new ArrayList(); l.add(1); l.add(2); l.findResults(x -> x == 1 ? x : null).size()"));
}
public void testIterable_GroupBy() {
assertEquals(2,
exec("List l = new ArrayList(); l.add(1); l.add(-1); l.groupBy(x -> x < 0 ? 'negative' : 'positive').size()"));
}
public void testIterable_Join() {
assertEquals("test,ing",
exec("List l = new ArrayList(); l.add('test'); l.add('ing'); l.join(',')"));
}
public void testIterable_Sum() {
assertEquals(5.0D,
exec("List l = new ArrayList(); l.add(1); l.add(2); l.sum(x -> x + 1)"));
}
public void testCollection_Collect() {
assertEquals(Arrays.asList(2, 3),
exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(x -> x + 1)"));
assertEquals(asSet(2, 3),
exec("List l = new ArrayList(); l.add(1); l.add(2); l.collect(new HashSet(), x -> x + 1)"));
}
public void testCollection_Find() {
assertEquals(2,
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.find(x -> x == 2)"));
}
public void testCollection_FindAll() {
assertEquals(Arrays.asList(2),
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findAll(x -> x == 2)"));
}
public void testCollection_FindResult() {
assertEquals("found",
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult(x -> x > 1 ? 'found' : null)"));
assertEquals("notfound",
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.findResult('notfound', x -> x > 10 ? 'found' : null)"));
}
public void testCollection_Split() {
assertEquals(Arrays.asList(Arrays.asList(2), Arrays.asList(1)),
exec("List l = new ArrayList(); l.add(1); l.add(2); return l.split(x -> x == 2)"));
}
public void testMap_Collect() {
assertEquals(Arrays.asList("one1", "two2"),
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect((key,value) -> key + value)"));
assertEquals(asSet("one1", "two2"),
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.collect(new HashSet(), (key,value) -> key + value)"));
}
public void testMap_Count() {
assertEquals(1,
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.count((key,value) -> value == 2)"));
}
public void testMap_Each() {
assertEquals(2,
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; Map m2 = new TreeMap(); m.each(m2::put); return m2.size()"));
}
public void testMap_Every() {
assertEquals(false,
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; m.every((key,value) -> value == 2)"));
}
public void testMap_Find() {
assertEquals("two",
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.find((key,value) -> value == 2).key"));
}
public void testMap_FindAll() {
assertEquals(Collections.singletonMap("two", 2),
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findAll((key,value) -> value == 2)"));
}
public void testMap_FindResult() {
assertEquals("found",
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; return m.findResult((key,value) -> value == 2 ? 'found' : null)"));
assertEquals("notfound",
exec("Map m = new TreeMap(); m.one = 1; m.two = 2; " +
"return m.findResult('notfound', (key,value) -> value == 10 ? 'found' : null)"));
}
public void testMap_FindResults() {
assertEquals(Arrays.asList("negative", "positive"),
exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " +
"return m.findResults((key,value) -> value < 0 ? 'negative' : 'positive')"));
}
public void testMap_GroupBy() {
Map<String,Map<String,Integer>> expected = new HashMap<>();
expected.put("negative", Collections.singletonMap("a", -1));
expected.put("positive", Collections.singletonMap("b", 1));
assertEquals(expected,
exec("Map m = new TreeMap(); m.a = -1; m.b = 1; " +
"return m.groupBy((key,value) -> value < 0 ? 'negative' : 'positive')"));
}
}

View File

@ -170,10 +170,24 @@ public class FunctionRefTests extends ScriptTestCase {
assertTrue(expected.getMessage().contains("Unknown reference"));
}
public void testWrongArityNotEnough() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
});
assertTrue(expected.getMessage().contains("Unknown reference"));
}
public void testWrongArityDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);");
});
assertTrue(expected.getMessage().contains("Unknown reference"));
}
public void testWrongArityNotEnoughDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);");
});
assertTrue(expected.getMessage().contains("Unknown reference"));
}
}

View File

@ -0,0 +1,146 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class InitializerTests extends ScriptTestCase {
@SuppressWarnings({"unchecked", "rawtypes"})
public void testArrayInitializers() {
int[] ints = (int[])exec("new int[] {}");
assertEquals(0, ints.length);
ints = (int[])exec("new int[] {5, 7, -1, 14}");
assertEquals(4, ints.length);
assertEquals(5, ints[0]);
assertEquals(7, ints[1]);
assertEquals(-1, ints[2]);
assertEquals(14, ints[3]);
ints = (int[])exec("int y = 2; int z = 3; int[] x = new int[] {y*z, y + z, y - z, y, z}; return x;");
assertEquals(5, ints.length);
assertEquals(6, ints[0]);
assertEquals(5, ints[1]);
assertEquals(-1, ints[2]);
assertEquals(2, ints[3]);
assertEquals(3, ints[4]);
Object[] objects = (Object[])exec("int y = 2; List z = new ArrayList(); String s = 'aaa';" +
"Object[] x = new Object[] {y, z, 1 + s, s + 'aaa'}; return x;");
assertEquals(4, objects.length);
assertEquals(new Integer(2), objects[0]);
assertEquals(new ArrayList(), objects[1]);
assertEquals("1aaa", objects[2]);
assertEquals("aaaaaa", objects[3]);
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void testListInitializers() {
List list = (List)exec("[]");
assertEquals(0, list.size());
list = (List)exec("[5, 7, -1, 14]");
assertEquals(4, list.size());
assertEquals(5, list.get(0));
assertEquals(7, list.get(1));
assertEquals(-1, list.get(2));
assertEquals(14, list.get(3));
list = (List)exec("int y = 2; int z = 3; def x = [y*z, y + z, y - z, y, z]; return x;");
assertEquals(5, list.size());
assertEquals(6, list.get(0));
assertEquals(5, list.get(1));
assertEquals(-1, list.get(2));
assertEquals(2, list.get(3));
assertEquals(3, list.get(4));
list = (List)exec("int y = 2; List z = new ArrayList(); String s = 'aaa'; List x = [y, z, 1 + s, s + 'aaa']; return x;");
assertEquals(4, list.size());
assertEquals(new Integer(2), list.get(0));
assertEquals(new ArrayList(), list.get(1));
assertEquals("1aaa", list.get(2));
assertEquals("aaaaaa", list.get(3));
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void testMapInitializers() {
Map map = (Map)exec("[:]");
assertEquals(0, map.size());
map = (Map)exec("[5 : 7, -1 : 14]");
assertEquals(2, map.size());
assertEquals(new Integer(7), map.get(5));
assertEquals(new Integer(14), map.get(-1));
map = (Map)exec("int y = 2; int z = 3; Map x = [y*z : y + z, y - z : y, z : z]; return x;");
assertEquals(3, map.size());
assertEquals(new Integer(5), map.get(6));
assertEquals(new Integer(2), map.get(-1));
assertEquals(new Integer(3), map.get(3));
map = (Map)exec("int y = 2; List z = new ArrayList(); String s = 'aaa';" +
"def x = [y : z, 1 + s : s + 'aaa']; return x;");
assertEquals(2, map.size());
assertEquals(new ArrayList(), map.get(2));
assertEquals("aaaaaa", map.get("1aaa"));
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void testCrazyInitializer() {
Map map = (Map)exec("int y = 2; int z = 3; Map x = [y*z : y + z, 's' : [y, [y : [[z], [], [:]]]], z : [z, 9]]; return x;");
List list0 = new ArrayList();
list0.add(3);
List list1 = new ArrayList();
list1.add(list0);
list1.add(new ArrayList());
list1.add(new HashMap());
Map map0 = new HashMap();
map0.put(2, list1);
List list2 = new ArrayList();
list2.add(2);
list2.add(map0);
List list3 = new ArrayList();
list3.add(3);
list3.add(9);
assertEquals(3, map.size());
assertEquals(new Integer(5), map.get(6));
assertEquals(list2, map.get("s"));
assertEquals(list3, map.get(3));
}
}

View File

@ -180,6 +180,22 @@ public class LambdaTests extends ScriptTestCase {
assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters"));
}
public void testWrongArityNotEnough() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("List l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(() -> 5).sum();");
});
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
}
public void testWrongArityNotEnoughDef() {
IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("def l = new ArrayList(); l.add(1); l.add(1); "
+ "return l.stream().mapToInt(() -> 5).sum();");
});
assertTrue(expected.getMessage().contains("Incorrect number of parameters"));
}
public void testLambdaInFunction() {
assertEquals(5, exec("def foo() { Optional.empty().orElseGet(() -> 5) } return foo();"));
}

View File

@ -21,8 +21,6 @@ package org.elasticsearch.percolator;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
/**
*/
public class MultiPercolateAction extends Action<MultiPercolateRequest, MultiPercolateResponse, MultiPercolateRequestBuilder> {
public static final MultiPercolateAction INSTANCE = new MultiPercolateAction();

Some files were not shown because too many files have changed in this diff Show More