Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
50368656ee
|
@ -601,7 +601,6 @@ class BuildPlugin implements Plugin<Project> {
|
|||
} else {
|
||||
options.fork = true
|
||||
options.forkOptions.javaHome = compilerJavaHomeFile
|
||||
options.forkOptions.memoryMaximumSize = "512m"
|
||||
}
|
||||
if (targetCompatibilityVersion == JavaVersion.VERSION_1_8) {
|
||||
// compile with compact 3 profile by default
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
org.gradle.daemon=false
|
||||
org.gradle.jvmargs=-Xmx2g
|
||||
options.forkOptions.memoryMaximumSize=2g
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.gradle.precommit.ForbiddenApisCliTask
|
|||
esplugin {
|
||||
description 'The ICU Analysis plugin integrates Lucene ICU module into elasticsearch, adding ICU relates analysis components.'
|
||||
classname 'org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin'
|
||||
hasClientJar = true
|
||||
}
|
||||
|
||||
tasks.withType(ForbiddenApisCliTask) {
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.util.Objects;
|
|||
public abstract class ShardOperationFailedException implements Streamable, ToXContent {
|
||||
|
||||
protected String index;
|
||||
protected int shardId;
|
||||
protected int shardId = -1;
|
||||
protected String reason;
|
||||
protected RestStatus status;
|
||||
protected Throwable cause;
|
||||
|
|
|
@ -54,8 +54,7 @@ public class ShardSearchFailure extends ShardOperationFailedException {
|
|||
|
||||
private SearchShardTarget shardTarget;
|
||||
|
||||
private ShardSearchFailure() {
|
||||
|
||||
ShardSearchFailure() {
|
||||
}
|
||||
|
||||
public ShardSearchFailure(Exception e) {
|
||||
|
@ -101,6 +100,8 @@ public class ShardSearchFailure extends ShardOperationFailedException {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
shardTarget = new SearchShardTarget(in);
|
||||
index = shardTarget.getFullyQualifiedIndexName();
|
||||
shardId = shardTarget.getShardId().getId();
|
||||
}
|
||||
reason = in.readString();
|
||||
status = RestStatus.readFrom(in);
|
||||
|
|
|
@ -271,7 +271,7 @@ public class ReplicationResponse extends ActionResponse {
|
|||
public void readFrom(StreamInput in) throws IOException {
|
||||
shardId = ShardId.readShardId(in);
|
||||
super.shardId = shardId.getId();
|
||||
super.index = shardId.getIndexName();
|
||||
index = shardId.getIndexName();
|
||||
nodeId = in.readOptionalString();
|
||||
cause = in.readException();
|
||||
status = RestStatus.readFrom(in);
|
||||
|
|
|
@ -284,7 +284,7 @@ public class ClusterState implements ToXContentFragment, Diffable<ClusterState>
|
|||
final String TAB = " ";
|
||||
for (IndexMetaData indexMetaData : metaData) {
|
||||
sb.append(TAB).append(indexMetaData.getIndex());
|
||||
sb.append(": v[").append(indexMetaData.getVersion()).append("]\n");
|
||||
sb.append(": v[").append(indexMetaData.getVersion()).append("], mv[").append(indexMetaData.getMappingVersion()).append("]\n");
|
||||
for (int shard = 0; shard < indexMetaData.getNumberOfShards(); shard++) {
|
||||
sb.append(TAB).append(TAB).append(shard).append(": ");
|
||||
sb.append("p_term [").append(indexMetaData.primaryTerm(shard)).append("], ");
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.Assertions;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
|
@ -291,6 +292,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
|
||||
public static final String KEY_IN_SYNC_ALLOCATIONS = "in_sync_allocations";
|
||||
static final String KEY_VERSION = "version";
|
||||
static final String KEY_MAPPING_VERSION = "mapping_version";
|
||||
static final String KEY_ROUTING_NUM_SHARDS = "routing_num_shards";
|
||||
static final String KEY_SETTINGS = "settings";
|
||||
static final String KEY_STATE = "state";
|
||||
|
@ -309,6 +311,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
|
||||
private final Index index;
|
||||
private final long version;
|
||||
|
||||
private final long mappingVersion;
|
||||
|
||||
private final long[] primaryTerms;
|
||||
|
||||
private final State state;
|
||||
|
@ -336,7 +341,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
private final ActiveShardCount waitForActiveShards;
|
||||
private final ImmutableOpenMap<String, RolloverInfo> rolloverInfos;
|
||||
|
||||
private IndexMetaData(Index index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings,
|
||||
private IndexMetaData(Index index, long version, long mappingVersion, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings,
|
||||
ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases,
|
||||
ImmutableOpenMap<String, Custom> customs, ImmutableOpenIntMap<Set<String>> inSyncAllocationIds,
|
||||
DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters,
|
||||
|
@ -345,6 +350,8 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
|
||||
this.index = index;
|
||||
this.version = version;
|
||||
assert mappingVersion >= 0 : mappingVersion;
|
||||
this.mappingVersion = mappingVersion;
|
||||
this.primaryTerms = primaryTerms;
|
||||
assert primaryTerms.length == numberOfShards;
|
||||
this.state = state;
|
||||
|
@ -394,6 +401,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
return this.version;
|
||||
}
|
||||
|
||||
public long getMappingVersion() {
|
||||
return mappingVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* The term of the current selected primary. This is a non-negative number incremented when
|
||||
|
@ -644,6 +654,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
private final String index;
|
||||
private final int routingNumShards;
|
||||
private final long version;
|
||||
private final long mappingVersion;
|
||||
private final long[] primaryTerms;
|
||||
private final State state;
|
||||
private final Settings settings;
|
||||
|
@ -656,6 +667,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) {
|
||||
index = after.index.getName();
|
||||
version = after.version;
|
||||
mappingVersion = after.mappingVersion;
|
||||
routingNumShards = after.routingNumShards;
|
||||
state = after.state;
|
||||
settings = after.settings;
|
||||
|
@ -672,6 +684,11 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
index = in.readString();
|
||||
routingNumShards = in.readInt();
|
||||
version = in.readLong();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
mappingVersion = in.readVLong();
|
||||
} else {
|
||||
mappingVersion = 1;
|
||||
}
|
||||
state = State.fromId(in.readByte());
|
||||
settings = Settings.readSettingsFromStream(in);
|
||||
primaryTerms = in.readVLongArray();
|
||||
|
@ -707,6 +724,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
out.writeString(index);
|
||||
out.writeInt(routingNumShards);
|
||||
out.writeLong(version);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
out.writeVLong(mappingVersion);
|
||||
}
|
||||
out.writeByte(state.id);
|
||||
Settings.writeSettingsToStream(settings, out);
|
||||
out.writeVLongArray(primaryTerms);
|
||||
|
@ -723,6 +743,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
public IndexMetaData apply(IndexMetaData part) {
|
||||
Builder builder = builder(index);
|
||||
builder.version(version);
|
||||
builder.mappingVersion(mappingVersion);
|
||||
builder.setRoutingNumShards(routingNumShards);
|
||||
builder.state(state);
|
||||
builder.settings(settings);
|
||||
|
@ -739,6 +760,11 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
public static IndexMetaData readFrom(StreamInput in) throws IOException {
|
||||
Builder builder = new Builder(in.readString());
|
||||
builder.version(in.readLong());
|
||||
if (in.getVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
builder.mappingVersion(in.readVLong());
|
||||
} else {
|
||||
builder.mappingVersion(1);
|
||||
}
|
||||
builder.setRoutingNumShards(in.readInt());
|
||||
builder.state(State.fromId(in.readByte()));
|
||||
builder.settings(readSettingsFromStream(in));
|
||||
|
@ -778,6 +804,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(index.getName()); // uuid will come as part of settings
|
||||
out.writeLong(version);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
out.writeVLong(mappingVersion);
|
||||
}
|
||||
out.writeInt(routingNumShards);
|
||||
out.writeByte(state.id());
|
||||
writeSettingsToStream(settings, out);
|
||||
|
@ -821,6 +850,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
private String index;
|
||||
private State state = State.OPEN;
|
||||
private long version = 1;
|
||||
private long mappingVersion = 1;
|
||||
private long[] primaryTerms = null;
|
||||
private Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
private final ImmutableOpenMap.Builder<String, MappingMetaData> mappings;
|
||||
|
@ -843,6 +873,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
this.index = indexMetaData.getIndex().getName();
|
||||
this.state = indexMetaData.state;
|
||||
this.version = indexMetaData.version;
|
||||
this.mappingVersion = indexMetaData.mappingVersion;
|
||||
this.settings = indexMetaData.getSettings();
|
||||
this.primaryTerms = indexMetaData.primaryTerms.clone();
|
||||
this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings);
|
||||
|
@ -1009,6 +1040,15 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
return this;
|
||||
}
|
||||
|
||||
public long mappingVersion() {
|
||||
return mappingVersion;
|
||||
}
|
||||
|
||||
public Builder mappingVersion(final long mappingVersion) {
|
||||
this.mappingVersion = mappingVersion;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the primary term for the given shard.
|
||||
* See {@link IndexMetaData#primaryTerm(int)} for more information.
|
||||
|
@ -1136,7 +1176,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
|
||||
final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE);
|
||||
|
||||
return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(),
|
||||
return new IndexMetaData(new Index(index, uuid), version, mappingVersion, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(),
|
||||
tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters,
|
||||
indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards, rolloverInfos.build());
|
||||
}
|
||||
|
@ -1145,6 +1185,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
builder.startObject(indexMetaData.getIndex().getName());
|
||||
|
||||
builder.field(KEY_VERSION, indexMetaData.getVersion());
|
||||
builder.field(KEY_MAPPING_VERSION, indexMetaData.getMappingVersion());
|
||||
builder.field(KEY_ROUTING_NUM_SHARDS, indexMetaData.getRoutingNumShards());
|
||||
builder.field(KEY_STATE, indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH));
|
||||
|
||||
|
@ -1218,6 +1259,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("expected object but got a " + token);
|
||||
}
|
||||
boolean mappingVersion = false;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
|
@ -1316,6 +1358,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
builder.state(State.fromString(parser.text()));
|
||||
} else if (KEY_VERSION.equals(currentFieldName)) {
|
||||
builder.version(parser.longValue());
|
||||
} else if (KEY_MAPPING_VERSION.equals(currentFieldName)) {
|
||||
mappingVersion = true;
|
||||
builder.mappingVersion(parser.longValue());
|
||||
} else if (KEY_ROUTING_NUM_SHARDS.equals(currentFieldName)) {
|
||||
builder.setRoutingNumShards(parser.intValue());
|
||||
} else {
|
||||
|
@ -1325,6 +1370,9 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
throw new IllegalArgumentException("Unexpected token " + token);
|
||||
}
|
||||
}
|
||||
if (Assertions.ENABLED && Version.indexCreated(builder.settings).onOrAfter(Version.V_6_5_0)) {
|
||||
assert mappingVersion : "mapping version should be present for indices created on or after 6.5.0";
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -287,6 +287,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
MetaData.Builder builder = MetaData.builder(metaData);
|
||||
boolean updated = false;
|
||||
for (IndexMetaData indexMetaData : updateList) {
|
||||
boolean updatedMapping = false;
|
||||
// do the actual merge here on the master, and update the mapping source
|
||||
// we use the exact same indexService and metadata we used to validate above here to actually apply the update
|
||||
final Index index = indexMetaData.getIndex();
|
||||
|
@ -303,7 +304,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
if (existingSource.equals(updatedSource)) {
|
||||
// same source, no changes, ignore it
|
||||
} else {
|
||||
updated = true;
|
||||
updatedMapping = true;
|
||||
// use the merged mapping source
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("{} update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource);
|
||||
|
@ -313,7 +314,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
|
||||
}
|
||||
} else {
|
||||
updated = true;
|
||||
updatedMapping = true;
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("{} create_mapping [{}] with source [{}]", index, mappingType, updatedSource);
|
||||
} else if (logger.isInfoEnabled()) {
|
||||
|
@ -329,7 +330,16 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
indexMetaDataBuilder.putMapping(new MappingMetaData(mapper.mappingSource()));
|
||||
}
|
||||
}
|
||||
if (updatedMapping) {
|
||||
indexMetaDataBuilder.mappingVersion(1 + indexMetaDataBuilder.mappingVersion());
|
||||
}
|
||||
/*
|
||||
* This implicitly increments the index metadata version and builds the index metadata. This means that we need to have
|
||||
* already incremented the mapping version if necessary. Therefore, the mapping version increment must remain before this
|
||||
* statement.
|
||||
*/
|
||||
builder.put(indexMetaDataBuilder);
|
||||
updated |= updatedMapping;
|
||||
}
|
||||
if (updated) {
|
||||
return ClusterState.builder(currentState).metaData(builder).build();
|
||||
|
|
|
@ -522,8 +522,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean updateMapping(IndexMetaData indexMetaData) throws IOException {
|
||||
return mapperService().updateMapping(indexMetaData);
|
||||
public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException {
|
||||
return mapperService().updateMapping(currentIndexMetaData, newIndexMetaData);
|
||||
}
|
||||
|
||||
private class StoreCloseListener implements Store.OnClose {
|
||||
|
|
|
@ -802,7 +802,7 @@ public class InternalEngine extends Engine {
|
|||
location = translog.add(new Translog.Index(index, indexResult));
|
||||
} else if (indexResult.getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
// if we have document failure, record it as a no-op in the translog with the generated seq_no
|
||||
location = translog.add(new Translog.NoOp(indexResult.getSeqNo(), index.primaryTerm(), indexResult.getFailure().getMessage()));
|
||||
location = translog.add(new Translog.NoOp(indexResult.getSeqNo(), index.primaryTerm(), indexResult.getFailure().toString()));
|
||||
} else {
|
||||
location = null;
|
||||
}
|
||||
|
@ -1111,7 +1111,7 @@ public class InternalEngine extends Engine {
|
|||
location = translog.add(new Translog.Delete(delete, deleteResult));
|
||||
} else if (deleteResult.getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) {
|
||||
location = translog.add(new Translog.NoOp(deleteResult.getSeqNo(),
|
||||
delete.primaryTerm(), deleteResult.getFailure().getMessage()));
|
||||
delete.primaryTerm(), deleteResult.getFailure().toString()));
|
||||
} else {
|
||||
location = null;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.elasticsearch.Assertions;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -192,8 +193,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
/**
|
||||
* Update mapping by only merging the metadata that is different between received and stored entries
|
||||
*/
|
||||
public boolean updateMapping(IndexMetaData indexMetaData) throws IOException {
|
||||
assert indexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index() + " but was " + indexMetaData.getIndex();
|
||||
public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException {
|
||||
assert newIndexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index() + " but was " + newIndexMetaData.getIndex();
|
||||
// go over and add the relevant mappings (or update them)
|
||||
Set<String> existingMappers = new HashSet<>();
|
||||
if (mapper != null) {
|
||||
|
@ -205,7 +206,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
final Map<String, DocumentMapper> updatedEntries;
|
||||
try {
|
||||
// only update entries if needed
|
||||
updatedEntries = internalMerge(indexMetaData, MergeReason.MAPPING_RECOVERY, true);
|
||||
updatedEntries = internalMerge(newIndexMetaData, MergeReason.MAPPING_RECOVERY, true);
|
||||
} catch (Exception e) {
|
||||
logger.warn(() -> new ParameterizedMessage("[{}] failed to apply mappings", index()), e);
|
||||
throw e;
|
||||
|
@ -213,9 +214,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
|
||||
boolean requireRefresh = false;
|
||||
|
||||
assertMappingVersion(currentIndexMetaData, newIndexMetaData, updatedEntries);
|
||||
|
||||
for (DocumentMapper documentMapper : updatedEntries.values()) {
|
||||
String mappingType = documentMapper.type();
|
||||
CompressedXContent incomingMappingSource = indexMetaData.mapping(mappingType).source();
|
||||
CompressedXContent incomingMappingSource = newIndexMetaData.mapping(mappingType).source();
|
||||
|
||||
String op = existingMappers.contains(mappingType) ? "updated" : "added";
|
||||
if (logger.isDebugEnabled() && incomingMappingSource.compressed().length < 512) {
|
||||
|
@ -240,6 +243,45 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
return requireRefresh;
|
||||
}
|
||||
|
||||
private void assertMappingVersion(
|
||||
final IndexMetaData currentIndexMetaData,
|
||||
final IndexMetaData newIndexMetaData,
|
||||
final Map<String, DocumentMapper> updatedEntries) {
|
||||
if (Assertions.ENABLED
|
||||
&& currentIndexMetaData != null
|
||||
&& currentIndexMetaData.getCreationVersion().onOrAfter(Version.V_6_5_0)) {
|
||||
if (currentIndexMetaData.getMappingVersion() == newIndexMetaData.getMappingVersion()) {
|
||||
// if the mapping version is unchanged, then there should not be any updates and all mappings should be the same
|
||||
assert updatedEntries.isEmpty() : updatedEntries;
|
||||
for (final ObjectCursor<MappingMetaData> mapping : newIndexMetaData.getMappings().values()) {
|
||||
final CompressedXContent currentSource = currentIndexMetaData.mapping(mapping.value.type()).source();
|
||||
final CompressedXContent newSource = mapping.value.source();
|
||||
assert currentSource.equals(newSource) :
|
||||
"expected current mapping [" + currentSource + "] for type [" + mapping.value.type() + "] "
|
||||
+ "to be the same as new mapping [" + newSource + "]";
|
||||
}
|
||||
} else {
|
||||
// if the mapping version is changed, it should increase, there should be updates, and the mapping should be different
|
||||
final long currentMappingVersion = currentIndexMetaData.getMappingVersion();
|
||||
final long newMappingVersion = newIndexMetaData.getMappingVersion();
|
||||
assert currentMappingVersion < newMappingVersion :
|
||||
"expected current mapping version [" + currentMappingVersion + "] "
|
||||
+ "to be less than new mapping version [" + newMappingVersion + "]";
|
||||
assert updatedEntries.isEmpty() == false;
|
||||
for (final DocumentMapper documentMapper : updatedEntries.values()) {
|
||||
final MappingMetaData currentMapping = currentIndexMetaData.mapping(documentMapper.type());
|
||||
if (currentMapping != null) {
|
||||
final CompressedXContent currentSource = currentMapping.source();
|
||||
final CompressedXContent newSource = documentMapper.mappingSource();
|
||||
assert currentSource.equals(newSource) == false :
|
||||
"expected current mapping [" + currentSource + "] for type [" + documentMapper.type() + "] " +
|
||||
"to be different than new mapping";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void merge(Map<String, Map<String, Object>> mappings, MergeReason reason) {
|
||||
Map<String, CompressedXContent> mappingSourcesCompressed = new LinkedHashMap<>(mappings.size());
|
||||
for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
|
||||
|
|
|
@ -456,7 +456,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
|
|||
AllocatedIndex<? extends Shard> indexService = null;
|
||||
try {
|
||||
indexService = indicesService.createIndex(indexMetaData, buildInIndexListener);
|
||||
if (indexService.updateMapping(indexMetaData) && sendRefreshMapping) {
|
||||
if (indexService.updateMapping(null, indexMetaData) && sendRefreshMapping) {
|
||||
nodeMappingRefreshAction.nodeMappingRefresh(state.nodes().getMasterNode(),
|
||||
new NodeMappingRefreshAction.NodeMappingRefreshRequest(indexMetaData.getIndex().getName(),
|
||||
indexMetaData.getIndexUUID(), state.nodes().getLocalNodeId())
|
||||
|
@ -490,7 +490,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
|
|||
if (ClusterChangedEvent.indexMetaDataChanged(currentIndexMetaData, newIndexMetaData)) {
|
||||
indexService.updateMetaData(newIndexMetaData);
|
||||
try {
|
||||
if (indexService.updateMapping(newIndexMetaData) && sendRefreshMapping) {
|
||||
if (indexService.updateMapping(currentIndexMetaData, newIndexMetaData) && sendRefreshMapping) {
|
||||
nodeMappingRefreshAction.nodeMappingRefresh(state.nodes().getMasterNode(),
|
||||
new NodeMappingRefreshAction.NodeMappingRefreshRequest(newIndexMetaData.getIndex().getName(),
|
||||
newIndexMetaData.getIndexUUID(), state.nodes().getLocalNodeId())
|
||||
|
@ -778,7 +778,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
|
|||
/**
|
||||
* Checks if index requires refresh from master.
|
||||
*/
|
||||
boolean updateMapping(IndexMetaData indexMetaData) throws IOException;
|
||||
boolean updateMapping(IndexMetaData currentIndexMetaData, IndexMetaData newIndexMetaData) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns shard with given id.
|
||||
|
|
|
@ -292,6 +292,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateApp
|
|||
// Index exists and it's closed - open it in metadata and start recovery
|
||||
IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN);
|
||||
indexMdBuilder.version(Math.max(snapshotIndexMetaData.getVersion(), currentIndexMetaData.getVersion() + 1));
|
||||
indexMdBuilder.mappingVersion(Math.max(snapshotIndexMetaData.getMappingVersion(), currentIndexMetaData.getMappingVersion() + 1));
|
||||
if (!request.includeAliases()) {
|
||||
// Remove all snapshot aliases
|
||||
if (!snapshotIndexMetaData.getAliases().isEmpty()) {
|
||||
|
|
|
@ -102,7 +102,7 @@ public class SnapshotShardFailure extends ShardOperationFailedException {
|
|||
nodeId = in.readOptionalString();
|
||||
shardId = ShardId.readShardId(in);
|
||||
super.shardId = shardId.getId();
|
||||
super.index = shardId.getIndexName();
|
||||
index = shardId.getIndexName();
|
||||
reason = in.readString();
|
||||
status = RestStatus.readFrom(in);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -180,7 +181,7 @@ public class SearchResponseTests extends ESTestCase {
|
|||
int numFailures = randomIntBetween(1, 5);
|
||||
ShardSearchFailure[] failures = new ShardSearchFailure[numFailures];
|
||||
for (int i = 0; i < failures.length; i++) {
|
||||
failures[i] = ShardSearchFailureTests.createTestItem();
|
||||
failures[i] = ShardSearchFailureTests.createTestItem(IndexMetaData.INDEX_UUID_NA_VALUE);
|
||||
}
|
||||
SearchResponse response = createTestItem(failures);
|
||||
XContentType xcontentType = randomFrom(XContentType.values());
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.index.Index;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -38,7 +39,7 @@ import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|||
|
||||
public class ShardSearchFailureTests extends ESTestCase {
|
||||
|
||||
public static ShardSearchFailure createTestItem() {
|
||||
public static ShardSearchFailure createTestItem(String indexUuid) {
|
||||
String randomMessage = randomAlphaOfLengthBetween(3, 20);
|
||||
Exception ex = new ParsingException(0, 0, randomMessage , new IllegalArgumentException("some bad argument"));
|
||||
SearchShardTarget searchShardTarget = null;
|
||||
|
@ -47,7 +48,7 @@ public class ShardSearchFailureTests extends ESTestCase {
|
|||
String indexName = randomAlphaOfLengthBetween(5, 10);
|
||||
String clusterAlias = randomBoolean() ? randomAlphaOfLengthBetween(5, 10) : null;
|
||||
searchShardTarget = new SearchShardTarget(nodeId,
|
||||
new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomInt()), clusterAlias, OriginalIndices.NONE);
|
||||
new ShardId(new Index(indexName, indexUuid), randomInt()), clusterAlias, OriginalIndices.NONE);
|
||||
}
|
||||
return new ShardSearchFailure(ex, searchShardTarget);
|
||||
}
|
||||
|
@ -66,7 +67,7 @@ public class ShardSearchFailureTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
|
||||
ShardSearchFailure response = createTestItem();
|
||||
ShardSearchFailure response = createTestItem(IndexMetaData.INDEX_UUID_NA_VALUE);
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
boolean humanReadable = randomBoolean();
|
||||
BytesReference originalBytes = toShuffledXContent(response, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
||||
|
@ -134,4 +135,15 @@ public class ShardSearchFailureTests extends ESTestCase {
|
|||
+ "}",
|
||||
xContent.utf8ToString());
|
||||
}
|
||||
|
||||
public void testSerialization() throws IOException {
|
||||
ShardSearchFailure testItem = createTestItem(randomAlphaOfLength(12));
|
||||
ShardSearchFailure deserializedInstance = copyStreamable(testItem, writableRegistry(),
|
||||
ShardSearchFailure::new, VersionUtils.randomVersion(random()));
|
||||
assertEquals(testItem.index(), deserializedInstance.index());
|
||||
assertEquals(testItem.shard(), deserializedInstance.shard());
|
||||
assertEquals(testItem.shardId(), deserializedInstance.shardId());
|
||||
assertEquals(testItem.reason(), deserializedInstance.reason());
|
||||
assertEquals(testItem.status(), deserializedInstance.status());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,4 +84,34 @@ public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
|||
assertSame(result, result2);
|
||||
}
|
||||
|
||||
public void testMappingVersion() throws Exception {
|
||||
final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
|
||||
final long previousVersion = indexService.getMetaData().getMappingVersion();
|
||||
final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class);
|
||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type");
|
||||
request.indices(new Index[] {indexService.index()});
|
||||
request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}");
|
||||
final ClusterStateTaskExecutor.ClusterTasksResult<PutMappingClusterStateUpdateRequest> result =
|
||||
mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request));
|
||||
assertThat(result.executionResults.size(), equalTo(1));
|
||||
assertTrue(result.executionResults.values().iterator().next().isSuccess());
|
||||
assertThat(result.resultingState.metaData().index("test").getMappingVersion(), equalTo(1 + previousVersion));
|
||||
}
|
||||
|
||||
public void testMappingVersionUnchanged() throws Exception {
|
||||
final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
|
||||
final long previousVersion = indexService.getMetaData().getMappingVersion();
|
||||
final MetaDataMappingService mappingService = getInstanceFromNode(MetaDataMappingService.class);
|
||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type");
|
||||
request.indices(new Index[] {indexService.index()});
|
||||
request.source("{ \"properties\": {}}");
|
||||
final ClusterStateTaskExecutor.ClusterTasksResult<PutMappingClusterStateUpdateRequest> result =
|
||||
mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request));
|
||||
assertThat(result.executionResults.size(), equalTo(1));
|
||||
assertTrue(result.executionResults.values().iterator().next().isSuccess());
|
||||
assertThat(result.resultingState.metaData().index("test").getMappingVersion(), equalTo(previousVersion));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -267,6 +267,7 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
IndexMetaData deserialized = indices.get(original.getIndex().getName());
|
||||
assertThat(deserialized, notNullValue());
|
||||
assertThat(deserialized.getVersion(), equalTo(original.getVersion()));
|
||||
assertThat(deserialized.getMappingVersion(), equalTo(original.getMappingVersion()));
|
||||
assertThat(deserialized.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas()));
|
||||
assertThat(deserialized.getNumberOfShards(), equalTo(original.getNumberOfShards()));
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -741,4 +742,13 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
|
|||
client().prepareIndex("test", "type", "1").setSource("foo", "abc").get();
|
||||
assertThat(index.mapperService().fullName("foo"), instanceOf(KeywordFieldMapper.KeywordFieldType.class));
|
||||
}
|
||||
|
||||
public void testMappingVersionAfterDynamicMappingUpdate() {
|
||||
createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
|
||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
final long previousVersion = clusterService.state().metaData().index("test").getMappingVersion();
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "text").get();
|
||||
assertThat(clusterService.state().metaData().index("test").getMappingVersion(), equalTo(1 + previousVersion));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -30,6 +32,7 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
|||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -188,4 +191,30 @@ public class UpdateMappingTests extends ESSingleNodeTestCase {
|
|||
() -> mapperService2.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE));
|
||||
assertThat(e.getMessage(), equalTo("mapper [foo] of different type, current_type [long], merged_type [ObjectMapper]"));
|
||||
}
|
||||
|
||||
public void testMappingVersion() {
|
||||
createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
|
||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
{
|
||||
final long previousVersion = clusterService.state().metaData().index("test").getMappingVersion();
|
||||
final PutMappingRequest request = new PutMappingRequest();
|
||||
request.indices("test");
|
||||
request.type("type");
|
||||
request.source("field", "type=text");
|
||||
client().admin().indices().putMapping(request).actionGet();
|
||||
assertThat(clusterService.state().metaData().index("test").getMappingVersion(), Matchers.equalTo(1 + previousVersion));
|
||||
}
|
||||
|
||||
{
|
||||
final long previousVersion = clusterService.state().metaData().index("test").getMappingVersion();
|
||||
final PutMappingRequest request = new PutMappingRequest();
|
||||
request.indices("test");
|
||||
request.type("type");
|
||||
request.source("field", "type=text");
|
||||
client().admin().indices().putMapping(request).actionGet();
|
||||
// the version should be unchanged after putting the same mapping again
|
||||
assertThat(clusterService.state().metaData().index("test").getMappingVersion(), Matchers.equalTo(previousVersion));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.common.util.iterable.Iterables;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.engine.InternalEngine;
|
||||
import org.elasticsearch.index.engine.InternalEngineTests;
|
||||
|
@ -47,6 +46,7 @@ import org.elasticsearch.index.seqno.SequenceNumbers;
|
|||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexShardTests;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.translog.SnapshotMatchers;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.indices.recovery.RecoveryTarget;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
|
@ -54,6 +54,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
|||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -338,38 +339,73 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
|||
* for primary and replica shards
|
||||
*/
|
||||
public void testDocumentFailureReplication() throws Exception {
|
||||
final String failureMessage = "simulated document failure";
|
||||
final ThrowingDocumentFailureEngineFactory throwingDocumentFailureEngineFactory =
|
||||
new ThrowingDocumentFailureEngineFactory(failureMessage);
|
||||
final IOException indexException = new IOException("simulated indexing failure");
|
||||
final IOException deleteException = new IOException("simulated deleting failure");
|
||||
final EngineFactory engineFactory = config -> InternalEngineTests.createInternalEngine((dir, iwc) ->
|
||||
new IndexWriter(dir, iwc) {
|
||||
final AtomicBoolean throwAfterIndexedOneDoc = new AtomicBoolean(); // need one document to trigger delete in IW.
|
||||
@Override
|
||||
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
|
||||
if (throwAfterIndexedOneDoc.getAndSet(true)) {
|
||||
throw indexException;
|
||||
} else {
|
||||
return super.addDocument(doc);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public long deleteDocuments(Term... terms) throws IOException {
|
||||
throw deleteException;
|
||||
}
|
||||
}, null, null, config);
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(0)) {
|
||||
@Override
|
||||
protected EngineFactory getEngineFactory(ShardRouting routing) {
|
||||
return throwingDocumentFailureEngineFactory;
|
||||
}}) {
|
||||
protected EngineFactory getEngineFactory(ShardRouting routing) { return engineFactory; }}) {
|
||||
|
||||
// test only primary
|
||||
// start with the primary only so two first failures are replicated to replicas via recovery from the translog of the primary.
|
||||
shards.startPrimary();
|
||||
BulkItemResponse response = shards.index(
|
||||
new IndexRequest(index.getName(), "type", "1")
|
||||
.source("{}", XContentType.JSON)
|
||||
);
|
||||
assertTrue(response.isFailed());
|
||||
assertNoOpTranslogOperationForDocumentFailure(shards, 1, shards.getPrimary().getPendingPrimaryTerm(), failureMessage);
|
||||
shards.assertAllEqual(0);
|
||||
long primaryTerm = shards.getPrimary().getPendingPrimaryTerm();
|
||||
List<Translog.Operation> expectedTranslogOps = new ArrayList<>();
|
||||
BulkItemResponse indexResp = shards.index(new IndexRequest(index.getName(), "type", "1").source("{}", XContentType.JSON));
|
||||
assertThat(indexResp.isFailed(), equalTo(false));
|
||||
expectedTranslogOps.add(new Translog.Index("type", "1", 0, primaryTerm, 1, "{}".getBytes(StandardCharsets.UTF_8), null, -1));
|
||||
try (Translog.Snapshot snapshot = getTranslog(shards.getPrimary()).newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps));
|
||||
}
|
||||
|
||||
indexResp = shards.index(new IndexRequest(index.getName(), "type", "any").source("{}", XContentType.JSON));
|
||||
assertThat(indexResp.getFailure().getCause(), equalTo(indexException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(1, primaryTerm, indexException.toString()));
|
||||
|
||||
BulkItemResponse deleteResp = shards.delete(new DeleteRequest(index.getName(), "type", "1"));
|
||||
assertThat(deleteResp.getFailure().getCause(), equalTo(deleteException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(2, primaryTerm, deleteException.toString()));
|
||||
shards.assertAllEqual(1);
|
||||
|
||||
// add some replicas
|
||||
int nReplica = randomIntBetween(1, 3);
|
||||
for (int i = 0; i < nReplica; i++) {
|
||||
shards.addReplica();
|
||||
}
|
||||
shards.startReplicas(nReplica);
|
||||
response = shards.index(
|
||||
new IndexRequest(index.getName(), "type", "1")
|
||||
.source("{}", XContentType.JSON)
|
||||
);
|
||||
assertTrue(response.isFailed());
|
||||
assertNoOpTranslogOperationForDocumentFailure(shards, 2, shards.getPrimary().getPendingPrimaryTerm(), failureMessage);
|
||||
shards.assertAllEqual(0);
|
||||
for (IndexShard shard : shards) {
|
||||
try (Translog.Snapshot snapshot = getTranslog(shard).newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps));
|
||||
}
|
||||
}
|
||||
// unlike previous failures, these two failures replicated directly from the replication channel.
|
||||
indexResp = shards.index(new IndexRequest(index.getName(), "type", "any").source("{}", XContentType.JSON));
|
||||
assertThat(indexResp.getFailure().getCause(), equalTo(indexException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(3, primaryTerm, indexException.toString()));
|
||||
|
||||
deleteResp = shards.delete(new DeleteRequest(index.getName(), "type", "1"));
|
||||
assertThat(deleteResp.getFailure().getCause(), equalTo(deleteException));
|
||||
expectedTranslogOps.add(new Translog.NoOp(4, primaryTerm, deleteException.toString()));
|
||||
|
||||
for (IndexShard shard : shards) {
|
||||
try (Translog.Snapshot snapshot = getTranslog(shard).newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps));
|
||||
}
|
||||
}
|
||||
shards.assertAllEqual(1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -541,47 +577,4 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
|
|||
shards.assertAllEqual(0);
|
||||
}
|
||||
}
|
||||
|
||||
/** Throws <code>documentFailure</code> on every indexing operation */
|
||||
static class ThrowingDocumentFailureEngineFactory implements EngineFactory {
|
||||
final String documentFailureMessage;
|
||||
|
||||
ThrowingDocumentFailureEngineFactory(String documentFailureMessage) {
|
||||
this.documentFailureMessage = documentFailureMessage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine newReadWriteEngine(EngineConfig config) {
|
||||
return InternalEngineTests.createInternalEngine((directory, writerConfig) ->
|
||||
new IndexWriter(directory, writerConfig) {
|
||||
@Override
|
||||
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
|
||||
assert documentFailureMessage != null;
|
||||
throw new IOException(documentFailureMessage);
|
||||
}
|
||||
}, null, null, config);
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertNoOpTranslogOperationForDocumentFailure(
|
||||
Iterable<IndexShard> replicationGroup,
|
||||
int expectedOperation,
|
||||
long expectedPrimaryTerm,
|
||||
String failureMessage) throws IOException {
|
||||
for (IndexShard indexShard : replicationGroup) {
|
||||
try(Translog.Snapshot snapshot = getTranslog(indexShard).newSnapshot()) {
|
||||
assertThat(snapshot.totalOperations(), equalTo(expectedOperation));
|
||||
long expectedSeqNo = 0L;
|
||||
Translog.Operation op = snapshot.next();
|
||||
do {
|
||||
assertThat(op.opType(), equalTo(Translog.Operation.Type.NO_OP));
|
||||
assertThat(op.seqNo(), equalTo(expectedSeqNo));
|
||||
assertThat(op.primaryTerm(), equalTo(expectedPrimaryTerm));
|
||||
assertThat(((Translog.NoOp) op).reason(), containsString(failureMessage));
|
||||
op = snapshot.next();
|
||||
expectedSeqNo++;
|
||||
} while (op != null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -273,7 +273,7 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean updateMapping(IndexMetaData indexMetaData) throws IOException {
|
||||
public boolean updateMapping(final IndexMetaData currentIndexMetaData, final IndexMetaData newIndexMetaData) throws IOException {
|
||||
failRandomly();
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.action.resync.ResyncReplicationRequest;
|
|||
import org.elasticsearch.action.resync.ResyncReplicationResponse;
|
||||
import org.elasticsearch.action.resync.TransportResyncReplicationAction;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicatedWriteRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationOperation;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
|
@ -193,14 +194,23 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
|||
}
|
||||
|
||||
public BulkItemResponse index(IndexRequest indexRequest) throws Exception {
|
||||
return executeWriteRequest(indexRequest, indexRequest.getRefreshPolicy());
|
||||
}
|
||||
|
||||
public BulkItemResponse delete(DeleteRequest deleteRequest) throws Exception {
|
||||
return executeWriteRequest(deleteRequest, deleteRequest.getRefreshPolicy());
|
||||
}
|
||||
|
||||
private BulkItemResponse executeWriteRequest(
|
||||
DocWriteRequest<?> writeRequest, WriteRequest.RefreshPolicy refreshPolicy) throws Exception {
|
||||
PlainActionFuture<BulkItemResponse> listener = new PlainActionFuture<>();
|
||||
final ActionListener<BulkShardResponse> wrapBulkListener = ActionListener.wrap(
|
||||
bulkShardResponse -> listener.onResponse(bulkShardResponse.getResponses()[0]),
|
||||
listener::onFailure);
|
||||
bulkShardResponse -> listener.onResponse(bulkShardResponse.getResponses()[0]),
|
||||
listener::onFailure);
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
items[0] = new BulkItemRequest(0, indexRequest);
|
||||
BulkShardRequest request = new BulkShardRequest(shardId, indexRequest.getRefreshPolicy(), items);
|
||||
new IndexingAction(request, wrapBulkListener, this).execute();
|
||||
items[0] = new BulkItemRequest(0, writeRequest);
|
||||
BulkShardRequest request = new BulkShardRequest(shardId, refreshPolicy, items);
|
||||
new WriteReplicationAction(request, wrapBulkListener, this).execute();
|
||||
return listener.get();
|
||||
}
|
||||
|
||||
|
@ -598,9 +608,9 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
|||
|
||||
}
|
||||
|
||||
class IndexingAction extends ReplicationAction<BulkShardRequest, BulkShardRequest, BulkShardResponse> {
|
||||
class WriteReplicationAction extends ReplicationAction<BulkShardRequest, BulkShardRequest, BulkShardResponse> {
|
||||
|
||||
IndexingAction(BulkShardRequest request, ActionListener<BulkShardResponse> listener, ReplicationGroup replicationGroup) {
|
||||
WriteReplicationAction(BulkShardRequest request, ActionListener<BulkShardResponse> listener, ReplicationGroup replicationGroup) {
|
||||
super(request, listener, replicationGroup, "indexing");
|
||||
}
|
||||
|
||||
|
|
|
@ -38,16 +38,19 @@ The following parameters can be specified in the body of a POST request and
|
|||
pertain to creating a token:
|
||||
|
||||
`grant_type`::
|
||||
(string) The type of grant. Valid grant types are: `password` and `refresh_token`.
|
||||
(string) The type of grant. Supported grant types are: `password`,
|
||||
`client_credentials` and `refresh_token`.
|
||||
|
||||
`password`::
|
||||
(string) The user's password. If you specify the `password` grant type, this
|
||||
parameter is required.
|
||||
parameter is required. This parameter is not valid with any other supported
|
||||
grant type.
|
||||
|
||||
`refresh_token`::
|
||||
(string) If you specify the `refresh_token` grant type, this parameter is
|
||||
required. It contains the string that was returned when you created the token
|
||||
and enables you to extend its life.
|
||||
and enables you to extend its life. This parameter is not valid with any other
|
||||
supported grant type.
|
||||
|
||||
`scope`::
|
||||
(string) The scope of the token. Currently tokens are only issued for a scope of
|
||||
|
@ -55,11 +58,48 @@ and enables you to extend its life.
|
|||
|
||||
`username`::
|
||||
(string) The username that identifies the user. If you specify the `password`
|
||||
grant type, this parameter is required.
|
||||
grant type, this parameter is required. This parameter is not valid with any
|
||||
other supported grant type.
|
||||
|
||||
==== Examples
|
||||
|
||||
The following example obtains a token for the `test_admin` user:
|
||||
The following example obtains a token using the `client_credentials` grant type,
|
||||
which simply creates a token as the authenticated user:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /_xpack/security/oauth2/token
|
||||
{
|
||||
"grant_type" : "client_credentials"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The following example output contains the access token, the amount of time (in
|
||||
seconds) that the token expires in, and the type:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"access_token" : "dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
"type" : "Bearer",
|
||||
"expires_in" : 1200
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==/$body.access_token/]
|
||||
|
||||
The token returned by this API can be used by sending a request with a
|
||||
`Authorization` header with a value having the prefix `Bearer ` followed
|
||||
by the value of the `access_token`.
|
||||
|
||||
[source,shell]
|
||||
--------------------------------------------------
|
||||
curl -H "Authorization: Bearer dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==" http://localhost:9200/_cluster/health
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
The following example obtains a token for the `test_admin` user using the
|
||||
`password` grant type:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -73,7 +113,7 @@ POST /_xpack/security/oauth2/token
|
|||
// CONSOLE
|
||||
|
||||
The following example output contains the access token, the amount of time (in
|
||||
seconds) that the token expires in, and the type:
|
||||
seconds) that the token expires in, the type, and the refresh token:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -87,19 +127,10 @@ seconds) that the token expires in, and the type:
|
|||
// TESTRESPONSE[s/dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==/$body.access_token/]
|
||||
// TESTRESPONSE[s/vLBPvmAB6KvwvJZr27cS/$body.refresh_token/]
|
||||
|
||||
The token returned by this API can be used by sending a request with a
|
||||
`Authorization` header with a value having the prefix `Bearer ` followed
|
||||
by the value of the `access_token`.
|
||||
|
||||
[source,shell]
|
||||
--------------------------------------------------
|
||||
curl -H "Authorization: Bearer dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==" http://localhost:9200/_cluster/health
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
[[security-api-refresh-token]]
|
||||
To extend the life of an existing token, you can call the API again with the
|
||||
refresh token within 24 hours of the token's creation. For example:
|
||||
To extend the life of an existing token obtained using the `password` grant type,
|
||||
you can call the API again with the refresh token within 24 hours of the token's
|
||||
creation. For example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -19,6 +19,10 @@ import org.elasticsearch.common.CharArrays;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
|
@ -29,6 +33,37 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
*/
|
||||
public final class CreateTokenRequest extends ActionRequest {
|
||||
|
||||
public enum GrantType {
|
||||
PASSWORD("password"),
|
||||
REFRESH_TOKEN("refresh_token"),
|
||||
AUTHORIZATION_CODE("authorization_code"),
|
||||
CLIENT_CREDENTIALS("client_credentials");
|
||||
|
||||
private final String value;
|
||||
|
||||
GrantType(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public static GrantType fromString(String grantType) {
|
||||
if (grantType != null) {
|
||||
for (GrantType type : values()) {
|
||||
if (type.getValue().equals(grantType)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Set<GrantType> SUPPORTED_GRANT_TYPES = Collections.unmodifiableSet(
|
||||
EnumSet.of(GrantType.PASSWORD, GrantType.REFRESH_TOKEN, GrantType.CLIENT_CREDENTIALS));
|
||||
|
||||
private String grantType;
|
||||
private String username;
|
||||
private SecureString password;
|
||||
|
@ -49,33 +84,58 @@ public final class CreateTokenRequest extends ActionRequest {
|
|||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if ("password".equals(grantType)) {
|
||||
if (Strings.isNullOrEmpty(username)) {
|
||||
validationException = addValidationError("username is missing", validationException);
|
||||
}
|
||||
if (password == null || password.getChars() == null || password.getChars().length == 0) {
|
||||
validationException = addValidationError("password is missing", validationException);
|
||||
}
|
||||
if (refreshToken != null) {
|
||||
validationException =
|
||||
addValidationError("refresh_token is not supported with the password grant_type", validationException);
|
||||
}
|
||||
} else if ("refresh_token".equals(grantType)) {
|
||||
if (username != null) {
|
||||
validationException =
|
||||
addValidationError("username is not supported with the refresh_token grant_type", validationException);
|
||||
}
|
||||
if (password != null) {
|
||||
validationException =
|
||||
addValidationError("password is not supported with the refresh_token grant_type", validationException);
|
||||
}
|
||||
if (refreshToken == null) {
|
||||
validationException = addValidationError("refresh_token is missing", validationException);
|
||||
GrantType type = GrantType.fromString(grantType);
|
||||
if (type != null) {
|
||||
switch (type) {
|
||||
case PASSWORD:
|
||||
if (Strings.isNullOrEmpty(username)) {
|
||||
validationException = addValidationError("username is missing", validationException);
|
||||
}
|
||||
if (password == null || password.getChars() == null || password.getChars().length == 0) {
|
||||
validationException = addValidationError("password is missing", validationException);
|
||||
}
|
||||
if (refreshToken != null) {
|
||||
validationException =
|
||||
addValidationError("refresh_token is not supported with the password grant_type", validationException);
|
||||
}
|
||||
break;
|
||||
case REFRESH_TOKEN:
|
||||
if (username != null) {
|
||||
validationException =
|
||||
addValidationError("username is not supported with the refresh_token grant_type", validationException);
|
||||
}
|
||||
if (password != null) {
|
||||
validationException =
|
||||
addValidationError("password is not supported with the refresh_token grant_type", validationException);
|
||||
}
|
||||
if (refreshToken == null) {
|
||||
validationException = addValidationError("refresh_token is missing", validationException);
|
||||
}
|
||||
break;
|
||||
case CLIENT_CREDENTIALS:
|
||||
if (username != null) {
|
||||
validationException =
|
||||
addValidationError("username is not supported with the client_credentials grant_type", validationException);
|
||||
}
|
||||
if (password != null) {
|
||||
validationException =
|
||||
addValidationError("password is not supported with the client_credentials grant_type", validationException);
|
||||
}
|
||||
if (refreshToken != null) {
|
||||
validationException = addValidationError("refresh_token is not supported with the client_credentials grant_type",
|
||||
validationException);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
validationException = addValidationError("grant_type only supports the values: [" +
|
||||
SUPPORTED_GRANT_TYPES.stream().map(GrantType::getValue).collect(Collectors.joining(", ")) + "]",
|
||||
validationException);
|
||||
}
|
||||
} else {
|
||||
validationException = addValidationError("grant_type only supports the values: [password, refresh_token]", validationException);
|
||||
validationException = addValidationError("grant_type only supports the values: [" +
|
||||
SUPPORTED_GRANT_TYPES.stream().map(GrantType::getValue).collect(Collectors.joining(", ")) + "]",
|
||||
validationException);
|
||||
}
|
||||
|
||||
return validationException;
|
||||
}
|
||||
|
||||
|
@ -126,6 +186,11 @@ public final class CreateTokenRequest extends ActionRequest {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
if (out.getVersion().before(Version.V_7_0_0_alpha1) && GrantType.CLIENT_CREDENTIALS.getValue().equals(grantType)) {
|
||||
throw new IllegalArgumentException("a request with the client_credentials grant_type cannot be sent to version [" +
|
||||
out.getVersion() + "]");
|
||||
}
|
||||
|
||||
out.writeString(grantType);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
out.writeOptionalString(username);
|
||||
|
|
|
@ -59,8 +59,14 @@ public final class CreateTokenResponse extends ActionResponse implements ToXCont
|
|||
out.writeString(tokenString);
|
||||
out.writeTimeValue(expiresIn);
|
||||
out.writeOptionalString(scope);
|
||||
if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
out.writeString(refreshToken);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { // TODO change to V_6_5_0 after backport
|
||||
out.writeOptionalString(refreshToken);
|
||||
} else if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
if (refreshToken == null) {
|
||||
out.writeString("");
|
||||
} else {
|
||||
out.writeString(refreshToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,7 +76,9 @@ public final class CreateTokenResponse extends ActionResponse implements ToXCont
|
|||
tokenString = in.readString();
|
||||
expiresIn = in.readTimeValue();
|
||||
scope = in.readOptionalString();
|
||||
if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { // TODO change to V_6_5_0 after backport
|
||||
refreshToken = in.readOptionalString();
|
||||
} else if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
|
||||
refreshToken = in.readString();
|
||||
}
|
||||
}
|
||||
|
@ -90,4 +98,20 @@ public final class CreateTokenResponse extends ActionResponse implements ToXCont
|
|||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
CreateTokenResponse that = (CreateTokenResponse) o;
|
||||
return Objects.equals(tokenString, that.tokenString) &&
|
||||
Objects.equals(expiresIn, that.expiresIn) &&
|
||||
Objects.equals(scope, that.scope) &&
|
||||
Objects.equals(refreshToken, that.refreshToken);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(tokenString, expiresIn, scope, refreshToken);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.action.token;
|
||||
package org.elasticsearch.xpack.core.security.action.token;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
|
@ -20,7 +20,7 @@ public class CreateTokenRequestTests extends ESTestCase {
|
|||
ActionRequestValidationException ve = request.validate();
|
||||
assertNotNull(ve);
|
||||
assertEquals(1, ve.validationErrors().size());
|
||||
assertThat(ve.validationErrors().get(0), containsString("[password, refresh_token]"));
|
||||
assertThat(ve.validationErrors().get(0), containsString("[password, refresh_token, client_credentials]"));
|
||||
assertThat(ve.validationErrors().get(0), containsString("grant_type"));
|
||||
|
||||
request.setGrantType("password");
|
||||
|
@ -72,5 +72,19 @@ public class CreateTokenRequestTests extends ESTestCase {
|
|||
assertNotNull(ve);
|
||||
assertEquals(1, ve.validationErrors().size());
|
||||
assertThat(ve.validationErrors(), hasItem("refresh_token is missing"));
|
||||
|
||||
request.setGrantType("client_credentials");
|
||||
ve = request.validate();
|
||||
assertNull(ve);
|
||||
|
||||
request.setUsername(randomAlphaOfLengthBetween(1, 32));
|
||||
request.setPassword(new SecureString(randomAlphaOfLengthBetween(1, 32).toCharArray()));
|
||||
request.setRefreshToken(randomAlphaOfLengthBetween(1, 32));
|
||||
ve = request.validate();
|
||||
assertNotNull(ve);
|
||||
assertEquals(3, ve.validationErrors().size());
|
||||
assertThat(ve.validationErrors(), hasItem(containsString("username is not supported")));
|
||||
assertThat(ve.validationErrors(), hasItem(containsString("password is not supported")));
|
||||
assertThat(ve.validationErrors(), hasItem(containsString("refresh_token is not supported")));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.core.security.action.token;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
public class CreateTokenResponseTests extends ESTestCase {
|
||||
|
||||
public void testSerialization() throws Exception {
|
||||
CreateTokenResponse response = new CreateTokenResponse(randomAlphaOfLengthBetween(1, 10), TimeValue.timeValueMinutes(20L),
|
||||
randomBoolean() ? null : "FULL", randomAlphaOfLengthBetween(1, 10));
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
response.writeTo(output);
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
CreateTokenResponse serialized = new CreateTokenResponse();
|
||||
serialized.readFrom(input);
|
||||
assertEquals(response, serialized);
|
||||
}
|
||||
}
|
||||
|
||||
response = new CreateTokenResponse(randomAlphaOfLengthBetween(1, 10), TimeValue.timeValueMinutes(20L),
|
||||
randomBoolean() ? null : "FULL", null);
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
response.writeTo(output);
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
CreateTokenResponse serialized = new CreateTokenResponse();
|
||||
serialized.readFrom(input);
|
||||
assertEquals(response, serialized);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSerializationToPre62Version() throws Exception {
|
||||
CreateTokenResponse response = new CreateTokenResponse(randomAlphaOfLengthBetween(1, 10), TimeValue.timeValueMinutes(20L),
|
||||
randomBoolean() ? null : "FULL", randomBoolean() ? null : randomAlphaOfLengthBetween(1, 10));
|
||||
final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_1_4);
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
output.setVersion(version);
|
||||
response.writeTo(output);
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(version);
|
||||
CreateTokenResponse serialized = new CreateTokenResponse();
|
||||
serialized.readFrom(input);
|
||||
assertNull(serialized.getRefreshToken());
|
||||
assertEquals(response.getTokenString(), serialized.getTokenString());
|
||||
assertEquals(response.getExpiresIn(), serialized.getExpiresIn());
|
||||
assertEquals(response.getScope(), serialized.getScope());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSerializationToPost62Pre65Version() throws Exception {
|
||||
CreateTokenResponse response = new CreateTokenResponse(randomAlphaOfLengthBetween(1, 10), TimeValue.timeValueMinutes(20L),
|
||||
randomBoolean() ? null : "FULL", randomAlphaOfLengthBetween(1, 10));
|
||||
final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_2_0, Version.V_6_4_0);
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
output.setVersion(version);
|
||||
response.writeTo(output);
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(version);
|
||||
CreateTokenResponse serialized = new CreateTokenResponse();
|
||||
serialized.readFrom(input);
|
||||
assertEquals(response, serialized);
|
||||
}
|
||||
}
|
||||
|
||||
// no refresh token
|
||||
response = new CreateTokenResponse(randomAlphaOfLengthBetween(1, 10), TimeValue.timeValueMinutes(20L),
|
||||
randomBoolean() ? null : "FULL", null);
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
output.setVersion(version);
|
||||
response.writeTo(output);
|
||||
try (StreamInput input = output.bytes().streamInput()) {
|
||||
input.setVersion(version);
|
||||
CreateTokenResponse serialized = new CreateTokenResponse();
|
||||
serialized.readFrom(input);
|
||||
assertEquals("", serialized.getRefreshToken());
|
||||
assertEquals(response.getTokenString(), serialized.getTokenString());
|
||||
assertEquals(response.getExpiresIn(), serialized.getExpiresIn());
|
||||
assertEquals(response.getScope(), serialized.getScope());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -61,7 +61,7 @@ public final class TransportSamlAuthenticateAction extends HandledTransportActio
|
|||
final TimeValue expiresIn = tokenService.getExpirationDelay();
|
||||
listener.onResponse(
|
||||
new SamlAuthenticateResponse(authentication.getUser().principal(), tokenString, tuple.v2(), expiresIn));
|
||||
}, listener::onFailure), tokenMeta);
|
||||
}, listener::onFailure), tokenMeta, true);
|
||||
}, e -> {
|
||||
logger.debug(() -> new ParameterizedMessage("SamlToken [{}] could not be authenticated", saml), e);
|
||||
listener.onFailure(e);
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.xpack.security.authc.AuthenticationService;
|
|||
import org.elasticsearch.xpack.security.authc.TokenService;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
|
@ -48,29 +49,52 @@ public final class TransportCreateTokenAction extends HandledTransportAction<Cre
|
|||
|
||||
@Override
|
||||
protected void doExecute(Task task, CreateTokenRequest request, ActionListener<CreateTokenResponse> listener) {
|
||||
CreateTokenRequest.GrantType type = CreateTokenRequest.GrantType.fromString(request.getGrantType());
|
||||
assert type != null : "type should have been validated in the action";
|
||||
switch (type) {
|
||||
case PASSWORD:
|
||||
authenticateAndCreateToken(request, listener);
|
||||
break;
|
||||
case CLIENT_CREDENTIALS:
|
||||
Authentication authentication = Authentication.getAuthentication(threadPool.getThreadContext());
|
||||
createToken(request, authentication, authentication, false, listener);
|
||||
break;
|
||||
default:
|
||||
listener.onFailure(new IllegalStateException("grant_type [" + request.getGrantType() +
|
||||
"] is not supported by the create token action"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private void authenticateAndCreateToken(CreateTokenRequest request, ActionListener<CreateTokenResponse> listener) {
|
||||
Authentication originatingAuthentication = Authentication.getAuthentication(threadPool.getThreadContext());
|
||||
try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) {
|
||||
final UsernamePasswordToken authToken = new UsernamePasswordToken(request.getUsername(), request.getPassword());
|
||||
authenticationService.authenticate(CreateTokenAction.NAME, request, authToken,
|
||||
ActionListener.wrap(authentication -> {
|
||||
request.getPassword().close();
|
||||
tokenService.createUserToken(authentication, originatingAuthentication, ActionListener.wrap(tuple -> {
|
||||
final String tokenStr = tokenService.getUserTokenString(tuple.v1());
|
||||
final String scope = getResponseScopeValue(request.getScope());
|
||||
ActionListener.wrap(authentication -> {
|
||||
request.getPassword().close();
|
||||
createToken(request, authentication, originatingAuthentication, true, listener);
|
||||
}, e -> {
|
||||
// clear the request password
|
||||
request.getPassword().close();
|
||||
listener.onFailure(e);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
final CreateTokenResponse response =
|
||||
new CreateTokenResponse(tokenStr, tokenService.getExpirationDelay(), scope, tuple.v2());
|
||||
listener.onResponse(response);
|
||||
}, e -> {
|
||||
// clear the request password
|
||||
request.getPassword().close();
|
||||
listener.onFailure(e);
|
||||
}), Collections.emptyMap());
|
||||
}, e -> {
|
||||
// clear the request password
|
||||
request.getPassword().close();
|
||||
listener.onFailure(e);
|
||||
}));
|
||||
private void createToken(CreateTokenRequest request, Authentication authentication, Authentication originatingAuth,
|
||||
boolean includeRefreshToken, ActionListener<CreateTokenResponse> listener) {
|
||||
try {
|
||||
tokenService.createUserToken(authentication, originatingAuth, ActionListener.wrap(tuple -> {
|
||||
final String tokenStr = tokenService.getUserTokenString(tuple.v1());
|
||||
final String scope = getResponseScopeValue(request.getScope());
|
||||
|
||||
final CreateTokenResponse response =
|
||||
new CreateTokenResponse(tokenStr, tokenService.getExpirationDelay(), scope, tuple.v2());
|
||||
listener.onResponse(response);
|
||||
}, listener::onFailure), Collections.emptyMap(), includeRefreshToken);
|
||||
} catch (IOException e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -212,7 +212,8 @@ public final class TokenService extends AbstractComponent {
|
|||
* The created token will be stored in the security index.
|
||||
*/
|
||||
public void createUserToken(Authentication authentication, Authentication originatingClientAuth,
|
||||
ActionListener<Tuple<UserToken, String>> listener, Map<String, Object> metadata) throws IOException {
|
||||
ActionListener<Tuple<UserToken, String>> listener, Map<String, Object> metadata,
|
||||
boolean includeRefreshToken) throws IOException {
|
||||
ensureEnabled();
|
||||
if (authentication == null) {
|
||||
listener.onFailure(new IllegalArgumentException("authentication must be provided"));
|
||||
|
@ -226,13 +227,14 @@ public final class TokenService extends AbstractComponent {
|
|||
new Authentication(authentication.getUser(), authentication.getAuthenticatedBy(), authentication.getLookedUpBy(),
|
||||
version);
|
||||
final UserToken userToken = new UserToken(version, matchingVersionAuth, expiration, metadata);
|
||||
final String refreshToken = UUIDs.randomBase64UUID();
|
||||
final String refreshToken = includeRefreshToken ? UUIDs.randomBase64UUID() : null;
|
||||
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
builder.startObject();
|
||||
builder.field("doc_type", "token");
|
||||
builder.field("creation_time", created.toEpochMilli());
|
||||
builder.startObject("refresh_token")
|
||||
if (includeRefreshToken) {
|
||||
builder.startObject("refresh_token")
|
||||
.field("token", refreshToken)
|
||||
.field("invalidated", false)
|
||||
.field("refreshed", false)
|
||||
|
@ -242,6 +244,7 @@ public final class TokenService extends AbstractComponent {
|
|||
.field("realm", originatingClientAuth.getAuthenticatedBy().getName())
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
builder.startObject("access_token")
|
||||
.field("invalidated", false)
|
||||
.field("user_token", userToken)
|
||||
|
@ -734,7 +737,7 @@ public final class TokenService extends AbstractComponent {
|
|||
.request();
|
||||
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest,
|
||||
ActionListener.<UpdateResponse>wrap(
|
||||
updateResponse -> createUserToken(authentication, userAuth, listener, metadata),
|
||||
updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true),
|
||||
e -> {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause instanceof VersionConflictEngineException ||
|
||||
|
|
|
@ -316,7 +316,7 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase {
|
|||
new RealmRef("native", NativeRealmSettings.TYPE, "node01"), null);
|
||||
final Map<String, Object> metadata = samlRealm.createTokenMetadata(nameId, session);
|
||||
final PlainActionFuture<Tuple<UserToken, String>> future = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, future, metadata);
|
||||
tokenService.createUserToken(authentication, authentication, future, metadata, true);
|
||||
return future.actionGet();
|
||||
}
|
||||
|
||||
|
|
|
@ -222,7 +222,7 @@ public class TransportSamlLogoutActionTests extends SamlTestCase {
|
|||
new SamlNameId(NameID.TRANSIENT, nameId, null, null, null), session);
|
||||
|
||||
final PlainActionFuture<Tuple<UserToken, String>> future = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, future, tokenMetaData);
|
||||
tokenService.createUserToken(authentication, authentication, future, tokenMetaData, true);
|
||||
final UserToken userToken = future.actionGet().v1();
|
||||
mockGetTokenFromId(userToken, client);
|
||||
final String tokenString = tokenService.getUserTokenString(userToken);
|
||||
|
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.security.action.token;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.get.GetAction;
|
||||
import org.elasticsearch.action.get.GetRequestBuilder;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.MultiGetAction;
|
||||
import org.elasticsearch.action.get.MultiGetItemResponse;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequestBuilder;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.action.index.IndexAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.update.UpdateAction;
|
||||
import org.elasticsearch.action.update.UpdateRequestBuilder;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.protocol.xpack.security.User;
|
||||
import org.elasticsearch.test.ClusterServiceUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.core.XPackSettings;
|
||||
import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction;
|
||||
import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest;
|
||||
import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse;
|
||||
import org.elasticsearch.xpack.core.security.authc.Authentication;
|
||||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||
import org.elasticsearch.xpack.security.authc.AuthenticationService;
|
||||
import org.elasticsearch.xpack.security.authc.TokenService;
|
||||
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.time.Clock;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyString;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class TransportCreateTokenActionTests extends ESTestCase {
|
||||
|
||||
private static final Settings SETTINGS = Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "TokenServiceTests")
|
||||
.put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true).build();
|
||||
|
||||
private ThreadPool threadPool;
|
||||
private Client client;
|
||||
private SecurityIndexManager securityIndex;
|
||||
private ClusterService clusterService;
|
||||
private AtomicReference<IndexRequest> idxReqReference;
|
||||
private AuthenticationService authenticationService;
|
||||
|
||||
@Before
|
||||
public void setupClient() {
|
||||
threadPool = new TestThreadPool(getTestName());
|
||||
client = mock(Client.class);
|
||||
idxReqReference = new AtomicReference<>();
|
||||
authenticationService = mock(AuthenticationService.class);
|
||||
when(client.threadPool()).thenReturn(threadPool);
|
||||
when(client.settings()).thenReturn(SETTINGS);
|
||||
doAnswer(invocationOnMock -> {
|
||||
GetRequestBuilder builder = new GetRequestBuilder(client, GetAction.INSTANCE);
|
||||
builder.setIndex((String) invocationOnMock.getArguments()[0])
|
||||
.setType((String) invocationOnMock.getArguments()[1])
|
||||
.setId((String) invocationOnMock.getArguments()[2]);
|
||||
return builder;
|
||||
}).when(client).prepareGet(anyString(), anyString(), anyString());
|
||||
when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE));
|
||||
doAnswer(invocationOnMock -> {
|
||||
ActionListener<MultiGetResponse> listener = (ActionListener<MultiGetResponse>) invocationOnMock.getArguments()[1];
|
||||
MultiGetResponse response = mock(MultiGetResponse.class);
|
||||
MultiGetItemResponse[] responses = new MultiGetItemResponse[2];
|
||||
when(response.getResponses()).thenReturn(responses);
|
||||
|
||||
GetResponse oldGetResponse = mock(GetResponse.class);
|
||||
when(oldGetResponse.isExists()).thenReturn(false);
|
||||
responses[0] = new MultiGetItemResponse(oldGetResponse, null);
|
||||
|
||||
GetResponse getResponse = mock(GetResponse.class);
|
||||
responses[1] = new MultiGetItemResponse(getResponse, null);
|
||||
when(getResponse.isExists()).thenReturn(false);
|
||||
listener.onResponse(response);
|
||||
return Void.TYPE;
|
||||
}).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class));
|
||||
when(client.prepareIndex(any(String.class), any(String.class), any(String.class)))
|
||||
.thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE));
|
||||
when(client.prepareUpdate(any(String.class), any(String.class), any(String.class)))
|
||||
.thenReturn(new UpdateRequestBuilder(client, UpdateAction.INSTANCE));
|
||||
doAnswer(invocationOnMock -> {
|
||||
idxReqReference.set((IndexRequest) invocationOnMock.getArguments()[1]);
|
||||
ActionListener<IndexResponse> responseActionListener = (ActionListener<IndexResponse>) invocationOnMock.getArguments()[2];
|
||||
responseActionListener.onResponse(new IndexResponse());
|
||||
return null;
|
||||
}).when(client).execute(eq(IndexAction.INSTANCE), any(IndexRequest.class), any(ActionListener.class));
|
||||
|
||||
// setup lifecycle service
|
||||
securityIndex = mock(SecurityIndexManager.class);
|
||||
doAnswer(invocationOnMock -> {
|
||||
Runnable runnable = (Runnable) invocationOnMock.getArguments()[1];
|
||||
runnable.run();
|
||||
return null;
|
||||
}).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class));
|
||||
|
||||
doAnswer(invocationOnMock -> {
|
||||
UsernamePasswordToken token = (UsernamePasswordToken) invocationOnMock.getArguments()[2];
|
||||
User user = new User(token.principal());
|
||||
Authentication authentication = new Authentication(user, new Authentication.RealmRef("fake", "mock", "n1"), null);
|
||||
authentication.writeToContext(threadPool.getThreadContext());
|
||||
ActionListener<Authentication> authListener = (ActionListener<Authentication>) invocationOnMock.getArguments()[3];
|
||||
authListener.onResponse(authentication);
|
||||
return Void.TYPE;
|
||||
}).when(authenticationService).authenticate(eq(CreateTokenAction.NAME), any(CreateTokenRequest.class),
|
||||
any(UsernamePasswordToken.class), any(ActionListener.class));
|
||||
|
||||
this.clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||
}
|
||||
|
||||
@After
|
||||
public void stopThreadPool() throws Exception {
|
||||
if (threadPool != null) {
|
||||
terminate(threadPool);
|
||||
}
|
||||
}
|
||||
|
||||
public void testClientCredentialsCreatesWithoutRefreshToken() throws Exception {
|
||||
final TokenService tokenService = new TokenService(SETTINGS, Clock.systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe"), new Authentication.RealmRef("realm", "type", "node"), null);
|
||||
authentication.writeToContext(threadPool.getThreadContext());
|
||||
|
||||
final TransportCreateTokenAction action = new TransportCreateTokenAction(SETTINGS, threadPool,
|
||||
mock(TransportService.class), new ActionFilters(Collections.emptySet()), tokenService,
|
||||
authenticationService);
|
||||
final CreateTokenRequest createTokenRequest = new CreateTokenRequest();
|
||||
createTokenRequest.setGrantType("client_credentials");
|
||||
|
||||
PlainActionFuture<CreateTokenResponse> tokenResponseFuture = new PlainActionFuture<>();
|
||||
action.doExecute(null, createTokenRequest, tokenResponseFuture);
|
||||
CreateTokenResponse createTokenResponse = tokenResponseFuture.get();
|
||||
assertNull(createTokenResponse.getRefreshToken());
|
||||
assertNotNull(createTokenResponse.getTokenString());
|
||||
|
||||
assertNotNull(idxReqReference.get());
|
||||
Map<String, Object> sourceMap = idxReqReference.get().sourceAsMap();
|
||||
assertNotNull(sourceMap);
|
||||
assertNotNull(sourceMap.get("access_token"));
|
||||
assertNull(sourceMap.get("refresh_token"));
|
||||
}
|
||||
|
||||
public void testPasswordGrantTypeCreatesWithRefreshToken() throws Exception {
|
||||
final TokenService tokenService = new TokenService(SETTINGS, Clock.systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe"), new Authentication.RealmRef("realm", "type", "node"), null);
|
||||
authentication.writeToContext(threadPool.getThreadContext());
|
||||
|
||||
final TransportCreateTokenAction action = new TransportCreateTokenAction(SETTINGS, threadPool,
|
||||
mock(TransportService.class), new ActionFilters(Collections.emptySet()), tokenService,
|
||||
authenticationService);
|
||||
final CreateTokenRequest createTokenRequest = new CreateTokenRequest();
|
||||
createTokenRequest.setGrantType("password");
|
||||
createTokenRequest.setUsername("user");
|
||||
createTokenRequest.setPassword(new SecureString("password".toCharArray()));
|
||||
|
||||
PlainActionFuture<CreateTokenResponse> tokenResponseFuture = new PlainActionFuture<>();
|
||||
action.doExecute(null, createTokenRequest, tokenResponseFuture);
|
||||
CreateTokenResponse createTokenResponse = tokenResponseFuture.get();
|
||||
assertNotNull(createTokenResponse.getRefreshToken());
|
||||
assertNotNull(createTokenResponse.getTokenString());
|
||||
|
||||
assertNotNull(idxReqReference.get());
|
||||
Map<String, Object> sourceMap = idxReqReference.get().sourceAsMap();
|
||||
assertNotNull(sourceMap);
|
||||
assertNotNull(sourceMap.get("access_token"));
|
||||
assertNotNull(sourceMap.get("refresh_token"));
|
||||
}
|
||||
}
|
|
@ -896,7 +896,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
try (ThreadContext.StoredContext ctx = threadContext.stashContext()) {
|
||||
Authentication originatingAuth = new Authentication(new User("creator"), new RealmRef("test", "test", "test"), null);
|
||||
tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap(), true);
|
||||
}
|
||||
String token = tokenService.getUserTokenString(tokenFuture.get().v1());
|
||||
mockGetTokenFromId(tokenFuture.get().v1(), client);
|
||||
|
@ -975,7 +975,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
try (ThreadContext.StoredContext ctx = threadContext.stashContext()) {
|
||||
Authentication originatingAuth = new Authentication(new User("creator"), new RealmRef("test", "test", "test"), null);
|
||||
tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap(), true);
|
||||
}
|
||||
String token = tokenService.getUserTokenString(tokenFuture.get().v1());
|
||||
mockGetTokenFromId(tokenFuture.get().v1(), client);
|
||||
|
|
|
@ -341,6 +341,39 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase {
|
|||
assertEquals(SecuritySettingsSource.TEST_USER_NAME, response.user().principal());
|
||||
}
|
||||
|
||||
public void testClientCredentialsGrant() throws Exception {
|
||||
Client client = client().filterWithHeader(Collections.singletonMap("Authorization",
|
||||
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
|
||||
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
|
||||
SecurityClient securityClient = new SecurityClient(client);
|
||||
CreateTokenResponse createTokenResponse = securityClient.prepareCreateToken()
|
||||
.setGrantType("client_credentials")
|
||||
.get();
|
||||
assertNull(createTokenResponse.getRefreshToken());
|
||||
|
||||
AuthenticateRequest request = new AuthenticateRequest();
|
||||
request.username(SecuritySettingsSource.TEST_SUPERUSER);
|
||||
PlainActionFuture<AuthenticateResponse> authFuture = new PlainActionFuture<>();
|
||||
client.filterWithHeader(Collections.singletonMap("Authorization", "Bearer " + createTokenResponse.getTokenString()))
|
||||
.execute(AuthenticateAction.INSTANCE, request, authFuture);
|
||||
AuthenticateResponse response = authFuture.get();
|
||||
assertEquals(SecuritySettingsSource.TEST_SUPERUSER, response.user().principal());
|
||||
|
||||
// invalidate
|
||||
PlainActionFuture<InvalidateTokenResponse> invalidateResponseFuture = new PlainActionFuture<>();
|
||||
InvalidateTokenRequest invalidateTokenRequest =
|
||||
new InvalidateTokenRequest(createTokenResponse.getTokenString(), InvalidateTokenRequest.Type.ACCESS_TOKEN);
|
||||
securityClient.invalidateToken(invalidateTokenRequest, invalidateResponseFuture);
|
||||
assertTrue(invalidateResponseFuture.get().isCreated());
|
||||
|
||||
ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> {
|
||||
PlainActionFuture<AuthenticateResponse> responseFuture = new PlainActionFuture<>();
|
||||
client.filterWithHeader(Collections.singletonMap("Authorization", "Bearer " + createTokenResponse.getTokenString()))
|
||||
.execute(AuthenticateAction.INSTANCE, request, responseFuture);
|
||||
responseFuture.actionGet();
|
||||
});
|
||||
}
|
||||
|
||||
@Before
|
||||
public void waitForSecurityIndexWritable() throws Exception {
|
||||
assertSecurityIndexActive();
|
||||
|
|
|
@ -157,7 +157,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
|
@ -203,7 +203,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
|
@ -227,7 +227,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
PlainActionFuture<Tuple<UserToken, String>> newTokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, newTokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, newTokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken newToken = newTokenFuture.get().v1();
|
||||
assertNotNull(newToken);
|
||||
assertNotEquals(tokenService.getUserTokenString(newToken), tokenService.getUserTokenString(token));
|
||||
|
@ -262,7 +262,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
otherTokenService.refreshMetaData(tokenService.getTokenMetaData());
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
|
@ -292,7 +292,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
|
@ -322,7 +322,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
PlainActionFuture<Tuple<UserToken, String>> newTokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, newTokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, newTokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken newToken = newTokenFuture.get().v1();
|
||||
assertNotNull(newToken);
|
||||
assertNotEquals(tokenService.getUserTokenString(newToken), tokenService.getUserTokenString(token));
|
||||
|
@ -353,7 +353,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
|
@ -383,7 +383,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
UserToken token = tokenFuture.get().v1();
|
||||
assertThat(tokenService.getUserTokenString(token), notNullValue());
|
||||
|
||||
|
@ -397,7 +397,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
doAnswer(invocationOnMock -> {
|
||||
|
@ -451,7 +451,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
TokenService tokenService = new TokenService(tokenServiceEnabledSettings, clock, client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
mockGetTokenFromId(token);
|
||||
|
||||
|
@ -501,7 +501,8 @@ public class TokenServiceTests extends ESTestCase {
|
|||
.put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), false)
|
||||
.build(),
|
||||
Clock.systemUTC(), client, securityIndex, clusterService);
|
||||
IllegalStateException e = expectThrows(IllegalStateException.class, () -> tokenService.createUserToken(null, null, null, null));
|
||||
IllegalStateException e = expectThrows(IllegalStateException.class,
|
||||
() -> tokenService.createUserToken(null, null, null, null, true));
|
||||
assertEquals("tokens are not enabled", e.getMessage());
|
||||
|
||||
PlainActionFuture<UserToken> future = new PlainActionFuture<>();
|
||||
|
@ -559,7 +560,7 @@ public class TokenServiceTests extends ESTestCase {
|
|||
new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService);
|
||||
Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null);
|
||||
PlainActionFuture<Tuple<UserToken, String>> tokenFuture = new PlainActionFuture<>();
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap());
|
||||
tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true);
|
||||
final UserToken token = tokenFuture.get().v1();
|
||||
assertNotNull(token);
|
||||
mockGetTokenFromId(token);
|
||||
|
|
|
@ -158,6 +158,7 @@ subprojects {
|
|||
} else {
|
||||
String systemKeyFile = version.before('6.3.0') ? 'x-pack/system_key' : 'system_key'
|
||||
extraConfigFile systemKeyFile, "${mainProject.projectDir}/src/test/resources/system_key"
|
||||
keystoreSetting 'xpack.security.authc.token.passphrase', 'token passphrase'
|
||||
}
|
||||
setting 'xpack.watcher.encrypt_sensitive_data', 'true'
|
||||
}
|
||||
|
@ -199,6 +200,9 @@ subprojects {
|
|||
setting 'xpack.watcher.encrypt_sensitive_data', 'true'
|
||||
keystoreFile 'xpack.watcher.encryption_key', "${mainProject.projectDir}/src/test/resources/system_key"
|
||||
}
|
||||
if (version.before('6.0.0')) {
|
||||
keystoreSetting 'xpack.security.authc.token.passphrase', 'token passphrase'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue