Add a tool to migrate users/roles from file to native realm

This adds the `bin/shield/migrate` tool that allows migrating users and
roles from the files to the native (API-based) store.

It looks like this:

```
λ bin/shield/migrate native -U http://localhost:9200 -u test_user -p changeme -n lee,foo -r role1,role2,role3,role4,foo
starting migration of users and roles...
importing users from [/home/hinmanm/scratch/elasticsearch-2.4.0-SNAPSHOT/config/shield/users]...
found existing users: [test_user, joe3, joe2]
migrating user [lee]
{"user":{"created":true}}
no user [foo] found, skipping
importing roles from [/home/hinmanm/scratch/elasticsearch-2.4.0-SNAPSHOT/config/shield/roles.yml]...
found existing roles: [marvel_user, role_query_fields, admin_role, role3, admin, remote_marvel_agent, power_user, role_new_format_name_array, role_run_as, logstash, role_fields, role_run_as1, role_new_format, kibana4_server, user, transport_client, role1.ab, role_query]
migrating role [role1]
{"role":{"created":true}}
migrating role [role2]
{"role":{"created":true}}
role [role3] already exists, skipping
migrating role [role4]
failed to migrate role [role4] with body: {"indices":[{"names":["idx2"]},{"names":["idx2"]},{"names":["idx1"]}]}
java.io.IOException: {"error":{"root_cause":[{"type":"parse_exception","reason":"failed to parse indices privileges for role [role4]. missing required [privileges] field"}],"type":"parse_exception","reason":"failed to parse indices privileges for role [role4]. missing required [privileges] field"},"status":400}
  at org.elasticsearch.shield.authc.esusers.tool.ESNativeRealmMigrateTool$MigrateUserOrRoles.postURL(ESNativeRealmMigrateTool.java:206)
  at org.elasticsearch.shield.authc.esusers.tool.ESNativeRealmMigrateTool$MigrateUserOrRoles.importRoles(ESNativeRealmMigrateTool.java:389)
  at org.elasticsearch.shield.authc.esusers.tool.ESNativeRealmMigrateTool$MigrateUserOrRoles.execute(ESNativeRealmMigrateTool.java:171)
  at org.elasticsearch.common.cli.CliTool.execute(CliTool.java:153)
  at org.elasticsearch.shield.authc.esusers.tool.ESNativeRealmMigrateTool.main(ESNativeRealmMigrateTool.java:91)
Caused by: java.io.IOException: Server returned HTTP response code: 400 for URL: http://localhost:9200/_shield/role/role4
  at sun.net.www.protocol.http.HttpURLConnection.getInputStream0(HttpURLConnection.java:1840)
  at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1441)
  at org.elasticsearch.shield.authc.esusers.tool.ESNativeRealmMigrateTool$MigrateUserOrRoles.postURL(ESNativeRealmMigrateTool.java:192)
  ... 4 more

no role [foo] found, skipping
users and roles imported.
```

Original commit: elastic/x-pack-elasticsearch@3ce47c0ffd
This commit is contained in:
Lee Hinman 2016-06-02 11:50:21 -06:00
parent a673c44036
commit a289fbd168
14 changed files with 1179 additions and 15 deletions

View File

@ -0,0 +1,32 @@
apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: ':x-plugins:elasticsearch:x-pack', configuration: 'runtime')
}
integTest {
cluster {
setting 'script.inline', 'true'
plugin 'x-pack', project(':x-plugins:elasticsearch:x-pack')
extraConfigFile 'x-pack/roles.yml', 'roles.yml'
[
test_admin: 'superuser',
transport_user: 'superuser',
existing: 'superuser',
bob: 'actual_role'
].each { String user, String role ->
setupCommand 'setupUser#' + user,
'bin/x-pack/users', 'useradd', user, '-p', 'changeme', '-r', role
}
waitCondition = { node, ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}",
dest: tmpFile.toString(),
username: 'test_admin',
password: 'changeme',
ignoreerrors: true,
retries: 10)
return tmpFile.exists()
}
}
}

View File

@ -0,0 +1,17 @@
actual_role:
run_as: [ "joe" ]
cluster:
- monitor
indices:
- names: [ "index1", "index2" ]
privileges: [ "read", "write", "create_index", "indices:admin/refresh" ]
fields:
- foo
- bar
query:
bool:
must_not:
match:
hidden: true
- names: "*"
privileges: [ "read" ]

View File

@ -0,0 +1,125 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.security.SecurityTemplateService;
import org.elasticsearch.xpack.security.action.role.GetRolesResponse;
import org.elasticsearch.xpack.security.action.user.GetUsersResponse;
import org.elasticsearch.xpack.security.action.user.PutUserResponse;
import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.user.User;
import java.util.Arrays;
import java.util.Collections;
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
import static org.hamcrest.Matchers.containsString;
/**
* Integration tests for the {@code migrate} shell command
*/
public class MigrateToolIT extends MigrateToolTestCase {
private static String[] args(String command) {
if (!Strings.hasLength(command)) {
return Strings.EMPTY_ARRAY;
}
return command.split("\\s+");
}
public void testRunMigrateTool() throws Exception {
String integHome = System.getProperty("tests.config.dir");
logger.info("--> HOME: {}", integHome);
Settings settings = Settings.builder()
.put("path.home", createTempDir().toAbsolutePath().toString())
.build();
// Cluster should already be up
String url = "http://" + getHttpURL();
logger.info("--> using URL: {}", url);
MockTerminal t = new MockTerminal();
Client client = getClient();
SecurityClient c = new SecurityClient(client);
// Add an existing user so the tool will skip it
PutUserResponse pur = c.preparePutUser("existing", "s3kirt".toCharArray(), "role1", "user").get();
assertTrue(pur.created());
// Wait for the security index to be green
ClusterHealthResponse actionGet = client.admin().cluster()
.health(Requests.clusterHealthRequest(SecurityTemplateService.SECURITY_INDEX_NAME)
.timeout(TimeValue.timeValueSeconds(30))
.waitForGreenStatus()
.waitForEvents(Priority.LANGUID)
.waitForRelocatingShards(0))
.actionGet();
ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles();
OptionParser parser = muor.getParser();
OptionSet options = parser.parse("-u", "test_admin", "-p", "changeme", "-U", url, "-c", integHome);
muor.execute(t, options, settings.getAsMap());
logger.info("--> output:\n{}", t.getOutput());
// Check that the migrated user can be retrieved
GetUsersResponse resp = c.prepareGetUsers("bob").get();
assertTrue("user 'bob' should exist", resp.hasUsers());
User bob = resp.users()[0];
assertEquals(bob.principal(), "bob");
assertArrayEquals(bob.roles(), new String[]{"actual_role"});
// Make sure the existing user did not change
resp = c.prepareGetUsers("existing").get();
assertTrue("user should exist", resp.hasUsers());
User existing = resp.users()[0];
assertEquals(existing.principal(), "existing");
assertArrayEquals(existing.roles(), new String[]{"role1", "user"});
// Make sure the "actual_role" made it in and is correct
GetRolesResponse roleResp = c.prepareGetRoles().names("actual_role").get();
assertTrue("role should exist", roleResp.hasRoles());
RoleDescriptor rd = roleResp.roles()[0];
assertNotNull(rd);
assertEquals(rd.getName(), "actual_role");
assertArrayEquals(rd.getClusterPrivileges(), new String[]{"monitor"});
assertArrayEquals(rd.getRunAs(), new String[]{"joe"});
RoleDescriptor.IndicesPrivileges[] ips = rd.getIndicesPrivileges();
assertEquals(ips.length, 2);
for (RoleDescriptor.IndicesPrivileges ip : ips) {
if (Arrays.equals(ip.getIndices(), new String[]{"index1", "index2"})) {
assertArrayEquals(ip.getPrivileges(), new String[]{"read", "write", "create_index", "indices:admin/refresh"});
assertArrayEquals(ip.getFields(), new String[]{"foo", "bar"});
assertNotNull(ip.getQuery());
assertThat(ip.getQuery().toUtf8(), containsString("{\"bool\":{\"must_not\":{\"match\":{\"hidden\":true}}}}"));
} else {
assertArrayEquals(ip.getIndices(), new String[]{"*"});
assertArrayEquals(ip.getPrivileges(), new String[]{"read"});
assertArrayEquals(ip.getFields(), null);
assertNull(ip.getQuery());
}
}
// Check that bob can access the things the "actual_role" says he can
String token = basicAuthHeaderValue("bob", new SecuredString("changeme".toCharArray()));
// Create "index1" index and try to search from it as "bob"
client.filterWithHeader(Collections.singletonMap("Authorization", token)).admin().indices().prepareCreate("index1").get();
SearchResponse searchResp = client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get();
}
}

View File

@ -0,0 +1,177 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.xpack.XPackPlugin;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Path;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicInteger;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
import static org.hamcrest.Matchers.notNullValue;
/**
* {@link MigrateToolTestCase} is an abstract base class to run integration
* tests against an external Elasticsearch Cluster.
* <p>
* You can define a list of transport addresses from where you can reach your cluster
* by setting "tests.cluster" system property. It defaults to "localhost:9300".
* <p>
* All tests can be run from maven using mvn install as maven will start an external cluster first.
* <p>
* If you want to debug this module from your IDE, then start an external cluster by yourself
* then run JUnit. If you changed the default port, set "tests.cluster=localhost:PORT" when running
* your test.
*/
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
public abstract class MigrateToolTestCase extends LuceneTestCase {
/**
* Key used to eventually switch to using an external cluster and provide its transport addresses
*/
public static final String TESTS_CLUSTER = "tests.cluster";
/**
* Key used to eventually switch to using an external cluster and provide its transport addresses
*/
public static final String TESTS_HTTP_CLUSTER = "tests.rest.cluster";
/**
* Defaults to localhost:9300
*/
public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300";
protected static final ESLogger logger = ESLoggerFactory.getLogger(MigrateToolTestCase.class.getName());
private static final AtomicInteger counter = new AtomicInteger();
private static Client client;
private static String clusterAddresses;
private static String clusterHttpAddresses;
private static Client startClient(Path tempDir, TransportAddress... transportAddresses) {
logger.info("--> Starting Elasticsearch Java TransportClient {}, {}", transportAddresses, tempDir);
Settings clientSettings = Settings.builder()
.put("cluster.name", "qa_migrate_tests_" + counter.getAndIncrement())
.put("client.transport.ignore_cluster_name", true)
.put("path.home", tempDir)
.put(Security.USER_SETTING.getKey(), "transport_user:changeme")
.put("node.mode", "network") // we require network here!
.build();
TransportClient.Builder transportClientBuilder = TransportClient.builder()
.addPlugin(XPackPlugin.class)
.settings(clientSettings);
TransportClient client = transportClientBuilder.build().addTransportAddresses(transportAddresses);
logger.info("--> Elasticsearch Java TransportClient started");
Exception clientException = null;
try {
ClusterHealthResponse health = client.admin().cluster().prepareHealth().get();
logger.info("--> connected to [{}] cluster which is running [{}] node(s).",
health.getClusterName(), health.getNumberOfNodes());
} catch (Exception e) {
clientException = e;
}
assumeNoException("Sounds like your cluster is not running at " + clusterAddresses, clientException);
return client;
}
private static Client startClient() throws UnknownHostException {
String[] stringAddresses = clusterAddresses.split(",");
TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length];
int i = 0;
for (String stringAddress : stringAddresses) {
int lastColon = stringAddress.lastIndexOf(":");
if (lastColon == -1) {
throw new IllegalArgumentException("address [" + clusterAddresses + "] not valid");
}
String ip = stringAddress.substring(0, lastColon);
String port = stringAddress.substring(lastColon + 1);
try {
transportAddresses[i++] = new InetSocketTransportAddress(InetAddress.getByName(ip), Integer.valueOf(port));
} catch (NumberFormatException e) {
throw new IllegalArgumentException("port is not valid, expected number but was [" + port + "]");
}
}
return startClient(createTempDir(), transportAddresses);
}
public static Client getClient() {
if (client == null) {
try {
client = startClient();
} catch (UnknownHostException e) {
logger.error("could not start the client", e);
}
assertThat(client, notNullValue());
}
return client;
}
public static String getHttpURL() {
return clusterHttpAddresses;
}
@BeforeClass
public static void initializeSettings() throws UnknownHostException {
String port = System.getProperty("integ.http.port");
clusterAddresses = System.getProperty(TESTS_CLUSTER);
clusterHttpAddresses = System.getProperty(TESTS_HTTP_CLUSTER);
if (clusterAddresses == null || clusterAddresses.isEmpty()) {
throw new UnknownHostException("unable to get a cluster address");
}
}
@AfterClass
public static void stopTransportClient() {
if (client != null) {
client.close();
client = null;
}
}
@Before
public void defineIndexName() {
doClean();
}
@After
public void cleanIndex() {
doClean();
}
private void doClean() {
if (client != null) {
try {
client.admin().indices().prepareDelete("_all").get();
} catch (Exception e) {
// We ignore this cleanup exception
}
}
}
}

View File

@ -0,0 +1,102 @@
#!/bin/bash
# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
SCRIPT="$0"
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
# Drop everything prior to ->
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
# determine elasticsearch home
ES_HOME=`dirname "$SCRIPT"`/../..
# make ELASTICSEARCH_HOME absolute
ES_HOME=`cd "$ES_HOME"; pwd`
# If an include wasn't specified in the environment, then search for one...
if [ "x$ES_INCLUDE" = "x" ]; then
# Locations (in order) to use when searching for an include file.
for include in /usr/share/elasticsearch/elasticsearch.in.sh \
/usr/local/share/elasticsearch/elasticsearch.in.sh \
/opt/elasticsearch/elasticsearch.in.sh \
~/.elasticsearch.in.sh \
"`dirname "$0"`"/../elasticsearch.in.sh \
"$ES_HOME/bin/elasticsearch.in.sh"; do
if [ -r "$include" ]; then
. "$include"
break
fi
done
# ...otherwise, source the specified include.
elif [ -r "$ES_INCLUDE" ]; then
. "$ES_INCLUDE"
fi
if [ -x "$JAVA_HOME/bin/java" ]; then
JAVA="$JAVA_HOME/bin/java"
else
JAVA=`which java`
fi
if [ ! -x "$JAVA" ]; then
echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME"
exit 1
fi
if [ -z "$ES_CLASSPATH" ]; then
echo "You must set the ES_CLASSPATH var" >&2
exit 1
fi
# Try to read package config files
if [ -f "/etc/sysconfig/elasticsearch" ]; then
CONF_DIR=/etc/elasticsearch
. "/etc/sysconfig/elasticsearch"
elif [ -f "/etc/default/elasticsearch" ]; then
CONF_DIR=/etc/elasticsearch
. "/etc/default/elasticsearch"
fi
export HOSTNAME=`hostname -s`
# include x-pack jars in classpath
ES_CLASSPATH="$ES_CLASSPATH:$ES_HOME/plugins/x-pack/*"
# don't let JAVA_TOOL_OPTIONS slip in (e.g. crazy agents in ubuntu)
# works around https://bugs.launchpad.net/ubuntu/+source/jayatana/+bug/1441487
if [ "x$JAVA_TOOL_OPTIONS" != "x" ]; then
echo "Warning: Ignoring JAVA_TOOL_OPTIONS=$JAVA_TOOL_OPTIONS"
echo "Please pass JVM parameters via ES_JAVA_OPTS instead"
unset JAVA_TOOL_OPTIONS
fi
# CONF_FILE setting was removed
if [ ! -z "$CONF_FILE" ]; then
echo "CONF_FILE setting is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed."
exit 1
fi
declare -a args=("$@")
if [ -e "$CONF_DIR" ]; then
args=("${args[@]}" -Edefault.path.conf="$CONF_DIR")
fi
cd "$ES_HOME" > /dev/null
"$JAVA" $ES_JAVA_OPTS -cp "$ES_CLASSPATH" -Des.path.home="$ES_HOME" org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool "${args[@]}"
status=$?
cd - > /dev/null
exit $status

View File

@ -0,0 +1,9 @@
@echo off
rem Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
rem or more contributor license agreements. Licensed under the Elastic License;
rem you may not use this file except in compliance with the Elastic License.
PUSHD "%~dp0"
CALL "%~dp0.in.bat" org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool %*
POPD

View File

@ -79,6 +79,11 @@ public class PutUserRequestBuilder extends ActionRequestBuilder<PutUserRequest,
return this;
}
public PutUserRequestBuilder passwordHash(char[] passwordHash) {
request.passwordHash(passwordHash);
return this;
}
public PutUserRequestBuilder source(String username, BytesReference source) throws IOException {
username(username);
try (XContentParser parser = XContentHelper.createParser(source)) {
@ -99,6 +104,14 @@ public class PutUserRequestBuilder extends ActionRequestBuilder<PutUserRequest,
throw new ElasticsearchParseException(
"expected field [{}] to be of type string, but found [{}] instead", currentFieldName, token);
}
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, User.Fields.PASSWORD_HASH)) {
if (token == XContentParser.Token.VALUE_STRING) {
char[] passwordChars = parser.text().toCharArray();
passwordHash(passwordChars);
} else {
throw new ElasticsearchParseException(
"expected field [{}] to be of type string, but found [{}] instead", currentFieldName, token);
}
} else if (ParseFieldMatcher.STRICT.match(currentFieldName, User.Fields.ROLES)) {
if (token == XContentParser.Token.VALUE_STRING) {
roles(Strings.commaDelimitedListToStringArray(parser.text()));

View File

@ -0,0 +1,397 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authc.esnative;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import javax.net.ssl.HttpsURLConnection;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cli.MultiCommand;
import org.elasticsearch.cli.SettingCommand;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.xpack.security.action.role.PutRoleRequest;
import org.elasticsearch.xpack.security.action.user.PutUserRequest;
import org.elasticsearch.xpack.security.authc.Realms;
import org.elasticsearch.xpack.security.authc.file.FileUserPasswdStore;
import org.elasticsearch.xpack.security.authc.file.FileUserRolesStore;
import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.authz.permission.Permission;
import org.elasticsearch.xpack.security.authz.store.FileRolesStore;
import org.elasticsearch.xpack.security.ssl.ClientSSLService;
import org.elasticsearch.xpack.security.ssl.SSLConfiguration;
import org.elasticsearch.xpack.security.support.NoOpLogger;
import org.elasticsearch.xpack.security.support.Validation;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* This is the command-line tool used for migrating users and roles from the file-based realm into the new native realm using the API for
* import. It reads from the files and tries its best to add the users, showing an error if it was incapable of importing them. Any existing
* users or roles are skipped.
*/
public class ESNativeRealmMigrateTool extends MultiCommand {
public static void main(String[] args) throws Exception {
exit(new ESNativeRealmMigrateTool().main(args, Terminal.DEFAULT));
}
public ESNativeRealmMigrateTool() {
super("Imports file-based users and roles to the native security realm");
subcommands.put("native", new MigrateUserOrRoles());
}
/** Command to migrate users and roles to the native realm */
public static class MigrateUserOrRoles extends SettingCommand {
private final OptionSpec<String> username;
private final OptionSpec<String> password;
private final OptionSpec<String> url;
private final OptionSpec<String> usersToMigrateCsv;
private final OptionSpec<String> rolesToMigrateCsv;
private final OptionSpec<String> esConfigDir;
public MigrateUserOrRoles() {
super("Migrates users or roles from file to native realm");
this.username = parser.acceptsAll(Arrays.asList("u", "username"),
"User used to authenticate with Elasticsearch")
.withRequiredArg();
this.password = parser.acceptsAll(Arrays.asList("p", "password"),
"Password used to authenticate with Elasticsearch")
.withRequiredArg();
this.url = parser.acceptsAll(Arrays.asList("U", "url"),
"URL of Elasticsearch host")
.withRequiredArg();
this.usersToMigrateCsv = parser.acceptsAll(Arrays.asList("n", "users"),
"Users to migrate from file to native realm")
.withRequiredArg();
this.rolesToMigrateCsv = parser.acceptsAll(Arrays.asList("r", "roles"),
"Roles to migrate from file to native realm")
.withRequiredArg();
this.esConfigDir = parser.acceptsAll(Arrays.asList("c", "config"),
"Configuration directory to use instead of default")
.withRequiredArg();
}
// Visible for testing
public OptionParser getParser() {
return this.parser;
}
@Override
protected void printAdditionalHelp(Terminal terminal) {
terminal.println("This tool migrates file based users[1] and roles[2] to the native realm in");
terminal.println("elasticsearch, saving the administrator from needing to manually transition");
terminal.println("them from the file.");
}
// Visible for testing
@Override
public void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
terminal.println("starting migration of users and roles...");
Settings.Builder sb = Settings.builder();
sb.put(settings);
if (this.esConfigDir != null) {
sb.put("path.conf", this.esConfigDir.value(options));
}
Settings shieldSettings = sb.build();
Environment shieldEnv = new Environment(shieldSettings);
importUsers(terminal, shieldSettings, shieldEnv, options);
importRoles(terminal, shieldSettings, shieldEnv, options);
terminal.println("users and roles imported.");
}
private String postURL(Settings settings, Environment env, String method, String urlString,
OptionSet options, @Nullable String bodyString) throws Exception {
URI uri = new URI(urlString);
URL url = uri.toURL();
HttpURLConnection conn;
if ("https".equalsIgnoreCase(uri.getScheme())) {
SSLConfiguration.Global globalConfig = new SSLConfiguration.Global(settings);
final ClientSSLService sslService = new ClientSSLService(settings, globalConfig);
sslService.setEnvironment(env);
final HttpsURLConnection httpsConn = (HttpsURLConnection) url.openConnection();
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
// Requires permission java.lang.RuntimePermission "setFactory";
httpsConn.setSSLSocketFactory(sslService.sslSocketFactory(settings));
return null;
}
});
conn = httpsConn;
} else {
conn = (HttpURLConnection) url.openConnection();
}
// If using SSL, need a custom service because it's likely a self-signed certificate
conn.setRequestMethod(method);
conn.setReadTimeout(30 * 1000); // 30 second timeout
// Add basic-auth header
conn.setRequestProperty("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(username.value(options),
new SecuredString(password.value(options).toCharArray())));
conn.setDoOutput(true); // we'll be sending a body
conn.connect();
if (bodyString != null) {
try (OutputStream out = conn.getOutputStream()) {
out.write(bodyString.getBytes(Charsets.UTF_8));
} catch (Exception e) {
try {
conn.disconnect();
} catch (Exception e2) {
// Ignore exceptions if we weren't able to close the connection after an error
}
throw e;
}
}
try (BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream(), Charsets.UTF_8))) {
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
return sb.toString();
} catch (IOException e) {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getErrorStream(), Charsets.UTF_8))) {
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line);
}
throw new IOException(sb.toString(), e);
}
} finally {
conn.disconnect();
}
}
public Set<String> getUsersThatExist(Terminal terminal, Settings settings, Environment env, OptionSet options) throws Exception {
Set<String> existingUsers = new HashSet<>();
String allUsersJson = postURL(settings, env, "GET", this.url.value(options) + "/_xpack/security/user/", options, null);
try (XContentParser parser = JsonXContent.jsonXContent.createParser(allUsersJson)) {
XContentParser.Token token = parser.nextToken();
String userName;
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) {
userName = parser.currentName();
existingUsers.add(userName);
parser.nextToken();
parser.skipChildren();
}
} else {
throw new Exception("failed to retrieve users, expecting an object but got: " + token);
}
}
terminal.println("found existing users: " + existingUsers);
return existingUsers;
}
public static String createUserJson(String[] roles, char[] password) throws IOException {
XContentBuilder builder = jsonBuilder();
builder.startObject();
{
builder.field("password_hash", new String(password));
builder.startArray("roles");
for (String role : roles) {
builder.value(role);
}
builder.endArray();
}
builder.endObject();
return builder.string();
}
public void importUsers(Terminal terminal, Settings settings, Environment env, OptionSet options) {
String usersCsv = usersToMigrateCsv.value(options);
String[] usersToMigrate = (usersCsv != null) ? usersCsv.split(",") : Strings.EMPTY_ARRAY;
Settings esusersSettings = Realms.fileRealmSettings(settings);
Path usersFile = FileUserPasswdStore.resolveFile(esusersSettings, env);
Path usersRolesFile = FileUserRolesStore.resolveFile(esusersSettings, env);
terminal.println("importing users from [" + usersFile + "]...");
Map<String, char[]> userToHashedPW = FileUserPasswdStore.parseFile(usersFile, null);
Map<String, String[]> userToRoles = FileUserRolesStore.parseFile(usersRolesFile, null);
Set<String> existingUsers;
try {
existingUsers = getUsersThatExist(terminal, settings, env, options);
} catch (Exception e) {
terminal.println("failed to get users that already exist, skipping user import");
terminal.println(ExceptionsHelper.stackTrace(e));
return;
}
if (usersToMigrate.length == 0) {
usersToMigrate = userToHashedPW.keySet().toArray(new String[userToHashedPW.size()]);
}
for (String user : usersToMigrate) {
if (userToHashedPW.containsKey(user) == false) {
terminal.println("no user [" + user + "] found, skipping");
continue;
} else if (existingUsers.contains(user)) {
terminal.println("user [" + user + "] already exists, skipping");
continue;
}
terminal.println("migrating user [" + user + "]");
String reqBody = "n/a";
try {
reqBody = createUserJson(userToRoles.get(user), userToHashedPW.get(user));
String resp = postURL(settings, env, "POST",
this.url.value(options) + "/_xpack/security/user/" + user, options, reqBody);
terminal.println(resp);
} catch (Exception e) {
terminal.println("failed to migrate user [" + user + "] with body: " + reqBody);
terminal.println(ExceptionsHelper.stackTrace(e));
}
}
}
public Set<String> getRolesThatExist(Terminal terminal, Settings settings, Environment env, OptionSet options) throws Exception {
Set<String> existingRoles = new HashSet<>();
String allRolesJson = postURL(settings, env, "GET", this.url.value(options) + "/_xpack/security/role/", options, null);
try (XContentParser parser = JsonXContent.jsonXContent.createParser(allRolesJson)) {
XContentParser.Token token = parser.nextToken();
String roleName;
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) {
roleName = parser.currentName();
existingRoles.add(roleName);
parser.nextToken();
parser.skipChildren();
}
} else {
throw new Exception("failed to retrieve roles, expecting an object but got: " + token);
}
}
terminal.println("found existing roles: " + existingRoles);
return existingRoles;
}
public static String createRoleJson(RoleDescriptor rd) throws IOException {
XContentBuilder builder = jsonBuilder();
builder.startObject();
{
String[] clusterStrings = rd.getClusterPrivileges();
String[] runAs = rd.getRunAs();
RoleDescriptor.IndicesPrivileges[] indicesPrivileges = rd.getIndicesPrivileges();
if (clusterStrings != null && clusterStrings.length > 0) {
builder.array("cluster", clusterStrings);
}
if (runAs != null && runAs.length > 0) {
builder.array("run_as", runAs);
}
if (indicesPrivileges != null && indicesPrivileges.length > 0) {
builder.startArray("indices");
for (RoleDescriptor.IndicesPrivileges ip : indicesPrivileges) {
builder.startObject();
{
String[] indices = ip.getIndices();
String[] privs = ip.getPrivileges();
String[] fields = ip.getFields();
BytesReference query = ip.getQuery();
if (indices != null && indices.length > 0) {
builder.array("names", indices);
}
if (privs != null && privs.length > 0) {
builder.array("privileges", privs);
}
if (fields != null && fields.length > 0) {
builder.array("fields", fields);
}
if (query != null) {
builder.field("query", query.toUtf8());
}
}
builder.endObject();
}
builder.endArray();
}
}
builder.endObject();
return builder.string();
}
public void importRoles(Terminal terminal, Settings settings, Environment env, OptionSet options) {
String rolesCsv = rolesToMigrateCsv.value(options);
String[] rolesToMigrate = (rolesCsv != null) ? rolesCsv.split(",") : Strings.EMPTY_ARRAY;
Settings esusersSettings = Realms.fileRealmSettings(settings);
Path rolesFile = FileRolesStore.resolveFile(esusersSettings, env).toAbsolutePath();
terminal.println("importing roles from [" + rolesFile + "]...");
Map<String, RoleDescriptor> roles = FileRolesStore.parseRoleDescriptors(rolesFile, null, true, Settings.EMPTY);
Set<String> existingRoles;
try {
existingRoles = getRolesThatExist(terminal, settings, env, options);
} catch (Exception e) {
terminal.println("failed to get roles that already exist, skipping role import");
terminal.println(ExceptionsHelper.stackTrace(e));
return;
}
if (rolesToMigrate.length == 0) {
rolesToMigrate = roles.keySet().toArray(new String[roles.size()]);
}
for (String roleName : rolesToMigrate) {
if (roles.containsKey(roleName) == false) {
terminal.println("no role [" + roleName + "] found, skipping");
continue;
} else if (existingRoles.contains(roleName)) {
terminal.println("role [" + roleName + "] already exists, skipping");
continue;
}
terminal.println("migrating role [" + roleName + "]");
String reqBody = "n/a";
try {
reqBody = createRoleJson(roles.get(roleName));;
String resp = postURL(settings, env, "POST",
this.url.value(options) + "/_xpack/security/role/" + roleName, options, reqBody);
terminal.println(resp);
} catch (Exception e) {
terminal.println("failed to migrate role [" + roleName + "] with body: " + reqBody);
terminal.println(ExceptionsHelper.stackTrace(e));
}
}
}
}
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.authz.store;
import com.fasterxml.jackson.dataformat.yaml.snakeyaml.error.YAMLException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
@ -150,7 +151,55 @@ public class FileRolesStore extends AbstractLifecycleComponent<RolesStore> imple
return unmodifiableMap(roles);
}
public static Map<String, RoleDescriptor> parseRoleDescriptors(Path path, ESLogger logger,
boolean resolvePermission, Settings settings) {
if (logger == null) {
logger = NoOpLogger.INSTANCE;
}
Map<String, RoleDescriptor> roles = new HashMap<>();
logger.trace("attempted to read roles file located at [{}]", path.toAbsolutePath());
if (Files.exists(path)) {
try {
List<String> roleSegments = roleSegments(path);
for (String segment : roleSegments) {
RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings);
if (rd != null) {
roles.put(rd.getName(), rd);
}
}
} catch (IOException ioe) {
logger.error("failed to read roles file [{}]. skipping all roles...", ioe, path.toAbsolutePath());
}
}
return unmodifiableMap(roles);
}
@Nullable
private static Role parseRole(String segment, Path path, ESLogger logger, boolean resolvePermissions, Settings settings) {
RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermissions, settings);
if (descriptor != null) {
String roleName = descriptor.getName();
// first check if FLS/DLS is enabled on the role...
for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) {
if ((privilege.getQuery() != null || privilege.getFields() != null)
&& Security.flsDlsEnabled(settings) == false) {
logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " +
"enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path
.toAbsolutePath(), XPackPlugin.featureEnabledSetting(Security.DLS_FLS_FEATURE));
return null;
}
}
return Role.builder(descriptor).build();
} else {
return null;
}
}
@Nullable
private static RoleDescriptor parseRoleDescriptor(String segment, Path path, ESLogger logger,
boolean resolvePermissions, Settings settings) {
String roleName = null;
try {
XContentParser parser = YamlXContent.yamlXContent.createParser(segment);
@ -167,25 +216,13 @@ public class FileRolesStore extends AbstractLifecycleComponent<RolesStore> imple
}
if (resolvePermissions == false) {
return Role.builder(roleName).build();
return new RoleDescriptor(roleName, null, null, null);
}
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
RoleDescriptor descriptor = RoleDescriptor.parse(roleName, parser);
// first check if FLS/DLS is enabled on the role...
for (RoleDescriptor.IndicesPrivileges privilege : descriptor.getIndicesPrivileges()) {
if ((privilege.getQuery() != null || privilege.getFields() != null)
&& Security.flsDlsEnabled(settings) == false) {
logger.error("invalid role definition [{}] in roles file [{}]. document and field level security is not " +
"enabled. set [{}] to [true] in the configuration file. skipping role...", roleName, path
.toAbsolutePath(), XPackPlugin.featureEnabledSetting(Security.DLS_FLS_FEATURE));
return null;
}
}
return Role.builder(descriptor).build();
return descriptor;
} else {
logger.error("invalid role definition [{}] in roles file [{}]. skipping role...", roleName, path.toAbsolutePath());
return null;

View File

@ -294,6 +294,7 @@ public class User implements ToXContent {
public interface Fields {
ParseField USERNAME = new ParseField("username");
ParseField PASSWORD = new ParseField("password");
ParseField PASSWORD_HASH = new ParseField("password_hash");
ParseField ROLES = new ParseField("roles");
ParseField FULL_NAME = new ParseField("full_name");
ParseField EMAIL = new ParseField("email");

View File

@ -0,0 +1,34 @@
NAME
migrate - Migrates elasticsearch file based users and roles to native realm
SYNOPSIS
migrate native -U <url> [OPTIONS]
DESCRIPTION
This tool migrates file based users[1] and roles[2] to the native realm in
elasticsearch, saving the administrator from needing to manually transition
them from the file.
OPTIONS
-U --url URL to connect to to user/role import
-u --username [Optional] Username for authenticating with Elasticsearch
-p --password [Optional] Password for authenticating with Elasticsearch
-n --users [Optional] Users to migrate from file to native realm,
if not specified all users will be migrated
-r --roles [Optional] Roles to migrate from file to native realm
if not specified all roles will be migrated
-c --config [Optional] Path to Elasticsearch config directory
NOTES
[1] https://www.elastic.co/guide/en/shield/current/setting-up-authentication.html
[2] https://www.elastic.co/guide/en/shield/current/configuring-rbac.html

View File

@ -11,7 +11,10 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.xpack.security.InternalClient;
import org.elasticsearch.xpack.security.Security;
@ -28,10 +31,12 @@ import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.ExternalResource;
import java.net.InetSocketAddress;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
@ -360,4 +365,19 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase {
public static SecurityClient securityClient(Client client) {
return randomBoolean() ? new XPackClient(client).security() : new SecurityClient(client);
}
}
protected String getHttpURL() {
return getHttpURL(false);
}
protected String getHttpURL(boolean useSSL) {
final NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get();
final List<NodeInfo> nodes = nodeInfos.getNodes();
assertTrue("there is at least one node", nodes.size() > 0);
NodeInfo ni = randomFrom(nodes);
TransportAddress publishAddress = ni.getHttp().address().publishAddress();
assertEquals(1, publishAddress.uniqueAddressTypeId());
InetSocketAddress address = ((InetSocketTransportAddress) publishAddress).address();
return (useSSL ? "https://" : "http://") + NetworkAddress.format(address.getAddress()) + ":" + address.getPort();
}
}

View File

@ -0,0 +1,150 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.securit.authc.esnative;
import com.google.common.base.Charsets;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.NativeRealmIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSource;
import org.elasticsearch.xpack.security.SecurityTemplateService;
import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool;
import org.elasticsearch.xpack.security.client.SecurityClient;
import org.elasticsearch.xpack.security.transport.netty.SecurityNettyHttpServerTransport;
import org.junit.BeforeClass;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
* Integration tests for the {@code ESNativeMigrateTool}
*/
public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase {
// Randomly use SSL (or not)
private static boolean useSSL;
@BeforeClass
private static void setSSL() {
useSSL = randomBoolean();
}
@Override
public Settings nodeSettings(int nodeOrdinal) {
logger.info("--> use SSL? {}", useSSL);
Settings s = Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.HTTP_ENABLED.getKey(), true)
.put(SecurityNettyHttpServerTransport.SSL_SETTING.getKey(), useSSL)
.build();
return s;
}
@Override
protected boolean sslTransportEnabled() {
return useSSL;
}
private String homePath() throws Exception {
Environment e = internalCluster().getInstances(Environment.class).iterator().next();
return e.configFile().toAbsolutePath().toString();
}
public void testRetrieveUsers() throws Exception {
String home = homePath();
SecurityClient c = new SecurityClient(client());
logger.error("--> creating users");
int numToAdd = randomIntBetween(1,10);
Set<String> addedUsers = new HashSet(numToAdd);
for (int i = 0; i < numToAdd; i++) {
String uname = randomAsciiOfLength(5);
c.preparePutUser(uname, "s3kirt".toCharArray(), "role1", "user").get();
addedUsers.add(uname);
}
logger.error("--> waiting for .security index");
ensureGreen(SecurityTemplateService.SECURITY_INDEX_NAME);
MockTerminal t = new MockTerminal();
String username = nodeClientUsername();
String password = new String(nodeClientPassword().utf8Bytes(), Charsets.UTF_8);
String url = getHttpURL(useSSL);
ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles();
Settings sslSettings =
SecuritySettingsSource.getSSLSettingsForStore("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks",
"testnode");
Settings settings = Settings.builder().put(sslSettings).put("path.home", home).build();
logger.error("--> retrieving users using URL: {}, home: {}", url, home);
OptionParser parser = muor.getParser();
OptionSet options = parser.parse("-u", username, "-p", password, "-U", url, "-c", home);
logger.info("--> options: {}", options.asMap());
Set<String> users = muor.getUsersThatExist(t, settings, new Environment(settings), options);
logger.info("--> output: \n{}", t.getOutput());;
for (String u : addedUsers) {
assertThat("expected list to contain: " + u, users.contains(u), is(true));
}
}
public void testRetrieveRoles() throws Exception {
String home = homePath();
SecurityClient c = new SecurityClient(client());
logger.error("--> creating roles");
int numToAdd = randomIntBetween(1,10);
Set<String> addedRoles = new HashSet(numToAdd);
for (int i = 0; i < numToAdd; i++) {
String rname = randomAsciiOfLength(5);
c.preparePutRole(rname)
.cluster("all", "none")
.runAs("root", "nobody")
.addIndices(new String[]{"index"}, new String[]{"read"},
new String[]{"body", "title"}, new BytesArray("{\"query\": {\"match_all\": {}}}"))
.get();
addedRoles.add(rname);
}
logger.error("--> waiting for .security index");
ensureGreen(SecurityTemplateService.SECURITY_INDEX_NAME);
MockTerminal t = new MockTerminal();
String username = nodeClientUsername();
String password = new String(nodeClientPassword().utf8Bytes(), Charsets.UTF_8);
String url = getHttpURL(useSSL);
ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles();
Settings sslSettings =
SecuritySettingsSource.getSSLSettingsForStore("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks",
"testclient");
Settings settings = Settings.builder().put(sslSettings).put("path.home", home).build();
logger.error("--> retrieving roles using URL: {}, home: {}", url, home);
OptionParser parser = muor.getParser();
OptionSet options = parser.parse("-u", username, "-p", password, "-U", url, "-c", home);
Set<String> roles = muor.getRolesThatExist(t, settings, new Environment(settings), options);
logger.info("--> output: \n{}", t.getOutput());;
for (String r : addedRoles) {
assertThat("expected list to contain: " + r, roles.contains(r), is(true));
}
}
}

View File

@ -0,0 +1,50 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers.tool;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.CommandTestCase;
import org.elasticsearch.common.Strings;
import org.elasticsearch.xpack.security.authc.esnative.ESNativeRealmMigrateTool;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import org.junit.Test;
import static org.hamcrest.Matchers.equalTo;
/**
* Unit tests for the {@code ESNativeRealmMigrateTool}
*/
public class ESNativeRealmMigrateToolTests extends CommandTestCase {
@Override
protected Command newCommand() {
return new ESNativeRealmMigrateTool();
}
public void testUserJson() throws Exception {
assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createUserJson(Strings.EMPTY_ARRAY, "hash".toCharArray()),
equalTo("{\"password_hash\":\"hash\",\"roles\":[]}"));
assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createUserJson(new String[]{"role1", "role2"}, "hash".toCharArray()),
equalTo("{\"password_hash\":\"hash\",\"roles\":[\"role1\",\"role2\"]}"));
}
public void testRoleJson() throws Exception {
RoleDescriptor.IndicesPrivileges ip = RoleDescriptor.IndicesPrivileges.builder()
.indices(new String[]{"i1", "i2", "i3"})
.privileges(new String[]{"all"})
.fields(new String[]{"body"})
.build();
RoleDescriptor.IndicesPrivileges[] ips = new RoleDescriptor.IndicesPrivileges[1];
ips[0] = ip;
String[] cluster = Strings.EMPTY_ARRAY;
String[] runAs = Strings.EMPTY_ARRAY;
RoleDescriptor rd = new RoleDescriptor("rolename", cluster, ips, runAs);
assertThat(ESNativeRealmMigrateTool.MigrateUserOrRoles.createRoleJson(rd),
equalTo("{\"indices\":[{\"names\":[\"i1\",\"i2\",\"i3\"],\"privileges\":[\"all\"],\"fields\":[\"body\"]}]}"));
}
}