Initial commit of the infrastructure codebase for security

There are four modules:
 - authc: realm based authentication module
 - authz: role based privileges & permissions authorization module
 - n2n: node to node authentication module (incl. IP filtering auth)
 - audit: audit trail module (only includes log file audit trails for now)

Original commit: elastic/x-pack-elasticsearch@b1ec9e2923
This commit is contained in:
uboness 2014-07-17 15:53:50 +02:00
parent 5ede63f180
commit f727e29066
61 changed files with 4141 additions and 4 deletions

24
pom.xml
View File

@ -24,6 +24,7 @@
</releases>
<snapshots>
<enabled>true</enabled>
<updatePolicy>always</updatePolicy>
</snapshots>
</repository>
</repositories>
@ -31,6 +32,8 @@
<properties>
<lucene.version>4.9.0</lucene.version>
<elasticsearch.version>1.4.0-SNAPSHOT</elasticsearch.version>
<netty.version>3.9.1.Final</netty.version>
<tests.jvms>auto</tests.jvms>
<tests.shuffle>true</tests.shuffle>
<tests.output>onerror</tests.output>
@ -77,12 +80,32 @@
</dependency>
<!-- real dependencies -->
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>${elasticsearch.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.2</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<version>${netty.version}</version>
</dependency>
<!-- needed for forbidden APIs, embarassing, need to fix -->
<dependency>
<groupId>com.google.guava</groupId>
@ -278,6 +301,7 @@
<excludes>
<exclude>jsr166e/**</exclude>
<!-- start excludes for valid system-out -->
<exclude>org/elasticsearch/shield/support/CmdLineTool*</exclude>
<exclude>org/elasticsearch/common/logging/log4j/ConsoleAppender*</exclude>
<exclude>org/elasticsearch/plugins/PluginManager.class</exclude>
<exclude>org/elasticsearch/bootstrap/Bootstrap.class</exclude>

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.shield.authc.AuthenticationService;
import org.elasticsearch.shield.authz.AuthorizationService;
public class SecurityActionFilter {
}
///**
// *
// */
//public class SecurityActionFilter implements ActionFilter {
//
// private final AuthenticationService authenticationService;
// private final AuthorizationService authorizationService;
//
// @Inject
// public SecurityActionFilter(AuthenticationService authenticationService, AuthorizationService authorizationService) {
// this.authenticationService = authenticationService;
// this.authorizationService = authorizationService;
// }
//
// @Override
// public void process(String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) {
// User user = authenticationService.authenticate(action, actionRequest);
// authorizationService.authorize(user, action, actionRequest);
// actionFilterChain.continueProcessing(action, actionRequest, actionListener);
// }
//
// @Override
// public int order() {
// return Integer.MIN_VALUE;
// }
//}

View File

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield;
import org.elasticsearch.ElasticsearchException;import java.lang.String;import java.lang.Throwable;
/**
*
*/
public class SecurityException extends ElasticsearchException {
public SecurityException(String msg) {
super(msg);
}
public SecurityException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.Modules;
import org.elasticsearch.common.inject.SpawnModules;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.audit.AuditTrailModule;
import org.elasticsearch.shield.authc.AuthenticationModule;
import org.elasticsearch.shield.authz.AuthorizationModule;
import org.elasticsearch.shield.n2n.N2NModule;
/**
*
*/
public class SecurityModule extends AbstractModule implements SpawnModules {
private final Settings settings;
public SecurityModule(Settings settings) {
this.settings = settings;
}
@Override
public Iterable<? extends Module> spawnModules() {
return ImmutableList.of(
Modules.createModule(AuthenticationModule.class, settings),
Modules.createModule(AuthorizationModule.class, settings),
Modules.createModule(AuditTrailModule.class, settings),
Modules.createModule(N2NModule.class, settings));
}
@Override
protected void configure() {
}
}

View File

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield;
/**
*
*/
public class SecuritySettingsException extends SecurityException {
public SecuritySettingsException(String msg) {
super(msg);
}
public SecuritySettingsException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield;
import org.elasticsearch.shield.authz.SystemRole;
/**
* An authenticated user
*/
public abstract class User {
public static final User SYSTEM = new System();
/**
* @return The principal of this user - effectively serving as the unique identity of of the user.
*/
public abstract String principal();
/**
* @return The roles this user is associated with. The roles are identified by their unique names
* and each represents as set of permissions
*/
public abstract String[] roles();
public static class Simple extends User {
private final String username;
private final String[] roles;
public Simple(String username, String... roles) {
this.username = username;
this.roles = roles;
}
@Override
public String principal() {
return username;
}
@Override
public String[] roles() {
return roles;
}
}
private static class System extends User {
private static final String NAME = "__es_system_user";
private static final String[] ROLES = new String[] { SystemRole.NAME };
private System() {
}
@Override
public String principal() {
return NAME;
}
@Override
public String[] roles() {
return ROLES;
}
}
}

View File

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.audit;
/**
*
*/
public class AuditException extends org.elasticsearch.shield.SecurityException {
public AuditException(String msg) {
super(msg);
}
public AuditException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.audit;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.authc.AuthenticationToken;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public interface AuditTrail {
public static final AuditTrail NOOP = new AuditTrail() {
@Override
public void anonymousAccess(String action, TransportRequest request) {
}
@Override
public void authenticationFailed(AuthenticationToken token, String action, TransportRequest request) {
}
@Override
public void accessGranted(User user, String action, TransportRequest request) {
}
@Override
public void accessDenied(User user, String action, TransportRequest request) {
}
};
void anonymousAccess(String action, TransportRequest request);
void authenticationFailed(AuthenticationToken token, String action, TransportRequest request);
void accessGranted(User user, String action, TransportRequest request);
void accessDenied(User user, String action, TransportRequest request);
}

View File

@ -0,0 +1,50 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.audit;
import com.google.common.collect.Sets;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.audit.logfile.LoggingAuditTrail;
import java.util.Set;
/**
*
*/
public class AuditTrailModule extends AbstractModule {
private final Settings settings;
public AuditTrailModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
if (settings.getAsBoolean("shield.audit.enabled", false)) {
bind(AuditTrail.class).toInstance(AuditTrail.NOOP);
return;
}
String[] outputs = settings.getAsArray("shield.audit.outputs", new String[] { "logfile" });
if (outputs.length == 0) {
bind(AuditTrail.class).toInstance(AuditTrail.NOOP);
return;
}
bind(AuditTrail.class).to(AuditTrailService.class);
Multibinder<AuditTrail> binder = Multibinder.newSetBinder(binder(), AuditTrail.class);
Set<String> uniqueOutputs = Sets.newHashSet(outputs);
for (String output : uniqueOutputs) {
switch (output) {
case "logfile":
binder.addBinding().to(LoggingAuditTrail.class);
break;
}
}
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.audit;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.authc.AuthenticationToken;
import org.elasticsearch.transport.TransportRequest;
import java.util.Set;
/**
*
*/
public class AuditTrailService extends AbstractComponent implements AuditTrail {
private final AuditTrail[] auditTrails;
@Inject
public AuditTrailService(Settings settings, Set<AuditTrail> auditTrails) {
super(settings);
this.auditTrails = auditTrails.toArray(new AuditTrail[auditTrails.size()]);
}
@Override
public void anonymousAccess(String action, TransportRequest request) {
for (int i = 0; i < auditTrails.length; i++) {
auditTrails[i].anonymousAccess(action, request);
}
}
@Override
public void authenticationFailed(AuthenticationToken token, String action, TransportRequest request) {
for (int i = 0; i < auditTrails.length; i++) {
auditTrails[i].authenticationFailed(token, action, request);
}
}
@Override
public void accessGranted(User user, String action, TransportRequest request) {
for (int i = 0; i < auditTrails.length; i++) {
auditTrails[i].accessGranted(user, action, request);
}
}
@Override
public void accessDenied(User user, String action, TransportRequest request) {
for (int i = 0; i < auditTrails.length; i++) {
auditTrails[i].accessDenied(user, action, request);
}
}
}

View File

@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.audit.logfile;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.audit.AuditTrail;
import org.elasticsearch.shield.authc.AuthenticationToken;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public class LoggingAuditTrail extends AbstractComponent implements AuditTrail {
@Inject
public LoggingAuditTrail(Settings settings) {
super(settings);
}
@Override
public void anonymousAccess(String action, TransportRequest request) {
if (logger.isDebugEnabled()) {
logger.info("ANONYMOUS_ACCESS\thost=[{}], action=[{}], request=[{}]", request.remoteAddress(), action, request);
} else {
logger.info("ANONYMOUS_ACCESS\thost=[{}], action=[{}]", request.remoteAddress(), action);
}
}
@Override
public void authenticationFailed(AuthenticationToken token, String action, TransportRequest request) {
if (logger.isDebugEnabled()) {
logger.info("AUTHENTICATION_FAILED\thost=[{}], action=[{}], principal=[{}], request=[{}]", request.remoteAddress(), action, token.principal(), request);
} else {
logger.info("AUTHENTICATION_FAILED\thost=[{}], action=[{}], principal=[{}]", request.remoteAddress(), action, token.principal());
}
}
@Override
public void accessGranted(User user, String action, TransportRequest request) {
if (logger.isDebugEnabled()) {
logger.info("ACCESS_GRANTED\thost=[{}], action=[{}], principal=[{}], request=[{}]", request.remoteAddress(), action, user.principal(), request);
} else {
logger.info("ACCESS_GRANTED\thost=[{}], action=[{}], principal=[{}]", request.remoteAddress(), action, user.principal());
}
}
@Override
public void accessDenied(User user, String action, TransportRequest request) {
if (logger.isDebugEnabled()) {
logger.info("ACCESS_DENIED\thost=[{}], action=[{}], principal=[{}], request=[{}]", request.remoteAddress(), action, user.principal(), request);
} else {
logger.info("ACCESS_DENIED\thost=[{}], action=[{}], principal=[{}]", request.remoteAddress(), action, user.principal());
}
}
}

View File

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc;
/**
*
*/
public class AuthenticationException extends org.elasticsearch.shield.SecurityException {
public AuthenticationException(String msg) {
super(msg);
}
public AuthenticationException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc;
import com.google.common.collect.ImmutableList;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.Modules;
import org.elasticsearch.common.inject.SpawnModules;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.authc.esusers.ESUsersModule;
import org.elasticsearch.shield.authc.ldap.LdapModule;
/**
*
*/
public class AuthenticationModule extends AbstractModule implements SpawnModules {
private final Settings settings;
public AuthenticationModule(Settings settings) {
this.settings = settings;
}
@Override
public Iterable<? extends Module> spawnModules() {
ImmutableList.Builder<? extends Module> modules = ImmutableList.builder();
modules.add(Modules.createModule(ESUsersModule.class, settings));
if (LdapModule.enabled(settings)) {
modules.add(Modules.createModule(LdapModule.class, settings));
}
return modules.build();
}
@Override
protected void configure() {
bind(AuthenticationService.class).to(InternalAuthenticationService.class);
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc;
import org.elasticsearch.shield.User;
import org.elasticsearch.transport.TransportRequest;
/**
* Responsible for authenticating the Users behind requests
*/
public interface AuthenticationService {
/**
* Authenticates the user associated with the given request.
*
* An {@link AuthenticationToken authentication token} will be extracted from the request, and
* will be authenticated. On successful authentication, the {@link org.elasticsearch.shield.User user} that is associated
* with the request (i.e. that is associated with the token's {@link AuthenticationToken#principal() principal})
* will be returned.
*
* @param request The executed request
* @return The authenticated User
* @throws AuthenticationException If no user could be authenticated (can either be due to missing
* supported authentication token, or simply due to bad credentials.
*/
User authenticate(String action, TransportRequest request) throws AuthenticationException;
}

View File

@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc;
/**
*
*/
public interface AuthenticationToken {
String principal();
Object credentials();
}

View File

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.audit.AuditTrail;
import org.elasticsearch.shield.authc.esusers.ESUsersRealm;
import org.elasticsearch.shield.authc.ldap.LdapRealm;
import org.elasticsearch.transport.TransportRequest;
/**
* An authentication service that delegates the authentication process to its configured {@link Realm realms}.
*/
public class InternalAuthenticationService extends AbstractComponent implements AuthenticationService {
private final Realm[] realms;
private final AuditTrail auditTrail;
@Inject
public InternalAuthenticationService(Settings settings, ESUsersRealm esUsersRealm, @Nullable LdapRealm ldapRealm, @Nullable AuditTrail auditTrail) {
super(settings);
this.realms = ldapRealm != null ?
new Realm[] { esUsersRealm, ldapRealm } :
new Realm[] { esUsersRealm };
this.auditTrail = auditTrail;
}
/**
* Authenticates the user associated with the given request by delegating the authentication to
* the configured realms. Each realm will be asked to authenticate the request, the first realm that
* successfully authenticates will "win" and its authenticated user will be returned. If none of the
* configured realms successfully authenticates the request, an {@link AuthenticationException} will
* be thrown.
*
* The order by which the realms are ran is based on the order by which they were set in the
* constructor.
*
* @param request The executed request
* @return The authenticated user
* @throws AuthenticationException If none of the configured realms successfully authenticated the
* request
*/
@Override
public User authenticate(String action, TransportRequest request) throws AuthenticationException {
for (int i = 0; i < realms.length; i++) {
AuthenticationToken token = realms[i].token(request);
if (token != null) {
User user = realms[i].authenticate(token);
if (user != null) {
return user;
} else if (auditTrail != null) {
auditTrail.authenticationFailed(token, action, request);
}
}
}
if (auditTrail != null) {
auditTrail.anonymousAccess(action, request);
}
throw new AuthenticationException("Unable to authenticate user for request");
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc;
import org.elasticsearch.shield.User;
import org.elasticsearch.transport.TransportRequest;
/**
* An authentication mechanism to which the default authentication {@link org.elasticsearch.shield.authc.AuthenticationService service}
* delegates the authentication process. Different realms may be defined, each may be based on different
* authentication mechanism supporting its own specific authentication token type.
*/
public interface Realm<T extends AuthenticationToken> {
/**
* @return The type of this realm
*/
String type();
/**
* Attempts to extract a authentication token from the request. If an appropriate token is found
* {@link #authenticate(AuthenticationToken)} will be called for an authentication attempt. If no
* appropriate token is found, {@code null} is returned.
*
* @param request The request
* @return The authentication token this realm can authenticate, {@code null} if no such
* token is found
*/
T token(TransportRequest request);
/**
* Authenticates the given token. A successful authentication will return the User associated
* with the given token. An unsuccessful authentication returns {@code null}.
*
* @param token The authentication token
* @return The authenticated user or {@code null} if authentication failed.
*/
User authenticate(T token);
}

View File

@ -0,0 +1,25 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.shield.authc.support.UserPasswdStore;
import org.elasticsearch.shield.authc.support.UserRolesStore;
import static org.elasticsearch.common.inject.name.Names.named;
/**
*
*/
public class ESUsersModule extends AbstractModule {
@Override
protected void configure() {
bind(ESUsersRealm.class).asEagerSingleton();
bind(UserPasswdStore.class).annotatedWith(named("file")).to(FileUserPasswdStore.class).asEagerSingleton();
bind(UserRolesStore.class).annotatedWith(named("file")).to(FileUserRolesStore.class).asEagerSingleton();
}
}

View File

@ -0,0 +1,61 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.name.Named;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.authc.Realm;
import org.elasticsearch.shield.authc.support.UserPasswdStore;
import org.elasticsearch.shield.authc.support.UserRolesStore;
import org.elasticsearch.shield.authc.support.UsernamePasswordToken;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public class ESUsersRealm extends AbstractComponent implements Realm<UsernamePasswordToken> {
public static final String TYPE = "esusers";
final UserPasswdStore userPasswdStore;
final UserRolesStore userRolesStore;
@Inject
public ESUsersRealm(Settings settings, @Named("file") UserPasswdStore userPasswdStore, @Named("file") UserRolesStore userRolesStore) {
super(settings);
this.userPasswdStore = userPasswdStore;
this.userRolesStore = userRolesStore;
}
@Override
public String type() {
return TYPE;
}
@Override
public UsernamePasswordToken token(TransportRequest request) {
return UsernamePasswordToken.extractToken(request, null);
}
@Override
public User authenticate(UsernamePasswordToken token) {
if (userPasswdStore == null) {
return null;
}
if (!userPasswdStore.verifyPassword(token.principal(), token.credentials())) {
return null;
}
String[] roles = Strings.EMPTY_ARRAY;
if (userRolesStore != null) {
roles = userRolesStore.roles(token.principal());
}
return new User.Simple(token.principal(), roles);
}
}

View File

@ -0,0 +1,160 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.shield.authc.support.Hasher;
import org.elasticsearch.shield.authc.support.UserPasswdStore;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
*
*/
public class FileUserPasswdStore extends AbstractComponent implements UserPasswdStore {
private final Path file;
private final FileWatcher watcher;
final Hasher hasher = Hasher.HTPASSWD;
private volatile ImmutableMap<String, char[]> esUsers;
private final Listener listener;
@Inject
public FileUserPasswdStore(Settings settings, Environment env, ResourceWatcherService watcherService) {
this(settings, env, watcherService, Listener.NOOP);
}
FileUserPasswdStore(Settings settings, Environment env, ResourceWatcherService watcherService, Listener listener) {
super(settings);
file = resolveFile(componentSettings, env);
esUsers = ImmutableMap.copyOf(parseFile(file, logger));
watcher = new FileWatcher(file.getParent().toFile());
watcher.addListener(new FileListener());
watcherService.add(watcher);
this.listener = listener;
}
@Override
public boolean verifyPassword(String username, char[] password) {
if (esUsers == null) {
return false;
}
char[] hash = esUsers.get(username);
if (hash == null) {
return false;
}
return hasher.verify(password, hash);
}
public static Path resolveFile(Settings settings, Environment env) {
String location = settings.get("file.users");
if (location == null) {
return env.configFile().toPath().resolve(".users");
}
return Paths.get(location);
}
public static Map<String, char[]> parseFile(Path path, @Nullable ESLogger logger) {
if (!Files.exists(path)) {
return ImmutableMap.of();
}
List<String> lines = null;
try {
lines = Files.readAllLines(path, Charsets.UTF_8);
} catch (IOException ioe) {
throw new ElasticsearchException("Could not read users file [" + path.toAbsolutePath() + "]", ioe);
}
ImmutableMap.Builder<String, char[]> users = ImmutableMap.builder();
int lineNr = 0;
for (String line : lines) {
lineNr++;
int i = line.indexOf(":");
if (i <= 0 || i == line.length() - 1) {
logger.error("Invalid entry in users file [" + path.toAbsolutePath() + "], line [" + lineNr + "]. Skipping...");
continue;
}
String username = line.substring(0, i).trim();
String hash = line.substring(i + 1).trim();
users.put(username, hash.toCharArray());
}
return users.build();
}
public static void writeFile(Map<String, char[]> esUsers, Path path) {
try (PrintWriter writer = new PrintWriter(Files.newBufferedWriter(path, Charsets.UTF_8, StandardOpenOption.CREATE, StandardOpenOption.WRITE))) {
for (Map.Entry<String, char[]> entry : esUsers.entrySet()) {
writer.printf(Locale.ROOT, "{}\t{}", entry.getKey(), new String(entry.getValue()));
}
} catch (IOException ioe) {
throw new ElasticsearchException("Could not write users file [" + path.toAbsolutePath() + "], please check file permissions", ioe);
}
}
private class FileListener extends FileChangesListener {
@Override
public void onFileCreated(File file) {
if (file.equals(FileUserPasswdStore.this.file.toFile())) {
esUsers = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
listener.onRefresh();
}
}
@Override
public void onFileDeleted(File file) {
if (file.equals(FileUserPasswdStore.this.file.toFile())) {
esUsers = ImmutableMap.of();
listener.onRefresh();
}
}
@Override
public void onFileChanged(File file) {
if (file.equals(FileUserPasswdStore.this.file.toFile())) {
if (file.equals(FileUserPasswdStore.this.file.toFile())) {
esUsers = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
listener.onRefresh();
}
}
}
}
public static interface Listener {
static final Listener NOOP = new Listener() {
@Override
public void onRefresh() {
}
};
void onRefresh();
}
}

View File

@ -0,0 +1,154 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.shield.authc.support.UserRolesStore;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Pattern;
/**
*
*/
public class FileUserRolesStore extends AbstractComponent implements UserRolesStore {
private static final Pattern ROLES_DELIM = Pattern.compile("\\s*,\\s*");
private final Path file;
private volatile ImmutableMap<String, String[]> userRoles;
private final Listener listener;
@Inject
public FileUserRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService) {
this(settings, env, watcherService, Listener.NOOP);
}
FileUserRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Listener listener) {
super(settings);
file = resolveFile(componentSettings, env);
userRoles = ImmutableMap.copyOf(parseFile(file, logger));
FileWatcher watcher = new FileWatcher(file.getParent().toFile());
watcher.addListener(new FileListener());
watcherService.add(watcher);
this.listener = listener;
}
public String[] roles(String username) {
return userRoles != null ? userRoles.get(username) : null;
}
public static Path resolveFile(Settings settings, Environment env) {
String location = settings.get("file.users_roles");
if (location == null) {
return env.configFile().toPath().resolve(".users_roles");
}
return Paths.get(location);
}
public static Map<String, String[]> parseFile(Path path, @Nullable ESLogger logger) {
if (!Files.exists(path)) {
return ImmutableMap.of();
}
List<String> lines = null;
try {
lines = Files.readAllLines(path, Charsets.UTF_8);
} catch (IOException ioe) {
throw new ElasticsearchException("Could not read users file [" + path.toAbsolutePath() + "]", ioe);
}
ImmutableMap.Builder<String, String[]> usersRoles = ImmutableMap.builder();
int lineNr = 0;
for (String line : lines) {
lineNr++;
int i = line.indexOf(":");
if (i <= 0 || i == line.length() - 1) {
logger.error("Invalid entry in users file [" + path.toAbsolutePath() + "], line [" + lineNr + "]. Skipping...");
continue;
}
String username = line.substring(0, i).trim();
String rolesStr = line.substring(i + 1).trim();
String[] roles = ROLES_DELIM.split(rolesStr);
usersRoles.put(username, roles);
}
return usersRoles.build();
}
public static void writeFile(Map<String, String[]> userRoles, Path path) {
try (PrintWriter writer = new PrintWriter(Files.newBufferedWriter(path, Charsets.UTF_8, StandardOpenOption.CREATE, StandardOpenOption.WRITE))) {
for (Map.Entry<String, String[]> entry : userRoles.entrySet()) {
writer.printf(Locale.ROOT, "{}\t{}", entry.getKey(), Strings.arrayToCommaDelimitedString(entry.getValue()));
}
} catch (IOException ioe) {
throw new ElasticsearchException("Could not write users file [" + path.toAbsolutePath() + "], please check file permissions");
}
}
private class FileListener extends FileChangesListener {
@Override
public void onFileCreated(File file) {
if (file.equals(FileUserRolesStore.this.file.toFile())) {
userRoles = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
listener.onRefresh();
}
}
@Override
public void onFileDeleted(File file) {
if (file.equals(FileUserRolesStore.this.file.toFile())) {
userRoles = ImmutableMap.of();
listener.onRefresh();
}
}
@Override
public void onFileChanged(File file) {
if (file.equals(FileUserRolesStore.this.file.toFile())) {
if (file.equals(FileUserRolesStore.this.file.toFile())) {
userRoles = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
listener.onRefresh();
}
}
}
}
public static interface Listener {
static final Listener NOOP = new Listener() {
@Override
public void onRefresh() {
}
};
void onRefresh();
}
}

View File

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers.tool;
import org.apache.commons.cli.CommandLine;
import org.elasticsearch.shield.authc.esusers.FileUserPasswdStore;
import org.elasticsearch.shield.authc.esusers.FileUserRolesStore;
import org.elasticsearch.shield.authc.support.Hasher;
import org.elasticsearch.shield.support.CmdLineTool;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
public class UserAdd extends CmdLineTool {
public static void main(String[] args) throws Exception {
new UserAdd().execute(args);
}
public UserAdd() {
super("useradd",
option("p", "password", "The user password").hasArg(true).required(true),
option("r", "roles", "Comma-separated list of the roles of the user").hasArg(true).required(true),
option("h", "help", "Prints usage help").hasArg(false).required(false)
);
}
public void run(CommandLine cli) throws Exception {
if (cli.getArgs().length == 0) {
terminal.println("username is missing");
printUsage();
exit(ExitStatus.USAGE);
}
String username = cli.getArgs()[0];
char[] password;
String passwordStr = cli.getOptionValue("password");
if (passwordStr != null) {
password = passwordStr.toCharArray();
} else {
password = terminal.readPassword("Enter new password: ");
char[] retyped = terminal.readPassword("Retype new password: ");
if (!Arrays.equals(password, retyped)) {
terminal.print("Password mismatch");
exit(ExitStatus.USAGE);
}
}
String[] roles = null;
String rolesCsv = cli.getOptionValue("roles");
if (rolesCsv != null) {
roles = rolesCsv.split(",");
}
addUser(username, password, roles);
}
private void addUser(String username, char[] passwd, String[] roles) {
Path file = FileUserPasswdStore.resolveFile(settings, env);
Map<String, char[]> users = FileUserPasswdStore.parseFile(file, null);
if (users == null) {
// file doesn't exist so we just create a new file
users = new HashMap<>();
}
if (users.containsKey(username)) {
terminal.println("User [{}] already exists", username);
exit(ExitStatus.CODE_ERROR);
}
Hasher hasher = Hasher.HTPASSWD;
users.put(username, hasher.hash(passwd));
FileUserPasswdStore.writeFile(users, file);
file = FileUserRolesStore.resolveFile(settings, env);
Map<String, String[]> userRoles = FileUserRolesStore.parseFile(file, null);
if (userRoles == null) {
// file doesn't exist, so we just create a new file
userRoles = new HashMap<>();
}
userRoles.put(username, roles);
FileUserRolesStore.writeFile(userRoles, file);
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.ldap;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
/**
*
*/
public class LdapModule extends AbstractModule {
public static boolean enabled(Settings settings) {
Settings ldapSettings = settings.getComponentSettings(LdapModule.class);
return ldapSettings != null;
}
@Override
protected void configure() {
}
}

View File

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.ldap;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.authc.Realm;
import org.elasticsearch.shield.authc.support.UsernamePasswordToken;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public class LdapRealm extends AbstractComponent implements Realm<UsernamePasswordToken> {
private static final String TYPE = "ldap";
@Inject
public LdapRealm(Settings settings) {
super(settings);
}
@Override
public String type() {
return TYPE;
}
@Override
public UsernamePasswordToken token(TransportRequest request) {
return UsernamePasswordToken.extractToken(request, null);
}
@Override
public User authenticate(UsernamePasswordToken token) {
return null;
}
}

View File

@ -0,0 +1,754 @@
package org.elasticsearch.shield.authc.support;
// Copyright (c) 2006 Damien Miller <djm@mindrot.org>
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
/**
* BCrypt implements OpenBSD-style Blowfish password hashing using
* the scheme described in "A Future-Adaptable Password Scheme" by
* Niels Provos and David Mazieres.
* <p>
* This password hashing system tries to thwart off-line password
* cracking using a computationally-intensive hashing algorithm,
* based on Bruce Schneier's Blowfish cipher. The work factor of
* the algorithm is parameterised, so it can be increased as
* computers get faster.
* <p>
* Usage is really simple. To hash a password for the first time,
* call the hashpw method with a random salt, like this:
* <p>
* <code>
* String pw_hash = BCrypt.hashpw(plain_password, BCrypt.gensalt()); <br />
* </code>
* <p>
* To check whether a plaintext password matches one that has been
* hashed previously, use the checkpw method:
* <p>
* <code>
* if (BCrypt.checkpw(candidate_password, stored_hash))<br />
* &nbsp;&nbsp;&nbsp;&nbsp;System.out.println("It matches");<br />
* else<br />
* &nbsp;&nbsp;&nbsp;&nbsp;System.out.println("It does not match");<br />
* </code>
* <p>
* The gensalt() method takes an optional parameter (log_rounds)
* that determines the computational complexity of the hashing:
* <p>
* <code>
* String strong_salt = BCrypt.gensalt(10)<br />
* String stronger_salt = BCrypt.gensalt(12)<br />
* </code>
* <p>
* The amount of work increases exponentially (2**log_rounds), so
* each increment is twice as much work. The default log_rounds is
* 10, and the valid range is 4 to 31.
*
* @author Damien Miller
* @version 0.2
*/
public class BCrypt {
// BCrypt parameters
private static final int GENSALT_DEFAULT_LOG2_ROUNDS = 10;
private static final int BCRYPT_SALT_LEN = 16;
// Blowfish parameters
private static final int BLOWFISH_NUM_ROUNDS = 16;
// Initial contents of key schedule
private static final int P_orig[] = {
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
0x9216d5d9, 0x8979fb1b
};
private static final int S_orig[] = {
0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7,
0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99,
0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16,
0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e,
0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee,
0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013,
0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef,
0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e,
0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60,
0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440,
0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce,
0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a,
0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e,
0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677,
0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193,
0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032,
0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88,
0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239,
0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e,
0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0,
0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3,
0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98,
0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88,
0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe,
0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6,
0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d,
0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b,
0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7,
0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba,
0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463,
0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f,
0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09,
0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3,
0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb,
0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279,
0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8,
0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab,
0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82,
0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db,
0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573,
0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0,
0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b,
0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790,
0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8,
0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4,
0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0,
0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7,
0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c,
0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad,
0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1,
0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299,
0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9,
0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477,
0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf,
0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49,
0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af,
0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa,
0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5,
0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41,
0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915,
0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400,
0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915,
0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664,
0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a,
0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623,
0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266,
0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1,
0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e,
0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6,
0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1,
0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e,
0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1,
0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737,
0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8,
0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff,
0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd,
0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701,
0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7,
0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41,
0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331,
0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf,
0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af,
0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e,
0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87,
0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c,
0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2,
0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16,
0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd,
0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b,
0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509,
0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e,
0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3,
0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f,
0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a,
0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4,
0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960,
0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66,
0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28,
0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802,
0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84,
0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510,
0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf,
0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14,
0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e,
0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50,
0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7,
0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8,
0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281,
0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99,
0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696,
0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128,
0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73,
0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0,
0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0,
0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105,
0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250,
0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3,
0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285,
0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00,
0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061,
0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb,
0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e,
0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735,
0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc,
0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9,
0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340,
0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20,
0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7,
0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934,
0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068,
0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af,
0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840,
0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45,
0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504,
0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a,
0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb,
0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee,
0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6,
0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42,
0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b,
0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2,
0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb,
0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527,
0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b,
0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33,
0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c,
0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3,
0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc,
0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17,
0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564,
0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b,
0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115,
0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922,
0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728,
0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0,
0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e,
0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37,
0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d,
0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804,
0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b,
0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3,
0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb,
0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d,
0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c,
0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350,
0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9,
0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a,
0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe,
0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d,
0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc,
0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f,
0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61,
0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2,
0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9,
0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2,
0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c,
0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e,
0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633,
0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10,
0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169,
0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52,
0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027,
0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5,
0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62,
0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634,
0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76,
0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24,
0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc,
0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4,
0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c,
0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837,
0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0,
0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b,
0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe,
0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b,
0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4,
0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8,
0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6,
0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304,
0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22,
0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4,
0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6,
0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9,
0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59,
0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593,
0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51,
0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28,
0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c,
0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b,
0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28,
0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c,
0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd,
0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a,
0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319,
0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb,
0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f,
0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991,
0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32,
0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680,
0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166,
0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae,
0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb,
0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5,
0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47,
0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370,
0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d,
0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84,
0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048,
0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8,
0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd,
0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9,
0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7,
0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38,
0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f,
0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c,
0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525,
0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1,
0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442,
0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964,
0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e,
0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8,
0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d,
0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f,
0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299,
0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02,
0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc,
0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614,
0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a,
0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6,
0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b,
0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0,
0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060,
0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e,
0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9,
0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f,
0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6
};
// bcrypt IV: "OrpheanBeholderScryDoubt"
static private final int bf_crypt_ciphertext[] = {
0x4f727068, 0x65616e42, 0x65686f6c,
0x64657253, 0x63727944, 0x6f756274
};
// Table for Base64 encoding
static private final char base64_code[] = {
'.', '/', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J',
'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V',
'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9'
};
// Table for Base64 decoding
static private final byte index_64[] = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 0, 1, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63, -1, -1,
-1, -1, -1, -1, -1, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
-1, -1, -1, -1, -1, -1, 28, 29, 30,
31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
51, 52, 53, -1, -1, -1, -1, -1
};
// Expanded Blowfish key
private int P[];
private int S[];
/**
* Encode a byte array using bcrypt's slightly-modified base64
* encoding scheme. Note that this is *not* compatible with
* the standard MIME-base64 encoding.
*
* @param d the byte array to encode
* @param len the number of bytes to encode
* @return base64-encoded string
* @exception IllegalArgumentException if the length is invalid
*/
private static String encode_base64(byte d[], int len)
throws IllegalArgumentException {
int off = 0;
StringBuffer rs = new StringBuffer();
int c1, c2;
if (len <= 0 || len > d.length)
throw new IllegalArgumentException ("Invalid len");
while (off < len) {
c1 = d[off++] & 0xff;
rs.append(base64_code[(c1 >> 2) & 0x3f]);
c1 = (c1 & 0x03) << 4;
if (off >= len) {
rs.append(base64_code[c1 & 0x3f]);
break;
}
c2 = d[off++] & 0xff;
c1 |= (c2 >> 4) & 0x0f;
rs.append(base64_code[c1 & 0x3f]);
c1 = (c2 & 0x0f) << 2;
if (off >= len) {
rs.append(base64_code[c1 & 0x3f]);
break;
}
c2 = d[off++] & 0xff;
c1 |= (c2 >> 6) & 0x03;
rs.append(base64_code[c1 & 0x3f]);
rs.append(base64_code[c2 & 0x3f]);
}
return rs.toString();
}
/**
* Look up the 3 bits base64-encoded by the specified character,
* range-checking againt conversion table
* @param x the base64-encoded value
* @return the decoded value of x
*/
private static byte char64(char x) {
if ((int)x < 0 || (int)x > index_64.length)
return -1;
return index_64[(int)x];
}
/**
* Decode a string encoded using bcrypt's base64 scheme to a
* byte array. Note that this is *not* compatible with
* the standard MIME-base64 encoding.
* @param s the string to decode
* @param maxolen the maximum number of bytes to decode
* @return an array containing the decoded bytes
* @throws IllegalArgumentException if maxolen is invalid
*/
private static byte[] decode_base64(String s, int maxolen)
throws IllegalArgumentException {
StringBuffer rs = new StringBuffer();
int off = 0, slen = s.length(), olen = 0;
byte ret[];
byte c1, c2, c3, c4, o;
if (maxolen <= 0)
throw new IllegalArgumentException ("Invalid maxolen");
while (off < slen - 1 && olen < maxolen) {
c1 = char64(s.charAt(off++));
c2 = char64(s.charAt(off++));
if (c1 == -1 || c2 == -1)
break;
o = (byte)(c1 << 2);
o |= (c2 & 0x30) >> 4;
rs.append((char)o);
if (++olen >= maxolen || off >= slen)
break;
c3 = char64(s.charAt(off++));
if (c3 == -1)
break;
o = (byte)((c2 & 0x0f) << 4);
o |= (c3 & 0x3c) >> 2;
rs.append((char)o);
if (++olen >= maxolen || off >= slen)
break;
c4 = char64(s.charAt(off++));
o = (byte)((c3 & 0x03) << 6);
o |= c4;
rs.append((char)o);
++olen;
}
ret = new byte[olen];
for (off = 0; off < olen; off++)
ret[off] = (byte)rs.charAt(off);
return ret;
}
/**
* Blowfish encipher a single 64-bit block encoded as
* two 32-bit halves
* @param lr an array containing the two 32-bit half blocks
* @param off the position in the array of the blocks
*/
private final void encipher(int lr[], int off) {
int i, n, l = lr[off], r = lr[off + 1];
l ^= P[0];
for (i = 0; i <= BLOWFISH_NUM_ROUNDS - 2;) {
// Feistel substitution on left word
n = S[(l >> 24) & 0xff];
n += S[0x100 | ((l >> 16) & 0xff)];
n ^= S[0x200 | ((l >> 8) & 0xff)];
n += S[0x300 | (l & 0xff)];
r ^= n ^ P[++i];
// Feistel substitution on right word
n = S[(r >> 24) & 0xff];
n += S[0x100 | ((r >> 16) & 0xff)];
n ^= S[0x200 | ((r >> 8) & 0xff)];
n += S[0x300 | (r & 0xff)];
l ^= n ^ P[++i];
}
lr[off] = r ^ P[BLOWFISH_NUM_ROUNDS + 1];
lr[off + 1] = l;
}
/**
* Cycically extract a word of key material
* @param data the string to extract the data from
* @param offp a "pointer" (as a one-entry array) to the
* current offset into data
* @return the next word of material from data
*/
private static int streamtoword(byte data[], int offp[]) {
int i;
int word = 0;
int off = offp[0];
for (i = 0; i < 4; i++) {
word = (word << 8) | (data[off] & 0xff);
off = (off + 1) % data.length;
}
offp[0] = off;
return word;
}
/**
* Initialise the Blowfish key schedule
*/
private void init_key() {
P = (int[])P_orig.clone();
S = (int[])S_orig.clone();
}
/**
* Key the Blowfish cipher
* @param key an array containing the key
*/
private void key(byte key[]) {
int i;
int koffp[] = { 0 };
int lr[] = { 0, 0 };
int plen = P.length, slen = S.length;
for (i = 0; i < plen; i++)
P[i] = P[i] ^ streamtoword(key, koffp);
for (i = 0; i < plen; i += 2) {
encipher(lr, 0);
P[i] = lr[0];
P[i + 1] = lr[1];
}
for (i = 0; i < slen; i += 2) {
encipher(lr, 0);
S[i] = lr[0];
S[i + 1] = lr[1];
}
}
/**
* Perform the "enhanced key schedule" step described by
* Provos and Mazieres in "A Future-Adaptable Password Scheme"
* http://www.openbsd.org/papers/bcrypt-paper.ps
* @param data salt information
* @param key password information
*/
private void ekskey(byte data[], byte key[]) {
int i;
int koffp[] = { 0 }, doffp[] = { 0 };
int lr[] = { 0, 0 };
int plen = P.length, slen = S.length;
for (i = 0; i < plen; i++)
P[i] = P[i] ^ streamtoword(key, koffp);
for (i = 0; i < plen; i += 2) {
lr[0] ^= streamtoword(data, doffp);
lr[1] ^= streamtoword(data, doffp);
encipher(lr, 0);
P[i] = lr[0];
P[i + 1] = lr[1];
}
for (i = 0; i < slen; i += 2) {
lr[0] ^= streamtoword(data, doffp);
lr[1] ^= streamtoword(data, doffp);
encipher(lr, 0);
S[i] = lr[0];
S[i + 1] = lr[1];
}
}
/**
* Perform the central password hashing step in the
* bcrypt scheme
* @param password the password to hash
* @param salt the binary salt to hash with the password
* @param log_rounds the binary logarithm of the number
* of rounds of hashing to apply
* @return an array containing the binary hashed password
*/
private byte[] crypt_raw(byte password[], byte salt[], int log_rounds) {
int rounds, i, j;
int cdata[] = (int[])bf_crypt_ciphertext.clone();
int clen = cdata.length;
byte ret[];
if (log_rounds < 4 || log_rounds > 31)
throw new IllegalArgumentException ("Bad number of rounds");
rounds = 1 << log_rounds;
if (salt.length != BCRYPT_SALT_LEN)
throw new IllegalArgumentException ("Bad salt length");
init_key();
ekskey(salt, password);
for (i = 0; i < rounds; i++) {
key(password);
key(salt);
}
for (i = 0; i < 64; i++) {
for (j = 0; j < (clen >> 1); j++)
encipher(cdata, j << 1);
}
ret = new byte[clen * 4];
for (i = 0, j = 0; i < clen; i++) {
ret[j++] = (byte)((cdata[i] >> 24) & 0xff);
ret[j++] = (byte)((cdata[i] >> 16) & 0xff);
ret[j++] = (byte)((cdata[i] >> 8) & 0xff);
ret[j++] = (byte)(cdata[i] & 0xff);
}
return ret;
}
/**
* Hash a password using the OpenBSD bcrypt scheme
* @param password the password to hash
* @param salt the salt to hash with (perhaps generated
* using BCrypt.gensalt)
* @return the hashed password
*/
public static String hashpw(String password, String salt) {
BCrypt B;
String real_salt;
byte passwordb[], saltb[], hashed[];
char minor = (char)0;
int rounds, off = 0;
StringBuffer rs = new StringBuffer();
if (salt.charAt(0) != '$' || salt.charAt(1) != '2')
throw new IllegalArgumentException ("Invalid salt version");
if (salt.charAt(2) == '$')
off = 3;
else {
minor = salt.charAt(2);
if (minor != 'a' || salt.charAt(3) != '$')
throw new IllegalArgumentException ("Invalid salt revision");
off = 4;
}
// Extract number of rounds
if (salt.charAt(off + 2) > '$')
throw new IllegalArgumentException ("Missing salt rounds");
rounds = Integer.parseInt(salt.substring(off, off + 2));
real_salt = salt.substring(off + 3, off + 25);
try {
passwordb = (password + (minor >= 'a' ? "\000" : "")).getBytes("UTF-8");
} catch (UnsupportedEncodingException uee) {
throw new AssertionError("UTF-8 is not supported");
}
saltb = decode_base64(real_salt, BCRYPT_SALT_LEN);
B = new BCrypt();
hashed = B.crypt_raw(passwordb, saltb, rounds);
rs.append("$2");
if (minor >= 'a')
rs.append(minor);
rs.append("$");
if (rounds < 10)
rs.append("0");
rs.append(Integer.toString(rounds));
rs.append("$");
rs.append(encode_base64(saltb, saltb.length));
rs.append(encode_base64(hashed,
bf_crypt_ciphertext.length * 4 - 1));
return rs.toString();
}
/**
* Generate a salt for use with the BCrypt.hashpw() method
* @param log_rounds the log2 of the number of rounds of
* hashing to apply - the work factor therefore increases as
* 2**log_rounds.
* @param random an instance of SecureRandom to use
* @return an encoded salt value
*/
public static String gensalt(int log_rounds, SecureRandom random) {
StringBuffer rs = new StringBuffer();
byte rnd[] = new byte[BCRYPT_SALT_LEN];
random.nextBytes(rnd);
rs.append("$2a$");
if (log_rounds < 10)
rs.append("0");
rs.append(Integer.toString(log_rounds));
rs.append("$");
rs.append(encode_base64(rnd, rnd.length));
return rs.toString();
}
/**
* Generate a salt for use with the BCrypt.hashpw() method
* @param log_rounds the log2 of the number of rounds of
* hashing to apply - the work factor therefore increases as
* 2**log_rounds.
* @return an encoded salt value
*/
public static String gensalt(int log_rounds) {
return gensalt(log_rounds, new SecureRandom());
}
/**
* Generate a salt for use with the BCrypt.hashpw() method,
* selecting a reasonable default for the number of hashing
* rounds to apply
* @return an encoded salt value
*/
public static String gensalt() {
return gensalt(GENSALT_DEFAULT_LOG2_ROUNDS);
}
/**
* Check that a plaintext password matches a previously hashed
* one
* @param plaintext the plaintext password to verify
* @param hashed the previously-hashed password
* @return true if the passwords match, false otherwise
*/
public static boolean checkpw(String plaintext, String hashed) {
return (hashed.compareTo(hashpw(plaintext, hashed)) == 0);
}
}

View File

@ -0,0 +1,119 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.shield.authc.AuthenticationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
/**
* A base class for username/password stores that caches the users and their pwd hashes in-memory. The cache
* has an expiration time (defaults to 1hr, but it's configurable and can also be disabled by setting the cache
* ttl to 0).
*/
public abstract class CachingUserPasswdStore extends AbstractComponent implements UserPasswdStore {
private static final TimeValue DEFAULT_TTL = TimeValue.timeValueHours(1);
private final LoadingCache<String, PasswordHash> cache;
protected CachingUserPasswdStore(Settings settings) {
super(settings);
TimeValue ttl = componentSettings.getAsTime("cache.ttl", DEFAULT_TTL);
if (ttl.millis() > 0) {
cache = CacheBuilder.newBuilder()
.expireAfterWrite(ttl.getMillis(), TimeUnit.MILLISECONDS)
.build(new CacheLoader<String, PasswordHash>() {
@Override
public PasswordHash load(String username) throws Exception {
PasswordHash hash = passwordHash(username);
if (hash == null) {
throw new AuthenticationException("Authentication failed");
}
return hash;
}
});
} else {
cache = null;
}
}
protected final void expire(String username) {
if (cache != null) {
cache.invalidate(username);
}
}
protected final void expireAll() {
if (cache != null) {
cache.invalidateAll();
}
}
@Override
public final boolean verifyPassword(final String username, final char[] password) {
if (cache == null) {
return doVerifyPassword(username, password);
}
try {
PasswordHash hash = cache.get(username);
return hash.verify(password);
} catch (ExecutionException ee) {
return false;
}
}
/**
* Verifies the given password. Both the given username, and if the username is verified, then the
* given password. This method is used when the caching is disabled.
*/
protected abstract boolean doVerifyPassword(String username, char[] password);
protected abstract PasswordHash passwordHash(String username);
public static abstract class Writable extends CachingUserPasswdStore implements UserPasswdStore.Writable {
protected Writable(Settings settings) {
super(settings);
}
@Override
public final void store(String username, char[] key) {
doStore(username, key);
expire(username);
}
@Override
public void remove(String username) {
doRemove(username);
expire(username);
}
protected abstract void doStore(String username, char[] password);
protected abstract void doRemove(String username);
}
/**
* Represents a hash of a password.
*/
protected static interface PasswordHash {
boolean verify(char[] password);
}
}

View File

@ -0,0 +1,113 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.shield.authc.AuthenticationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
/**
* A base class for user roles store that caches the roles per username in-memory. The cache
* has an expiration time (defaults to 1hr, but it's configurable and can also be disabled by setting the cache
* ttl to 0).
*/
public abstract class CachingUserRolesStore extends AbstractComponent implements UserRolesStore {
private static final TimeValue DEFAULT_TTL = TimeValue.timeValueHours(1);
private final LoadingCache<String, String[]> cache;
protected CachingUserRolesStore(Settings settings) {
super(settings);
TimeValue ttl = componentSettings.getAsTime("cache.ttl", DEFAULT_TTL);
if (ttl.millis() > 0) {
cache = CacheBuilder.newBuilder()
.expireAfterWrite(ttl.getMillis(), TimeUnit.MILLISECONDS)
.build(new CacheLoader<String, String[]>() {
@Override
public String[] load(String username) throws Exception {
return doLoadRoles(username);
}
});
} else {
cache = null;
}
}
protected final void expire(String username) {
if (cache != null) {
cache.invalidate(username);
}
}
protected final void expireAll() {
if (cache != null) {
cache.invalidateAll();
}
}
@Override
public String[] roles(final String username) {
if (cache == null) {
return doLoadRoles(username);
}
try {
return cache.get(username);
} catch (ExecutionException ee) {
throw new AuthenticationException("Could not load user roles", ee);
}
}
protected abstract String[] doLoadRoles(String username);
public static abstract class Writable extends CachingUserRolesStore implements UserRolesStore.Writable {
protected Writable(Settings settings) {
super(settings);
}
@Override
public void setRoles(String username, String... roles) {
doSetRoles(username, roles);
expire(username);
}
@Override
public void addRoles(String username, String... roles) {
doAddRoles(username, roles);
expire(username);
}
@Override
public void removeRoles(String username, String... roles) {
doRemoveRoles(username, roles);
expire(username);
}
@Override
public void removeUser(String username) {
doRemoveUser(username);
expire(username);
}
public abstract void doSetRoles(String username, String... roles);
public abstract void doAddRoles(String username, String... roles);
public abstract void doRemoveRoles(String username, String... roles);
public abstract void doRemoveUser(String username);
}
}

View File

@ -0,0 +1,103 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
import com.google.common.base.Charsets;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.Crypt;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.codec.digest.Md5Crypt;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.os.OsUtils;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.util.Arrays;
import java.util.Locale;
/**
*
*/
public enum Hasher {
/**
* A hasher that is compatible with apache htpasswd. Hashes by default using bcrypt type $2a$
* but can verify any of the hashes supported by htpasswd.
*/
HTPASSWD() {
@Override
public char[] hash(char[] text) {
String salt = org.elasticsearch.shield.authc.support.BCrypt.gensalt();
return BCrypt.hashpw(new String(text), salt).toCharArray();
}
@Override
public boolean verify(char[] text, char[] hash) {
String hashStr = new String(hash);
if (hashStr.startsWith(BCRYPT_PREFIX_Y)) {
hashStr = BCRYPT_PREFIX + hashStr.substring(BCRYPT_PREFIX_Y.length());
}
if (hashStr.startsWith(BCRYPT_PREFIX)) {
return BCrypt.checkpw(new String(text), hashStr);
}
if (hashStr.startsWith(PLAIN_PREFIX)) {
hashStr = hashStr.substring(PLAIN_PREFIX.length());
return hashStr.compareTo(new String(text)) == 0;
}
byte[] textBytes = toBytes(text);
if (hashStr.startsWith(APR1_PREFIX)) {
return hashStr.compareTo(Md5Crypt.apr1Crypt(textBytes, hashStr)) == 0;
}
if (hashStr.startsWith(SHA1_PREFIX)) {
String passwd64 = Base64.encodeBase64String(DigestUtils.sha1(textBytes));
return hashStr.substring(SHA1_PREFIX.length()).compareTo(passwd64) == 0;
}
return CRYPT_SUPPORTED ?
hashStr.compareTo(Crypt.crypt(textBytes, hashStr)) == 0: // crypt algo
hashStr.compareTo(new String(text)) == 0; // plain text
}
};
private static final String APR1_PREFIX = "$apr1$";
private static final String BCRYPT_PREFIX = "$2a$";
private static final String BCRYPT_PREFIX_Y = "$2y$";
private static final String SHA1_PREFIX = "{SHA}";
private static final String PLAIN_PREFIX = "{plain}";
private static final boolean CRYPT_SUPPORTED = !OsUtils.WINDOWS;
public static Hasher resolve(String name, Hasher defaultHasher) {
if (name == null) {
return defaultHasher;
}
switch (name.toLowerCase(Locale.ROOT)) {
case "htpasswd" : return HTPASSWD;
default:
return defaultHasher;
}
}
public static Hasher resolve(String name) {
Hasher hasher = resolve(name, null);
if (hasher == null) {
throw new ElasticsearchIllegalArgumentException("Unknown hash function [" + name + "]");
}
return hasher;
}
public abstract char[] hash(char[] data);
public abstract boolean verify(char[] data, char[] hash);
private static byte[] toBytes(char[] chars) {
CharBuffer charBuffer = CharBuffer.wrap(chars);
ByteBuffer byteBuffer = Charsets.UTF_8.encode(charBuffer);
byte[] bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit());
Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data
return bytes;
}
}

View File

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
/**
*
*/
public interface UserPasswdStore {
boolean verifyPassword(String username, char[] password);
public static interface Writable extends UserPasswdStore {
void store(String username, char[] password);
void remove(String username);
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
/**
*
*/
public interface UserRolesStore {
String[] roles(String username);
public static interface Writable extends UserRolesStore {
void setRoles(String username, String... roles);
void addRoles(String username, String... roles);
void removeRoles(String username, String... roles);
void removeUser(String username);
}
}

View File

@ -0,0 +1,76 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
import com.google.common.base.Charsets;
import org.apache.commons.codec.binary.Base64;
import org.elasticsearch.shield.authc.AuthenticationException;
import org.elasticsearch.shield.authc.AuthenticationToken;
import org.elasticsearch.transport.TransportRequest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
*/
public class UsernamePasswordToken implements AuthenticationToken {
private static final String TOKEN_HEADER = "X-ES-UsernamePasswordToken";
static final String BASIC_AUTH_HEADER = "Authorization";
private static final Pattern BASIC_AUTH_PATTERN = Pattern.compile("Basic\\s(.+)");
private final String username;
private final char[] password;
public UsernamePasswordToken(String username, char[] password) {
this.username = username;
this.password = password;
}
@Override
public String principal() {
return username;
}
@Override
public char[] credentials() {
return password;
}
public static UsernamePasswordToken extractToken(TransportRequest request, UsernamePasswordToken defaultToken) {
UsernamePasswordToken token = (UsernamePasswordToken) request.getHeaders().get(TOKEN_HEADER);
if (token != null) {
return token;
}
String authStr = (String) request.getHeaders().get(BASIC_AUTH_HEADER);
if (authStr == null) {
if (defaultToken == null) {
return null;
}
request.putHeader(TOKEN_HEADER, defaultToken);
return defaultToken;
}
Matcher matcher = BASIC_AUTH_PATTERN.matcher(authStr.trim());
if (!matcher.matches()) {
throw new AuthenticationException("Invalid basic authentication header value");
}
String userpasswd = new String(Base64.decodeBase64(matcher.group(1)), Charsets.UTF_8);
int i = userpasswd.indexOf(':');
token = new UsernamePasswordToken(userpasswd.substring(0, i), userpasswd.substring(i+1).toCharArray());
request.putHeader(TOKEN_HEADER, token);
return token;
}
public static void putTokenHeader(TransportRequest request, UsernamePasswordToken token) {
String basicToken = token.username + ":" + new String(token.password);
basicToken = new String(Base64.encodeBase64(basicToken.getBytes(Charsets.UTF_8)), Charsets.UTF_8);
request.putHeader("Authorization", "Basic " + basicToken);
}
}

View File

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
/**
*
*/
public class AuthorizationException extends org.elasticsearch.shield.SecurityException {
public AuthorizationException(String msg) {
super(msg);
}
public AuthorizationException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import org.elasticsearch.common.inject.AbstractModule;
/**
*
*/
public class AuthorizationModule extends AbstractModule {
@Override
protected void configure() {
bind(RolesStore.class).to(FileRolesStore.class);
bind(AuthorizationService.class).to(InternalAuthorizationService.class);
}
}

View File

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import org.elasticsearch.shield.User;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public interface AuthorizationService {
void authorize(User user, String action, TransportRequest request) throws AuthorizationException;
}

View File

@ -0,0 +1,201 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
*
*/
public class FileRolesStore extends AbstractComponent implements RolesStore {
private static final Pattern COMMA_DELIM = Pattern.compile("\\s*,\\s*");
private final Path file;
private final FileWatcher watcher;
private volatile ImmutableMap<String, Permission> roles;
@Inject
public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService) {
super(settings);
file = resolveFile(componentSettings, env);
roles = ImmutableMap.copyOf(parseFile(file, logger));
watcher = new FileWatcher(file.getParent().toFile());
watcher.addListener(new FileListener());
watcherService.add(watcher);
}
@Override
public Permission permission(String... roles) {
Permission.Compound.Builder builder = Permission.compound();
for (int i = 0; i < roles.length; i++) {
Permission permissions = this.roles.get(roles[i]);
if (permissions != null) {
builder.add(permissions);
}
}
return builder.build();
}
public static Path resolveFile(Settings settings, Environment env) {
String location = settings.get("file.roles");
if (location == null) {
return env.configFile().toPath().resolve(".roles.yml");
}
return Paths.get(location);
}
public static Map<String, Permission> parseFile(Path path, @Nullable ESLogger logger) {
if (!Files.exists(path)) {
return ImmutableMap.of();
}
ImmutableMap.Builder<String, Permission> roles = ImmutableMap.builder();
try {
byte[] content = Streams.copyToByteArray(path.toFile());
XContentParser parser = XContentFactory.xContent(content).createParser(content);
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
String roleName = currentFieldName;
Permission.Compound.Builder builder = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("cluster".equals(currentFieldName)) {
String[] privs;
if (token == XContentParser.Token.VALUE_STRING) {
privs = COMMA_DELIM.split(parser.text().trim());
} else if (token == XContentParser.Token.START_ARRAY) {
List<String> list = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
if (list == null) {
list = new ArrayList<>();
}
list.add(parser.text());
}
}
privs = list != null ? list.toArray(new String[list.size()]) : Strings.EMPTY_ARRAY;
} else {
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
"]. [cluster] field value can either be a string or a list of strings, but [" + token + "] was found instead");
}
if (builder == null) {
builder = Permission.compound();
}
Privilege.Cluster cluster = Privilege.Cluster.resolve(privs);
builder.add(Permission.cluster(cluster));
} else if ("indices".equals(currentFieldName)) {
if (token != XContentParser.Token.START_ARRAY) {
}
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
Permission.Index index = parseIndicesPermission(parser.text());
if (index != null) {
if (builder == null) {
builder = Permission.compound();
}
builder.add(index);
}
} else {
throw new ElasticsearchException("Invalid roles file format [" + path.toAbsolutePath() +
"]. [indices] field value must be an array of indices-privileges mappings defined as a string" +
" in the form <comma-separated list of index name patterns>::<comma-separated list of privileges> , but [" + token + "] was found instead");
}
}
}
}
assert roleName != null;
if (builder != null) {
roles.put(roleName, builder.build());
}
}
}
return roles.build();
} catch (IOException ioe) {
throw new ElasticsearchException("Failed to read roles file [" + path.toAbsolutePath() + "]", ioe);
}
}
private static Permission.Index parseIndicesPermission(String spec) {
int i = spec.indexOf("::");
if (i == 0) {
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]. Missing indices name patterns list");
}
if (i < 0) {
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]");
}
if (i == spec.length() - 2) {
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]. Missing privileges list");
}
if (spec.indexOf("::", i+2) >= 0) {
throw new ElasticsearchException("Malformed index privileges entry [" + spec + "]. There should only be a single \"::\" separator");
}
String[] indices = COMMA_DELIM.split(spec.substring(0, i));
String[] privs = COMMA_DELIM.split(spec.substring(i+2));
Privilege.Index index = Privilege.Index.resolve(privs);
return Permission.index(index, indices);
}
private class FileListener extends FileChangesListener {
@Override
public void onFileCreated(File file) {
if (file.equals(FileRolesStore.this.file.toFile())) {
roles = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
}
}
@Override
public void onFileDeleted(File file) {
if (file.equals(FileRolesStore.this.file.toFile())) {
roles = ImmutableMap.of();
}
}
@Override
public void onFileChanged(File file) {
if (file.equals(FileRolesStore.this.file.toFile())) {
if (file.equals(FileRolesStore.this.file.toFile())) {
roles = ImmutableMap.copyOf(parseFile(file.toPath(), logger));
}
}
}
}
}

View File

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
/**
*
*/
public interface IndicesRelatedRequest {
String[] relatedIndices();
}

View File

@ -0,0 +1,48 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.audit.AuditTrail;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public class InternalAuthorizationService extends AbstractComponent implements AuthorizationService {
private final ClusterService clusterService;
private final RolesStore rolesStore;
private final @Nullable AuditTrail auditTrail;
@Inject
public InternalAuthorizationService(Settings settings, RolesStore rolesStore, ClusterService clusterService, @Nullable AuditTrail auditTrail) {
super(settings);
this.rolesStore = rolesStore;
this.clusterService = clusterService;
this.auditTrail = auditTrail;
}
@Override
public void authorize(User user, String action, TransportRequest request) throws AuthorizationException {
Permission permission = rolesStore.permission(user.roles());
MetaData metaData = clusterService.state().metaData();
if (permission.check(action, request, metaData)) {
if (auditTrail != null) {
auditTrail.accessGranted(user, action, request);
}
return;
}
auditTrail.accessDenied(user, action, request);
throw new AuthorizationException("Action [" + action + "] is unauthorized");
}
}

View File

@ -0,0 +1,128 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import com.google.common.base.Predicate;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.shield.support.AutomatonPredicate;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public abstract class Permission {
public abstract boolean check(String action, TransportRequest request, MetaData metaData);
public static Cluster cluster(Privilege.Cluster clusterPrivilege) {
return new Cluster(clusterPrivilege.predicate());
}
public static Index index(Privilege.Index indexPrivilege, String... indexNamePatterns) {
assert indexNamePatterns.length != 0 : "Index permissions must at least be defined on a single index";
Automaton indices = new RegExp(indexNamePatterns[0]).toAutomaton();
for (int i = 1; i < indexNamePatterns.length; i++) {
indices.union(new RegExp(indexNamePatterns[i]).toAutomaton());
}
return new Index(new AutomatonPredicate(indices), indexPrivilege.predicate());
}
public static Compound.Builder compound() {
return new Compound.Builder();
}
public static class Index extends Permission {
private final Predicate<String> indicesMatcher;
private final Predicate<String> actionMatcher;
private Index(Predicate<String> indicesMatcher, Predicate<String> actionMatcher) {
this.indicesMatcher = indicesMatcher;
this.actionMatcher = actionMatcher;
}
@Override
public boolean check(String action, TransportRequest request, MetaData metaData) {
if (!actionMatcher.apply(action)) {
return false;
}
assert request instanceof IndicesRelatedRequest :
"the only requests passing the action matcher should be IndexRelatedRequests";
// if for some reason we missing an action... just for safety we'll reject
if (!(request instanceof IndicesRelatedRequest)) {
return false;
}
IndicesRelatedRequest req = (IndicesRelatedRequest) request;
for (String index : req.relatedIndices()) {
if (!indicesMatcher.apply(index)) {
return false;
}
}
return true;
}
}
public static class Cluster extends Permission {
private final Predicate<String> actionMatcher;
private Cluster(Predicate<String> actionMatcher) {
this.actionMatcher = actionMatcher;
}
@Override
public boolean check(String action, TransportRequest request, MetaData metaData) {
return actionMatcher.apply(action);
}
}
public static class Compound extends Permission {
private final Permission[] permissions;
private Compound(Permission... permissions) {
this.permissions = permissions;
}
@Override
public boolean check(String action, TransportRequest request, MetaData metaData) {
for (int i = 0; i < permissions.length; i++) {
if (permissions[i].check(action, request, metaData)) {
return true;
}
}
return false;
}
public static class Builder {
private Permission[] permissions = null;
private Builder() {}
public void add(Permission... permissions) {
if (this.permissions == null) {
this.permissions = permissions;
return;
}
Permission[] extended = new Permission[this.permissions.length + permissions.length];
System.arraycopy(this.permissions, 0, extended, 0, this.permissions.length);
System.arraycopy(permissions, 0, extended, this.permissions.length, permissions.length);
}
public Compound build() {
return new Compound(permissions);
}
}
}
}

View File

@ -0,0 +1,192 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import com.google.common.base.Predicate;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.BasicAutomata;
import org.apache.lucene.util.automaton.MinimizationOperations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.shield.support.AutomatonPredicate;
/**
*
*/
public interface Privilege {
Privilege SYSTEM = new AutomatonPrivilege("internal:.*");
Predicate<String> predicate();
Privilege plus(Privilege other);
Privilege minus(Privilege other);
boolean implies(Privilege other);
public static enum Index implements Privilege {
NONE(AutomatonPrivilege.NONE),
ALL("indices:.*"),
MANAGE("indices:monitor/.*", "indices:admin/.*"),
MONITOR("indices:monitor/.*"),
FULL_DATA_ACCESS("indices:data/.*"),
CRUD("indices:data/write/.*", "indices:data/read/.*"),
READ("indices:data/read/.*"),
CREATE("indices:data/write/create"), //todo unsupported yet
INDEX("indices:data/write/index.*", "indices:data/write/update"),
DELETE("indices:data/write/delete.*"),
WRITE("indices:data/write/.*"),
BENCHMARK("indices:data/benchmark");
private AutomatonPrivilege privilege;
private Index(String... patterns) {
this(new AutomatonPrivilege(patterns));
}
private Index(AutomatonPrivilege privilege) {
this.privilege = privilege;
}
@Override
public Predicate<String> predicate() {
return privilege.predicate();
}
@Override
public Privilege plus(Privilege other) {
return privilege.plus(other);
}
@Override
public Privilege minus(Privilege other) {
return privilege.minus(other);
}
@Override
public boolean implies(Privilege other) {
return privilege.implies(other);
}
public static Index resolve(String... names) {
Index result = null;
for (int i = 0; i < names.length; i++) {
if (result == null) {
result = Index.valueOf(names[i]);
} else {
result.plus(Index.valueOf(names[i]));
}
}
return result;
}
}
public static enum Cluster implements Privilege {
NONE(AutomatonPrivilege.NONE),
ALL("cluster:.*"),
MANAGE("cluster:.*"),
MONITOR("cluster:monitor/.*");
private AutomatonPrivilege privilege;
private Cluster(String... patterns) {
this(new AutomatonPrivilege(patterns));
}
private Cluster(AutomatonPrivilege privilege) {
this.privilege = privilege;
}
@Override
public Predicate<String> predicate() {
return privilege.predicate();
}
@Override
public Privilege plus(Privilege other) {
return privilege.plus(other);
}
@Override
public Privilege minus(Privilege other) {
return privilege.minus(other);
}
@Override
public boolean implies(Privilege other) {
return privilege.implies(other);
}
public static Cluster resolve(String... names) {
Cluster result = null;
for (int i = 0; i < names.length; i++) {
if (result == null) {
result = Cluster.valueOf(names[i]);
} else {
result.plus(Cluster.valueOf(names[i]));
}
}
return result;
}
}
static class AutomatonPrivilege implements Privilege {
private static final AutomatonPrivilege NONE = new AutomatonPrivilege(BasicAutomata.makeEmpty());
private final Automaton automaton;
private AutomatonPrivilege(String... patterns) {
this.automaton = compileAutomaton(patterns);
}
private AutomatonPrivilege(Automaton automaton) {
this.automaton = automaton;
}
@Override
public Predicate<String> predicate() {
return new AutomatonPredicate(automaton);
}
private static Automaton compileAutomaton(String... patterns) {
Automaton a = null;
for (int i = 0; i < patterns.length; i++) {
if (a == null) {
a = new RegExp(patterns[i], RegExp.ALL).toAutomaton();
} else {
a = a.union(new RegExp(patterns[i], RegExp.ALL).toAutomaton());
}
}
MinimizationOperations.minimize(a);
return a;
}
@Override
public Privilege plus(Privilege other) {
return new AutomatonPrivilege(automaton.union(((AutomatonPrivilege) other).automaton));
}
@Override
public Privilege minus(Privilege other) {
if (!implies(other)) {
return this;
}
return new AutomatonPrivilege(automaton.minus(((AutomatonPrivilege) other).automaton));
}
@Override
public boolean implies(Privilege other) {
return ((AutomatonPrivilege) other).automaton.subsetOf(automaton);
}
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
/**
*
*/
public interface RolesStore {
Permission permission(String... roles);
public static interface Writable extends RolesStore {
void set(String role, Privilege.Index[] privileges, String[] indices);
void grant(String role, Privilege.Index[] privileges, String[] indices);
void grant(String role, Privilege.Cluster[] privileges);
void revoke(String role, Privilege.Index[] privileges, String[] indices);
void revoke(String role, Privilege.Cluster[] privileges);
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authz;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.transport.TransportRequest;
/**
*
*/
public class SystemRole extends Permission {
public static final String NAME = "__es_system_role";
@Override
public boolean check(String action, TransportRequest request, MetaData metaData) {
return true;
}
}

View File

@ -0,0 +1,177 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.n2n;
import com.google.common.base.Charsets;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.jboss.netty.handler.ipfilter.IpFilterRule;
import org.jboss.netty.handler.ipfilter.IpSubnetFilterRule;
import org.jboss.netty.handler.ipfilter.PatternRule;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.Principal;
import java.util.ArrayList;
import java.util.List;
/**
* A file based IP Filtering node to node authentication. IP filtering rules can be configured in
* a monitored file (auto loads when changed). Each line in the file represents a filtering rule.
* A rule is composed of an inclusion/exclusion sign and a matching rule:
*
*
* <ul>
* <li><code>-PATTERN</code>: means any remote address that matches PATTERN will be denied (auth will fail)<li>
* <li><code>+PATTERN</code>: means any remote address that matches PATTERN will be allowed (auth will succeed)<li>
* </ul>
*
* The following patterns are supported:
*
* <ul>
* <li><code>i:IP</code> - where IP is a specific node IP (regexp supported)</li>
* <li><code>c:MASK</code> - where MASK is a CIDR mask to match nodes IPs</li>
* <li><code>n:HOST</code> - where HOST is the hostname of the node (regexp supported)</li>
* </ul>
*
* Examples:
*
* <ul>
* <li><code>-i:192.168.100.2</code></li>: deny access from node with the specified IP
* <li><code>+c:2001:db8::/48</code></li>: allow access from nodes with IPs that match the specified mask
* <li><code>-n:es-staging-.*</code></li>: deny access from any node in stanging env (matched on the hostname regexp)
* </ul>
*
*/
public class IPFilteringN2NAuthentricator extends AbstractComponent implements N2NAuthenticator {
private static final String DEFAULT_FILE = ".ip_filtering";
private static final IpFilterRule[] NO_RULES = new IpFilterRule[0];
private final Path file;
private final FileWatcher watcher;
private volatile IpFilterRule[] rules;
@Inject
public IPFilteringN2NAuthentricator(Settings settings, Environment env, ResourceWatcherService watcherService) {
super(settings);
file = resolveFile(componentSettings, env);
rules = parseFile(file, logger);
watcher = new FileWatcher(file.getParent().toFile());
watcher.addListener(new FileListener());
watcherService.add(watcher);
}
public static Path resolveFile(Settings settings, Environment env) {
String location = settings.get("file" + DEFAULT_FILE);
if (location == null) {
return env.configFile().toPath().resolve(DEFAULT_FILE);
}
return Paths.get(location);
}
public static IpFilterRule[] parseFile(Path path, @Nullable ESLogger logger) {
if (!Files.exists(path)) {
return null;
}
List<String> lines = null;
try {
lines = Files.readAllLines(path, Charsets.UTF_8);
} catch (IOException ioe) {
throw new ElasticsearchException("Failed to read hosts file [" + path.toAbsolutePath() + "]", ioe);
}
List<IpFilterRule> rules = new ArrayList<>(lines.size());
for (String line : lines) {
rules.add(parseRule(path, line, logger));
}
if (rules.size() == 0) {
return NO_RULES;
}
return rules.toArray(new IpFilterRule[rules.size()]);
}
private static IpFilterRule parseRule(Path path, String rule, @Nullable ESLogger logger) {
if (rule == null || rule.length() == 0) {
return null;
}
if (!(rule.startsWith("+") || rule.startsWith("-"))) {
return null;
}
boolean allow = rule.startsWith("+");
if (rule.charAt(1) == 'n' || rule.charAt(1) == 'i') {
return new PatternRule(allow, rule.substring(1));
}
if (rule.charAt(1) == 'c') {
try {
return new IpSubnetFilterRule(allow, rule.substring(3));
} catch (UnknownHostException e) {
if (logger != null && logger.isErrorEnabled()) {
logger.error("Skipping invalid ip filtering rule [" + rule + "] in hosts_allow file [" + path.toAbsolutePath() + "]", e);
}
return null;
}
}
if (logger != null && logger.isErrorEnabled()) {
logger.error("Skipping invalid ip filtering rule [" + rule + "] in hosts_allow file [" + path.toAbsolutePath() + "]. ':' can only appear once");
}
return null;
}
@Override
public boolean authenticate(@Nullable Principal peerPrincipal, InetAddress peerAddress, int peerPort) {
for (int i = 0; i < rules.length; i++) {
if (rules[i].contains(peerAddress)) {
return true;
}
}
return false;
}
private class FileListener extends FileChangesListener {
@Override
public void onFileCreated(File file) {
if (file.equals(IPFilteringN2NAuthentricator.this.file.toFile())) {
rules = parseFile(file.toPath(), logger);
}
}
@Override
public void onFileDeleted(File file) {
if (file.equals(IPFilteringN2NAuthentricator.this.file.toFile())) {
rules = NO_RULES;
}
}
@Override
public void onFileChanged(File file) {
if (file.equals(IPFilteringN2NAuthentricator.this.file.toFile())) {
if (file.equals(IPFilteringN2NAuthentricator.this.file.toFile())) {
rules = parseFile(file.toPath(), logger);
}
}
}
}
}

View File

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.n2n;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
/**
*
*/
public class N2NAuthModule extends AbstractModule {
private final Settings settings;
public N2NAuthModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
}
}

View File

@ -0,0 +1,46 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.n2n;
import org.elasticsearch.common.Nullable;
import java.net.InetAddress;
import java.security.Principal;
/**
*
*/
public interface N2NAuthenticator {
N2NAuthenticator NO_AUTH = new N2NAuthenticator() {
@Override
public boolean authenticate(@Nullable Principal peerPrincipal, InetAddress peerAddress, int peerPort) {
return true;
}
};
boolean authenticate(@Nullable Principal peerPrincipal, InetAddress peerAddress, int peerPort);
class Compound implements N2NAuthenticator {
private N2NAuthenticator[] authenticators;
public Compound(N2NAuthenticator... authenticators) {
this.authenticators = authenticators;
}
@Override
public boolean authenticate(@Nullable Principal peerPrincipal, InetAddress peerAddress, int peerPort) {
for (int i = 0; i < authenticators.length; i++) {
if (!authenticators[i].authenticate(peerPrincipal, peerAddress, peerPort)) {
return false;
}
}
return true;
}
}
}

View File

@ -3,18 +3,17 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.plugin;
package org.elasticsearch.shield.n2n;
import org.elasticsearch.common.inject.AbstractModule;
/**
*
*/
public class SecurityModule extends AbstractModule {
public class N2NModule extends AbstractModule {
@Override
protected void configure() {
bind(IPFilteringN2NAuthentricator.class).asEagerSingleton();
}
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.shield.plugin;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.shield.SecurityModule;
import java.util.Collection;

View File

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.support;
import com.google.common.base.Predicate;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
/**
*
*/
public class AutomatonPredicate implements Predicate<String> {
private final CharacterRunAutomaton automaton;
public AutomatonPredicate(Automaton automaton) {
this.automaton = new CharacterRunAutomaton(automaton);
}
public AutomatonPredicate(CharacterRunAutomaton automaton) {
this.automaton = automaton;
}
@Override
public boolean apply(String input) {
return automaton.run(input);
}
}

View File

@ -0,0 +1,207 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.support;
import org.apache.commons.cli.*;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import java.io.*;
import java.util.Locale;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
/**
*
*/
public abstract class CmdLineTool {
protected enum ExitStatus {
OK(0),
USAGE(64),
IO_ERROR(74),
CODE_ERROR(70);
private final int status;
private ExitStatus(int status) {
this.status = status;
}
}
protected static final Terminal terminal = ConsoleTerminal.supported() ? new ConsoleTerminal() : new SystemTerminal();
protected static final Environment env;
protected static final Settings settings;
static {
Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true);
settings = tuple.v1();
env = tuple.v2();
}
private final Options options;
private final String cmd;
private final HelpFormatter helpFormatter = new HelpFormatter();
protected CmdLineTool(String cmd, OptionBuilder... options) {
this.cmd = cmd;
this.options = new Options();
for (int i = 0; i < options.length; i++) {
this.options.addOption(options[i].option);
}
}
protected final void execute(String[] args) {
CommandLineParser parser = new GnuParser();
try {
CommandLine cli = parser.parse(options, args);
run(cli);
} catch (ParseException pe) {
printUsage();
} catch (Exception e) {
terminal.println("Error: %s", e.getMessage());
printUsage();
}
}
protected void printUsage() {
helpFormatter.printUsage(terminal.printWriter(), HelpFormatter.DEFAULT_WIDTH, cmd, options);
}
protected void exit(ExitStatus status) {
System.exit(status.status);
}
protected abstract void run(CommandLine cli) throws Exception;
protected static OptionBuilder option(String shortName, String longName, String description) {
return new OptionBuilder(shortName, longName, description);
}
protected static class OptionBuilder {
private final Option option;
private OptionBuilder(String shortName, String longName, String description) {
option = new Option(shortName, description);
option.setLongOpt(longName);
}
public OptionBuilder required(boolean required) {
option.setRequired(required);
return this;
}
public OptionBuilder hasArg(boolean hasArg) {
if (hasArg) {
option.setArgs(1);
}
return this;
}
}
protected static abstract class Terminal {
public abstract void print(String msg, Object... args);
public void println(String msg, Object... args) {
print(msg + System.lineSeparator(), args);
}
public abstract void print(Throwable t);
public void newLine() {
println("");
}
public abstract String readString(String msg, Object... args);
public abstract char[] readPassword(String msg, Object... args);
public abstract PrintWriter printWriter();
}
private static class ConsoleTerminal extends Terminal {
final Console console = System.console();
static boolean supported() {
return System.console() != null;
}
@Override
public void print(String msg, Object... args) {
console.printf(msg, args);
console.flush();
}
@Override
public void print(Throwable t) {
t.printStackTrace(console.writer());
console.flush();
}
@Override
public String readString(String msg, Object... args) {
return console.readLine(msg, args);
}
@Override
public char[] readPassword(String msg, Object... args) {
return console.readPassword(msg, args);
}
@Override
public PrintWriter printWriter() {
return console.writer();
}
}
private static class SystemTerminal extends Terminal {
private final PrintWriter printWriter = new PrintWriter(System.out);
@Override
public void print(String msg, Object... args) {
System.out.print(String.format(Locale.ROOT, msg, args));
}
@Override
public void print(Throwable t) {
t.printStackTrace(System.err);
}
@Override
public String readString(String msg, Object... args) {
print(msg, args);
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
try {
return reader.readLine();
} catch (IOException ioe) {
System.err.println("Could not read input");
ioe.printStackTrace();
System.exit(1);
}
return null;
}
@Override
public char[] readPassword(String msg, Object... args) {
return readString(msg, args).toCharArray();
}
@Override
public PrintWriter printWriter() {
return printWriter;
}
}
}

View File

@ -0,0 +1,13 @@
admin:
cluster: ALL
indices:
- *::ALL
power_user:
cluster: MONITOR
indices:
- *::MONITOR,DATA_ACCESS
user:
indices:
- *::READ,INDEX,MANAGE

View File

@ -0,0 +1,3 @@
admin:{plain}changeme
poweruser:{plain)changeme
user:{plain}changeme

View File

@ -0,0 +1,3 @@
admin:admin
poweruser:poweruser
user:user

View File

@ -0,0 +1,12 @@
logger:
shield.audit.logfile: INFO, audit_file
appender:
audit_file:
type: dailyRollingFile
file: ${path.logs}/${cluster.name}.log
datePattern: "'.'yyyy-MM-dd"
layout:
type: pattern
conversionPattern: "[%d{ISO8601}] %m%n"

View File

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Guice;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.junit.Before;
import org.junit.Test;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class ESUsersModuleTests extends ElasticsearchTestCase {
private Path users;
private Path usersRoles;
@Before
public void init() throws Exception {
users = Paths.get(getClass().getResource("users").toURI());
usersRoles = Paths.get(getClass().getResource("users_roles").toURI());
}
@Test
public void test() throws Exception {
Injector injector = Guice.createInjector(new TestModule(users, usersRoles), new ESUsersModule());
ESUsersRealm realm = injector.getInstance(ESUsersRealm.class);
assertThat(realm, notNullValue());
assertThat(realm.userPasswdStore, notNullValue());
assertThat(realm.userPasswdStore, instanceOf(FileUserPasswdStore.class));
assertThat(realm.userRolesStore, notNullValue());
assertThat(realm.userRolesStore, instanceOf(FileUserRolesStore.class));
}
public static class TestModule extends AbstractModule {
final Path users;
final Path usersRoles;
public TestModule(Path users, Path usersRoles) {
this.users = users;
this.usersRoles = usersRoles;
}
@Override
protected void configure() {
Settings settings = ImmutableSettings.builder()
.put("shield.authc.esusers.file.users", users.toAbsolutePath())
.put("shield.authc.esusers.file.users_roles", usersRoles.toAbsolutePath())
.build();
Environment env = new Environment(settings);
bind(Settings.class).toInstance(settings);
bind(Environment.class).toInstance(env);
bind(ThreadPool.class).toInstance(new ThreadPool("test"));
bind(ResourceWatcherService.class).asEagerSingleton();
}
}
}

View File

@ -0,0 +1,100 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.shield.User;
import org.elasticsearch.shield.authc.support.UserPasswdStore;
import org.elasticsearch.shield.authc.support.UserRolesStore;
import org.elasticsearch.shield.authc.support.UsernamePasswordToken;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.transport.TransportRequest;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.arrayContaining;
/**
*
*/
public class ESUsersRealmTests extends ElasticsearchTestCase {
@Test
public void testAuthenticate() throws Exception {
Settings settings = ImmutableSettings.builder().build();
MockUserPasswdStore userPasswdStore = new MockUserPasswdStore("user1", "test123");
MockUserRolesStore userRolesStore = new MockUserRolesStore("user1", "role1", "role2");
ESUsersRealm realm = new ESUsersRealm(settings, userPasswdStore, userRolesStore);
User user = realm.authenticate(new UsernamePasswordToken("user1", "test123".toCharArray()));
assertTrue(userPasswdStore.called);
assertTrue(userRolesStore.called);
assertThat(user, notNullValue());
assertThat(user.principal(), equalTo("user1"));
assertThat(user.roles(), notNullValue());
assertThat(user.roles().length, equalTo(2));
assertThat(user.roles(), arrayContaining("role1", "role2"));
}
@Test
public void testToken() throws Exception {
Settings settings = ImmutableSettings.builder().build();
MockUserPasswdStore userPasswdStore = new MockUserPasswdStore("user1", "test123");
MockUserRolesStore userRolesStore = new MockUserRolesStore("user1", "role1", "role2");
ESUsersRealm realm = new ESUsersRealm(settings, userPasswdStore, userRolesStore);
TransportRequest request = new TransportRequest() {};
UsernamePasswordToken.putTokenHeader(request, new UsernamePasswordToken("user1", "test123".toCharArray()));
UsernamePasswordToken token = realm.token(request);
assertThat(token, notNullValue());
assertThat(token.principal(), equalTo("user1"));
assertThat(token.credentials(), notNullValue());
assertThat(new String(token.credentials()), equalTo("test123"));
}
private static class MockUserPasswdStore implements UserPasswdStore {
final String username;
final String password;
boolean called = false;
private MockUserPasswdStore(String username, String password) {
this.username = username;
this.password = password;
}
@Override
public boolean verifyPassword(String username, char[] password) {
called = true;
assertThat(username, equalTo(this.username));
assertThat(new String(password), equalTo(this.password));
return true;
}
}
private static class MockUserRolesStore implements UserRolesStore {
final String username;
final String[] roles;
boolean called = false;
private MockUserRolesStore(String username, String... roles) {
this.username = username;
this.roles = roles;
}
@Override
public String[] roles(String username) {
called = true;
assertThat(username, equalTo(this.username));
return roles;
}
}
}

View File

@ -0,0 +1,104 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.base.Charsets;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.shield.authc.support.Hasher;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.junit.Test;
import java.io.BufferedWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class FileUserPasswdStoreTests extends ElasticsearchTestCase {
@Test
public void testParseFile() throws Exception {
Path path = Paths.get(getClass().getResource("users").toURI());
Map<String, char[]> users = FileUserPasswdStore.parseFile(path, null);
assertThat(users, notNullValue());
assertThat(users.size(), is(6));
assertThat(users.get("bcrypt"), notNullValue());
assertThat(new String(users.get("bcrypt")), equalTo("$2y$05$zxnP0vdREMxnEpkLCDI2OuSaSk/QEKA2.A42iOpI6U2u.RLLOWm1e"));
assertThat(users.get("bcrypt10"), notNullValue());
assertThat(new String(users.get("bcrypt10")), equalTo("$2y$10$FMhmFjwU5.qxQ/BsEciS9OqcJVkFMgXMo4uH5CelOR1j4N9zIv67e"));
assertThat(users.get("md5"), notNullValue());
assertThat(new String(users.get("md5")), equalTo("$apr1$R3DdqiAZ$aljIkaIVPSarmDMlJUBBP."));
assertThat(users.get("crypt"), notNullValue());
assertThat(new String(users.get("crypt")), equalTo("hsP1PYSLsEEvs"));
assertThat(users.get("plain"), notNullValue());
assertThat(new String(users.get("plain")), equalTo("{plain}test123"));
assertThat(users.get("sha"), notNullValue());
assertThat(new String(users.get("sha")), equalTo("{SHA}cojt0Pw//L6ToM8G41aOKFIWh7w="));
}
@Test
public void testAutoReload() throws Exception {
ThreadPool threadPool = null;
ResourceWatcherService watcherService = null;
try {
Path users = Paths.get(getClass().getResource("users").toURI());
Path tmp = Files.createTempFile(null, null);
Files.copy(users, Files.newOutputStream(tmp));
Settings settings = ImmutableSettings.builder()
.put("watcher.interval", "2s")
.put("shield.authc.esusers.file.users", tmp.toAbsolutePath())
.build();
Environment env = new Environment(settings);
threadPool = new ThreadPool("test");
watcherService = new ResourceWatcherService(settings, threadPool);
final CountDownLatch latch = new CountDownLatch(1);
FileUserPasswdStore store = new FileUserPasswdStore(settings, env, watcherService, new FileUserPasswdStore.Listener() {
@Override
public void onRefresh() {
latch.countDown();
}
});
assertTrue(store.verifyPassword("bcrypt", "test123".toCharArray()));
watcherService.start();
try (BufferedWriter writer = Files.newBufferedWriter(tmp, Charsets.UTF_8, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append("foobar:" + new String(Hasher.HTPASSWD.hash("barfoo".toCharArray())));
}
if (!latch.await(5, TimeUnit.SECONDS)) {
fail("Waited too long for the updated file to be picked up");
}
assertTrue(store.verifyPassword("foobar", "barfoo".toCharArray()));
} finally {
if (watcherService != null) {
watcherService.stop();
}
if (threadPool != null) {
threadPool.shutdownNow();
}
}
}
}

View File

@ -0,0 +1,107 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.esusers;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.base.Charsets;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.junit.Test;
import java.io.BufferedWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class FileUserRolesStoreTests extends ElasticsearchTestCase {
@Test
public void testParseFile() throws Exception {
Path path = Paths.get(getClass().getResource("users_roles").toURI());
Map<String, String[]> usersRoles = FileUserRolesStore.parseFile(path, null);
assertThat(usersRoles, notNullValue());
assertThat(usersRoles.size(), is(3));
assertThat(usersRoles.get("user1"), notNullValue());
assertThat(usersRoles.get("user1").length, is(3));
assertThat(usersRoles.get("user1"), arrayContaining("role1", "role2", "role3"));
assertThat(usersRoles.get("user2"), notNullValue());
assertThat(usersRoles.get("user2").length, is(2));
assertThat(usersRoles.get("user2"), arrayContaining("role2", "role3"));
assertThat(usersRoles.get("user3"), notNullValue());
assertThat(usersRoles.get("user3").length, is(1));
assertThat(usersRoles.get("user3"), arrayContaining("role3"));
}
@Test
public void testAutoReload() throws Exception {
ThreadPool threadPool = null;
ResourceWatcherService watcherService = null;
try {
Path users = Paths.get(getClass().getResource("users_roles").toURI());
Path tmp = Files.createTempFile(null, null);
Files.copy(users, Files.newOutputStream(tmp));
Settings settings = ImmutableSettings.builder()
.put("watcher.interval", "2s")
.put("shield.authc.esusers.file.users_roles", tmp.toAbsolutePath())
.build();
Environment env = new Environment(settings);
threadPool = new ThreadPool("test");
watcherService = new ResourceWatcherService(settings, threadPool);
final CountDownLatch latch = new CountDownLatch(1);
FileUserRolesStore store = new FileUserRolesStore(settings, env, watcherService, new FileUserRolesStore.Listener() {
@Override
public void onRefresh() {
latch.countDown();
}
});
String[] roles = store.roles("user1");
assertThat(roles, notNullValue());
assertThat(roles.length, is(3));
assertThat(roles, arrayContaining("role1", "role2", "role3"));
assertThat(store.roles("user4"), nullValue());
watcherService.start();
try (BufferedWriter writer = Files.newBufferedWriter(tmp, Charsets.UTF_8, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append("user4:role4,role5");
}
if (!latch.await(5, TimeUnit.SECONDS)) {
fail("Waited too long for the updated file to be picked up");
}
roles = store.roles("user4");
assertThat(roles, notNullValue());
assertThat(roles.length, is(2));
assertThat(roles, arrayContaining("role4", "role5"));
} finally {
if (watcherService != null) {
watcherService.stop();
}
if (threadPool != null) {
threadPool.shutdownNow();
}
}
}
}

View File

@ -0,0 +1,35 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
/**
*
*/
public class HasherTests {
@Test
public void testHtpasswdToolGenerated() throws Exception {
Hasher hasher = Hasher.HTPASSWD;
char[] passwd = "test123".toCharArray();
assertTrue(hasher.verify(passwd, "$2a$05$zxnP0vdREMxnEpkLCDI2OuSaSk/QEKA2.A42iOpI6U2u.RLLOWm1e".toCharArray()));
assertTrue(hasher.verify(passwd, "$2a$10$FMhmFjwU5.qxQ/BsEciS9OqcJVkFMgXMo4uH5CelOR1j4N9zIv67e".toCharArray()));
assertTrue(hasher.verify(passwd, "$apr1$R3DdqiAZ$aljIkaIVPSarmDMlJUBBP.".toCharArray()));
assertTrue(hasher.verify(passwd, "hsP1PYSLsEEvs".toCharArray()));
assertTrue(hasher.verify(passwd, "{plain}test123".toCharArray()));
assertTrue(hasher.verify(passwd, "{SHA}cojt0Pw//L6ToM8G41aOKFIWh7w=".toCharArray()));
}
@Test
public void testHtpasswdSelfGenerated() throws Exception {
Hasher hasher = Hasher.HTPASSWD;
char[] passwd = "test123".toCharArray();
assertTrue(hasher.verify(passwd, hasher.hash(passwd)));
}
}

View File

@ -0,0 +1,48 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.shield.authc.support;
import com.google.common.base.Charsets;
import org.apache.commons.codec.binary.Base64;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.transport.TransportRequest;
import org.junit.Test;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class UsernamePasswordTokenTests extends ElasticsearchTestCase {
@Test
public void testPutToken() throws Exception {
TransportRequest request = new TransportRequest() {};
UsernamePasswordToken.putTokenHeader(request, new UsernamePasswordToken("user1", "test123".toCharArray()));
String header = request.getHeader(UsernamePasswordToken.BASIC_AUTH_HEADER);
assertThat(header, notNullValue());
assertTrue(header.startsWith("Basic "));
String token = header.substring("Basic ".length());
token = new String(Base64.decodeBase64(token), Charsets.UTF_8);
int i = token.indexOf(":");
assertTrue(i > 0);
String username = token.substring(0, i);
String password = token.substring(i + 1);
assertThat(username, equalTo("user1"));
assertThat(password, equalTo("test123"));
}
@Test
public void testExtractToken() throws Exception {
TransportRequest request = new TransportRequest() {};
String header = "Basic " + new String(Base64.encodeBase64("user1:test123".getBytes(Charsets.UTF_8)), Charsets.UTF_8);
request.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header);
UsernamePasswordToken token = UsernamePasswordToken.extractToken(request, null);
assertThat(token, notNullValue());
assertThat(token.principal(), equalTo("user1"));
assertThat(new String(token.credentials()), equalTo("test123"));
}
}

View File

@ -0,0 +1,4 @@
+c:255.255.0.0/16
-i:192.168.100.2
-c:2001:db8::/48
-n:hostname

View File

@ -0,0 +1,6 @@
bcrypt: $2y$05$zxnP0vdREMxnEpkLCDI2OuSaSk/QEKA2.A42iOpI6U2u.RLLOWm1e
md5: $apr1$R3DdqiAZ$aljIkaIVPSarmDMlJUBBP.
crypt: hsP1PYSLsEEvs
plain: {plain}test123
sha:{SHA}cojt0Pw//L6ToM8G41aOKFIWh7w=
bcrypt10: $2y$10$FMhmFjwU5.qxQ/BsEciS9OqcJVkFMgXMo4uH5CelOR1j4N9zIv67e

View File

@ -0,0 +1,3 @@
user1:role1,role2,role3
user2 : role2 , role3
user3:role3