Use InputStreamStreamInputs validation to limit size of tokens (elastic/x-pack-elasticsearch#2537)
Relates to elastic/elasticsearch#26692 relates elastic/x-pack-elasticsearch#2493 Original commit: elastic/x-pack-elasticsearch@8e23868743
This commit is contained in:
parent
371953488b
commit
47214426e9
|
@ -221,8 +221,8 @@ public final class TokenService extends AbstractComponent {
|
|||
|
||||
void decodeToken(String token, ActionListener<UserToken> listener) throws IOException {
|
||||
// We intentionally do not use try-with resources since we need to keep the stream open if we need to compute a key!
|
||||
StreamInput in = new InputStreamStreamInput(
|
||||
Base64.getDecoder().wrap(new ByteArrayInputStream(token.getBytes(StandardCharsets.UTF_8))));
|
||||
byte[] bytes = token.getBytes(StandardCharsets.UTF_8);
|
||||
StreamInput in = new InputStreamStreamInput(Base64.getDecoder().wrap(new ByteArrayInputStream(bytes)), bytes.length);
|
||||
if (in.available() < MINIMUM_BASE64_BYTES) {
|
||||
logger.debug("invalid token");
|
||||
listener.onResponse(null);
|
||||
|
|
|
@ -17,6 +17,7 @@ import java.util.Map;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
|
@ -840,6 +841,11 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
assertEquals(expected, result);
|
||||
latch.countDown();
|
||||
}, this::logAndFail));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), containsString("array length must be <= to " + ArrayUtil.MAX_ARRAY_LENGTH + " but was: "));
|
||||
} catch (NegativeArraySizeException ex) {
|
||||
assertThat(ex.getMessage(), containsString("array size must be positive but was: "));
|
||||
|
||||
}
|
||||
|
||||
// we need to use a latch here because the key computation goes async on another thread!
|
||||
|
|
Loading…
Reference in New Issue