Merged branch 'jetty-9.4.x' into 'jetty-10.0.x'.
This commit is contained in:
commit
098f5029c8
|
@ -233,7 +233,7 @@ public class ServletPathSpec extends PathSpec
|
|||
{
|
||||
this.group = PathSpecGroup.EXACT;
|
||||
this.prefix = servletPathSpec;
|
||||
if (servletPathSpec.endsWith("*") )
|
||||
if (servletPathSpec.endsWith("*"))
|
||||
{
|
||||
LOG.warn("Suspicious URL pattern: '{}'; see sections 12.1 and 12.2 of the Servlet specification",
|
||||
servletPathSpec);
|
||||
|
|
|
@ -461,6 +461,8 @@ public class HpackContext
|
|||
if (value != null && value.length() > 0)
|
||||
{
|
||||
int huffmanLen = Huffman.octetsNeeded(value);
|
||||
if (huffmanLen < 0)
|
||||
throw new IllegalStateException("bad value");
|
||||
int lenLen = NBitInteger.octectsNeeded(7, huffmanLen);
|
||||
_huffmanValue = new byte[1 + lenLen + huffmanLen];
|
||||
ByteBuffer buffer = ByteBuffer.wrap(_huffmanValue);
|
||||
|
|
|
@ -177,7 +177,7 @@ public class HpackDecoder
|
|||
else
|
||||
name = toASCIIString(buffer, length);
|
||||
check:
|
||||
for (int i = name.length(); i-- > 0;)
|
||||
for (int i = name.length(); i-- > 0; )
|
||||
{
|
||||
char c = name.charAt(i);
|
||||
if (c > 0xff)
|
||||
|
|
|
@ -19,9 +19,10 @@
|
|||
package org.eclipse.jetty.http2.hpack;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.eclipse.jetty.http.HttpField;
|
||||
import org.eclipse.jetty.http.HttpFields;
|
||||
|
@ -34,24 +35,22 @@ import org.eclipse.jetty.http.MetaData;
|
|||
import org.eclipse.jetty.http.PreEncodedHttpField;
|
||||
import org.eclipse.jetty.http2.hpack.HpackContext.Entry;
|
||||
import org.eclipse.jetty.http2.hpack.HpackContext.StaticEntry;
|
||||
import org.eclipse.jetty.util.ArrayTrie;
|
||||
import org.eclipse.jetty.util.StringUtil;
|
||||
import org.eclipse.jetty.util.Trie;
|
||||
import org.eclipse.jetty.util.TypeUtil;
|
||||
import org.eclipse.jetty.util.log.Log;
|
||||
import org.eclipse.jetty.util.log.Logger;
|
||||
|
||||
public class HpackEncoder
|
||||
{
|
||||
public static final Logger LOG = Log.getLogger(HpackEncoder.class);
|
||||
private static final HttpField[] __status = new HttpField[599];
|
||||
static final EnumSet<HttpHeader> __DO_NOT_HUFFMAN =
|
||||
private static final Logger LOG = Log.getLogger(HpackEncoder.class);
|
||||
private static final HttpField[] STATUSES = new HttpField[599];
|
||||
static final EnumSet<HttpHeader> DO_NOT_HUFFMAN =
|
||||
EnumSet.of(
|
||||
HttpHeader.AUTHORIZATION,
|
||||
HttpHeader.CONTENT_MD5,
|
||||
HttpHeader.PROXY_AUTHENTICATE,
|
||||
HttpHeader.PROXY_AUTHORIZATION);
|
||||
static final EnumSet<HttpHeader> __DO_NOT_INDEX =
|
||||
static final EnumSet<HttpHeader> DO_NOT_INDEX =
|
||||
EnumSet.of(
|
||||
// HttpHeader.C_PATH, // TODO more data needed
|
||||
// HttpHeader.DATE, // TODO more data needed
|
||||
|
@ -71,23 +70,21 @@ public class HpackEncoder
|
|||
HttpHeader.LAST_MODIFIED,
|
||||
HttpHeader.SET_COOKIE,
|
||||
HttpHeader.SET_COOKIE2);
|
||||
static final EnumSet<HttpHeader> __NEVER_INDEX =
|
||||
static final EnumSet<HttpHeader> NEVER_INDEX =
|
||||
EnumSet.of(
|
||||
HttpHeader.AUTHORIZATION,
|
||||
HttpHeader.SET_COOKIE,
|
||||
HttpHeader.SET_COOKIE2);
|
||||
private static final PreEncodedHttpField CONNECTION_TE = new PreEncodedHttpField(HttpHeader.CONNECTION, "te");
|
||||
private static final EnumSet<HttpHeader> IGNORED_HEADERS = EnumSet.of(HttpHeader.CONNECTION, HttpHeader.KEEP_ALIVE,
|
||||
HttpHeader.PROXY_CONNECTION, HttpHeader.TRANSFER_ENCODING, HttpHeader.UPGRADE);
|
||||
private static final PreEncodedHttpField TE_TRAILERS = new PreEncodedHttpField(HttpHeader.TE, "trailers");
|
||||
private static final Trie<Boolean> specialHopHeaders = new ArrayTrie<>(6);
|
||||
|
||||
static
|
||||
{
|
||||
for (HttpStatus.Code code : HttpStatus.Code.values())
|
||||
{
|
||||
__status[code.getCode()] = new PreEncodedHttpField(HttpHeader.C_STATUS, Integer.toString(code.getCode()));
|
||||
STATUSES[code.getCode()] = new PreEncodedHttpField(HttpHeader.C_STATUS, Integer.toString(code.getCode()));
|
||||
}
|
||||
specialHopHeaders.put("close", true);
|
||||
specialHopHeaders.put("te", true);
|
||||
}
|
||||
|
||||
private final HpackContext _context;
|
||||
|
@ -182,33 +179,37 @@ public class HpackEncoder
|
|||
{
|
||||
MetaData.Response response = (MetaData.Response)metadata;
|
||||
int code = response.getStatus();
|
||||
HttpField status = code < __status.length ? __status[code] : null;
|
||||
HttpField status = code < STATUSES.length ? STATUSES[code] : null;
|
||||
if (status == null)
|
||||
status = new HttpField.IntValueHttpField(HttpHeader.C_STATUS, code);
|
||||
encode(buffer, status);
|
||||
}
|
||||
|
||||
// Add all non-connection fields.
|
||||
// Remove fields as specified in RFC 7540, 8.1.2.2.
|
||||
HttpFields fields = metadata.getFields();
|
||||
if (fields != null)
|
||||
{
|
||||
Set<String> hopHeaders = fields.getCSV(HttpHeader.CONNECTION, false).stream()
|
||||
.filter(v -> specialHopHeaders.get(v) == Boolean.TRUE)
|
||||
.map(StringUtil::asciiToLowerCase)
|
||||
.collect(Collectors.toSet());
|
||||
// For example: Connection: Close, TE, Upgrade, Custom.
|
||||
Set<String> hopHeaders = null;
|
||||
for (String value : fields.getCSV(HttpHeader.CONNECTION, false))
|
||||
{
|
||||
if (hopHeaders == null)
|
||||
hopHeaders = new HashSet<>();
|
||||
hopHeaders.add(StringUtil.asciiToLowerCase(value));
|
||||
}
|
||||
for (HttpField field : fields)
|
||||
{
|
||||
if (field.getHeader() == HttpHeader.CONNECTION)
|
||||
HttpHeader header = field.getHeader();
|
||||
if (header != null && IGNORED_HEADERS.contains(header))
|
||||
continue;
|
||||
if (!hopHeaders.isEmpty() && hopHeaders.contains(StringUtil.asciiToLowerCase(field.getName())))
|
||||
continue;
|
||||
if (field.getHeader() == HttpHeader.TE)
|
||||
if (header == HttpHeader.TE)
|
||||
{
|
||||
if (!field.contains("trailers"))
|
||||
continue;
|
||||
encode(buffer, CONNECTION_TE);
|
||||
encode(buffer, TE_TRAILERS);
|
||||
if (field.contains("trailers"))
|
||||
encode(buffer, TE_TRAILERS);
|
||||
continue;
|
||||
}
|
||||
if (hopHeaders != null && hopHeaders.contains(StringUtil.asciiToLowerCase(field.getName())))
|
||||
continue;
|
||||
encode(buffer, field);
|
||||
}
|
||||
}
|
||||
|
@ -318,12 +319,12 @@ public class HpackEncoder
|
|||
if (_debug)
|
||||
encoding = indexed ? "PreEncodedIdx" : "PreEncoded";
|
||||
}
|
||||
else if (__DO_NOT_INDEX.contains(header))
|
||||
else if (DO_NOT_INDEX.contains(header))
|
||||
{
|
||||
// Non indexed field
|
||||
indexed = false;
|
||||
boolean neverIndex = __NEVER_INDEX.contains(header);
|
||||
boolean huffman = !__DO_NOT_HUFFMAN.contains(header);
|
||||
boolean neverIndex = NEVER_INDEX.contains(header);
|
||||
boolean huffman = !DO_NOT_HUFFMAN.contains(header);
|
||||
encodeName(buffer, neverIndex ? (byte)0x10 : (byte)0x00, 4, header.asString(), name);
|
||||
encodeValue(buffer, huffman, field.getValue());
|
||||
|
||||
|
@ -346,7 +347,7 @@ public class HpackEncoder
|
|||
{
|
||||
// indexed
|
||||
indexed = true;
|
||||
boolean huffman = !__DO_NOT_HUFFMAN.contains(header);
|
||||
boolean huffman = !DO_NOT_HUFFMAN.contains(header);
|
||||
encodeName(buffer, (byte)0x40, 6, header.asString(), name);
|
||||
encodeValue(buffer, huffman, field.getValue());
|
||||
if (_debug)
|
||||
|
@ -400,19 +401,38 @@ public class HpackEncoder
|
|||
{
|
||||
// huffman literal value
|
||||
buffer.put((byte)0x80);
|
||||
NBitInteger.encode(buffer, 7, Huffman.octetsNeeded(value));
|
||||
Huffman.encode(buffer, value);
|
||||
|
||||
int needed = Huffman.octetsNeeded(value);
|
||||
if (needed >= 0)
|
||||
{
|
||||
NBitInteger.encode(buffer, 7, needed);
|
||||
Huffman.encode(buffer, value);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Not iso_8859_1
|
||||
byte[] bytes = value.getBytes(StandardCharsets.UTF_8);
|
||||
NBitInteger.encode(buffer, 7, Huffman.octetsNeeded(bytes));
|
||||
Huffman.encode(buffer, bytes);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// add literal assuming iso_8859_1
|
||||
buffer.put((byte)0x00);
|
||||
buffer.put((byte)0x00).mark();
|
||||
NBitInteger.encode(buffer, 7, value.length());
|
||||
for (int i = 0; i < value.length(); i++)
|
||||
{
|
||||
char c = value.charAt(i);
|
||||
if (c < ' ' || c > 127)
|
||||
throw new IllegalArgumentException();
|
||||
{
|
||||
// Not iso_8859_1, so re-encode as UTF-8
|
||||
buffer.reset();
|
||||
byte[] bytes = value.getBytes(StandardCharsets.UTF_8);
|
||||
NBitInteger.encode(buffer, 7, bytes.length);
|
||||
buffer.put(bytes, 0, bytes.length);
|
||||
return;
|
||||
}
|
||||
buffer.put((byte)c);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public class HpackFieldPreEncoder implements HttpFieldPreEncoder
|
|||
@Override
|
||||
public byte[] getEncodedField(HttpHeader header, String name, String value)
|
||||
{
|
||||
boolean notIndexed = HpackEncoder.__DO_NOT_INDEX.contains(header);
|
||||
boolean notIndexed = HpackEncoder.DO_NOT_INDEX.contains(header);
|
||||
|
||||
ByteBuffer buffer = BufferUtil.allocate(name.length() + value.length() + 10);
|
||||
BufferUtil.clearToFill(buffer);
|
||||
|
@ -56,8 +56,8 @@ public class HpackFieldPreEncoder implements HttpFieldPreEncoder
|
|||
if (notIndexed)
|
||||
{
|
||||
// Non indexed field
|
||||
boolean neverIndex = HpackEncoder.__NEVER_INDEX.contains(header);
|
||||
huffman = !HpackEncoder.__DO_NOT_HUFFMAN.contains(header);
|
||||
boolean neverIndex = HpackEncoder.NEVER_INDEX.contains(header);
|
||||
huffman = !HpackEncoder.DO_NOT_HUFFMAN.contains(header);
|
||||
buffer.put(neverIndex ? (byte)0x10 : (byte)0x00);
|
||||
bits = 4;
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public class HpackFieldPreEncoder implements HttpFieldPreEncoder
|
|||
{
|
||||
// indexed
|
||||
buffer.put((byte)0x40);
|
||||
huffman = !HpackEncoder.__DO_NOT_HUFFMAN.contains(header);
|
||||
huffman = !HpackEncoder.DO_NOT_HUFFMAN.contains(header);
|
||||
bits = 6;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.eclipse.jetty.http2.hpack;
|
|||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.eclipse.jetty.util.Utf8StringBuilder;
|
||||
|
||||
public class Huffman
|
||||
{
|
||||
|
||||
|
@ -358,7 +360,7 @@ public class Huffman
|
|||
|
||||
public static String decode(ByteBuffer buffer, int length) throws HpackException.CompressionException
|
||||
{
|
||||
StringBuilder out = new StringBuilder(length * 2);
|
||||
Utf8StringBuilder utf8 = new Utf8StringBuilder(length * 2);
|
||||
int node = 0;
|
||||
int current = 0;
|
||||
int bits = 0;
|
||||
|
@ -378,7 +380,7 @@ public class Huffman
|
|||
throw new HpackException.CompressionException("EOS in content");
|
||||
|
||||
// terminal node
|
||||
out.append(rowsym[node]);
|
||||
utf8.append((byte)(0xFF & rowsym[node]));
|
||||
bits -= rowbits[node];
|
||||
node = 0;
|
||||
}
|
||||
|
@ -411,7 +413,7 @@ public class Huffman
|
|||
break;
|
||||
}
|
||||
|
||||
out.append(rowsym[node]);
|
||||
utf8.append((byte)(0xFF & rowsym[node]));
|
||||
bits -= rowbits[node];
|
||||
node = 0;
|
||||
}
|
||||
|
@ -419,7 +421,27 @@ public class Huffman
|
|||
if (node != 0)
|
||||
throw new HpackException.CompressionException("Bad termination");
|
||||
|
||||
return out.toString();
|
||||
return utf8.toString();
|
||||
}
|
||||
|
||||
public static int octetsNeeded(String s)
|
||||
{
|
||||
return octetsNeeded(CODES, s);
|
||||
}
|
||||
|
||||
public static int octetsNeeded(byte[] b)
|
||||
{
|
||||
return octetsNeeded(CODES, b);
|
||||
}
|
||||
|
||||
public static void encode(ByteBuffer buffer, String s)
|
||||
{
|
||||
encode(CODES, buffer, s);
|
||||
}
|
||||
|
||||
public static void encode(ByteBuffer buffer, byte[] b)
|
||||
{
|
||||
encode(CODES, buffer, b);
|
||||
}
|
||||
|
||||
public static int octetsNeededLC(String s)
|
||||
|
@ -432,11 +454,6 @@ public class Huffman
|
|||
encode(LCCODES, buffer, s);
|
||||
}
|
||||
|
||||
public static int octetsNeeded(String s)
|
||||
{
|
||||
return octetsNeeded(CODES, s);
|
||||
}
|
||||
|
||||
private static int octetsNeeded(final int[][] table, String s)
|
||||
{
|
||||
int needed = 0;
|
||||
|
@ -445,18 +462,30 @@ public class Huffman
|
|||
{
|
||||
char c = s.charAt(i);
|
||||
if (c >= 128 || c < ' ')
|
||||
throw new IllegalArgumentException();
|
||||
return -1;
|
||||
needed += table[c][1];
|
||||
}
|
||||
|
||||
return (needed + 7) / 8;
|
||||
}
|
||||
|
||||
public static void encode(ByteBuffer buffer, String s)
|
||||
private static int octetsNeeded(final int[][] table, byte[] b)
|
||||
{
|
||||
encode(CODES, buffer, s);
|
||||
int needed = 0;
|
||||
int len = b.length;
|
||||
for (int i = 0; i < len; i++)
|
||||
{
|
||||
int c = 0xFF & b[i];
|
||||
needed += table[c][1];
|
||||
}
|
||||
return (needed + 7) / 8;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param table The table to encode by
|
||||
* @param buffer The buffer to encode to
|
||||
* @param s The string to encode
|
||||
*/
|
||||
private static void encode(final int[][] table, ByteBuffer buffer, String s)
|
||||
{
|
||||
long current = 0;
|
||||
|
@ -488,4 +517,35 @@ public class Huffman
|
|||
buffer.put((byte)(current));
|
||||
}
|
||||
}
|
||||
|
||||
private static void encode(final int[][] table, ByteBuffer buffer, byte[] b)
|
||||
{
|
||||
long current = 0;
|
||||
int n = 0;
|
||||
|
||||
int len = b.length;
|
||||
for (int i = 0; i < len; i++)
|
||||
{
|
||||
int c = 0xFF & b[i];
|
||||
int code = table[c][0];
|
||||
int bits = table[c][1];
|
||||
|
||||
current <<= bits;
|
||||
current |= code;
|
||||
n += bits;
|
||||
|
||||
while (n >= 8)
|
||||
{
|
||||
n -= 8;
|
||||
buffer.put((byte)(current >> n));
|
||||
}
|
||||
}
|
||||
|
||||
if (n > 0)
|
||||
{
|
||||
current <<= (8 - n);
|
||||
current |= (0xFF >>> n);
|
||||
buffer.put((byte)(current));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ public class HpackTest
|
|||
BufferUtil.flipToFlush(buffer, 0);
|
||||
Response decoded0 = (Response)decoder.decode(buffer);
|
||||
original0.getFields().put(new HttpField(HttpHeader.CONTENT_ENCODING, ""));
|
||||
assertMetadataSame(original0, decoded0);
|
||||
assertMetaDataResponseSame(original0, decoded0);
|
||||
|
||||
// Same again?
|
||||
BufferUtil.clearToFill(buffer);
|
||||
|
@ -75,7 +75,7 @@ public class HpackTest
|
|||
BufferUtil.flipToFlush(buffer, 0);
|
||||
Response decoded0b = (Response)decoder.decode(buffer);
|
||||
|
||||
assertMetadataSame(original0, decoded0b);
|
||||
assertMetaDataResponseSame(original0, decoded0b);
|
||||
|
||||
HttpFields fields1 = new HttpFields();
|
||||
fields1.add(HttpHeader.CONTENT_TYPE, "text/plain");
|
||||
|
@ -93,7 +93,7 @@ public class HpackTest
|
|||
BufferUtil.flipToFlush(buffer, 0);
|
||||
Response decoded1 = (Response)decoder.decode(buffer);
|
||||
|
||||
assertMetadataSame(original1, decoded1);
|
||||
assertMetaDataResponseSame(original1, decoded1);
|
||||
assertEquals("custom-key", decoded1.getFields().getField("Custom-Key").getName());
|
||||
}
|
||||
|
||||
|
@ -106,19 +106,19 @@ public class HpackTest
|
|||
|
||||
HttpFields fields0 = new HttpFields();
|
||||
fields0.add("1234567890", "1234567890123456789012345678901234567890");
|
||||
fields0.add("Cookie", "abcdeffhijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQR");
|
||||
fields0.add("Cookie", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQR");
|
||||
MetaData original0 = new MetaData(HttpVersion.HTTP_2, fields0);
|
||||
|
||||
BufferUtil.clearToFill(buffer);
|
||||
encoder.encode(buffer, original0);
|
||||
BufferUtil.flipToFlush(buffer, 0);
|
||||
MetaData decoded0 = (MetaData)decoder.decode(buffer);
|
||||
MetaData decoded0 = decoder.decode(buffer);
|
||||
|
||||
assertMetadataSame(original0, decoded0);
|
||||
assertMetaDataSame(original0, decoded0);
|
||||
|
||||
HttpFields fields1 = new HttpFields();
|
||||
fields1.add("1234567890", "1234567890123456789012345678901234567890");
|
||||
fields1.add("Cookie", "abcdeffhijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQR");
|
||||
fields1.add("Cookie", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQR");
|
||||
fields1.add("x", "y");
|
||||
MetaData original1 = new MetaData(HttpVersion.HTTP_2, fields1);
|
||||
|
||||
|
@ -136,6 +136,26 @@ public class HpackTest
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void encodeDecodeNonAscii() throws Exception
|
||||
{
|
||||
HpackEncoder encoder = new HpackEncoder();
|
||||
HpackDecoder decoder = new HpackDecoder(4096, 8192);
|
||||
ByteBuffer buffer = BufferUtil.allocate(16 * 1024);
|
||||
|
||||
HttpFields fields0 = new HttpFields();
|
||||
fields0.add("Cookie", "[\uD842\uDF9F]");
|
||||
fields0.add("custom-key", "[\uD842\uDF9F]");
|
||||
Response original0 = new MetaData.Response(HttpVersion.HTTP_2, 200, fields0);
|
||||
|
||||
BufferUtil.clearToFill(buffer);
|
||||
encoder.encode(buffer, original0);
|
||||
BufferUtil.flipToFlush(buffer, 0);
|
||||
Response decoded0 = (Response)decoder.decode(buffer);
|
||||
|
||||
assertMetadataSame(original0, decoded0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void evictReferencedFieldTest() throws Exception
|
||||
{
|
||||
|
@ -143,57 +163,111 @@ public class HpackTest
|
|||
HpackDecoder decoder = new HpackDecoder(200, 1024);
|
||||
ByteBuffer buffer = BufferUtil.allocateDirect(16 * 1024);
|
||||
|
||||
String longEnoughToBeEvicted = "012345678901234567890123456789012345678901234567890";
|
||||
|
||||
HttpFields fields0 = new HttpFields();
|
||||
fields0.add("123456789012345678901234567890123456788901234567890", "value");
|
||||
fields0.add("foo", "abcdeffhijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQR");
|
||||
fields0.add(longEnoughToBeEvicted, "value");
|
||||
fields0.add("foo", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ");
|
||||
MetaData original0 = new MetaData(HttpVersion.HTTP_2, fields0);
|
||||
|
||||
BufferUtil.clearToFill(buffer);
|
||||
encoder.encode(buffer, original0);
|
||||
BufferUtil.flipToFlush(buffer, 0);
|
||||
MetaData decoded0 = (MetaData)decoder.decode(buffer);
|
||||
MetaData decoded0 = decoder.decode(buffer);
|
||||
|
||||
assertEquals(2, encoder.getHpackContext().size());
|
||||
assertEquals(2, decoder.getHpackContext().size());
|
||||
assertEquals("123456789012345678901234567890123456788901234567890", encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length + 1).getHttpField().getName());
|
||||
assertEquals("foo", encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length + 0).getHttpField().getName());
|
||||
assertEquals(longEnoughToBeEvicted, encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length + 1).getHttpField().getName());
|
||||
assertEquals("foo", encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length).getHttpField().getName());
|
||||
|
||||
assertMetadataSame(original0, decoded0);
|
||||
assertMetaDataSame(original0, decoded0);
|
||||
|
||||
HttpFields fields1 = new HttpFields();
|
||||
fields1.add("123456789012345678901234567890123456788901234567890", "other_value");
|
||||
fields1.add(longEnoughToBeEvicted, "other_value");
|
||||
fields1.add("x", "y");
|
||||
MetaData original1 = new MetaData(HttpVersion.HTTP_2, fields1);
|
||||
|
||||
BufferUtil.clearToFill(buffer);
|
||||
encoder.encode(buffer, original1);
|
||||
BufferUtil.flipToFlush(buffer, 0);
|
||||
MetaData decoded1 = (MetaData)decoder.decode(buffer);
|
||||
assertMetadataSame(original1, decoded1);
|
||||
MetaData decoded1 = decoder.decode(buffer);
|
||||
assertMetaDataSame(original1, decoded1);
|
||||
|
||||
assertEquals(2, encoder.getHpackContext().size());
|
||||
assertEquals(2, decoder.getHpackContext().size());
|
||||
assertEquals("x", encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length + 0).getHttpField().getName());
|
||||
assertEquals("x", encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length).getHttpField().getName());
|
||||
assertEquals("foo", encoder.getHpackContext().get(HpackContext.STATIC_TABLE.length + 1).getHttpField().getName());
|
||||
}
|
||||
|
||||
private void assertMetadataSame(MetaData.Response expected, MetaData.Response actual)
|
||||
@Test
|
||||
public void testHopHeadersAreRemoved() throws Exception
|
||||
{
|
||||
HpackEncoder encoder = new HpackEncoder();
|
||||
HpackDecoder decoder = new HpackDecoder(4096, 16384);
|
||||
|
||||
HttpFields input = new HttpFields();
|
||||
input.put(HttpHeader.ACCEPT, "*");
|
||||
input.put(HttpHeader.CONNECTION, "TE, Upgrade, Custom");
|
||||
input.put("Custom", "Pizza");
|
||||
input.put(HttpHeader.KEEP_ALIVE, "true");
|
||||
input.put(HttpHeader.PROXY_CONNECTION, "foo");
|
||||
input.put(HttpHeader.TE, "1234567890abcdef");
|
||||
input.put(HttpHeader.TRANSFER_ENCODING, "chunked");
|
||||
input.put(HttpHeader.UPGRADE, "gold");
|
||||
|
||||
ByteBuffer buffer = BufferUtil.allocate(2048);
|
||||
BufferUtil.clearToFill(buffer);
|
||||
encoder.encode(buffer, new MetaData(HttpVersion.HTTP_2, input));
|
||||
BufferUtil.flipToFlush(buffer, 0);
|
||||
MetaData metaData = decoder.decode(buffer);
|
||||
HttpFields output = metaData.getFields();
|
||||
|
||||
assertEquals(1, output.size());
|
||||
assertEquals("*", output.get(HttpHeader.ACCEPT));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTETrailers() throws Exception
|
||||
{
|
||||
HpackEncoder encoder = new HpackEncoder();
|
||||
HpackDecoder decoder = new HpackDecoder(4096, 16384);
|
||||
|
||||
HttpFields input = new HttpFields();
|
||||
input.put(HttpHeader.CONNECTION, "TE");
|
||||
String teValue = "trailers";
|
||||
input.put(HttpHeader.TE, teValue);
|
||||
String trailerValue = "Custom";
|
||||
input.put(HttpHeader.TRAILER, trailerValue);
|
||||
|
||||
ByteBuffer buffer = BufferUtil.allocate(2048);
|
||||
BufferUtil.clearToFill(buffer);
|
||||
encoder.encode(buffer, new MetaData(HttpVersion.HTTP_2, input));
|
||||
BufferUtil.flipToFlush(buffer, 0);
|
||||
MetaData metaData = decoder.decode(buffer);
|
||||
HttpFields output = metaData.getFields();
|
||||
|
||||
assertEquals(2, output.size());
|
||||
assertEquals(teValue, output.get(HttpHeader.TE));
|
||||
assertEquals(trailerValue, output.get(HttpHeader.TRAILER));
|
||||
}
|
||||
|
||||
private void assertMetaDataResponseSame(MetaData.Response expected, MetaData.Response actual)
|
||||
{
|
||||
assertThat("Response.status", actual.getStatus(), is(expected.getStatus()));
|
||||
assertThat("Response.reason", actual.getReason(), is(expected.getReason()));
|
||||
assertMetadataSame((MetaData)expected, (MetaData)actual);
|
||||
assertMetaDataSame(expected, actual);
|
||||
}
|
||||
|
||||
private void assertMetadataSame(MetaData expected, MetaData actual)
|
||||
private void assertMetaDataSame(MetaData expected, MetaData actual)
|
||||
{
|
||||
assertThat("Metadata.contentLength", actual.getContentLength(), is(expected.getContentLength()));
|
||||
assertThat("Metadata.version" + ".version", actual.getHttpVersion(), is(expected.getHttpVersion()));
|
||||
assertHttpFieldsSame("Metadata.fields", expected.getFields(), actual.getFields());
|
||||
assertHttpFieldsSame(expected.getFields(), actual.getFields());
|
||||
}
|
||||
|
||||
private void assertHttpFieldsSame(String msg, HttpFields expected, HttpFields actual)
|
||||
private void assertHttpFieldsSame(HttpFields expected, HttpFields actual)
|
||||
{
|
||||
assertThat(msg + ".size", actual.size(), is(expected.size()));
|
||||
assertThat("metaData.fields.size", actual.size(), is(expected.size()));
|
||||
|
||||
for (HttpField actualField : actual)
|
||||
{
|
||||
|
@ -203,7 +277,7 @@ public class HpackTest
|
|||
// during testing.
|
||||
continue;
|
||||
}
|
||||
assertThat(msg + ".contains(" + actualField + ")", expected.contains(actualField), is(true));
|
||||
assertThat("metaData.fields.contains(" + actualField + ")", expected.contains(actualField), is(true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,11 +25,13 @@ import java.util.stream.Stream;
|
|||
|
||||
import org.eclipse.jetty.util.BufferUtil;
|
||||
import org.eclipse.jetty.util.TypeUtil;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
|
@ -77,8 +79,7 @@ public class HuffmanTest
|
|||
{
|
||||
String s = "bad '" + bad + "'";
|
||||
|
||||
assertThrows(IllegalArgumentException.class,
|
||||
() -> Huffman.octetsNeeded(s));
|
||||
assertThat(Huffman.octetsNeeded(s), Matchers.is(-1));
|
||||
|
||||
assertThrows(BufferOverflowException.class,
|
||||
() -> Huffman.encode(BufferUtil.allocate(32), s));
|
||||
|
|
|
@ -81,24 +81,20 @@ public abstract class AbstractSessionDataStore extends ContainerLifeCycle implem
|
|||
public SessionData load(String id) throws Exception
|
||||
{
|
||||
if (!isStarted())
|
||||
throw new IllegalStateException ("Not started");
|
||||
throw new IllegalStateException("Not started");
|
||||
|
||||
final AtomicReference<SessionData> reference = new AtomicReference<SessionData>();
|
||||
final AtomicReference<Exception> exception = new AtomicReference<Exception>();
|
||||
|
||||
Runnable r = new Runnable()
|
||||
Runnable r = () ->
|
||||
{
|
||||
@Override
|
||||
public void run()
|
||||
try
|
||||
{
|
||||
try
|
||||
{
|
||||
reference.set(doLoad(id));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
exception.set(e);
|
||||
}
|
||||
reference.set(doLoad(id));
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
exception.set(e);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -167,7 +163,7 @@ public abstract class AbstractSessionDataStore extends ContainerLifeCycle implem
|
|||
public Set<String> getExpired(Set<String> candidates)
|
||||
{
|
||||
if (!isStarted())
|
||||
throw new IllegalStateException ("Not started");
|
||||
throw new IllegalStateException("Not started");
|
||||
|
||||
try
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue