Merge pull request #10002 from eclipse/fix/12.0.x/merge-11.0.x

Merge `jetty-11.0.x` into `jetty-12.0.x`
This commit is contained in:
Joakim Erdfelt 2023-06-30 07:25:25 -05:00 committed by GitHub
commit dd44b30c3e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 179 additions and 84 deletions

View File

@ -417,7 +417,6 @@ class AsyncContentProducer implements ContentProducer
{
try
{
int remainingBeforeInterception = _rawContent.remaining();
HttpInput.Content content = _interceptor.readFrom(_rawContent);
if (content != null && content.isSpecial() && !_rawContent.isSpecial())
{
@ -434,24 +433,6 @@ class AsyncContentProducer implements ContentProducer
if (LOG.isDebugEnabled())
LOG.debug("interceptor generated special content {}", this);
}
else if (content != _rawContent && !_rawContent.isSpecial() && !_rawContent.isEmpty() && _rawContent.remaining() == remainingBeforeInterception)
{
IOException failure = new IOException("Interceptor " + _interceptor + " did not consume any of the " + _rawContent.remaining() + " remaining byte(s) of content");
if (content != null)
content.failed(failure);
failCurrentContent(failure);
// Set the _error flag to mark the content as definitive, i.e.:
// do not try to produce new raw content to get a fresher error
// when the special content was caused by the interceptor not
// consuming the raw content.
_error = true;
Response response = _httpChannel.getResponse();
if (response.isCommitted())
_httpChannel.abort(failure);
if (LOG.isDebugEnabled())
LOG.debug("interceptor did not consume content {}", this);
content = _transformedContent;
}
if (LOG.isDebugEnabled())
LOG.debug("intercepted raw content {}", this);

View File

@ -291,44 +291,6 @@ public class AsyncContentProducerTest
assertThat(contentFailedCount.get(), is(1));
}
@Test
public void testAsyncContentProducerInterceptorDoesNotConsume()
{
AtomicInteger contentFailedCount = new AtomicInteger();
AtomicInteger interceptorContentFailedCount = new AtomicInteger();
ContentProducer contentProducer = new AsyncContentProducer(new ContentListHttpChannel(List.of(new HttpInput.Content(ByteBuffer.allocate(1))
{
@Override
public void failed(Throwable x)
{
contentFailedCount.incrementAndGet();
}
}), new HttpInput.EofContent()));
try (AutoLock ignored = contentProducer.lock())
{
contentProducer.setInterceptor(content -> new HttpInput.Content(ByteBuffer.allocate(1))
{
@Override
public void failed(Throwable x)
{
interceptorContentFailedCount.incrementAndGet();
}
});
assertThat(contentProducer.isReady(), is(true));
HttpInput.Content content1 = contentProducer.nextContent();
assertThat(content1.isSpecial(), is(true));
assertThat(content1.getError().getMessage(), endsWith("did not consume any of the 1 remaining byte(s) of content"));
HttpInput.Content content2 = contentProducer.nextContent();
assertThat(content2.isSpecial(), is(true));
assertThat(content2.getError().getMessage(), endsWith("did not consume any of the 1 remaining byte(s) of content"));
}
assertThat(contentFailedCount.get(), is(1));
assertThat(interceptorContentFailedCount.get(), is(1));
}
@Test
public void testAsyncContentProducerInterceptorDoesNotConsumeEmptyContent()
{

View File

@ -271,33 +271,6 @@ public class BlockingContentProducerTest
assertThat(contentFailedCount.get(), is(1));
}
@Test
public void testBlockingContentProducerInterceptorDoesNotConsume()
{
AtomicInteger contentFailedCount = new AtomicInteger();
ContentProducer contentProducer = new BlockingContentProducer(new AsyncContentProducer(new StaticContentHttpChannel(new HttpInput.Content(ByteBuffer.allocate(1))
{
@Override
public void failed(Throwable x)
{
contentFailedCount.incrementAndGet();
}
})));
try (AutoLock ignored = contentProducer.lock())
{
contentProducer.setInterceptor(content -> null);
HttpInput.Content content1 = contentProducer.nextContent();
assertThat(content1.isSpecial(), is(true));
assertThat(content1.getError().getMessage(), endsWith("did not consume any of the 1 remaining byte(s) of content"));
HttpInput.Content content2 = contentProducer.nextContent();
assertThat(content2.isSpecial(), is(true));
assertThat(content2.getError().getMessage(), endsWith("did not consume any of the 1 remaining byte(s) of content"));
}
assertThat(contentFailedCount.get(), is(1));
}
@Test
public void testBlockingContentProducerInterceptorDoesNotConsumeEmptyContent()
{

View File

@ -0,0 +1,179 @@
//
// ========================================================================
// Copyright (c) 1995 Mort Bay Consulting Pty Ltd and others.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
// which is available at https://www.apache.org/licenses/LICENSE-2.0.
//
// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
// ========================================================================
//
package org.eclipse.jetty.ee9.servlet;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Stream;
import java.util.zip.GZIPOutputStream;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.eclipse.jetty.client.BytesRequestContent;
import org.eclipse.jetty.client.ContentResponse;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.Request;
import org.eclipse.jetty.ee9.servlet.ServletContextHandler;
import org.eclipse.jetty.http.HttpHeader;
import org.eclipse.jetty.http.HttpMethod;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.util.component.LifeCycle;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
/**
* Tests of GzipHandler behavior with gzip compressed Request content.
*/
public class GzipHandlerInputTest
{
private Server server;
private HttpClient client;
@BeforeEach
public void init() throws Exception
{
server = new Server();
ServerConnector connector = new ServerConnector(server);
server.addConnector(connector);
GzipHandler gzipHandler = new GzipHandler();
gzipHandler.setInflateBufferSize(8192); // enable request inflation
ServletContextHandler servletContextHandler = new ServletContextHandler();
servletContextHandler.setContextPath("/");
servletContextHandler.addServlet(ReadAllInputServlet.class, "/inflate");
gzipHandler.setHandler(servletContextHandler);
server.setHandler(gzipHandler);
server.start();
client = new HttpClient();
client.start();
}
@AfterEach
public void stop()
{
LifeCycle.stop(server);
LifeCycle.stop(client);
}
public static Stream<Arguments> transferScenarios()
{
int[] sizes = {
0, 1, 8191, 8192, 8193, 8194, 8195, 8226, 8227, 8260, 8261, 8262, 8263, 8264,
8192, 8193, 8194, 8195, 8226, 8227, 8228, 8259, 8260, 8261, 8262, 8263, 8515,
8516, 8517, 8518, 8773, 8774, 8775, 9216
};
List<Arguments> scenarios = new ArrayList<>();
// Scenarios 1: use Content-Length on request
for (int size : sizes)
{
scenarios.add(Arguments.of(size, true));
}
// Scenarios 2: use Transfer-Encoding: chunked on request
for (int size : sizes)
{
scenarios.add(Arguments.of(size, false));
}
return scenarios.stream();
}
@ParameterizedTest
@MethodSource("transferScenarios")
public void testReadGzippedInput(int testLength, boolean sendContentLength) throws Exception
{
byte[] rawBuf = new byte[testLength];
Arrays.fill(rawBuf, (byte)'x');
byte[] gzipBuf;
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
GZIPOutputStream gzipOut = new GZIPOutputStream(baos))
{
gzipOut.write(rawBuf, 0, rawBuf.length);
gzipOut.flush();
gzipOut.finish();
gzipBuf = baos.toByteArray();
}
URI destURI = server.getURI().resolve("/inflate");
BytesRequestContent bytesRequestContent = new BytesRequestContent(gzipBuf, new byte[0])
{
@Override
public long getLength()
{
if (sendContentLength)
return super.getLength();
return -1; // we want chunked transfer-encoding
}
};
Request request = client.newRequest(destURI)
.method(HttpMethod.POST)
.headers((headers) -> headers.put(HttpHeader.CONTENT_ENCODING, "gzip"))
.body(bytesRequestContent);
ContentResponse response = request.send();
assertThat(response.getStatus(), is(200));
String responseBody = response.getContentAsString();
if (sendContentLength)
assertThat(responseBody, containsString(String.format("[X-Content-Length]: %d", gzipBuf.length)));
else
assertThat(responseBody, containsString("[Transfer-Encoding]: chunked"));
assertThat(responseBody, containsString("[X-Content-Encoding]: gzip"));
assertThat(responseBody, containsString(String.format("Read %d bytes", rawBuf.length)));
}
public static class ReadAllInputServlet extends HttpServlet
{
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException
{
InputStream input = req.getInputStream();
byte[] buf = input.readAllBytes();
resp.setCharacterEncoding("utf-8");
resp.setContentType("text/plain");
PrintWriter out = resp.getWriter();
// dump header names & values
List<String> headerNames = Collections.list(req.getHeaderNames());
Collections.sort(headerNames);
for (String headerName : headerNames)
{
List<String> headerValues = Collections.list(req.getHeaders(headerName));
out.printf("header [%s]: %s%n", headerName, String.join(", ", headerValues));
}
// dump number of bytes read
out.printf("Read %d bytes%n", buf.length);
}
}
}