Merge pull request #8621 from eclipse/jetty-12.0.x-CachedContentFactory

Fix Caching ContentFactories in Jetty-12
This commit is contained in:
Lachlan 2022-10-04 18:57:29 +11:00 committed by GitHub
commit 05a4b96632
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 81 additions and 1059 deletions

View File

@ -163,6 +163,32 @@ public class CachingContentFactory implements HttpContent.ContentFactory
}
}
/**
* Tests whether the given HttpContent is cacheable, and if there is enough room to fit it in the cache.
*
* @param httpContent the HttpContent to test.
* @return whether the HttpContent is cacheable.
*/
protected boolean isCacheable(HttpContent httpContent)
{
if (httpContent == null)
return false;
if (httpContent.getResource().isDirectory())
return false;
if (_maxCachedFiles <= 0)
return false;
// Will it fit in the cache?
long len = httpContent.getContentLengthValue();
if (len <= 0)
return false;
if (isUseFileMappedBuffer())
return true;
return ((len <= _maxCachedFileSize) && (len + getCachedSize() <= _maxCacheSize));
}
@Override
public HttpContent getContent(String path) throws IOException
{
@ -178,14 +204,14 @@ public class CachingContentFactory implements HttpContent.ContentFactory
}
HttpContent httpContent = _authority.getContent(path);
// Do not cache directories or files that are too big
if (httpContent != null && !httpContent.getResource().isDirectory() && httpContent.getContentLengthValue() <= _maxCachedFileSize)
if (isCacheable(httpContent))
{
httpContent = cachingHttpContent = new CachingHttpContent(path, httpContent);
_cache.put(path, cachingHttpContent);
_cachedSize.addAndGet(cachingHttpContent.calculateSize());
shrinkCache();
}
return httpContent;
}

View File

@ -139,12 +139,12 @@ public class PrecompressedHttpContent implements HttpContent
@Override
public ByteBuffer getBuffer()
{
return _content.getBuffer();
return _precompressedContent.getBuffer();
}
@Override
public void release()
{
_content.release();
_precompressedContent.release();
}
}

View File

@ -1,621 +0,0 @@
//
// ========================================================================
// Copyright (c) 1995-2022 Mort Bay Consulting Pty Ltd and others.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
// which is available at https://www.apache.org/licenses/LICENSE-2.0.
//
// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
// ========================================================================
//
package org.eclipse.jetty.server;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.time.Instant;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.eclipse.jetty.http.CompressedContentFormat;
import org.eclipse.jetty.http.DateGenerator;
import org.eclipse.jetty.http.EtagUtils;
import org.eclipse.jetty.http.HttpContent;
import org.eclipse.jetty.http.HttpField;
import org.eclipse.jetty.http.HttpHeader;
import org.eclipse.jetty.http.MimeTypes;
import org.eclipse.jetty.http.MimeTypes.Type;
import org.eclipse.jetty.http.PreEncodedHttpField;
import org.eclipse.jetty.http.PrecompressedHttpContent;
import org.eclipse.jetty.http.ResourceHttpContent;
import org.eclipse.jetty.util.BufferUtil;
import org.eclipse.jetty.util.resource.Resource;
import org.eclipse.jetty.util.resource.ResourceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//TODO rework
public class CachedContentFactory implements HttpContent.ContentFactory
{
private static final Logger LOG = LoggerFactory.getLogger(CachedContentFactory.class);
private static final Map<CompressedContentFormat, CachedPrecompressedHttpContent> NO_PRECOMPRESSED = Collections.unmodifiableMap(Collections.emptyMap());
private final ConcurrentMap<String, CachedHttpContent> _cache;
private final AtomicInteger _cachedSize;
private final AtomicInteger _cachedFiles;
private final ResourceFactory _factory;
private final CachedContentFactory _parent;
private final MimeTypes _mimeTypes;
private final boolean _etags;
private final CompressedContentFormat[] _precompressedFormats;
private final boolean _useFileMappedBuffer;
private int _maxCachedFileSize = 128 * 1024 * 1024;
private int _maxCachedFiles = 2048;
private int _maxCacheSize = 256 * 1024 * 1024;
/**
* Constructor.
*
* @param parent the parent resource cache
* @param factory the resource factory
* @param mimeTypes Mimetype to use for meta data
* @param useFileMappedBuffer true to file memory mapped buffers
* @param etags true to support etags
* @param precompressedFormats array of precompression formats to support
*/
public CachedContentFactory(CachedContentFactory parent, ResourceFactory factory, MimeTypes mimeTypes, boolean useFileMappedBuffer, boolean etags, CompressedContentFormat[] precompressedFormats)
{
_factory = factory;
_cache = new ConcurrentHashMap<>();
_cachedSize = new AtomicInteger();
_cachedFiles = new AtomicInteger();
_mimeTypes = mimeTypes;
_parent = parent;
_useFileMappedBuffer = useFileMappedBuffer;
_etags = etags;
_precompressedFormats = precompressedFormats;
}
public int getCachedSize()
{
return _cachedSize.get();
}
public int getCachedFiles()
{
return _cachedFiles.get();
}
public int getMaxCachedFileSize()
{
return _maxCachedFileSize;
}
public void setMaxCachedFileSize(int maxCachedFileSize)
{
_maxCachedFileSize = maxCachedFileSize;
shrinkCache();
}
public int getMaxCacheSize()
{
return _maxCacheSize;
}
public void setMaxCacheSize(int maxCacheSize)
{
_maxCacheSize = maxCacheSize;
shrinkCache();
}
/**
* @return the max number of cached files.
*/
public int getMaxCachedFiles()
{
return _maxCachedFiles;
}
/**
* @param maxCachedFiles the max number of cached files.
*/
public void setMaxCachedFiles(int maxCachedFiles)
{
_maxCachedFiles = maxCachedFiles;
shrinkCache();
}
public boolean isUseFileMappedBuffer()
{
return _useFileMappedBuffer;
}
public void flushCache()
{
while (_cache.size() > 0)
{
for (String path : _cache.keySet())
{
CachedHttpContent content = _cache.remove(path);
if (content != null)
content.invalidate();
}
}
}
/**
* <p>Returns an entry from the cache, or creates a new one.</p>
*
* @param pathInContext The key into the cache
* @return The entry matching {@code pathInContext}, or a new entry
* if no matching entry was found. If the content exists but is not cacheable,
* then a {@link ResourceHttpContent} instance is returned. If
* the resource does not exist, then null is returned.
* @throws IOException if the resource cannot be retrieved
*/
@Override
public HttpContent getContent(String pathInContext) throws IOException
{
// Is the content in this cache?
CachedHttpContent content = _cache.get(pathInContext);
if (content != null && (content).isValid())
return content;
// try loading the content from our factory.
Resource resource = _factory.newResource(pathInContext);
HttpContent loaded = load(pathInContext, resource);
if (loaded != null)
return loaded;
// Is the content in the parent cache?
if (_parent != null)
{
HttpContent httpContent = _parent.getContent(pathInContext);
if (httpContent != null)
return httpContent;
}
return null;
}
/**
* @param resource the resource to test
* @return whether the resource is cacheable. The default implementation tests the cache sizes.
*/
protected boolean isCacheable(Resource resource)
{
if (_maxCachedFiles <= 0)
return false;
long len = resource.length();
// Will it fit in the cache?
return (len > 0 && (_useFileMappedBuffer || (len < _maxCachedFileSize && len < _maxCacheSize)));
}
private HttpContent load(String pathInContext, Resource resource) throws IOException
{
if (resource == null || !resource.exists())
return null;
if (resource.isDirectory())
return new ResourceHttpContent(resource, _mimeTypes.getMimeByExtension(resource.toString()));
// Will it fit in the cache?
if (isCacheable(resource))
{
CachedHttpContent content;
// Look for precompressed resources
if (_precompressedFormats.length > 0)
{
Map<CompressedContentFormat, CachedHttpContent> precompresssedContents = new HashMap<>(_precompressedFormats.length);
for (CompressedContentFormat format : _precompressedFormats)
{
String compressedPathInContext = pathInContext + format.getExtension();
CachedHttpContent compressedContent = _cache.get(compressedPathInContext);
if (compressedContent == null || compressedContent.isValid())
{
compressedContent = null;
Resource compressedResource = _factory.newResource(compressedPathInContext);
if (compressedResource.exists() && compressedResource.lastModified().isAfter(resource.lastModified()) &&
compressedResource.length() < resource.length())
{
compressedContent = new CachedHttpContent(compressedPathInContext, compressedResource, null);
CachedHttpContent added = _cache.putIfAbsent(compressedPathInContext, compressedContent);
if (added != null)
{
compressedContent.invalidate();
compressedContent = added;
}
}
}
if (compressedContent != null)
precompresssedContents.put(format, compressedContent);
}
content = new CachedHttpContent(pathInContext, resource, precompresssedContents);
}
else
content = new CachedHttpContent(pathInContext, resource, null);
// Add it to the cache.
CachedHttpContent added = _cache.putIfAbsent(pathInContext, content);
if (added != null)
{
content.invalidate();
content = added;
}
return content;
}
// Look for non Cacheable precompressed resource or content
String mt = _mimeTypes.getMimeByExtension(pathInContext);
if (_precompressedFormats.length > 0)
{
// Is the precompressed content cached?
Map<CompressedContentFormat, HttpContent> compressedContents = new HashMap<>();
for (CompressedContentFormat format : _precompressedFormats)
{
String compressedPathInContext = pathInContext + format.getExtension();
CachedHttpContent compressedContent = _cache.get(compressedPathInContext);
if (compressedContent != null && compressedContent.isValid() && Files.getLastModifiedTime(compressedContent.getResource().getPath()).toInstant().isAfter(resource.lastModified()))
compressedContents.put(format, compressedContent);
// Is there a precompressed resource?
Resource compressedResource = _factory.newResource(compressedPathInContext);
if (compressedResource.exists() && compressedResource.lastModified().isAfter(resource.lastModified()) &&
compressedResource.length() < resource.length())
compressedContents.put(format,
new ResourceHttpContent(compressedResource, _mimeTypes.getMimeByExtension(compressedPathInContext)));
}
if (!compressedContents.isEmpty())
return new ResourceHttpContent(resource, mt, compressedContents);
}
return new ResourceHttpContent(resource, mt);
}
private void shrinkCache()
{
// While we need to shrink
while (_cache.size() > 0 && (_cachedFiles.get() > _maxCachedFiles || _cachedSize.get() > _maxCacheSize))
{
// Scan the entire cache and generate an ordered list by last accessed time.
SortedSet<CachedHttpContent> sorted = new TreeSet<>(
Comparator.comparing((CachedHttpContent c) -> c._lastAccessed)
.thenComparingLong(c -> c._contentLengthValue)
.thenComparing(c -> c._key));
sorted.addAll(_cache.values());
// Invalidate least recently used first
for (CachedHttpContent content : sorted)
{
if (_cachedFiles.get() <= _maxCachedFiles && _cachedSize.get() <= _maxCacheSize)
break;
if (content == _cache.remove(content.getKey()))
content.invalidate();
}
}
}
protected ByteBuffer getIndirectBuffer(Resource resource)
{
try
{
return BufferUtil.toBuffer(resource, false);
}
catch (IOException | IllegalArgumentException e)
{
if (LOG.isDebugEnabled())
LOG.debug("Unable to get Indirect Buffer for {}", resource, e);
}
return null;
}
protected ByteBuffer getMappedBuffer(Resource resource)
{
// Only use file mapped buffers for cached resources, otherwise too much virtual memory commitment for
// a non shared resource. Also ignore max buffer size
try
{
if (_useFileMappedBuffer && resource.getPath() != null && resource.length() <= Integer.MAX_VALUE)
return BufferUtil.toMappedBuffer(resource);
}
catch (IOException | IllegalArgumentException e)
{
if (LOG.isDebugEnabled())
LOG.debug("Unable to get Mapped Buffer for {}", resource, e);
}
return null;
}
protected ByteBuffer getDirectBuffer(Resource resource)
{
try
{
return BufferUtil.toBuffer(resource, true);
}
catch (IOException | IllegalArgumentException e)
{
if (LOG.isDebugEnabled())
LOG.debug("Unable to get Direct Buffer for {}", resource, e);
}
return null;
}
@Override
public String toString()
{
return "ResourceCache[" + _parent + "," + _factory + "]@" + hashCode();
}
/**
* MetaData associated with a context Resource.
*/
public class CachedHttpContent implements HttpContent
{
private final String _key;
private final Resource _resource;
private final long _contentLengthValue;
private final HttpField _contentType;
private final String _characterEncoding;
private final MimeTypes.Type _mimeType;
private final HttpField _contentLength;
private final HttpField _lastModified;
private final Instant _lastModifiedValue;
private final HttpField _etag;
private final Map<CompressedContentFormat, CachedPrecompressedHttpContent> _precompressed;
private final AtomicReference<ByteBuffer> _indirectBuffer = new AtomicReference<>();
private final AtomicReference<ByteBuffer> _directBuffer = new AtomicReference<>();
private final AtomicReference<ByteBuffer> _mappedBuffer = new AtomicReference<>();
private volatile Instant _lastAccessed;
CachedHttpContent(String pathInContext, Resource resource, Map<CompressedContentFormat, CachedHttpContent> precompressedResources)
{
_key = pathInContext;
_resource = resource;
String contentType = _mimeTypes.getMimeByExtension(_resource.toString());
_contentType = contentType == null ? null : new PreEncodedHttpField(HttpHeader.CONTENT_TYPE, contentType);
_characterEncoding = _contentType == null ? null : MimeTypes.getCharsetFromContentType(contentType);
_mimeType = _contentType == null ? null : MimeTypes.CACHE.get(MimeTypes.getContentTypeWithoutCharset(contentType));
boolean exists = resource.exists();
_lastModifiedValue = exists ? resource.lastModified() : null;
_lastModified = _lastModifiedValue == null ? null
: new PreEncodedHttpField(HttpHeader.LAST_MODIFIED, DateGenerator.formatDate(_lastModifiedValue));
_contentLengthValue = exists ? resource.length() : 0;
_contentLength = new PreEncodedHttpField(HttpHeader.CONTENT_LENGTH, Long.toString(_contentLengthValue));
if (_cachedFiles.incrementAndGet() > _maxCachedFiles)
shrinkCache();
_lastAccessed = Instant.now();
_etag = CachedContentFactory.this._etags ? new PreEncodedHttpField(HttpHeader.ETAG, EtagUtils.computeWeakEtag(resource.getPath())) : null;
if (precompressedResources != null)
{
_precompressed = new HashMap<>(precompressedResources.size());
for (Map.Entry<CompressedContentFormat, CachedHttpContent> entry : precompressedResources.entrySet())
{
_precompressed.put(entry.getKey(), new CachedPrecompressedHttpContent(this, entry.getValue(), entry.getKey()));
}
}
else
{
_precompressed = NO_PRECOMPRESSED;
}
}
public String getKey()
{
return _key;
}
public boolean isCached()
{
return _key != null;
}
@Override
public Resource getResource()
{
return _resource;
}
@Override
public HttpField getETag()
{
return _etag;
}
@Override
public String getETagValue()
{
return _etag.getValue();
}
boolean isValid()
{
if (_lastModifiedValue == _resource.lastModified() && _contentLengthValue == _resource.length())
{
_lastAccessed = Instant.now();
return true;
}
if (this == _cache.remove(_key))
invalidate();
return false;
}
protected void invalidate()
{
ByteBuffer indirect = _indirectBuffer.getAndSet(null);
if (indirect != null)
_cachedSize.addAndGet(-BufferUtil.length(indirect));
ByteBuffer direct = _directBuffer.getAndSet(null);
if (direct != null)
_cachedSize.addAndGet(-BufferUtil.length(direct));
_mappedBuffer.getAndSet(null);
_cachedFiles.decrementAndGet();
}
@Override
public HttpField getLastModified()
{
return _lastModified;
}
@Override
public String getLastModifiedValue()
{
return _lastModified == null ? null : _lastModified.getValue();
}
@Override
public HttpField getContentType()
{
return _contentType;
}
@Override
public String getContentTypeValue()
{
return _contentType == null ? null : _contentType.getValue();
}
@Override
public HttpField getContentEncoding()
{
return null;
}
@Override
public String getContentEncodingValue()
{
return null;
}
@Override
public String getCharacterEncoding()
{
return _characterEncoding;
}
@Override
public Type getMimeType()
{
return _mimeType;
}
@Override
public HttpField getContentLength()
{
return _contentLength;
}
@Override
public long getContentLengthValue()
{
return _contentLengthValue;
}
@Override
public String toString()
{
return String.format("CachedContent@%x{r=%s,e=%b,lm=%s,ct=%s,c=%d}", hashCode(), _resource, _resource.exists(), _lastModified, _contentType, _precompressed.size());
}
@Override
public Map<CompressedContentFormat, ? extends HttpContent> getPrecompressedContents()
{
if (_precompressed.size() == 0)
return null;
Map<CompressedContentFormat, CachedPrecompressedHttpContent> ret = _precompressed;
for (Map.Entry<CompressedContentFormat, CachedPrecompressedHttpContent> entry : _precompressed.entrySet())
{
if (!entry.getValue().isValid())
{
if (ret == _precompressed)
ret = new HashMap<>(_precompressed);
ret.remove(entry.getKey());
}
}
return ret;
}
@Override
public ByteBuffer getBuffer()
{
return _indirectBuffer.get();
}
@Override
public void release()
{
invalidate();
}
}
public class CachedPrecompressedHttpContent extends PrecompressedHttpContent
{
private final CachedHttpContent _content;
private final CachedHttpContent _precompressedContent;
private final HttpField _etag;
CachedPrecompressedHttpContent(CachedHttpContent content, CachedHttpContent precompressedContent, CompressedContentFormat format)
{
super(content, precompressedContent, format);
_content = content;
_precompressedContent = precompressedContent;
// _etag = (CachedContentFactory.this._etags) ? new PreEncodedHttpField(HttpHeader.ETAG, _content.getResource().getWeakETag(format.getEtagSuffix())) : null;
_etag = null;
}
public boolean isValid()
{
return _precompressedContent.isValid() && _content.isValid(); // && _content.getResource().lastModified() <= _precompressedContent.getResource().lastModified();
}
@Override
public HttpField getETag()
{
if (_etag != null)
return _etag;
return super.getETag();
}
@Override
public String getETagValue()
{
if (_etag != null)
return _etag.getValue();
return super.getETagValue();
}
@Override
public String toString()
{
return "Cached" + super.toString();
}
}
}

View File

@ -33,7 +33,6 @@ import org.eclipse.jetty.util.resource.ResourceFactory;
* this factory are not intended to be cached, so memory limits for individual
* HttpOutput streams are enforced.
*/
//TODO remove
public class ResourceContentFactory implements ContentFactory
{
private final ResourceFactory _factory;

View File

@ -272,7 +272,6 @@ public class ResourceHandler extends Handler.Wrapper
public void setPrecompressedFormats(List<CompressedContentFormat> precompressedFormats)
{
_resourceService.setPrecompressedFormats(precompressedFormats);
setupContentFactory();
}
public void setEncodingCacheSize(int encodingCacheSize)
@ -288,7 +287,6 @@ public class ResourceHandler extends Handler.Wrapper
public void setMimeTypes(MimeTypes mimeTypes)
{
_mimeTypes = mimeTypes;
setupContentFactory();
}
/**

View File

@ -1,392 +0,0 @@
//
// ========================================================================
// Copyright (c) 1995-2022 Mort Bay Consulting Pty Ltd and others.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
// which is available at https://www.apache.org/licenses/LICENSE-2.0.
//
// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
// ========================================================================
//
package org.eclipse.jetty.server;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.stream.Stream;
import org.eclipse.jetty.http.CompressedContentFormat;
import org.eclipse.jetty.http.HttpContent;
import org.eclipse.jetty.http.MimeTypes;
import org.eclipse.jetty.http.ResourceHttpContent;
import org.eclipse.jetty.toolchain.test.FS;
import org.eclipse.jetty.toolchain.test.jupiter.WorkDir;
import org.eclipse.jetty.toolchain.test.jupiter.WorkDirExtension;
import org.eclipse.jetty.util.BufferUtil;
import org.eclipse.jetty.util.resource.FileSystemPool;
import org.eclipse.jetty.util.resource.Resource;
import org.eclipse.jetty.util.resource.ResourceCollection;
import org.eclipse.jetty.util.resource.ResourceFactory;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ExtendWith(WorkDirExtension.class)
@Disabled // TODO
public class ResourceCacheTest
{
public WorkDir workDir;
@BeforeEach
public void beforeEach()
{
assertThat(FileSystemPool.INSTANCE.mounts(), empty());
}
@AfterEach
public void afterEach()
{
assertThat(FileSystemPool.INSTANCE.mounts(), empty());
}
public Path createUtilTestResources(Path basePath) throws IOException
{
// root
makeFile(basePath.resolve("resource.txt"), "this is test data");
// - one/
Path one = basePath.resolve("one");
FS.ensureDirExists(one);
makeFile(one.resolve("1.txt"), "1 - one");
// - one/dir/
Path oneDir = one.resolve("dir");
FS.ensureDirExists(oneDir);
makeFile(oneDir.resolve("1.txt"), "1 - one");
// - two/
Path two = basePath.resolve("two");
FS.ensureDirExists(two);
makeFile(two.resolve("1.txt"), "1 - two");
makeFile(two.resolve("2.txt"), "2 - two");
// - two/dir/
Path twoDir = two.resolve("dir");
FS.ensureDirExists(twoDir);
makeFile(twoDir.resolve("2.txt"), "2 - two");
// - three/
Path three = basePath.resolve("three");
FS.ensureDirExists(three);
makeFile(three.resolve("2.txt"), "2 - three");
makeFile(three.resolve("3.txt"), "3 - three");
// - three/dir/
Path threeDir = three.resolve("dir");
FS.ensureDirExists(threeDir);
makeFile(threeDir.resolve("3.txt"), "3 - three");
// - four/
Path four = basePath.resolve("four");
FS.ensureDirExists(four);
makeFile(four.resolve("four"), "4 - four (no extension)");
makeFile(four.resolve("four.txt"), "4 - four");
return basePath;
}
private void makeFile(Path file, String contents) throws IOException
{
try (BufferedWriter writer = Files.newBufferedWriter(file, UTF_8, StandardOpenOption.CREATE_NEW))
{
writer.write(contents);
writer.flush();
}
}
@Test
public void testMultipleSources1() throws Exception
{
Path basePath = createUtilTestResources(workDir.getEmptyPathDir());
List<Resource> resourceList = Stream.of("one", "two", "three")
.map(basePath::resolve)
.map(ResourceFactory.root()::newResource)
.toList();
ResourceCollection rc = Resource.combine(resourceList);
List<Resource> r = rc.getResources();
MimeTypes mime = new MimeTypes();
CachedContentFactory rc3 = new CachedContentFactory(null, ResourceFactory.of(r.get(2)), mime, false, false, CompressedContentFormat.NONE);
CachedContentFactory rc2 = new CachedContentFactory(rc3, ResourceFactory.of(r.get(1)), mime, false, false, CompressedContentFormat.NONE);
CachedContentFactory rc1 = new CachedContentFactory(rc2, ResourceFactory.of(r.get(0)), mime, false, false, CompressedContentFormat.NONE);
assertEquals(getContent(rc1, "1.txt"), "1 - one");
assertEquals(getContent(rc1, "2.txt"), "2 - two");
assertEquals(getContent(rc1, "3.txt"), "3 - three");
assertEquals(getContent(rc2, "1.txt"), "1 - two");
assertEquals(getContent(rc2, "2.txt"), "2 - two");
assertEquals(getContent(rc2, "3.txt"), "3 - three");
assertNull(getContent(rc3, "1.txt"));
assertEquals(getContent(rc3, "2.txt"), "2 - three");
assertEquals(getContent(rc3, "3.txt"), "3 - three");
}
@Test
public void testUncacheable() throws Exception
{
Path basePath = createUtilTestResources(workDir.getEmptyPathDir());
List<Resource> resourceList = Stream.of("one", "two", "three")
.map(basePath::resolve)
.map(ResourceFactory.root()::newResource)
.toList();
ResourceCollection rc = Resource.combine(resourceList);
List<Resource> r = rc.getResources();
MimeTypes mime = new MimeTypes();
CachedContentFactory rc3 = new CachedContentFactory(null, ResourceFactory.of(r.get(2)), mime, false, false, CompressedContentFormat.NONE);
CachedContentFactory rc2 = new CachedContentFactory(rc3, ResourceFactory.of(r.get(1)), mime, false, false, CompressedContentFormat.NONE)
{
@Override
public boolean isCacheable(Resource resource)
{
return super.isCacheable(resource) && !resource.getFileName().equals("2.txt");
}
};
CachedContentFactory rc1 = new CachedContentFactory(rc2, ResourceFactory.of(r.get(0)), mime, false, false, CompressedContentFormat.NONE);
assertEquals(getContent(rc1, "1.txt"), "1 - one");
assertEquals(getContent(rc1, "2.txt"), "2 - two");
assertEquals(getContent(rc1, "3.txt"), "3 - three");
assertEquals(getContent(rc2, "1.txt"), "1 - two");
assertEquals(getContent(rc2, "2.txt"), "2 - two");
assertEquals(getContent(rc2, "3.txt"), "3 - three");
assertNull(getContent(rc3, "1.txt"));
assertEquals(getContent(rc3, "2.txt"), "2 - three");
assertEquals(getContent(rc3, "3.txt"), "3 - three");
}
@Test
public void testResourceCache() throws Exception
{
final Resource directory;
File[] files = new File[10];
String[] names = new String[files.length];
CachedContentFactory cache;
Path basePath = workDir.getEmptyPathDir();
for (int i = 0; i < files.length; i++)
{
Path tmpFile = basePath.resolve("R-" + i + ".txt");
try (BufferedWriter writer = Files.newBufferedWriter(tmpFile, UTF_8, StandardOpenOption.CREATE_NEW))
{
for (int j = 0; j < (i * 10 - 1); j++)
{
writer.write(' ');
}
writer.write('\n');
}
files[i] = tmpFile.toFile();
names[i] = tmpFile.getFileName().toString();
}
directory = ResourceFactory.root().newResource(files[0].getParentFile().getAbsolutePath());
cache = new CachedContentFactory(null, ResourceFactory.of(directory), new MimeTypes(), false, false, CompressedContentFormat.NONE);
cache.setMaxCacheSize(95);
cache.setMaxCachedFileSize(85);
cache.setMaxCachedFiles(4);
assertNull(cache.getContent("does not exist"));
assertTrue(cache.getContent(names[9]) instanceof ResourceHttpContent);
assertNotNull(cache.getContent(names[9]).getBuffer());
HttpContent content;
content = cache.getContent(names[8]);
assertThat(content, is(not(nullValue())));
assertEquals(80, content.getContentLengthValue());
assertEquals(0, cache.getCachedSize());
if (org.junit.jupiter.api.condition.OS.LINUX.isCurrentOs())
{
// Initially not using memory mapped files
content.getBuffer();
assertEquals(80, cache.getCachedSize());
// with both types of buffer loaded, this is too large for cache
content.getBuffer();
assertEquals(0, cache.getCachedSize());
assertEquals(0, cache.getCachedFiles());
cache = new CachedContentFactory(null, ResourceFactory.of(directory), new MimeTypes(), true, false, CompressedContentFormat.NONE);
cache.setMaxCacheSize(95);
cache.setMaxCachedFileSize(85);
cache.setMaxCachedFiles(4);
content = cache.getContent(names[8]);
content.getBuffer();
assertEquals(cache.isUseFileMappedBuffer() ? 0 : 80, cache.getCachedSize());
// with both types of buffer loaded, this is not too large for cache because
// mapped buffers don't count, so we can continue
}
content.getBuffer();
assertEquals(80, cache.getCachedSize());
assertEquals(1, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[1]);
assertEquals(80, cache.getCachedSize());
content.getBuffer();
assertEquals(90, cache.getCachedSize());
assertEquals(2, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[2]);
content.getBuffer();
assertEquals(30, cache.getCachedSize());
assertEquals(2, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[3]);
content.getBuffer();
assertEquals(60, cache.getCachedSize());
assertEquals(3, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[4]);
content.getBuffer();
assertEquals(90, cache.getCachedSize());
assertEquals(3, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[5]);
content.getBuffer();
assertEquals(90, cache.getCachedSize());
assertEquals(2, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[6]);
content.getBuffer();
assertEquals(60, cache.getCachedSize());
assertEquals(1, cache.getCachedFiles());
Thread.sleep(200);
try (OutputStream out = new FileOutputStream(files[6]))
{
out.write(' ');
}
content = cache.getContent(names[7]);
content.getBuffer();
assertEquals(70, cache.getCachedSize());
assertEquals(1, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[6]);
content.getBuffer();
assertEquals(71, cache.getCachedSize());
assertEquals(2, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[0]);
content.getBuffer();
assertEquals(72, cache.getCachedSize());
assertEquals(3, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[1]);
content.getBuffer();
assertEquals(82, cache.getCachedSize());
assertEquals(4, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[2]);
content.getBuffer();
assertEquals(32, cache.getCachedSize());
assertEquals(4, cache.getCachedFiles());
Thread.sleep(200);
content = cache.getContent(names[3]);
content.getBuffer();
assertEquals(61, cache.getCachedSize());
assertEquals(4, cache.getCachedFiles());
Thread.sleep(200);
cache.flushCache();
assertEquals(0, cache.getCachedSize());
assertEquals(0, cache.getCachedFiles());
cache.flushCache();
}
@Test
public void testNoextension() throws Exception
{
Path basePath = createUtilTestResources(workDir.getEmptyPathDir());
Resource resource = ResourceFactory.root().newResource(basePath.resolve("four"));
MimeTypes mime = new MimeTypes();
CachedContentFactory cache = new CachedContentFactory(null, ResourceFactory.of(resource), mime, false, false, CompressedContentFormat.NONE);
assertEquals(getContent(cache, "four.txt"), "4 - four");
assertEquals(getContent(cache, "four"), "4 - four (no extension)");
}
static String getContent(CachedContentFactory rc, String path) throws Exception
{
HttpContent content = rc.getContent(path);
if (content == null)
return null;
return BufferUtil.toString(content.getBuffer());
}
}

View File

@ -1212,7 +1212,6 @@ public class ResourceHandlerTest
public void testCachingMaxCacheSizeRespected() throws Exception
{
copySimpleTestResource(docRoot);
// TODO explicitly turn on caching
long expectedSize = Files.size(docRoot.resolve("simple.txt"));
CachingContentFactory contentFactory = (CachingContentFactory)_rootResourceHandler.getContentFactory();
contentFactory.setMaxCacheSize((int)expectedSize);
@ -1363,6 +1362,7 @@ public class ResourceHandlerTest
public void testCachingPrecompressedFilesCached() throws Exception
{
setupBigFiles(docRoot);
long expectedSize = Files.size(docRoot.resolve("big.txt")) +
Files.size(docRoot.resolve("big.txt.gz"));

View File

@ -84,7 +84,7 @@ public abstract class Resource implements Iterable<Resource>
{
if (resource == null)
return "null exists=false directory=false lm=-1";
return "%s exists=%b directory=%b lm=%d"
return "%s exists=%b directory=%b lm=%s"
.formatted(resource.toString(), resource.exists(), resource.isDirectory(), resource.lastModified());
}

View File

@ -93,6 +93,11 @@ public class DefaultServlet extends HttpServlet
private boolean _isPathInfoOnly = false;
public ResourceService getResourceService()
{
return _resourceService;
}
@Override
public void init() throws ServletException
{
@ -118,8 +123,7 @@ public class DefaultServlet extends HttpServlet
// TODO: should this come from context?
MimeTypes mimeTypes = new MimeTypes();
// TODO: this is configured further down below - see _resourceService.setPrecompressedFormats
List<CompressedContentFormat> precompressedFormats = List.of();
List<CompressedContentFormat> precompressedFormats = parsePrecompressedFormats(getInitParameter("precompressed"), getInitBoolean("gzip"), _resourceService.getPrecompressedFormats());
_useFileMappedBuffer = getInitBoolean("useFileMappedBuffer", _useFileMappedBuffer);
ResourceContentFactory resourceContentFactory = new ResourceContentFactory(ResourceFactory.of(_baseResource), mimeTypes, precompressedFormats);
@ -157,7 +161,7 @@ public class DefaultServlet extends HttpServlet
_resourceService.setAcceptRanges(getInitBoolean("acceptRanges", _resourceService.isAcceptRanges()));
_resourceService.setDirAllowed(getInitBoolean("dirAllowed", _resourceService.isDirAllowed()));
_resourceService.setRedirectWelcome(getInitBoolean("redirectWelcome", _resourceService.isRedirectWelcome()));
_resourceService.setPrecompressedFormats(parsePrecompressedFormats(getInitParameter("precompressed"), getInitBoolean("gzip"), _resourceService.getPrecompressedFormats()));
_resourceService.setPrecompressedFormats(precompressedFormats);
_resourceService.setEtags(getInitBoolean("etags", _resourceService.isEtags()));
_isPathInfoOnly = getInitBoolean("pathInfoOnly", _isPathInfoOnly);

View File

@ -311,11 +311,14 @@ public class ServletContextResponse extends ContextResponse
case CACHE_CONTROL:
case LAST_MODIFIED:
case EXPIRES:
case ETAG:
case DATE:
case VARY:
i.remove();
continue;
case ETAG:
if (getStatus() != HttpStatus.NOT_MODIFIED_304)
i.remove();
continue;
default:
}
}

View File

@ -2090,7 +2090,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testGzip() throws Exception
{
FS.ensureDirExists(docRoot);
@ -2243,7 +2242,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testCachedGzip() throws Exception
{
FS.ensureDirExists(docRoot);
@ -2371,7 +2369,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testBrotli() throws Exception
{
Files.writeString(docRoot.resolve("data0.txt"), "Hello Text 0", UTF_8);
@ -2511,7 +2508,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testCachedBrotli() throws Exception
{
Files.writeString(docRoot.resolve("data0.txt"), "Hello Text 0", UTF_8);
@ -2636,7 +2632,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testDefaultBrotliOverGzip() throws Exception
{
Files.writeString(docRoot.resolve("data0.txt"), "Hello Text 0", UTF_8);
@ -2685,7 +2680,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testCustomCompressionFormats() throws Exception
{
Files.writeString(docRoot.resolve("data0.txt"), "Hello Text 0", UTF_8);
@ -2736,7 +2730,6 @@ public class DefaultServletTest
}
@Test
@Disabled
public void testProgrammaticCustomCompressionFormats() throws Exception
{
Files.writeString(docRoot.resolve("data0.txt"), "Hello Text 0", UTF_8);
@ -2744,13 +2737,22 @@ public class DefaultServletTest
Files.writeString(docRoot.resolve("data0.txt.gz"), "fake gzip", UTF_8);
Files.writeString(docRoot.resolve("data0.txt.bz2"), "fake bzip2", UTF_8);
ResourceService resourceService = new ResourceService();
resourceService.setPrecompressedFormats(List.of(
new CompressedContentFormat("bzip2", ".bz2"),
new CompressedContentFormat("gzip", ".gz"),
new CompressedContentFormat("br", ".br")
));
ServletHolder defholder = new ServletHolder(new DefaultServlet()); // TODO: how to integrate resource service / precompressed format
DefaultServlet defaultServlet = new DefaultServlet();
ServletHolder defholder = new ServletHolder(defaultServlet)
{
@Override
public void initialize() throws Exception
{
super.initialize();
ResourceService resourceService = defaultServlet.getResourceService();
resourceService.setPrecompressedFormats(List.of(
new CompressedContentFormat("bzip2", ".bz2"),
new CompressedContentFormat("gzip", ".gz"),
new CompressedContentFormat("br", ".br")
));
}
};
context.addServlet(defholder, "/");
defholder.setInitParameter("resourceBase", docRoot.toString());
@ -2910,7 +2912,6 @@ public class DefaultServletTest
"Hello World",
"Now is the time for all good men to come to the aid of the party"
})
@Disabled
public void testIfETag(String content) throws Exception
{
Files.writeString(docRoot.resolve("file.txt"), content, UTF_8);
@ -2953,7 +2954,7 @@ public class DefaultServletTest
Connection:close\r
If-None-Match: wibble,@ETAG@,wobble\r
\r
""".replace("@ETAG", etag));
""".replace("@ETAG@", etag));
response = HttpTester.parseResponse(rawResponse);
assertThat(response.toString(), response.getStatus(), is(HttpStatus.NOT_MODIFIED_304));
@ -2983,7 +2984,7 @@ public class DefaultServletTest
Connection:close\r
If-Match: @ETAG@\r
\r
""".replace("@ETAG", etag));
""".replace("@ETAG@", etag));
response = HttpTester.parseResponse(rawResponse);
assertThat(response.toString(), response.getStatus(), is(HttpStatus.OK_200));
@ -2993,7 +2994,7 @@ public class DefaultServletTest
Connection:close\r
If-Match: wibble,@ETAG@,wobble\r
\r
""".replace("@ETAG", etag));
""".replace("@ETAG@", etag));
response = HttpTester.parseResponse(rawResponse);
assertThat(response.toString(), response.getStatus(), is(HttpStatus.OK_200));

View File

@ -21,6 +21,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
@ -230,7 +231,7 @@ public class CachedContentFactory implements HttpContent.ContentFactory
{
compressedContent = null;
Resource compressedResource = _factory.newResource(compressedPathInContext);
if (compressedResource.exists() && compressedResource.lastModified().isAfter(resource.lastModified()) &&
if (compressedResource.exists() && ResourceContentFactory.newerThanOrEqual(compressedResource, resource) &&
compressedResource.length() < resource.length())
{
compressedContent = new CachedHttpContent(compressedPathInContext, compressedResource, null);
@ -271,12 +272,12 @@ public class CachedContentFactory implements HttpContent.ContentFactory
{
String compressedPathInContext = pathInContext + format.getExtension();
CachedHttpContent compressedContent = _cache.get(compressedPathInContext);
if (compressedContent != null && compressedContent.isValid() && compressedContent.getResource().lastModified().isAfter(resource.lastModified()))
if (compressedContent != null && compressedContent.isValid() && ResourceContentFactory.newerThanOrEqual(compressedContent.getResource(), resource))
compressedContents.put(format, compressedContent);
// Is there a precompressed resource?
Resource compressedResource = _factory.newResource(compressedPathInContext);
if (compressedResource.exists() && compressedResource.lastModified().isAfter(resource.lastModified()) &&
if (compressedResource.exists() && ResourceContentFactory.newerThanOrEqual(compressedResource, resource) &&
compressedResource.length() < resource.length())
compressedContents.put(format,
new ResourceHttpContent(compressedResource, _mimeTypes.getMimeByExtension(compressedPathInContext)));
@ -452,7 +453,7 @@ public class CachedContentFactory implements HttpContent.ContentFactory
boolean isValid()
{
if (_lastModifiedValue == _resource.lastModified() && _contentLengthValue == _resource.length())
if (Objects.equals(_lastModifiedValue, _resource.lastModified()) && _contentLengthValue == _resource.length())
{
_lastAccessed = Instant.now();
return true;
@ -611,7 +612,8 @@ public class CachedContentFactory implements HttpContent.ContentFactory
public boolean isValid()
{
return _precompressedContent.isValid() && _content.isValid() && _content.getResource().lastModified().isBefore(_precompressedContent.getResource().lastModified());
return _precompressedContent.isValid() && _content.isValid() &&
ResourceContentFactory.newerThanOrEqual(_precompressedContent.getResource(), _content.getResource());
}
@Override

View File

@ -87,7 +87,7 @@ public class ResourceContentFactory implements ContentFactory
{
String compressedPathInContext = pathInContext + format.getExtension();
Resource compressedResource = _factory.newResource(compressedPathInContext);
if (compressedResource != null && compressedResource.exists() && compressedResource.lastModified().isAfter(resource.lastModified()) &&
if (compressedResource != null && compressedResource.exists() && ResourceContentFactory.newerThanOrEqual(compressedResource, resource) &&
compressedResource.length() < resource.length())
compressedContents.put(format,
new ResourceHttpContent(compressedResource, _mimeTypes.getMimeByExtension(compressedPathInContext)));
@ -98,6 +98,17 @@ public class ResourceContentFactory implements ContentFactory
return new ResourceHttpContent(resource, mt);
}
/**
* <p>Utility to compare {@link Resource#lastModified()} of two resources.</p>
* @param resource1 the first resource to test.
* @param resource2 the second resource to test.
* @return true if modified time of resource1 is newer or equal to that of resource2.
*/
static boolean newerThanOrEqual(Resource resource1, Resource resource2)
{
return !resource2.lastModified().isAfter(resource1.lastModified());
}
@Override
public String toString()
{

View File

@ -358,7 +358,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testListingProperUrlEncoding() throws Exception
{
ServletHolder defholder = context.addServlet(DefaultServlet.class, "/*");
@ -962,7 +961,6 @@ public class DefaultServletTest
* Ensure that oddball directory names are served with proper escaping
*/
@Test
@Disabled // TODO
public void testWelcomeRedirectDirWithQuestion() throws Exception
{
FS.ensureDirExists(docRoot);
@ -995,7 +993,6 @@ public class DefaultServletTest
* Ensure that oddball directory names are served with proper escaping
*/
@Test
@Disabled // TODO
public void testWelcomeRedirectDirWithSemicolon() throws Exception
{
FS.ensureDirExists(docRoot);
@ -1553,7 +1550,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testGzip() throws Exception
{
FS.ensureDirExists(docRoot);
@ -1648,7 +1644,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testCachedGzip() throws Exception
{
FS.ensureDirExists(docRoot);
@ -1731,7 +1726,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testBrotli() throws Exception
{
createFile(docRoot.resolve("data0.txt"), "Hello Text 0");
@ -1819,7 +1813,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testCachedBrotli() throws Exception
{
createFile(docRoot.resolve("data0.txt"), "Hello Text 0");
@ -1899,7 +1892,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testDefaultBrotliOverGzip() throws Exception
{
createFile(docRoot.resolve("data0.txt"), "Hello Text 0");
@ -1936,7 +1928,6 @@ public class DefaultServletTest
}
@Test
@Disabled // TODO
public void testCustomCompressionFormats() throws Exception
{
createFile(docRoot.resolve("data0.txt"), "Hello Text 0");