FIX: Improve clearing store cache (#9568)

1. Shorter
2. Simpler
3. Doesn't depend on external binaries
4. Doesn't fail on large amounts of files
5. Hopefully eliminates flaky spec errors
This commit is contained in:
Jarek Radosz 2020-04-28 17:24:04 +02:00 committed by GitHub
parent ec2943c5bc
commit c1c211365a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 5 additions and 15 deletions

View File

@ -149,22 +149,12 @@ module FileStore
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
FileUtils.cp(file.path, path)
# Keep latest 500 files
processes = Open3.pipeline(
["ls -t #{CACHE_DIR}", err: "/dev/null"],
"tail -n +#{CACHE_MAXIMUM_SIZE + 1}",
"awk '$0=\"#{CACHE_DIR}\"$0'",
"xargs rm -f"
)
# Remove all but CACHE_MAXIMUM_SIZE most recent files
files = Dir.glob("#{CACHE_DIR}*")
.sort_by { |f| File.mtime(f) }
.slice(0...-CACHE_MAXIMUM_SIZE)
ls = processes.shift
# Exit status `1` in `ls` occurs when e.g. "listing a directory
# in which entries are actively being removed or renamed".
# It's safe to ignore it here.
if ![0, 1].include?(ls.exitstatus) || !processes.all?(&:success?)
raise "Error clearing old cache"
end
FileUtils.rm(files, force: true)
end
private