1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
|
# frozen_string_literal: true
require 'fileutils'
require 'logger'
require 'sprockets/encoding_utils'
require 'sprockets/path_utils'
require 'zlib'
module Sprockets
class Cache
# Public: A file system cache store that automatically cleans up old keys.
#
# Assign the instance to the Environment#cache.
#
# environment.cache = Sprockets::Cache::FileStore.new("/tmp")
#
# See Also
#
# ActiveSupport::Cache::FileStore
#
class FileStore
# Internal: Default key limit for store.
DEFAULT_MAX_SIZE = 25 * 1024 * 1024
EXCLUDED_DIRS = ['.', '..'].freeze
GITKEEP_FILES = ['.gitkeep', '.keep'].freeze
# Internal: Default standard error fatal logger.
#
# Returns a Logger.
def self.default_logger
logger = Logger.new($stderr)
logger.level = Logger::FATAL
logger
end
# Public: Initialize the cache store.
#
# root - A String path to a directory to persist cached values to.
# max_size - A Integer of the maximum size the store will hold (in bytes).
# (default: 25MB).
# logger - The logger to which some info will be printed.
# (default logger level is FATAL and won't output anything).
def initialize(root, max_size = DEFAULT_MAX_SIZE, logger = self.class.default_logger)
@root = root
@max_size = max_size
@gc_size = max_size * 0.75
@logger = logger
end
# Public: Retrieve value from cache.
#
# This API should not be used directly, but via the Cache wrapper API.
#
# key - String cache key.
#
# Returns Object or nil or the value is not set.
def get(key)
path = File.join(@root, "#{key}.cache")
value = safe_open(path) do |f|
begin
EncodingUtils.unmarshaled_deflated(f.read, Zlib::MAX_WBITS)
rescue Exception => e
@logger.error do
"#{self.class}[#{path}] could not be unmarshaled: " +
"#{e.class}: #{e.message}"
end
nil
end
end
if value
FileUtils.touch(path)
value
end
end
# Public: Set a key and value in the cache.
#
# This API should not be used directly, but via the Cache wrapper API.
#
# key - String cache key.
# value - Object value.
#
# Returns Object value.
def set(key, value)
path = File.join(@root, "#{key}.cache")
# Ensure directory exists
FileUtils.mkdir_p File.dirname(path)
# Check if cache exists before writing
exists = File.exist?(path)
# Serialize value
marshaled = Marshal.dump(value)
# Compress if larger than 4KB
if marshaled.bytesize > 4 * 1024
deflater = Zlib::Deflate.new(
Zlib::BEST_COMPRESSION,
Zlib::MAX_WBITS,
Zlib::MAX_MEM_LEVEL,
Zlib::DEFAULT_STRATEGY
)
deflater << marshaled
raw = deflater.finish
else
raw = marshaled
end
# Write data
PathUtils.atomic_write(path) do |f|
f.write(raw)
@size = size + f.size unless exists
end
# GC if necessary
gc! if size > @max_size
value
end
# Public: Pretty inspect
#
# Returns String.
def inspect
"#<#{self.class} size=#{size}/#{@max_size}>"
end
# Public: Clear the cache
#
# adapted from ActiveSupport::Cache::FileStore#clear
#
# Deletes all items from the cache. In this case it deletes all the entries in the specified
# file store directory except for .keep or .gitkeep. Be careful which directory is specified
# as @root because everything in that directory will be deleted.
#
# Returns true
def clear(options=nil)
if File.exist?(@root)
root_dirs = Dir.entries(@root).reject { |f| (EXCLUDED_DIRS + GITKEEP_FILES).include?(f) }
FileUtils.rm_r(root_dirs.collect{ |f| File.join(@root, f) })
end
true
end
private
# Internal: Get all cache files along with stats.
#
# Returns an Array of [String filename, File::Stat] pairs sorted by
# mtime.
def find_caches
Dir.glob(File.join(@root, '**/*.cache')).reduce([]) { |stats, filename|
stat = safe_stat(filename)
# stat maybe nil if file was removed between the time we called
# dir.glob and the next stat
stats << [filename, stat] if stat
stats
}.sort_by { |_, stat| stat.mtime.to_i }
end
def size
@size ||= compute_size(find_caches)
end
def compute_size(caches)
caches.inject(0) { |sum, (_, stat)| sum + stat.size }
end
def safe_stat(fn)
File.stat(fn)
rescue Errno::ENOENT
nil
end
def safe_open(path, &block)
if File.exist?(path)
File.open(path, 'rb', &block)
end
rescue Errno::ENOENT
end
def gc!
start_time = Time.now
caches = find_caches
size = compute_size(caches)
delete_caches, keep_caches = caches.partition { |filename, stat|
deleted = size > @gc_size
size -= stat.size
deleted
}
return if delete_caches.empty?
FileUtils.remove(delete_caches.map(&:first), force: true)
@size = compute_size(keep_caches)
@logger.warn do
secs = Time.now.to_f - start_time.to_f
"#{self.class}[#{@root}] garbage collected " +
"#{delete_caches.size} files (#{(secs * 1000).to_i}ms)"
end
end
end
end
end
|