1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325
|
require 'sprockets/asset'
require 'sprockets/digest_utils'
require 'sprockets/engines'
require 'sprockets/errors'
require 'sprockets/file_reader'
require 'sprockets/mime'
require 'sprockets/path_utils'
require 'sprockets/processing'
require 'sprockets/processor_utils'
require 'sprockets/resolve'
require 'sprockets/transformers'
require 'sprockets/uri_utils'
require 'sprockets/unloaded_asset'
module Sprockets
# The loader phase takes a asset URI location and returns a constructed Asset
# object.
module Loader
include DigestUtils, PathUtils, ProcessorUtils, URIUtils
include Engines, Mime, Processing, Resolve, Transformers
# Public: Load Asset by Asset URI.
#
# uri - A String containing complete URI to a file including schema
# and full path such as:
# "file:///Path/app/assets/js/app.js?type=application/javascript"
#
#
# Returns Asset.
def load(uri)
unloaded = UnloadedAsset.new(uri, self)
if unloaded.params.key?(:id)
unless asset = asset_from_cache(unloaded.asset_key)
id = unloaded.params.delete(:id)
uri_without_id = build_asset_uri(unloaded.filename, unloaded.params)
asset = load_from_unloaded(UnloadedAsset.new(uri_without_id, self))
if asset[:id] != id
@logger.warn "Sprockets load error: Tried to find #{uri}, but latest was id #{asset[:id]}"
end
end
else
asset = fetch_asset_from_dependency_cache(unloaded) do |paths|
# When asset is previously generated, its "dependencies" are stored in the cache.
# The presence of `paths` indicates dependencies were stored.
# We can check to see if the dependencies have not changed by "resolving" them and
# generating a digest key from the resolved entries. If this digest key has not
# changed the asset will be pulled from cache.
#
# If this `paths` is present but the cache returns nothing then `fetch_asset_from_dependency_cache`
# will confusingly be called again with `paths` set to nil where the asset will be
# loaded from disk.
if paths
digest = DigestUtils.digest(resolve_dependencies(paths))
if uri_from_cache = cache.get(unloaded.digest_key(digest), true)
asset_from_cache(UnloadedAsset.new(uri_from_cache, self).asset_key)
end
else
load_from_unloaded(unloaded)
end
end
end
Asset.new(self, asset)
end
private
# Internal: Load asset hash from cache
#
# key - A String containing lookup information for an asset
#
# This method converts all "compressed" paths to absolute paths.
# Returns a hash of values representing an asset
def asset_from_cache(key)
asset = cache.get(key, true)
if asset
asset[:uri] = expand_from_root(asset[:uri])
asset[:load_path] = expand_from_root(asset[:load_path])
asset[:filename] = expand_from_root(asset[:filename])
asset[:metadata][:included].map! { |uri| expand_from_root(uri) } if asset[:metadata][:included]
asset[:metadata][:links].map! { |uri| expand_from_root(uri) } if asset[:metadata][:links]
asset[:metadata][:stubbed].map! { |uri| expand_from_root(uri) } if asset[:metadata][:stubbed]
asset[:metadata][:required].map! { |uri| expand_from_root(uri) } if asset[:metadata][:required]
asset[:metadata][:dependencies].map! { |uri| uri.start_with?("file-digest://") ? expand_from_root(uri) : uri } if asset[:metadata][:dependencies]
asset[:metadata].each_key do |k|
next unless k =~ /_dependencies\z/
asset[:metadata][k].map! { |uri| expand_from_root(uri) }
end
end
asset
end
# Internal: Loads an asset and saves it to cache
#
# unloaded - An UnloadedAsset
#
# This method is only called when the given unloaded asset could not be
# successfully pulled from cache.
def load_from_unloaded(unloaded)
unless file?(unloaded.filename)
raise FileNotFound, "could not find file: #{unloaded.filename}"
end
load_path, logical_path = paths_split(config[:paths], unloaded.filename)
unless load_path
raise FileOutsidePaths, "#{unloaded.filename} is no longer under a load path: #{self.paths.join(', ')}"
end
logical_path, file_type, engine_extnames, _ = parse_path_extnames(logical_path)
name = logical_path
if pipeline = unloaded.params[:pipeline]
logical_path += ".#{pipeline}"
end
if type = unloaded.params[:type]
logical_path += config[:mime_types][type][:extensions].first
end
if type != file_type && !config[:transformers][file_type][type]
raise ConversionError, "could not convert #{file_type.inspect} to #{type.inspect}"
end
processors = processors_for(type, file_type, engine_extnames, pipeline)
processors_dep_uri = build_processors_uri(type, file_type, engine_extnames, pipeline)
dependencies = config[:dependencies] + [processors_dep_uri]
# Read into memory and process if theres a processor pipeline
if processors.any?
result = call_processors(processors, {
environment: self,
cache: self.cache,
uri: unloaded.uri,
filename: unloaded.filename,
load_path: load_path,
name: name,
content_type: type,
metadata: { dependencies: dependencies }
})
validate_processor_result!(result)
source = result.delete(:data)
metadata = result
metadata[:charset] = source.encoding.name.downcase unless metadata.key?(:charset)
metadata[:digest] = digest(source)
metadata[:length] = source.bytesize
else
dependencies << build_file_digest_uri(unloaded.filename)
metadata = {
digest: file_digest(unloaded.filename),
length: self.stat(unloaded.filename).size,
dependencies: dependencies
}
end
asset = {
uri: unloaded.uri,
load_path: load_path,
filename: unloaded.filename,
name: name,
logical_path: logical_path,
content_type: type,
source: source,
metadata: metadata,
dependencies_digest: DigestUtils.digest(resolve_dependencies(metadata[:dependencies]))
}
asset[:id] = pack_hexdigest(digest(asset))
asset[:uri] = build_asset_uri(unloaded.filename, unloaded.params.merge(id: asset[:id]))
# Deprecated: Avoid tracking Asset mtime
asset[:mtime] = metadata[:dependencies].map { |u|
if u.start_with?("file-digest:")
s = self.stat(parse_file_digest_uri(u))
s ? s.mtime.to_i : nil
else
nil
end
}.compact.max
asset[:mtime] ||= self.stat(unloaded.filename).mtime.to_i
store_asset(asset, unloaded)
asset
end
# Internal: Save a given asset to the cache
#
# asset - A hash containing values of loaded asset
# unloaded - The UnloadedAsset used to lookup the `asset`
#
# This method converts all absolute paths to "compressed" paths
# which are relative if they're in the root.
def store_asset(asset, unloaded)
# Save the asset in the cache under the new URI
cached_asset = asset.dup
cached_asset[:uri] = compress_from_root(asset[:uri])
cached_asset[:filename] = compress_from_root(asset[:filename])
cached_asset[:load_path] = compress_from_root(asset[:load_path])
if cached_asset[:metadata]
# Deep dup to avoid modifying `asset`
cached_asset[:metadata] = cached_asset[:metadata].dup
if cached_asset[:metadata][:included] && !cached_asset[:metadata][:included].empty?
cached_asset[:metadata][:included] = cached_asset[:metadata][:included].dup
cached_asset[:metadata][:included].map! { |uri| compress_from_root(uri) }
end
if cached_asset[:metadata][:links] && !cached_asset[:metadata][:links].empty?
cached_asset[:metadata][:links] = cached_asset[:metadata][:links].dup
cached_asset[:metadata][:links].map! { |uri| compress_from_root(uri) }
end
if cached_asset[:metadata][:stubbed] && !cached_asset[:metadata][:stubbed].empty?
cached_asset[:metadata][:stubbed] = cached_asset[:metadata][:stubbed].dup
cached_asset[:metadata][:stubbed].map! { |uri| compress_from_root(uri) }
end
if cached_asset[:metadata][:required] && !cached_asset[:metadata][:required].empty?
cached_asset[:metadata][:required] = cached_asset[:metadata][:required].dup
cached_asset[:metadata][:required].map! { |uri| compress_from_root(uri) }
end
if cached_asset[:metadata][:dependencies] && !cached_asset[:metadata][:dependencies].empty?
cached_asset[:metadata][:dependencies] = cached_asset[:metadata][:dependencies].dup
cached_asset[:metadata][:dependencies].map! do |uri|
uri.start_with?("file-digest://".freeze) ? compress_from_root(uri) : uri
end
end
# compress all _dependencies in metadata like `sass_dependencies`
cached_asset[:metadata].each do |key, value|
next unless key =~ /_dependencies\z/
cached_asset[:metadata][key] = value.dup
cached_asset[:metadata][key].map! {|uri| compress_from_root(uri) }
end
end
# Unloaded asset and stored_asset now have a different URI
stored_asset = UnloadedAsset.new(asset[:uri], self)
cache.set(stored_asset.asset_key, cached_asset, true)
# Save the new relative path for the digest key of the unloaded asset
cache.set(unloaded.digest_key(asset[:dependencies_digest]), stored_asset.compressed_path, true)
end
# Internal: Resolve set of dependency URIs.
#
# uris - An Array of "dependencies" for example:
# ["environment-version", "environment-paths", "processors:type=text/css&file_type=text/css",
# "file-digest:///Full/path/app/assets/stylesheets/application.css",
# "processors:type=text/css&file_type=text/css&pipeline=self",
# "file-digest:///Full/path/app/assets/stylesheets"]
#
# Returns back array of things that the given uri dpends on
# For example the environment version, if you're using a different version of sprockets
# then the dependencies should be different, this is used only for generating cache key
# for example the "environment-version" may be resolved to "environment-1.0-3.2.0" for
# version "3.2.0" of sprockets.
#
# Any paths that are returned are converted to relative paths
#
# Returns array of resolved dependencies
def resolve_dependencies(uris)
uris.map { |uri| resolve_dependency(uri) }
end
# Internal: Retrieves an asset based on its digest
#
# unloaded - An UnloadedAsset
# limit - A Fixnum which sets the maximum number of versions of "histories"
# stored in the cache
#
# This method attempts to retrieve the last `limit` number of histories of an asset
# from the cache a "history" which is an array of unresolved "dependencies" that the asset needs
# to compile. In this case A dependency can refer to either an asset i.e. index.js
# may rely on jquery.js (so jquery.js is a depndency), or other factors that may affect
# compilation, such as the VERSION of sprockets (i.e. the environment) and what "processors"
# are used.
#
# For example a history array may look something like this
#
# [["environment-version", "environment-paths", "processors:type=text/css&file_type=text/css",
# "file-digest:///Full/path/app/assets/stylesheets/application.css",
# "processors:type=text/css&file_digesttype=text/css&pipeline=self",
# "file-digest:///Full/path/app/assets/stylesheets"]]
#
# Where the first entry is a Set of dependencies for last generated version of that asset.
# Multiple versions are stored since sprockets keeps the last `limit` number of assets
# generated present in the system.
#
# If a "history" of dependencies is present in the cache, each version of "history" will be
# yielded to the passed block which is responsible for loading the asset. If found, the existing
# history will be saved with the dependency that found a valid asset moved to the front.
#
# If no history is present, or if none of the histories could be resolved to a valid asset then,
# the block is yielded to and expected to return a valid asset.
# When this happens the dependencies for the returned asset are added to the "history", and older
# entries are removed if the "history" is above `limit`.
def fetch_asset_from_dependency_cache(unloaded, limit = 3)
key = unloaded.dependency_history_key
history = cache.get(key) || []
history.each_with_index do |deps, index|
expanded_deps = deps.map do |path|
path.start_with?("file-digest://") ? expand_from_root(path) : path
end
if asset = yield(expanded_deps)
cache.set(key, history.rotate!(index)) if index > 0
return asset
end
end
asset = yield
deps = asset[:metadata][:dependencies].dup.map! do |uri|
uri.start_with?("file-digest://") ? compress_from_root(uri) : uri
end
cache.set(key, history.unshift(deps).take(limit))
asset
end
end
end
|