DEV: Log duration of ember-cli asset build (#13980)
This commit is contained in:
parent
683712fae7
commit
8e45fdfbb1
|
@ -190,6 +190,17 @@ def concurrent?
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def current_timestamp
|
||||||
|
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
||||||
|
end
|
||||||
|
|
||||||
|
def log_task_duration(task_description, &task)
|
||||||
|
task_start = current_timestamp
|
||||||
|
task.call
|
||||||
|
STDERR.puts "Done '#{task_description}' : #{(current_timestamp - task_start).round(2)} secs"
|
||||||
|
STDERR.puts
|
||||||
|
end
|
||||||
|
|
||||||
def geolite_dbs
|
def geolite_dbs
|
||||||
@geolite_dbs ||= %w{
|
@geolite_dbs ||= %w{
|
||||||
GeoLite2-City
|
GeoLite2-City
|
||||||
|
@ -229,58 +240,66 @@ def copy_ember_cli_assets
|
||||||
assets = {}
|
assets = {}
|
||||||
files = {}
|
files = {}
|
||||||
|
|
||||||
unless system("yarn --cwd #{ember_dir} install")
|
log_task_duration('yarn install') {
|
||||||
STDERR.puts "Error running yarn install"
|
unless system("yarn --cwd #{ember_dir} install")
|
||||||
exit 1
|
STDERR.puts "Error running yarn install"
|
||||||
end
|
exit 1
|
||||||
|
end
|
||||||
|
}
|
||||||
|
|
||||||
unless system("yarn --cwd #{ember_dir} run ember build -prod")
|
log_task_duration('ember build -prod') {
|
||||||
STDERR.puts "Error running ember build"
|
unless system("yarn --cwd #{ember_dir} run ember build -prod")
|
||||||
exit 1
|
STDERR.puts "Error running ember build"
|
||||||
end
|
exit 1
|
||||||
|
end
|
||||||
|
}
|
||||||
|
|
||||||
# Copy assets and generate manifest data
|
# Copy assets and generate manifest data
|
||||||
Dir["#{ember_cli_assets}**/*"].each do |f|
|
log_task_duration('Copy assets and generate manifest data') {
|
||||||
if f !~ /test/ && File.file?(f)
|
Dir["#{ember_cli_assets}**/*"].each do |f|
|
||||||
rel_file = f.sub(ember_cli_assets, "")
|
if f !~ /test/ && File.file?(f)
|
||||||
digest = f.scan(/\-([a-f0-9]+)\./)[0][0]
|
rel_file = f.sub(ember_cli_assets, "")
|
||||||
|
digest = f.scan(/\-([a-f0-9]+)\./)[0][0]
|
||||||
|
|
||||||
dest = "public/assets"
|
dest = "public/assets"
|
||||||
dest_sub = dest
|
dest_sub = dest
|
||||||
if rel_file =~ /^([a-z\-\_]+)\//
|
if rel_file =~ /^([a-z\-\_]+)\//
|
||||||
dest_sub = "#{dest}/#{Regexp.last_match[1]}"
|
dest_sub = "#{dest}/#{Regexp.last_match[1]}"
|
||||||
|
end
|
||||||
|
|
||||||
|
FileUtils.mkdir_p(dest_sub) unless Dir.exists?(dest_sub)
|
||||||
|
log_file = File.basename(rel_file).sub("-#{digest}", "")
|
||||||
|
|
||||||
|
# It's simpler to serve the file as `application.js`
|
||||||
|
if log_file == "discourse.js"
|
||||||
|
log_file = "application.js"
|
||||||
|
rel_file.sub!(/^discourse/, "application")
|
||||||
|
end
|
||||||
|
|
||||||
|
res = FileUtils.cp(f, "#{dest}/#{rel_file}")
|
||||||
|
|
||||||
|
assets[log_file] = rel_file
|
||||||
|
files[rel_file] = {
|
||||||
|
"logical_path" => log_file,
|
||||||
|
"mtime" => File.mtime(f).iso8601(9),
|
||||||
|
"size" => File.size(f),
|
||||||
|
"digest" => digest,
|
||||||
|
"integrity" => "sha384-#{Base64.encode64(Digest::SHA384.digest(File.read(f))).chomp}"
|
||||||
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
FileUtils.mkdir_p(dest_sub) unless Dir.exists?(dest_sub)
|
|
||||||
log_file = File.basename(rel_file).sub("-#{digest}", "")
|
|
||||||
|
|
||||||
# It's simpler to serve the file as `application.js`
|
|
||||||
if log_file == "discourse.js"
|
|
||||||
log_file = "application.js"
|
|
||||||
rel_file.sub!(/^discourse/, "application")
|
|
||||||
end
|
|
||||||
|
|
||||||
res = FileUtils.cp(f, "#{dest}/#{rel_file}")
|
|
||||||
|
|
||||||
assets[log_file] = rel_file
|
|
||||||
files[rel_file] = {
|
|
||||||
"logical_path" => log_file,
|
|
||||||
"mtime" => File.mtime(f).iso8601(9),
|
|
||||||
"size" => File.size(f),
|
|
||||||
"digest" => digest,
|
|
||||||
"integrity" => "sha384-#{Base64.encode64(Digest::SHA384.digest(File.read(f))).chomp}"
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
end
|
}
|
||||||
|
|
||||||
# Update manifest file
|
# Update manifest file
|
||||||
manifest_result = Dir["public/assets/.sprockets-manifest-*.json"]
|
log_task_duration('Update manifest file') {
|
||||||
if manifest_result && manifest_result.size == 1
|
manifest_result = Dir["public/assets/.sprockets-manifest-*.json"]
|
||||||
json = JSON.parse(File.read(manifest_result[0]))
|
if manifest_result && manifest_result.size == 1
|
||||||
json['files'].merge!(files)
|
json = JSON.parse(File.read(manifest_result[0]))
|
||||||
json['assets'].merge!(assets)
|
json['files'].merge!(files)
|
||||||
File.write(manifest_result[0], json.to_json)
|
json['assets'].merge!(assets)
|
||||||
end
|
File.write(manifest_result[0], json.to_json)
|
||||||
|
end
|
||||||
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
task 'test_ember_cli_copy' do
|
task 'test_ember_cli_copy' do
|
||||||
|
@ -332,7 +351,6 @@ task 'assets:precompile' => 'assets:precompile:before' do
|
||||||
|
|
||||||
if $bypass_sprockets_uglify
|
if $bypass_sprockets_uglify
|
||||||
puts "Compressing Javascript and Generating Source Maps"
|
puts "Compressing Javascript and Generating Source Maps"
|
||||||
startAll = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
||||||
manifest = Sprockets::Manifest.new(assets_path)
|
manifest = Sprockets::Manifest.new(assets_path)
|
||||||
|
|
||||||
locales = Set.new(["en"])
|
locales = Set.new(["en"])
|
||||||
|
@ -341,43 +359,40 @@ task 'assets:precompile' => 'assets:precompile:before' do
|
||||||
locales.add(SiteSetting.default_locale)
|
locales.add(SiteSetting.default_locale)
|
||||||
end
|
end
|
||||||
|
|
||||||
concurrent? do |proc|
|
log_task_duration('Done compressing all JS files') {
|
||||||
manifest.files
|
concurrent? do |proc|
|
||||||
.select { |k, v| k =~ /\.js$/ }
|
manifest.files
|
||||||
.each do |file, info|
|
.select { |k, v| k =~ /\.js$/ }
|
||||||
|
.each do |file, info|
|
||||||
|
|
||||||
path = "#{assets_path}/#{file}"
|
path = "#{assets_path}/#{file}"
|
||||||
_file = (d = File.dirname(file)) == "." ? "_#{file}" : "#{d}/_#{File.basename(file)}"
|
_file = (d = File.dirname(file)) == "." ? "_#{file}" : "#{d}/_#{File.basename(file)}"
|
||||||
_path = "#{assets_path}/#{_file}"
|
_path = "#{assets_path}/#{_file}"
|
||||||
max_compress = max_compress?(info["logical_path"], locales)
|
max_compress = max_compress?(info["logical_path"], locales)
|
||||||
if File.exists?(_path)
|
if File.exists?(_path)
|
||||||
STDERR.puts "Skipping: #{file} already compressed"
|
STDERR.puts "Skipping: #{file} already compressed"
|
||||||
elsif file.include? "discourse/tests"
|
elsif file.include? "discourse/tests"
|
||||||
STDERR.puts "Skipping: #{file}"
|
STDERR.puts "Skipping: #{file}"
|
||||||
else
|
else
|
||||||
proc.call do
|
proc.call do
|
||||||
start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
log_task_duration(file) {
|
||||||
STDERR.puts "#{start} Compressing: #{file}"
|
STDERR.puts "Compressing: #{file}"
|
||||||
|
|
||||||
if max_compress
|
if max_compress
|
||||||
FileUtils.mv(path, _path)
|
FileUtils.mv(path, _path)
|
||||||
compress(_file, file)
|
compress(_file, file)
|
||||||
|
end
|
||||||
|
|
||||||
|
info["size"] = File.size(path)
|
||||||
|
info["mtime"] = File.mtime(path).iso8601
|
||||||
|
gzip(path)
|
||||||
|
brotli(path, max_compress)
|
||||||
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
info["size"] = File.size(path)
|
|
||||||
info["mtime"] = File.mtime(path).iso8601
|
|
||||||
gzip(path)
|
|
||||||
brotli(path, max_compress)
|
|
||||||
|
|
||||||
STDERR.puts "Done compressing #{file} : #{(Process.clock_gettime(Process::CLOCK_MONOTONIC) - start).round(2)} secs"
|
|
||||||
STDERR.puts
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
}
|
||||||
|
|
||||||
STDERR.puts "Done compressing all JS files : #{(Process.clock_gettime(Process::CLOCK_MONOTONIC) - startAll).round(2)} secs"
|
|
||||||
STDERR.puts
|
|
||||||
|
|
||||||
# protected
|
# protected
|
||||||
manifest.send :save
|
manifest.send :save
|
||||||
|
|
Loading…
Reference in New Issue