2019-01-31 23:40:48 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require "aws-sdk-s3"
|
|
|
|
require "csv"
|
|
|
|
|
|
|
|
class S3Inventory
|
2019-05-01 19:05:35 -04:00
|
|
|
attr_reader :type, :model, :inventory_date
|
2019-01-31 23:40:48 -05:00
|
|
|
|
|
|
|
CSV_KEY_INDEX ||= 1
|
|
|
|
CSV_ETAG_INDEX ||= 2
|
|
|
|
INVENTORY_PREFIX ||= "inventory"
|
2019-02-06 08:46:08 -05:00
|
|
|
INVENTORY_VERSION ||= "1"
|
2020-12-30 00:05:42 -05:00
|
|
|
INVENTORY_LAG ||= 2.days
|
2019-01-31 23:40:48 -05:00
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
def initialize(s3_helper, type, preloaded_inventory_file: nil, preloaded_inventory_date: nil)
|
2019-01-31 23:40:48 -05:00
|
|
|
@s3_helper = s3_helper
|
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
if preloaded_inventory_file && preloaded_inventory_date
|
|
|
|
# Data preloaded, so we don't need to fetch it again
|
|
|
|
@preloaded_inventory_file = preloaded_inventory_file
|
|
|
|
@inventory_date = preloaded_inventory_date
|
|
|
|
end
|
|
|
|
|
2019-01-31 23:40:48 -05:00
|
|
|
if type == :upload
|
2019-05-01 19:05:35 -04:00
|
|
|
@type = "original"
|
2019-01-31 23:40:48 -05:00
|
|
|
@model = Upload
|
|
|
|
elsif type == :optimized
|
2019-05-01 19:05:35 -04:00
|
|
|
@type = "optimized"
|
2019-01-31 23:40:48 -05:00
|
|
|
@model = OptimizedImage
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-19 11:24:35 -05:00
|
|
|
def backfill_etags_and_list_missing
|
2020-07-28 20:49:45 -04:00
|
|
|
if !@preloaded_inventory_file && files.blank?
|
2019-01-31 23:40:48 -05:00
|
|
|
error("Failed to list inventory from S3")
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2019-08-14 21:48:08 -04:00
|
|
|
DistributedMutex.synchronize("s3_inventory_list_missing_#{type}", validity: 30.minutes) do
|
2019-08-13 01:59:31 -04:00
|
|
|
begin
|
2020-07-28 20:49:45 -04:00
|
|
|
download_and_decompress_files if !@preloaded_inventory_file
|
2019-08-13 01:59:31 -04:00
|
|
|
|
2019-12-18 00:51:57 -05:00
|
|
|
multisite_prefix = Discourse.store.upload_path
|
2019-08-13 01:59:31 -04:00
|
|
|
ActiveRecord::Base.transaction do
|
|
|
|
begin
|
|
|
|
connection.exec(
|
|
|
|
"CREATE TEMP TABLE #{table_name}(url text UNIQUE, etag text, PRIMARY KEY(etag, url))",
|
|
|
|
)
|
|
|
|
connection.copy_data("COPY #{table_name} FROM STDIN CSV") do
|
2020-07-28 20:49:45 -04:00
|
|
|
for_each_inventory_row do |row|
|
|
|
|
key = row[CSV_KEY_INDEX]
|
2022-11-20 15:52:30 -05:00
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
next if Rails.configuration.multisite && key.exclude?(multisite_prefix)
|
2022-11-22 15:41:22 -05:00
|
|
|
next if key.exclude?("#{type}/")
|
2022-11-20 15:52:30 -05:00
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
url = File.join(Discourse.store.absolute_base_url, key)
|
|
|
|
connection.put_copy_data("#{url},#{row[CSV_ETAG_INDEX]}\n")
|
2019-02-19 21:52:40 -05:00
|
|
|
end
|
2019-02-13 17:11:52 -05:00
|
|
|
end
|
2019-01-31 23:40:48 -05:00
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
# backfilling etags
|
|
|
|
connection.async_exec(
|
|
|
|
"UPDATE #{model.table_name}
|
|
|
|
SET etag = #{table_name}.etag
|
|
|
|
FROM #{table_name}
|
2020-07-05 20:33:54 -04:00
|
|
|
WHERE #{model.table_name}.etag IS NULL AND
|
|
|
|
#{model.table_name}.url = #{table_name}.url",
|
|
|
|
)
|
2019-02-13 18:48:06 -05:00
|
|
|
|
2020-07-21 10:55:53 -04:00
|
|
|
uploads = model.where("updated_at < ?", inventory_date)
|
2020-07-21 12:19:56 -04:00
|
|
|
uploads = uploads.by_users if model == Upload
|
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
missing_uploads =
|
|
|
|
uploads.joins(
|
|
|
|
"LEFT JOIN #{table_name} ON #{table_name}.etag = #{model.table_name}.etag",
|
2020-07-21 10:55:53 -04:00
|
|
|
).where("#{table_name}.etag IS NULL")
|
2019-01-31 23:40:48 -05:00
|
|
|
|
2020-08-12 19:30:28 -04:00
|
|
|
exists_with_different_etag =
|
|
|
|
missing_uploads
|
|
|
|
.joins(
|
|
|
|
"LEFT JOIN #{table_name} inventory2 ON inventory2.url = #{model.table_name}.url",
|
|
|
|
)
|
|
|
|
.where("inventory2.etag IS NOT NULL")
|
|
|
|
.pluck(:id)
|
|
|
|
|
2020-09-16 23:35:29 -04:00
|
|
|
# marking as verified/not verified
|
2020-09-14 10:21:34 -04:00
|
|
|
if model == Upload
|
2020-09-16 23:35:29 -04:00
|
|
|
sql_params = {
|
|
|
|
inventory_date: inventory_date,
|
|
|
|
invalid_etag: Upload.verification_statuses[:invalid_etag],
|
2022-11-20 15:52:30 -05:00
|
|
|
verified: Upload.verification_statuses[:verified],
|
|
|
|
seeded_id_threshold: model::SEEDED_ID_THRESHOLD,
|
2020-09-16 23:35:29 -04:00
|
|
|
}
|
2022-11-20 15:52:30 -05:00
|
|
|
|
|
|
|
DB.exec(<<~SQL, sql_params)
|
|
|
|
UPDATE #{model.table_name}
|
|
|
|
SET verification_status = :verified
|
|
|
|
WHERE etag IS NOT NULL
|
|
|
|
AND verification_status <> :verified
|
|
|
|
AND updated_at < :inventory_date
|
|
|
|
AND id > :seeded_id_threshold
|
|
|
|
AND EXISTS
|
|
|
|
(
|
|
|
|
SELECT 1
|
|
|
|
FROM #{table_name}
|
|
|
|
WHERE #{table_name}.etag = #{model.table_name}.etag
|
|
|
|
)
|
|
|
|
SQL
|
|
|
|
|
2020-09-16 23:35:29 -04:00
|
|
|
DB.exec(<<~SQL, sql_params)
|
2020-09-14 10:21:34 -04:00
|
|
|
UPDATE #{model.table_name}
|
2022-11-20 15:52:30 -05:00
|
|
|
SET verification_status = :invalid_etag
|
|
|
|
WHERE verification_status <> :invalid_etag
|
|
|
|
AND updated_at < :inventory_date
|
|
|
|
AND id > :seeded_id_threshold
|
|
|
|
AND NOT EXISTS
|
|
|
|
(
|
|
|
|
SELECT 1
|
|
|
|
FROM #{table_name}
|
|
|
|
WHERE #{table_name}.etag = #{model.table_name}.etag
|
|
|
|
)
|
2020-09-16 23:35:29 -04:00
|
|
|
SQL
|
2020-09-14 10:21:34 -04:00
|
|
|
end
|
2020-08-11 00:43:51 -04:00
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
if (missing_count = missing_uploads.count) > 0
|
|
|
|
missing_uploads
|
|
|
|
.select(:id, :url)
|
|
|
|
.find_each do |upload|
|
2020-08-12 19:30:28 -04:00
|
|
|
if exists_with_different_etag.include?(upload.id)
|
|
|
|
log "#{upload.url} has different etag"
|
|
|
|
else
|
|
|
|
log upload.url
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2020-08-12 19:30:28 -04:00
|
|
|
end
|
2019-08-13 01:59:31 -04:00
|
|
|
|
|
|
|
log "#{missing_count} of #{uploads.count} #{model.name.underscore.pluralize} are missing"
|
2020-08-12 19:30:28 -04:00
|
|
|
if exists_with_different_etag.present?
|
|
|
|
log "#{exists_with_different_etag.count} of these are caused by differing etags"
|
|
|
|
log "Null the etag column and re-run for automatic backfill"
|
|
|
|
end
|
2019-02-19 21:52:40 -05:00
|
|
|
end
|
2019-01-31 23:40:48 -05:00
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
Discourse.stats.set("missing_s3_#{model.table_name}", missing_count)
|
|
|
|
ensure
|
|
|
|
connection.exec("DROP TABLE #{table_name}") unless connection.nil?
|
2019-02-19 21:52:40 -05:00
|
|
|
end
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
2019-08-13 01:59:31 -04:00
|
|
|
ensure
|
|
|
|
cleanup!
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
def for_each_inventory_row
|
|
|
|
if @preloaded_inventory_file
|
|
|
|
CSV.foreach(@preloaded_inventory_file) { |row| yield(row) }
|
|
|
|
else
|
|
|
|
files.each { |file| CSV.foreach(file[:filename][0...-3]) { |row| yield(row) } }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
def download_inventory_file_to_tmp_directory(file)
|
2022-01-05 12:45:08 -05:00
|
|
|
return if File.exist?(file[:filename])
|
2019-07-25 04:46:47 -04:00
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
log "Downloading inventory file '#{file[:key]}' to tmp directory..."
|
|
|
|
failure_message = "Failed to inventory file '#{file[:key]}' to tmp directory."
|
2019-01-31 23:40:48 -05:00
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
@s3_helper.download_file(file[:key], file[:filename], failure_message)
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
def decompress_inventory_file(file)
|
|
|
|
log "Decompressing inventory file '#{file[:filename]}', this may take a while..."
|
|
|
|
Discourse::Utils.execute_command(
|
|
|
|
"gzip",
|
|
|
|
"--decompress",
|
|
|
|
file[:filename],
|
|
|
|
failure_message: "Failed to decompress inventory file '#{file[:filename]}'.",
|
|
|
|
chdir: tmp_directory,
|
|
|
|
)
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def update_bucket_policy
|
|
|
|
@s3_helper.s3_client.put_bucket_policy(
|
|
|
|
bucket: bucket_name,
|
|
|
|
policy: {
|
|
|
|
Version: "2012-10-17",
|
|
|
|
Statement: [
|
|
|
|
{
|
|
|
|
Sid: "InventoryAndAnalyticsPolicy",
|
|
|
|
Effect: "Allow",
|
|
|
|
Principal: {
|
|
|
|
Service: "s3.amazonaws.com",
|
|
|
|
},
|
|
|
|
Action: ["s3:PutObject"],
|
2019-02-06 08:46:08 -05:00
|
|
|
Resource: ["#{inventory_path_arn}/*"],
|
2019-01-31 23:40:48 -05:00
|
|
|
Condition: {
|
|
|
|
ArnLike: {
|
2019-02-06 08:46:08 -05:00
|
|
|
"aws:SourceArn": bucket_arn,
|
2019-01-31 23:40:48 -05:00
|
|
|
},
|
|
|
|
StringEquals: {
|
|
|
|
"s3:x-amz-acl": "bucket-owner-full-control",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
],
|
|
|
|
}.to_json,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_bucket_inventory_configuration
|
|
|
|
@s3_helper.s3_client.put_bucket_inventory_configuration(
|
|
|
|
bucket: bucket_name,
|
|
|
|
id: inventory_id,
|
|
|
|
inventory_configuration: inventory_configuration,
|
|
|
|
use_accelerate_endpoint: false,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
def prepare_for_all_sites
|
|
|
|
db_names = RailsMultisite::ConnectionManagement.all_dbs
|
|
|
|
db_files = {}
|
|
|
|
|
|
|
|
db_names.each { |db| db_files[db] = Tempfile.new("#{db}-inventory.csv") }
|
|
|
|
|
|
|
|
download_and_decompress_files
|
|
|
|
for_each_inventory_row do |row|
|
|
|
|
key = row[CSV_KEY_INDEX]
|
|
|
|
row_db = key.match(%r{uploads/([^/]+)/})&.[](1)
|
|
|
|
if row_db && file = db_files[row_db]
|
|
|
|
file.write(row.to_csv)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
db_names.each { |db| db_files[db].rewind }
|
|
|
|
|
|
|
|
db_files
|
|
|
|
ensure
|
|
|
|
cleanup!
|
|
|
|
end
|
|
|
|
|
2019-01-31 23:40:48 -05:00
|
|
|
private
|
|
|
|
|
2019-08-13 01:59:31 -04:00
|
|
|
def cleanup!
|
2020-07-28 20:49:45 -04:00
|
|
|
return if @preloaded_inventory_file
|
2019-08-13 01:59:31 -04:00
|
|
|
files.each do |file|
|
2022-01-05 12:45:08 -05:00
|
|
|
File.delete(file[:filename]) if File.exist?(file[:filename])
|
|
|
|
File.delete(file[:filename][0...-3]) if File.exist?(file[:filename][0...-3])
|
2019-08-13 01:59:31 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-03 15:46:20 -04:00
|
|
|
def connection
|
|
|
|
@connection ||= ActiveRecord::Base.connection.raw_connection
|
|
|
|
end
|
|
|
|
|
|
|
|
def table_name
|
|
|
|
"#{type}_inventory"
|
|
|
|
end
|
|
|
|
|
2019-02-13 17:11:52 -05:00
|
|
|
def files
|
2020-07-28 20:49:45 -04:00
|
|
|
return if @preloaded_inventory_file
|
2019-02-13 17:11:52 -05:00
|
|
|
@files ||=
|
|
|
|
begin
|
|
|
|
symlink_file = unsorted_files.sort_by { |file| -file.last_modified.to_i }.first
|
|
|
|
return [] if symlink_file.blank?
|
2023-01-09 07:10:19 -05:00
|
|
|
|
2020-12-30 00:05:42 -05:00
|
|
|
@inventory_date = symlink_file.last_modified - INVENTORY_LAG
|
2019-02-13 17:11:52 -05:00
|
|
|
log "Downloading symlink file to tmp directory..."
|
|
|
|
failure_message = "Failed to download symlink file to tmp directory."
|
|
|
|
filename = File.join(tmp_directory, File.basename(symlink_file.key))
|
2023-01-09 07:10:19 -05:00
|
|
|
|
2019-02-13 17:11:52 -05:00
|
|
|
@s3_helper.download_file(symlink_file.key, filename, failure_message)
|
|
|
|
File
|
|
|
|
.readlines(filename)
|
|
|
|
.map do |key|
|
2019-02-13 19:26:30 -05:00
|
|
|
key = key.sub("s3://#{bucket_name}/", "").sub("\n", "")
|
2019-02-13 17:11:52 -05:00
|
|
|
{ key: key, filename: File.join(tmp_directory, File.basename(key)) }
|
2023-01-09 07:10:19 -05:00
|
|
|
end
|
2019-02-13 17:11:52 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-28 20:49:45 -04:00
|
|
|
def download_and_decompress_files
|
|
|
|
files.each do |file|
|
2022-01-05 12:45:08 -05:00
|
|
|
next if File.exist?(file[:filename][0...-3])
|
2020-07-28 20:49:45 -04:00
|
|
|
|
|
|
|
download_inventory_file_to_tmp_directory(file)
|
|
|
|
decompress_inventory_file(file)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-13 17:11:52 -05:00
|
|
|
def tmp_directory
|
|
|
|
@tmp_directory ||=
|
|
|
|
begin
|
|
|
|
current_db = RailsMultisite::ConnectionManagement.current_db
|
2019-08-13 01:59:31 -04:00
|
|
|
directory = File.join(Rails.root, "tmp", INVENTORY_PREFIX, current_db)
|
2019-02-13 17:11:52 -05:00
|
|
|
FileUtils.mkdir_p(directory)
|
|
|
|
directory
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-31 23:40:48 -05:00
|
|
|
def inventory_configuration
|
2019-05-01 19:05:35 -04:00
|
|
|
filter_prefix = type
|
2020-11-05 08:39:40 -05:00
|
|
|
filter_prefix = bucket_folder_path if bucket_folder_path.present?
|
2019-01-31 23:40:48 -05:00
|
|
|
|
|
|
|
{
|
|
|
|
destination: {
|
|
|
|
s3_bucket_destination: {
|
2019-02-06 08:46:08 -05:00
|
|
|
bucket: bucket_arn,
|
2019-02-06 10:21:28 -05:00
|
|
|
prefix: inventory_path,
|
2019-01-31 23:40:48 -05:00
|
|
|
format: "CSV",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
filter: {
|
|
|
|
prefix: filter_prefix,
|
|
|
|
},
|
|
|
|
is_enabled: SiteSetting.enable_s3_inventory,
|
|
|
|
id: inventory_id,
|
|
|
|
included_object_versions: "Current",
|
|
|
|
optional_fields: ["ETag"],
|
|
|
|
schedule: {
|
|
|
|
frequency: "Daily",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
def bucket_name
|
|
|
|
@s3_helper.s3_bucket_name
|
|
|
|
end
|
|
|
|
|
|
|
|
def bucket_folder_path
|
|
|
|
@s3_helper.s3_bucket_folder_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def unsorted_files
|
|
|
|
objects = []
|
|
|
|
|
2019-02-13 17:11:52 -05:00
|
|
|
hive_path = File.join(inventory_path, bucket_name, inventory_id, "hive")
|
2023-01-20 13:52:49 -05:00
|
|
|
@s3_helper.list(hive_path).each { |obj| objects << obj if obj.key.match?(/symlink\.txt\z/i) }
|
2019-01-31 23:40:48 -05:00
|
|
|
|
|
|
|
objects
|
|
|
|
rescue Aws::Errors::ServiceError => e
|
|
|
|
log("Failed to list inventory from S3", e)
|
2019-08-23 03:59:31 -04:00
|
|
|
[]
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
|
2019-05-01 19:05:35 -04:00
|
|
|
def inventory_id
|
|
|
|
@inventory_id ||=
|
|
|
|
begin
|
2019-07-25 04:46:47 -04:00
|
|
|
id = Rails.configuration.multisite ? "original" : type # TODO: rename multisite path to "uploads"
|
|
|
|
bucket_folder_path.present? ? "#{bucket_folder_path}-#{id}" : id
|
2019-05-01 19:05:35 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-06 08:46:08 -05:00
|
|
|
def inventory_path_arn
|
|
|
|
File.join(bucket_arn, inventory_path)
|
|
|
|
end
|
|
|
|
|
2019-01-31 23:40:48 -05:00
|
|
|
def inventory_path
|
2019-02-06 08:46:08 -05:00
|
|
|
path = File.join(INVENTORY_PREFIX, INVENTORY_VERSION)
|
|
|
|
path = File.join(bucket_folder_path, path) if bucket_folder_path.present?
|
|
|
|
path
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
|
2019-02-06 08:46:08 -05:00
|
|
|
def bucket_arn
|
|
|
|
"arn:aws:s3:::#{bucket_name}"
|
2019-01-31 23:40:48 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def log(message, ex = nil)
|
|
|
|
puts(message)
|
|
|
|
Rails.logger.error("#{ex}\n" + (ex.backtrace || []).join("\n")) if ex
|
|
|
|
end
|
|
|
|
|
|
|
|
def error(message)
|
|
|
|
log(message, StandardError.new(message))
|
|
|
|
end
|
|
|
|
end
|