2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-09-15 09:45:36 -04:00
|
|
|
require_dependency "db_helper"
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
module BackupRestore
|
2014-02-12 23:32:58 -05:00
|
|
|
|
2015-06-11 02:42:01 -04:00
|
|
|
class RestoreDisabledError < RuntimeError; end
|
2014-02-12 23:32:58 -05:00
|
|
|
class FilenameMissingError < RuntimeError; end
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
class Restorer
|
2014-08-04 11:55:09 -04:00
|
|
|
attr_reader :success
|
|
|
|
|
2018-03-15 18:09:06 -04:00
|
|
|
def self.pg_produces_portable_dump?(version)
|
2019-07-23 12:41:57 -04:00
|
|
|
# anything pg 11 or above will produce a non-portable dump
|
|
|
|
return false if version.to_i >= 11
|
|
|
|
|
|
|
|
# below 11, the behaviour was changed in multiple different minor
|
|
|
|
# versions depending on major release line - we list those versions below
|
2019-07-15 18:07:44 -04:00
|
|
|
gem_version = Gem::Version.new(version)
|
2018-03-15 18:09:06 -04:00
|
|
|
|
|
|
|
%w{
|
|
|
|
10.3
|
|
|
|
9.6.8
|
|
|
|
9.5.12
|
|
|
|
9.4.17
|
|
|
|
9.3.22
|
|
|
|
}.each do |unportable_version|
|
2019-07-15 18:07:44 -04:00
|
|
|
return false if Gem::Dependency.new("", "~> #{unportable_version}").match?("", gem_version)
|
2018-03-15 18:09:06 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
def initialize(user_id, opts = {})
|
2015-08-27 14:02:13 -04:00
|
|
|
@user_id = user_id
|
|
|
|
@client_id = opts[:client_id]
|
|
|
|
@filename = opts[:filename]
|
|
|
|
@publish_to_message_bus = opts[:publish_to_message_bus] || false
|
2019-03-07 15:48:26 -05:00
|
|
|
@disable_emails = opts.fetch(:disable_emails, true)
|
2014-02-12 23:32:58 -05:00
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
ensure_restore_is_enabled
|
2014-02-12 23:32:58 -05:00
|
|
|
ensure_no_operation_is_running
|
|
|
|
ensure_we_have_a_user
|
|
|
|
ensure_we_have_a_filename
|
|
|
|
|
|
|
|
initialize_state
|
|
|
|
end
|
|
|
|
|
|
|
|
def run
|
|
|
|
log "[STARTED]"
|
|
|
|
log "'#{@user_info[:username]}' has started the restore!"
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
mark_restore_as_running
|
2014-02-12 23:32:58 -05:00
|
|
|
|
|
|
|
listen_for_shutdown_signal
|
|
|
|
|
|
|
|
ensure_directory_exists(@tmp_directory)
|
|
|
|
|
|
|
|
copy_archive_to_tmp_directory
|
|
|
|
unzip_archive
|
|
|
|
|
|
|
|
extract_metadata
|
|
|
|
validate_metadata
|
|
|
|
|
|
|
|
extract_dump
|
2019-08-08 10:06:27 -04:00
|
|
|
create_missing_discourse_functions
|
2014-04-08 12:06:53 -04:00
|
|
|
|
2018-03-09 01:28:50 -05:00
|
|
|
if !can_restore_into_different_schema?
|
2018-03-15 12:14:08 -04:00
|
|
|
log "Cannot restore into different schema, restoring in-place"
|
2018-03-09 00:18:47 -05:00
|
|
|
enable_readonly_mode
|
|
|
|
pause_sidekiq
|
|
|
|
wait_for_sidekiq
|
|
|
|
BackupRestore.move_tables_between_schemas("public", "backup")
|
2018-04-05 21:43:32 -04:00
|
|
|
@db_was_changed = true
|
2018-03-09 00:18:47 -05:00
|
|
|
restore_dump
|
|
|
|
else
|
2018-03-15 12:14:08 -04:00
|
|
|
log "Restoring into 'backup' schema"
|
2018-03-09 00:18:47 -05:00
|
|
|
restore_dump
|
|
|
|
enable_readonly_mode
|
|
|
|
pause_sidekiq
|
|
|
|
wait_for_sidekiq
|
|
|
|
switch_schema!
|
|
|
|
end
|
2014-02-12 23:32:58 -05:00
|
|
|
|
2018-11-19 21:37:58 -05:00
|
|
|
migrate_database
|
|
|
|
reconnect_database
|
|
|
|
reload_site_settings
|
|
|
|
clear_emoji_cache
|
|
|
|
disable_readonly_mode
|
|
|
|
clear_theme_cache
|
|
|
|
|
2016-01-18 19:01:17 -05:00
|
|
|
extract_uploads
|
2019-02-18 05:48:03 -05:00
|
|
|
|
|
|
|
after_restore_hook
|
2014-02-12 23:32:58 -05:00
|
|
|
rescue SystemExit
|
|
|
|
log "Restore process was cancelled!"
|
|
|
|
rollback
|
2014-08-18 02:42:48 -04:00
|
|
|
rescue => ex
|
2014-02-12 23:32:58 -05:00
|
|
|
log "EXCEPTION: " + ex.message
|
|
|
|
log ex.backtrace.join("\n")
|
|
|
|
rollback
|
|
|
|
else
|
|
|
|
@success = true
|
|
|
|
ensure
|
2018-09-19 14:35:43 -04:00
|
|
|
clean_up
|
|
|
|
notify_user
|
|
|
|
log "Finished!"
|
2016-07-22 00:14:35 -04:00
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
@success ? log("[SUCCESS]") : log("[FAILED]")
|
|
|
|
end
|
|
|
|
|
|
|
|
protected
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
def ensure_restore_is_enabled
|
2015-06-11 02:42:01 -04:00
|
|
|
raise BackupRestore::RestoreDisabledError unless Rails.env.development? || SiteSetting.allow_restore?
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_no_operation_is_running
|
|
|
|
raise BackupRestore::OperationRunningError if BackupRestore.is_operation_running?
|
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_we_have_a_user
|
2014-05-06 09:41:59 -04:00
|
|
|
user = User.find_by(id: @user_id)
|
2014-02-12 23:32:58 -05:00
|
|
|
raise Discourse::InvalidParameters.new(:user_id) unless user
|
|
|
|
# keep some user data around to check them against the newly restored database
|
|
|
|
@user_info = { id: user.id, username: user.username, email: user.email }
|
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_we_have_a_filename
|
2015-06-11 02:42:01 -04:00
|
|
|
raise BackupRestore::FilenameMissingError if @filename.nil?
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def initialize_state
|
|
|
|
@success = false
|
2018-10-14 21:43:31 -04:00
|
|
|
@store = BackupRestore::BackupStore.create
|
2014-08-20 05:53:15 -04:00
|
|
|
@db_was_changed = false
|
2014-02-12 23:32:58 -05:00
|
|
|
@current_db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
@current_version = BackupRestore.current_version
|
|
|
|
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
|
|
|
@tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp)
|
|
|
|
@archive_filename = File.join(@tmp_directory, @filename)
|
|
|
|
@tar_filename = @archive_filename[0...-3]
|
|
|
|
@meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE)
|
2016-08-01 09:18:42 -04:00
|
|
|
@is_archive = !(@filename =~ /.sql.gz$/)
|
2016-07-31 23:56:06 -04:00
|
|
|
|
2014-03-25 00:15:30 -04:00
|
|
|
@logs = []
|
2014-03-28 07:15:53 -04:00
|
|
|
@readonly_mode_was_enabled = Discourse.readonly_mode?
|
2019-08-08 10:06:27 -04:00
|
|
|
@created_functions_for_table_columns = []
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def listen_for_shutdown_signal
|
|
|
|
Thread.new do
|
|
|
|
while BackupRestore.is_operation_running?
|
|
|
|
exit if BackupRestore.should_shutdown?
|
|
|
|
sleep 0.1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
def mark_restore_as_running
|
2014-02-12 23:32:58 -05:00
|
|
|
log "Marking restore as running..."
|
|
|
|
BackupRestore.mark_as_running!
|
|
|
|
end
|
|
|
|
|
|
|
|
def enable_readonly_mode
|
2014-03-28 07:15:53 -04:00
|
|
|
return if @readonly_mode_was_enabled
|
2014-02-12 23:32:58 -05:00
|
|
|
log "Enabling readonly mode..."
|
|
|
|
Discourse.enable_readonly_mode
|
|
|
|
end
|
|
|
|
|
|
|
|
def pause_sidekiq
|
|
|
|
log "Pausing sidekiq..."
|
|
|
|
Sidekiq.pause!
|
|
|
|
end
|
|
|
|
|
|
|
|
def wait_for_sidekiq
|
|
|
|
log "Waiting for sidekiq to finish running jobs..."
|
2014-03-14 10:49:35 -04:00
|
|
|
iterations = 1
|
|
|
|
while sidekiq_has_running_jobs?
|
|
|
|
log "Waiting for sidekiq to finish running jobs... ##{iterations}"
|
2014-02-12 23:32:58 -05:00
|
|
|
sleep 5
|
|
|
|
iterations += 1
|
2014-03-14 10:49:35 -04:00
|
|
|
raise "Sidekiq did not finish running all the jobs in the allowed time!" if iterations > 6
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-03-14 10:49:35 -04:00
|
|
|
def sidekiq_has_running_jobs?
|
2014-08-14 17:54:55 -04:00
|
|
|
Sidekiq::Workers.new.each do |_, _, worker|
|
2014-03-14 10:49:35 -04:00
|
|
|
payload = worker.try(:payload)
|
|
|
|
return true if payload.try(:all_sites)
|
|
|
|
return true if payload.try(:current_site_id) == @current_db
|
|
|
|
end
|
|
|
|
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def copy_archive_to_tmp_directory
|
2018-10-14 21:43:31 -04:00
|
|
|
if @store.remote?
|
|
|
|
log "Downloading archive to tmp directory..."
|
|
|
|
failure_message = "Failed to download archive to tmp directory."
|
|
|
|
else
|
|
|
|
log "Copying archive to tmp directory..."
|
|
|
|
failure_message = "Failed to copy archive to tmp directory."
|
|
|
|
end
|
|
|
|
|
|
|
|
@store.download_file(@filename, @archive_filename, failure_message)
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def unzip_archive
|
2016-08-01 09:18:42 -04:00
|
|
|
return unless @is_archive
|
|
|
|
|
2016-05-23 03:33:29 -04:00
|
|
|
log "Unzipping archive, this may take a while..."
|
2016-08-01 09:18:42 -04:00
|
|
|
|
2016-07-21 22:45:39 -04:00
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 02:21:30 -04:00
|
|
|
Discourse::Utils.execute_command('gzip', '--decompress', @archive_filename, failure_message: "Failed to unzip archive.")
|
2016-07-21 22:45:39 -04:00
|
|
|
end
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def extract_metadata
|
2016-08-01 09:18:42 -04:00
|
|
|
@metadata =
|
2016-09-16 04:59:22 -04:00
|
|
|
if system('tar', '--list', '--file', @tar_filename, BackupRestore::METADATA_FILE)
|
2018-03-15 12:14:08 -04:00
|
|
|
log "Extracting metadata file..."
|
2016-08-01 09:18:42 -04:00
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 02:21:30 -04:00
|
|
|
Discourse::Utils.execute_command(
|
2016-09-15 22:32:53 -04:00
|
|
|
'tar', '--extract', '--file', @tar_filename, BackupRestore::METADATA_FILE,
|
|
|
|
failure_message: "Failed to extract metadata file."
|
2016-08-01 09:18:42 -04:00
|
|
|
)
|
|
|
|
end
|
2016-07-21 22:45:39 -04:00
|
|
|
|
2016-08-25 05:19:10 -04:00
|
|
|
data = Oj.load_file(@meta_filename)
|
|
|
|
raise "Failed to load metadata file." if !data
|
|
|
|
data
|
2016-08-01 09:18:42 -04:00
|
|
|
else
|
2018-03-15 12:14:08 -04:00
|
|
|
log "No metadata file to extract."
|
2016-08-01 09:18:42 -04:00
|
|
|
if @filename =~ /-#{BackupRestore::VERSION_PREFIX}(\d{14})/
|
|
|
|
{ "version" => Regexp.last_match[1].to_i }
|
|
|
|
else
|
|
|
|
raise "Migration version is missing from the filename."
|
|
|
|
end
|
|
|
|
end
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def validate_metadata
|
|
|
|
log "Validating metadata..."
|
|
|
|
log " Current version: #{@current_version}"
|
2016-08-25 05:19:10 -04:00
|
|
|
|
|
|
|
raise "Metadata has not been extracted correctly." if !@metadata
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
log " Restored version: #{@metadata["version"]}"
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
error = "You're trying to restore a more recent version of the schema. You should migrate first!"
|
2014-02-12 23:32:58 -05:00
|
|
|
raise error if @metadata["version"] > @current_version
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_dump
|
2016-09-16 04:59:22 -04:00
|
|
|
@dump_filename =
|
|
|
|
if @is_archive
|
|
|
|
# For backwards compatibility
|
|
|
|
if system('tar', '--list', '--file', @tar_filename, BackupRestore::OLD_DUMP_FILE)
|
|
|
|
File.join(@tmp_directory, BackupRestore::OLD_DUMP_FILE)
|
|
|
|
else
|
|
|
|
File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
File.join(@tmp_directory, @filename)
|
|
|
|
end
|
|
|
|
|
2016-08-01 09:18:42 -04:00
|
|
|
return unless @is_archive
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
log "Extracting dump file..."
|
2016-07-21 22:45:39 -04:00
|
|
|
|
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 02:21:30 -04:00
|
|
|
Discourse::Utils.execute_command(
|
2016-09-15 22:32:53 -04:00
|
|
|
'tar', '--extract', '--file', @tar_filename, File.basename(@dump_filename),
|
|
|
|
failure_message: "Failed to extract dump file."
|
2016-07-21 22:45:39 -04:00
|
|
|
)
|
|
|
|
end
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
2018-03-09 00:18:47 -05:00
|
|
|
def get_dumped_by_version
|
|
|
|
output = Discourse::Utils.execute_command(
|
|
|
|
File.extname(@dump_filename) == '.gz' ? 'zgrep' : 'grep',
|
2018-03-09 02:48:12 -05:00
|
|
|
'-m1', @dump_filename, '-e', "-- Dumped by pg_dump version",
|
2018-03-09 00:18:47 -05:00
|
|
|
failure_message: "Failed to check version of pg_dump used to generate the dump file"
|
|
|
|
)
|
|
|
|
|
2018-03-15 23:09:13 -04:00
|
|
|
output.match(/version (\d+(\.\d+)+)/)[1]
|
2018-03-09 00:18:47 -05:00
|
|
|
end
|
|
|
|
|
2018-03-09 01:28:50 -05:00
|
|
|
def can_restore_into_different_schema?
|
2018-03-15 18:09:06 -04:00
|
|
|
self.class.pg_produces_portable_dump?(get_dumped_by_version)
|
2018-03-09 01:28:50 -05:00
|
|
|
end
|
|
|
|
|
2016-07-31 23:56:06 -04:00
|
|
|
def restore_dump_command
|
|
|
|
if File.extname(@dump_filename) == '.gz'
|
|
|
|
"gzip -d < #{@dump_filename} | #{sed_command} | #{psql_command} 2>&1"
|
|
|
|
else
|
|
|
|
"#{psql_command} 2>&1 < #{@dump_filename}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def restore_dump
|
|
|
|
log "Restoring dump file... (can be quite long)"
|
|
|
|
|
|
|
|
logs = Queue.new
|
|
|
|
psql_running = true
|
|
|
|
has_error = false
|
|
|
|
|
|
|
|
Thread.new do
|
2014-04-08 12:06:53 -04:00
|
|
|
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
2014-02-12 23:32:58 -05:00
|
|
|
while psql_running
|
|
|
|
message = logs.pop.strip
|
|
|
|
has_error ||= (message =~ /ERROR:/)
|
|
|
|
log(message) unless message.blank?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-07-31 23:56:06 -04:00
|
|
|
IO.popen(restore_dump_command) do |pipe|
|
2014-02-12 23:32:58 -05:00
|
|
|
begin
|
|
|
|
while line = pipe.readline
|
|
|
|
logs << line
|
|
|
|
end
|
|
|
|
rescue EOFError
|
|
|
|
# finished reading...
|
|
|
|
ensure
|
|
|
|
psql_running = false
|
|
|
|
logs << ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# psql does not return a valid exit code when an error happens
|
|
|
|
raise "psql failed" if has_error
|
|
|
|
end
|
|
|
|
|
2014-03-12 06:45:55 -04:00
|
|
|
def psql_command
|
2014-02-19 09:25:31 -05:00
|
|
|
db_conf = BackupRestore.database_configuration
|
|
|
|
|
2018-03-01 15:08:35 -05:00
|
|
|
password_argument = "PGPASSWORD='#{db_conf.password}'" if db_conf.password.present?
|
2014-02-20 12:42:17 -05:00
|
|
|
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
2014-07-30 11:20:25 -04:00
|
|
|
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
2014-02-20 12:42:17 -05:00
|
|
|
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
2014-02-19 09:25:31 -05:00
|
|
|
|
2014-02-20 12:42:17 -05:00
|
|
|
[ password_argument, # pass the password to psql (if any)
|
2014-02-19 09:25:31 -05:00
|
|
|
"psql", # the psql command
|
|
|
|
"--dbname='#{db_conf.database}'", # connect to database *dbname*
|
|
|
|
"--single-transaction", # all or nothing (also runs COPY commands faster)
|
2014-02-20 12:42:17 -05:00
|
|
|
host_argument, # the hostname to connect to (if any)
|
2016-09-15 22:32:53 -04:00
|
|
|
port_argument, # the port to connect to (if any)
|
2014-02-20 12:42:17 -05:00
|
|
|
username_argument # the username to connect as (if any)
|
2014-02-12 23:32:58 -05:00
|
|
|
].join(" ")
|
|
|
|
end
|
|
|
|
|
2016-07-21 22:45:39 -04:00
|
|
|
def sed_command
|
|
|
|
# in order to limit the downtime when restoring as much as possible
|
|
|
|
# we force the restoration to happen in the "restore" schema
|
|
|
|
|
|
|
|
# during the restoration, this make sure we
|
|
|
|
# - drop the "restore" schema if it exists
|
|
|
|
# - create the "restore" schema
|
|
|
|
# - prepend the "restore" schema into the search_path
|
|
|
|
|
|
|
|
regexp = "SET search_path = public, pg_catalog;"
|
|
|
|
|
|
|
|
replacement = [ "DROP SCHEMA IF EXISTS restore CASCADE;",
|
|
|
|
"CREATE SCHEMA restore;",
|
|
|
|
"SET search_path = restore, public, pg_catalog;",
|
|
|
|
].join(" ")
|
|
|
|
|
|
|
|
# we only want to replace the VERY first occurence of the search_path command
|
|
|
|
expression = "1,/^#{regexp}$/s/#{regexp}/#{replacement}/"
|
|
|
|
|
|
|
|
"sed -e '#{expression}'"
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def switch_schema!
|
2015-03-09 11:11:15 -04:00
|
|
|
log "Switching schemas... try reloading the site in 5 minutes, if successful, then reboot and restore is complete."
|
2014-02-12 23:32:58 -05:00
|
|
|
|
2014-02-19 09:25:31 -05:00
|
|
|
sql = [
|
|
|
|
"BEGIN;",
|
|
|
|
BackupRestore.move_tables_between_schemas_sql("public", "backup"),
|
|
|
|
BackupRestore.move_tables_between_schemas_sql("restore", "public"),
|
|
|
|
"COMMIT;"
|
|
|
|
].join("\n")
|
2014-02-12 23:32:58 -05:00
|
|
|
|
2014-08-20 05:53:15 -04:00
|
|
|
@db_was_changed = true
|
|
|
|
|
2018-06-19 02:13:14 -04:00
|
|
|
DB.exec(sql)
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def migrate_database
|
|
|
|
log "Migrating the database..."
|
2019-02-08 09:28:25 -05:00
|
|
|
|
|
|
|
if Discourse.skip_post_deployment_migrations?
|
|
|
|
ENV["SKIP_POST_DEPLOYMENT_MIGRATIONS"] = "0"
|
|
|
|
Rails.application.config.paths['db/migrate'] << Rails.root.join(
|
|
|
|
Discourse::DB_POST_MIGRATE_PATH
|
|
|
|
).to_s
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
Discourse::Application.load_tasks
|
|
|
|
ENV["VERSION"] = @current_version.to_s
|
2018-06-19 02:13:14 -04:00
|
|
|
DB.exec("SET search_path = public, pg_catalog;")
|
2014-02-21 10:17:00 -05:00
|
|
|
Rake::Task["db:migrate"].invoke
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def reconnect_database
|
|
|
|
log "Reconnecting to the database..."
|
2014-04-08 12:06:53 -04:00
|
|
|
RailsMultisite::ConnectionManagement::establish_connection(db: @current_db)
|
|
|
|
end
|
|
|
|
|
|
|
|
def reload_site_settings
|
|
|
|
log "Reloading site settings..."
|
|
|
|
SiteSetting.refresh!
|
2019-02-25 10:06:33 -05:00
|
|
|
|
2019-04-16 05:48:07 -04:00
|
|
|
if @disable_emails && SiteSetting.disable_emails == 'no'
|
2019-03-07 15:48:26 -05:00
|
|
|
log "Disabling outgoing emails for non-staff users..."
|
|
|
|
user = User.find_by_email(@user_info[:email]) || Discourse.system_user
|
|
|
|
SiteSetting.set_and_log(:disable_emails, 'non-staff', user)
|
|
|
|
end
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
2015-03-17 12:29:18 -04:00
|
|
|
def clear_emoji_cache
|
|
|
|
log "Clearing emoji cache..."
|
|
|
|
Emoji.clear_cache
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def extract_uploads
|
2016-09-15 23:00:37 -04:00
|
|
|
if system('tar', '--exclude=*/*', '--list', '--file', @tar_filename, 'uploads')
|
2014-12-22 19:12:26 -05:00
|
|
|
log "Extracting uploads..."
|
2016-09-15 09:45:36 -04:00
|
|
|
|
|
|
|
FileUtils.cd(@tmp_directory) do
|
2017-03-17 02:21:30 -04:00
|
|
|
Discourse::Utils.execute_command(
|
2016-09-15 22:32:53 -04:00
|
|
|
'tar', '--extract', '--keep-newer-files', '--file', @tar_filename, 'uploads/',
|
2016-09-21 04:04:41 -04:00
|
|
|
failure_message: "Failed to extract uploads."
|
2016-07-21 22:45:39 -04:00
|
|
|
)
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
2016-09-15 09:45:36 -04:00
|
|
|
|
|
|
|
public_uploads_path = File.join(Rails.root, "public")
|
|
|
|
|
|
|
|
FileUtils.cd(public_uploads_path) do
|
|
|
|
FileUtils.mkdir_p("uploads")
|
|
|
|
|
|
|
|
tmp_uploads_path = Dir.glob(File.join(@tmp_directory, "uploads", "*")).first
|
2019-07-09 07:57:31 -04:00
|
|
|
previous_db_name = BackupMetadata.value_for("db_name") || File.basename(tmp_uploads_path)
|
2016-09-15 09:45:36 -04:00
|
|
|
current_db_name = RailsMultisite::ConnectionManagement.current_db
|
2019-02-13 05:10:33 -05:00
|
|
|
optimized_images_exist = File.exist?(File.join(tmp_uploads_path, 'optimized'))
|
2016-09-15 09:45:36 -04:00
|
|
|
|
2017-03-17 02:21:30 -04:00
|
|
|
Discourse::Utils.execute_command(
|
2017-03-17 02:27:01 -04:00
|
|
|
'rsync', '-avp', '--safe-links', "#{tmp_uploads_path}/", "uploads/#{current_db_name}/",
|
2016-09-15 09:45:36 -04:00
|
|
|
failure_message: "Failed to restore uploads."
|
|
|
|
)
|
|
|
|
|
2019-07-09 07:57:31 -04:00
|
|
|
remap_uploads(previous_db_name, current_db_name)
|
2019-02-13 05:10:33 -05:00
|
|
|
|
2019-06-04 09:47:21 -04:00
|
|
|
if SiteSetting.Upload.enable_s3_uploads
|
|
|
|
migrate_to_s3
|
|
|
|
remove_local_uploads(File.join(public_uploads_path, "uploads/#{current_db_name}"))
|
|
|
|
end
|
|
|
|
|
2019-02-13 05:10:33 -05:00
|
|
|
generate_optimized_images unless optimized_images_exist
|
2016-09-15 09:45:36 -04:00
|
|
|
end
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-09 07:57:31 -04:00
|
|
|
def remap_uploads(previous_db_name, current_db_name)
|
|
|
|
log "Remapping uploads..."
|
|
|
|
|
2019-07-09 11:34:41 -04:00
|
|
|
was_multisite = BackupMetadata.value_for("multisite") == "t"
|
2019-07-09 10:11:32 -04:00
|
|
|
uploads_folder = was_multisite ? "/" : "/uploads/#{current_db_name}/"
|
|
|
|
|
2019-07-09 07:57:31 -04:00
|
|
|
if (old_base_url = BackupMetadata.value_for("base_url")) && old_base_url != Discourse.base_url
|
2019-08-12 11:14:51 -04:00
|
|
|
remap(old_base_url, Discourse.base_url)
|
2019-07-09 07:57:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
current_s3_base_url = SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_base_url : nil
|
|
|
|
if (old_s3_base_url = BackupMetadata.value_for("s3_base_url")) && old_base_url != current_s3_base_url
|
2019-08-12 11:14:51 -04:00
|
|
|
remap("#{old_s3_base_url}/", uploads_folder)
|
2019-07-09 07:57:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
current_s3_cdn_url = SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_cdn_url : nil
|
|
|
|
if (old_s3_cdn_url = BackupMetadata.value_for("s3_cdn_url")) && old_s3_cdn_url != current_s3_cdn_url
|
|
|
|
base_url = SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_cdn_url : Discourse.base_url
|
2019-08-12 11:14:51 -04:00
|
|
|
remap("#{old_s3_cdn_url}/", UrlHelper.schemaless("#{base_url}#{uploads_folder}"))
|
2019-07-09 07:57:31 -04:00
|
|
|
|
|
|
|
old_host = URI.parse(old_s3_cdn_url).host
|
2019-07-09 11:34:41 -04:00
|
|
|
new_host = URI.parse(base_url).host
|
2019-08-12 11:14:51 -04:00
|
|
|
remap(old_host, new_host)
|
2019-07-09 07:57:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
if (old_cdn_url = BackupMetadata.value_for("cdn_url")) && old_cdn_url != Discourse.asset_host
|
2019-07-09 11:34:41 -04:00
|
|
|
base_url = Discourse.asset_host || Discourse.base_url
|
2019-08-12 11:14:51 -04:00
|
|
|
remap("#{old_cdn_url}/", UrlHelper.schemaless("#{base_url}/"))
|
2019-07-09 07:57:31 -04:00
|
|
|
|
|
|
|
old_host = URI.parse(old_cdn_url).host
|
2019-07-09 11:34:41 -04:00
|
|
|
new_host = URI.parse(base_url).host
|
2019-08-12 11:14:51 -04:00
|
|
|
remap(old_host, new_host)
|
2019-07-09 07:57:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
if previous_db_name != current_db_name
|
2019-08-12 11:14:51 -04:00
|
|
|
remap("uploads/#{previous_db_name}", "uploads/#{current_db_name}")
|
2019-07-09 07:57:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while remapping uploads.", ex
|
|
|
|
end
|
|
|
|
|
2019-08-12 11:14:51 -04:00
|
|
|
def remap(from, to)
|
|
|
|
puts "Remapping '#{from}' to '#{to}'"
|
|
|
|
DbHelper.remap(from, to, verbose: true, excluded_tables: ["backup_metadata"])
|
|
|
|
end
|
|
|
|
|
2019-06-04 09:47:21 -04:00
|
|
|
def migrate_to_s3
|
|
|
|
log "Migrating uploads to S3..."
|
|
|
|
ENV["SKIP_FAILED"] = "1"
|
|
|
|
ENV["MIGRATE_TO_MULTISITE"] = "1" if Rails.configuration.multisite
|
|
|
|
Rake::Task["uploads:migrate_to_s3"].invoke
|
2019-07-23 11:42:12 -04:00
|
|
|
Jobs.run_later!
|
2019-06-04 09:47:21 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def remove_local_uploads(directory)
|
|
|
|
log "Removing local uploads directory..."
|
|
|
|
FileUtils.rm_rf(directory) if Dir[directory].present?
|
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while removing the following uploads directory: #{directory}", ex
|
|
|
|
end
|
|
|
|
|
2019-02-13 05:10:33 -05:00
|
|
|
def generate_optimized_images
|
2019-05-07 10:58:44 -04:00
|
|
|
log 'Optimizing site icons...'
|
2019-06-04 09:47:21 -04:00
|
|
|
DB.exec("TRUNCATE TABLE optimized_images")
|
2019-05-07 10:58:44 -04:00
|
|
|
SiteIconManager.ensure_optimized!
|
|
|
|
|
2019-02-13 05:10:33 -05:00
|
|
|
log 'Posts will be rebaked by a background job in sidekiq. You will see missing images until that has completed.'
|
|
|
|
log 'You can expedite the process by manually running "rake posts:rebake_uncooked_posts"'
|
|
|
|
|
|
|
|
DB.exec(<<~SQL)
|
|
|
|
UPDATE posts
|
|
|
|
SET baked_version = NULL
|
|
|
|
WHERE id IN (SELECT post_id FROM post_uploads)
|
|
|
|
SQL
|
|
|
|
|
|
|
|
User.where("uploaded_avatar_id IS NOT NULL").find_each do |user|
|
2019-05-02 04:08:12 -04:00
|
|
|
Jobs.enqueue(:create_avatar_thumbnails, upload_id: user.uploaded_avatar_id)
|
2019-02-13 05:10:33 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def rollback
|
|
|
|
log "Trying to rollback..."
|
2014-08-20 05:53:15 -04:00
|
|
|
if @db_was_changed && BackupRestore.can_rollback?
|
2014-02-13 18:27:25 -05:00
|
|
|
log "Rolling back..."
|
2014-02-19 09:25:31 -05:00
|
|
|
BackupRestore.move_tables_between_schemas("backup", "public")
|
2014-02-12 23:32:58 -05:00
|
|
|
else
|
2014-02-13 18:27:25 -05:00
|
|
|
log "There was no need to rollback"
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-03-24 14:34:16 -04:00
|
|
|
def notify_user
|
2017-04-26 14:47:36 -04:00
|
|
|
if user = User.find_by_email(@user_info[:email])
|
2014-03-24 14:34:16 -04:00
|
|
|
log "Notifying '#{user.username}' of the end of the restore..."
|
2017-03-17 02:21:30 -04:00
|
|
|
status = @success ? :restore_succeeded : :restore_failed
|
|
|
|
|
|
|
|
SystemMessage.create_from_system_user(user, status,
|
|
|
|
logs: Discourse::Utils.pretty_logs(@logs)
|
|
|
|
)
|
2014-03-24 14:34:16 -04:00
|
|
|
else
|
|
|
|
log "Could not send notification to '#{@user_info[:username]}' (#{@user_info[:email]}), because the user does not exists..."
|
|
|
|
end
|
2018-09-19 14:35:43 -04:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while notifying user.", ex
|
2014-03-24 14:34:16 -04:00
|
|
|
end
|
|
|
|
|
2019-08-08 10:06:27 -04:00
|
|
|
def create_missing_discourse_functions
|
|
|
|
log "Creating missing functions in the discourse_functions schema"
|
|
|
|
|
|
|
|
all_readonly_table_columns = []
|
|
|
|
|
|
|
|
Dir[Rails.root.join(Discourse::DB_POST_MIGRATE_PATH, "*.rb")].each do |path|
|
|
|
|
require path
|
|
|
|
class_name = File.basename(path, ".rb").sub(/^\d+_/, "").camelize
|
|
|
|
migration_class = class_name.constantize
|
|
|
|
|
|
|
|
if migration_class.const_defined?(:DROPPED_TABLES)
|
|
|
|
migration_class::DROPPED_TABLES.each do |table_name|
|
|
|
|
all_readonly_table_columns << [table_name]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if migration_class.const_defined?(:DROPPED_COLUMNS)
|
|
|
|
migration_class::DROPPED_COLUMNS.each do |table_name, column_names|
|
|
|
|
column_names.each do |column_name|
|
|
|
|
all_readonly_table_columns << [table_name, column_name]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
existing_function_names = Migration::BaseDropper.existing_discourse_function_names.map { |name| "#{name}()" }
|
|
|
|
|
|
|
|
all_readonly_table_columns.each do |table_name, column_name|
|
|
|
|
function_name = Migration::BaseDropper.readonly_function_name(table_name, column_name, with_schema: false)
|
|
|
|
|
|
|
|
if !existing_function_names.include?(function_name)
|
|
|
|
Migration::BaseDropper.create_readonly_function(table_name, column_name)
|
|
|
|
@created_functions_for_table_columns << [table_name, column_name]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def clean_up
|
|
|
|
log "Cleaning stuff up..."
|
2019-08-08 10:06:27 -04:00
|
|
|
drop_created_discourse_functions
|
2014-02-12 23:32:58 -05:00
|
|
|
remove_tmp_directory
|
|
|
|
unpause_sidekiq
|
2014-04-08 12:06:53 -04:00
|
|
|
disable_readonly_mode if Discourse.readonly_mode?
|
2014-10-10 14:04:07 -04:00
|
|
|
mark_restore_as_not_running
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def remove_tmp_directory
|
|
|
|
log "Removing tmp '#{@tmp_directory}' directory..."
|
|
|
|
FileUtils.rm_rf(@tmp_directory) if Dir[@tmp_directory].present?
|
2018-09-19 14:35:43 -04:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while removing the following tmp directory: #{@tmp_directory}", ex
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def unpause_sidekiq
|
|
|
|
log "Unpausing sidekiq..."
|
|
|
|
Sidekiq.unpause!
|
2018-09-19 14:35:43 -04:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while unpausing Sidekiq.", ex
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
2018-11-19 21:37:58 -05:00
|
|
|
def clear_theme_cache
|
|
|
|
log "Clear theme cache"
|
2019-01-10 05:06:01 -05:00
|
|
|
ThemeField.force_recompilation!
|
2018-11-19 21:37:58 -05:00
|
|
|
Theme.expire_site_cache!
|
2019-05-07 11:00:26 -04:00
|
|
|
Stylesheet::Manager.cache.clear
|
2018-11-19 21:37:58 -05:00
|
|
|
end
|
|
|
|
|
2019-08-08 10:06:27 -04:00
|
|
|
def drop_created_discourse_functions
|
|
|
|
log "Dropping function from the discourse_functions schema"
|
|
|
|
@created_functions_for_table_columns.each do |table_name, column_name|
|
|
|
|
Migration::BaseDropper.drop_readonly_function(table_name, column_name)
|
|
|
|
end
|
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while dropping functions from the discourse_functions schema", ex
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
def disable_readonly_mode
|
2014-03-28 07:15:53 -04:00
|
|
|
return if @readonly_mode_was_enabled
|
2014-02-12 23:32:58 -05:00
|
|
|
log "Disabling readonly mode..."
|
|
|
|
Discourse.disable_readonly_mode
|
2018-09-19 14:35:43 -04:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while disabling readonly mode.", ex
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
2014-10-10 14:04:07 -04:00
|
|
|
def mark_restore_as_not_running
|
2014-02-12 23:32:58 -05:00
|
|
|
log "Marking restore as finished..."
|
|
|
|
BackupRestore.mark_as_not_running!
|
2018-09-19 14:35:43 -04:00
|
|
|
rescue => ex
|
|
|
|
log "Something went wrong while marking restore as finished.", ex
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def ensure_directory_exists(directory)
|
|
|
|
log "Making sure #{directory} exists..."
|
|
|
|
FileUtils.mkdir_p(directory)
|
|
|
|
end
|
|
|
|
|
2019-02-18 05:48:03 -05:00
|
|
|
def after_restore_hook
|
|
|
|
log "Executing the after_restore_hook..."
|
|
|
|
DiscourseEvent.trigger(:restore_complete)
|
|
|
|
end
|
|
|
|
|
2018-09-19 14:35:43 -04:00
|
|
|
def log(message, ex = nil)
|
2015-02-09 10:53:28 -05:00
|
|
|
timestamp = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
2018-03-28 04:20:08 -04:00
|
|
|
puts(message)
|
|
|
|
publish_log(message, timestamp)
|
2015-02-09 10:53:28 -05:00
|
|
|
save_log(message, timestamp)
|
2018-09-19 14:35:43 -04:00
|
|
|
Rails.logger.error("#{ex}\n" + ex.backtrace.join("\n")) if ex
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
2015-02-09 10:53:28 -05:00
|
|
|
def publish_log(message, timestamp)
|
2014-02-12 23:32:58 -05:00
|
|
|
return unless @publish_to_message_bus
|
2015-02-09 10:53:28 -05:00
|
|
|
data = { timestamp: timestamp, operation: "restore", message: message }
|
2015-08-27 14:02:13 -04:00
|
|
|
MessageBus.publish(BackupRestore::LOGS_CHANNEL, data, user_ids: [@user_id], client_ids: [@client_id])
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
|
2015-02-09 10:53:28 -05:00
|
|
|
def save_log(message, timestamp)
|
|
|
|
@logs << "[#{timestamp}] #{message}"
|
2014-03-24 14:34:16 -04:00
|
|
|
end
|
|
|
|
|
2014-02-12 23:32:58 -05:00
|
|
|
end
|
|
|
|
end
|