2014-02-12 23:31:17 -05:00
|
|
|
class Backup
|
|
|
|
include UrlHelper
|
|
|
|
include ActiveModel::SerializerSupport
|
|
|
|
|
2014-03-11 17:28:12 -04:00
|
|
|
attr_reader :filename
|
|
|
|
attr_accessor :size, :path, :link
|
2014-02-12 23:31:17 -05:00
|
|
|
|
|
|
|
def initialize(filename)
|
|
|
|
@filename = filename
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.all
|
|
|
|
backups = Dir.glob(File.join(Backup.base_directory, "*.tar.gz"))
|
2014-03-11 17:28:12 -04:00
|
|
|
backups.sort.reverse.map { |backup| Backup.create_from_filename(File.basename(backup)) }
|
2014-02-12 23:31:17 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.[](filename)
|
|
|
|
path = File.join(Backup.base_directory, filename)
|
|
|
|
if File.exists?(path)
|
2014-03-11 17:28:12 -04:00
|
|
|
Backup.create_from_filename(filename)
|
2014-02-12 23:31:17 -05:00
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-03-11 17:28:12 -04:00
|
|
|
def remove
|
|
|
|
File.delete(@path) if File.exists?(path)
|
2014-03-12 16:23:47 -04:00
|
|
|
after_remove_hook
|
|
|
|
end
|
|
|
|
|
|
|
|
def after_create_hook
|
|
|
|
upload_to_s3 if SiteSetting.enable_s3_backups?
|
|
|
|
end
|
|
|
|
|
|
|
|
def after_remove_hook
|
|
|
|
remove_from_s3 if SiteSetting.enable_s3_backups?
|
|
|
|
end
|
|
|
|
|
|
|
|
def upload_to_s3
|
|
|
|
return unless fog_directory
|
|
|
|
fog_directory.files.create(key: @filename, public: false, body: File.read(@path))
|
|
|
|
end
|
|
|
|
|
|
|
|
def remove_from_s3
|
|
|
|
return unless fog
|
|
|
|
fog.delete_object(SiteSetting.s3_backup_bucket, @filename)
|
2014-02-12 23:31:17 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def self.base_directory
|
2014-04-07 11:55:38 -04:00
|
|
|
File.join(Rails.root, "public", "backups", RailsMultisite::ConnectionManagement.current_db)
|
2014-02-12 23:31:17 -05:00
|
|
|
end
|
|
|
|
|
2014-02-21 19:41:01 -05:00
|
|
|
def self.chunk_path(identifier, filename, chunk_number)
|
|
|
|
File.join(Backup.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}")
|
|
|
|
end
|
|
|
|
|
2014-03-11 17:28:12 -04:00
|
|
|
def self.create_from_filename(filename)
|
|
|
|
Backup.new(filename).tap do |b|
|
|
|
|
b.path = File.join(Backup.base_directory, b.filename)
|
|
|
|
b.link = b.schemaless "#{Discourse.base_url}/admin/backups/#{b.filename}"
|
|
|
|
b.size = File.size(b.path)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.remove_old
|
|
|
|
all_backups = Backup.all
|
|
|
|
return unless all_backups.size > SiteSetting.maximum_backups
|
|
|
|
all_backups[SiteSetting.maximum_backups..-1].each {|b| b.remove}
|
|
|
|
end
|
|
|
|
|
2014-03-12 16:23:47 -04:00
|
|
|
private
|
|
|
|
|
|
|
|
def fog
|
|
|
|
return @fog if @fog
|
|
|
|
return unless SiteSetting.s3_access_key_id.present? &&
|
|
|
|
SiteSetting.s3_secret_access_key.present? &&
|
|
|
|
SiteSetting.s3_backup_bucket.present?
|
|
|
|
require 'fog'
|
|
|
|
@fog = Fog::Storage.new(provider: 'AWS',
|
|
|
|
aws_access_key_id: SiteSetting.s3_access_key_id,
|
|
|
|
aws_secret_access_key: SiteSetting.s3_secret_access_key)
|
|
|
|
end
|
|
|
|
|
|
|
|
def fog_directory
|
|
|
|
return @fog_directory if @fog_directory
|
|
|
|
return unless fog
|
|
|
|
@fog_directory ||= fog.directories.get(SiteSetting.s3_backup_bucket)
|
|
|
|
end
|
|
|
|
|
2014-02-12 23:31:17 -05:00
|
|
|
end
|