2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
require 'csv'
|
|
|
|
|
|
|
|
module Jobs
|
|
|
|
|
2019-10-02 00:01:53 -04:00
|
|
|
class ExportCsvFile < ::Jobs::Base
|
2014-08-09 06:28:57 -04:00
|
|
|
sidekiq_options retry: false
|
2016-05-25 16:20:35 -04:00
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
attr_accessor :extra
|
|
|
|
attr_accessor :current_user
|
|
|
|
attr_accessor :entity
|
|
|
|
|
2016-05-25 16:20:35 -04:00
|
|
|
HEADER_ATTRS_FOR ||= HashWithIndifferentAccess.new(
|
2019-02-27 04:12:20 -05:00
|
|
|
user_list: ['id', 'name', 'username', 'email', 'title', 'created_at', 'last_seen_at', 'last_posted_at', 'last_emailed_at', 'trust_level', 'approved', 'suspended_at', 'suspended_till', 'silenced_till', 'active', 'admin', 'moderator', 'ip_address', 'staged', 'secondary_emails'],
|
2018-12-04 04:48:16 -05:00
|
|
|
user_stats: ['topics_entered', 'posts_read_count', 'time_read', 'topic_count', 'post_count', 'likes_given', 'likes_received'],
|
|
|
|
user_profile: ['location', 'website', 'views'],
|
|
|
|
user_sso: ['external_id', 'external_email', 'external_username', 'external_name', 'external_avatar_url'],
|
|
|
|
staff_action: ['staff_user', 'action', 'subject', 'created_at', 'details', 'context'],
|
2016-05-25 16:20:35 -04:00
|
|
|
screened_email: ['email', 'action', 'match_count', 'last_match_at', 'created_at', 'ip_address'],
|
2018-12-04 04:48:16 -05:00
|
|
|
screened_ip: ['ip_address', 'action', 'match_count', 'last_match_at', 'created_at'],
|
|
|
|
screened_url: ['domain', 'action', 'match_count', 'last_match_at', 'created_at'],
|
|
|
|
report: ['date', 'value']
|
2016-05-25 16:20:35 -04:00
|
|
|
)
|
2014-08-09 06:28:57 -04:00
|
|
|
|
|
|
|
def execute(args)
|
2015-01-14 11:00:51 -05:00
|
|
|
@entity = args[:entity]
|
2015-09-15 16:45:01 -04:00
|
|
|
@extra = HashWithIndifferentAccess.new(args[:args]) if args[:args]
|
2014-08-09 06:28:57 -04:00
|
|
|
@current_user = User.find_by(id: args[:user_id])
|
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
entities = [{ name: @entity }]
|
2016-05-25 16:20:35 -04:00
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
entities.each do |entity|
|
|
|
|
entity[:method] = :"#{entity[:name]}_export"
|
|
|
|
raise Discourse::InvalidParameters.new(:entity) unless respond_to?(entity[:method])
|
|
|
|
|
|
|
|
@timestamp ||= Time.now.strftime("%y%m%d-%H%M%S")
|
|
|
|
entity[:filename] =
|
2020-08-27 18:54:25 -04:00
|
|
|
if entity[:name] == "report" && @extra[:name].present?
|
2020-04-29 05:09:50 -04:00
|
|
|
"#{@extra[:name].dasherize}-#{@timestamp}"
|
|
|
|
else
|
|
|
|
"#{entity[:name].dasherize}-#{@timestamp}"
|
|
|
|
end
|
2016-05-25 16:20:35 -04:00
|
|
|
end
|
|
|
|
|
2018-04-24 11:38:56 -04:00
|
|
|
export_title = if @entity == "report" && @extra[:name].present?
|
|
|
|
I18n.t("reports.#{@extra[:name]}.title")
|
|
|
|
else
|
2020-04-29 05:09:50 -04:00
|
|
|
@entity.gsub('_', ' ').titleize
|
2018-04-24 11:38:56 -04:00
|
|
|
end
|
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
filename = entities[0][:filename] # use first entity as a name for this export
|
|
|
|
user_export = UserExport.create(file_name: filename, user_id: @current_user.id)
|
|
|
|
|
|
|
|
filename = "#{filename}-#{user_export.id}"
|
|
|
|
dirname = "#{UserExport.base_directory}/#{filename}"
|
2016-05-25 16:20:35 -04:00
|
|
|
|
|
|
|
# ensure directory exists
|
2022-01-05 12:45:08 -05:00
|
|
|
FileUtils.mkdir_p(dirname) unless Dir.exist?(dirname)
|
2014-08-09 06:28:57 -04:00
|
|
|
|
2019-07-18 08:34:48 -04:00
|
|
|
# Generate a compressed CSV file
|
2019-10-03 09:19:35 -04:00
|
|
|
begin
|
2020-04-29 05:09:50 -04:00
|
|
|
entities.each do |entity|
|
|
|
|
CSV.open("#{dirname}/#{entity[:filename]}.csv", "w") do |csv|
|
|
|
|
csv << get_header(entity[:name]) if entity[:name] != "report"
|
|
|
|
public_send(entity[:method]).each { |d| csv << d }
|
|
|
|
end
|
2019-10-03 09:19:35 -04:00
|
|
|
end
|
2020-04-29 05:09:50 -04:00
|
|
|
|
|
|
|
zip_filename = Compression::Zip.new.compress(UserExport.base_directory, filename)
|
2019-10-03 09:19:35 -04:00
|
|
|
ensure
|
2020-04-29 05:09:50 -04:00
|
|
|
FileUtils.rm_rf(dirname)
|
2019-07-18 08:34:48 -04:00
|
|
|
end
|
2018-04-19 07:30:31 -04:00
|
|
|
|
|
|
|
# create upload
|
2019-06-11 00:14:31 -04:00
|
|
|
upload = nil
|
2018-04-19 07:30:31 -04:00
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
if File.exist?(zip_filename)
|
|
|
|
File.open(zip_filename) do |file|
|
2018-04-19 07:30:31 -04:00
|
|
|
upload = UploadCreator.new(
|
|
|
|
file,
|
2020-04-29 05:09:50 -04:00
|
|
|
File.basename(zip_filename),
|
2018-04-19 07:30:31 -04:00
|
|
|
type: 'csv_export',
|
|
|
|
for_export: 'true'
|
|
|
|
).create_for(@current_user.id)
|
|
|
|
|
|
|
|
if upload.persisted?
|
|
|
|
user_export.update_columns(upload_id: upload.id)
|
|
|
|
else
|
2020-04-29 05:09:50 -04:00
|
|
|
Rails.logger.warn("Failed to upload the file #{zip_filename}")
|
2018-04-19 07:30:31 -04:00
|
|
|
end
|
|
|
|
end
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
File.delete(zip_filename)
|
2018-04-19 07:30:31 -04:00
|
|
|
end
|
2014-12-29 06:58:33 -05:00
|
|
|
ensure
|
2019-06-11 00:14:31 -04:00
|
|
|
post = notify_user(upload, export_title)
|
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
if user_export.present? && post.present?
|
|
|
|
topic = post.topic
|
|
|
|
user_export.update_columns(topic_id: topic.id)
|
|
|
|
topic.update_status('closed', true, Discourse.system_user)
|
|
|
|
end
|
2014-08-09 06:28:57 -04:00
|
|
|
end
|
|
|
|
|
2015-01-02 01:59:05 -05:00
|
|
|
def user_list_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:user_list_export) unless block_given?
|
|
|
|
|
2015-11-02 00:31:08 -05:00
|
|
|
user_field_ids = UserField.pluck(:id)
|
2016-05-25 16:20:35 -04:00
|
|
|
|
2016-03-07 08:17:10 -05:00
|
|
|
condition = {}
|
|
|
|
if @extra && @extra[:trust_level] && trust_level = TrustLevel.levels[@extra[:trust_level].to_sym]
|
2016-05-25 16:20:35 -04:00
|
|
|
condition = { trust_level: trust_level }
|
2016-03-07 08:17:10 -05:00
|
|
|
end
|
2015-11-02 00:31:08 -05:00
|
|
|
|
2021-04-20 20:42:07 -04:00
|
|
|
includes = [:user_profile, :user_stat, :groups, :user_emails]
|
2021-02-08 05:04:33 -05:00
|
|
|
if SiteSetting.enable_discourse_connect
|
2021-04-20 20:42:07 -04:00
|
|
|
includes << [:single_sign_on_record]
|
|
|
|
end
|
|
|
|
|
|
|
|
User.where(condition).includes(*includes).find_each do |user|
|
|
|
|
user_info_array = get_base_user_array(user)
|
|
|
|
if SiteSetting.enable_discourse_connect
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array = add_single_sign_on(user, user_info_array)
|
2015-11-02 00:31:08 -05:00
|
|
|
end
|
2021-04-20 20:42:07 -04:00
|
|
|
user_info_array = add_custom_fields(user, user_info_array, user_field_ids)
|
|
|
|
user_info_array = add_group_names(user, user_info_array)
|
|
|
|
yield user_info_array
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
2021-04-20 20:42:07 -04:00
|
|
|
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def staff_action_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:staff_action_export) unless block_given?
|
|
|
|
|
|
|
|
staff_action_data = if @current_user.admin?
|
|
|
|
UserHistory.only_staff_actions.order('id DESC')
|
2015-01-02 00:33:14 -05:00
|
|
|
else
|
2016-05-25 16:20:35 -04:00
|
|
|
UserHistory.where(admin_only: false).only_staff_actions.order('id DESC')
|
2015-01-02 00:33:14 -05:00
|
|
|
end
|
|
|
|
|
2016-05-25 16:20:35 -04:00
|
|
|
staff_action_data.each do |staff_action|
|
|
|
|
yield get_staff_action_fields(staff_action)
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
end
|
2014-11-25 17:43:17 -05:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def screened_email_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:screened_email_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedEmail.order('last_match_at DESC').each do |screened_email|
|
|
|
|
yield get_screened_email_fields(screened_email)
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def screened_ip_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:screened_ip_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedIpAddress.order('id DESC').each do |screened_ip|
|
|
|
|
yield get_screened_ip_fields(screened_ip)
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def screened_url_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:screened_url_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedUrl.select("domain, sum(match_count) as match_count, max(last_match_at) as last_match_at, min(created_at) as created_at")
|
|
|
|
.group(:domain)
|
|
|
|
.order('last_match_at DESC')
|
|
|
|
.each do |screened_url|
|
|
|
|
yield get_screened_url_fields(screened_url)
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-09-15 16:45:01 -04:00
|
|
|
def report_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:report_export) unless block_given?
|
|
|
|
|
2020-04-07 14:05:27 -04:00
|
|
|
# If dates are invalid consider then `nil`
|
|
|
|
if @extra[:start_date].is_a?(String)
|
|
|
|
@extra[:start_date] = @extra[:start_date].to_date.beginning_of_day rescue nil
|
|
|
|
end
|
|
|
|
if @extra[:end_date].is_a?(String)
|
|
|
|
@extra[:end_date] = @extra[:end_date].to_date.end_of_day rescue nil
|
|
|
|
end
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
@extra[:filters] = {}
|
2020-06-10 11:57:39 -04:00
|
|
|
@extra[:filters][:category] = @extra[:category].to_i if @extra[:category].present?
|
|
|
|
@extra[:filters][:group] = @extra[:group].to_i if @extra[:group].present?
|
|
|
|
@extra[:filters][:include_subcategories] = !!ActiveRecord::Type::Boolean.new.cast(@extra[:include_subcategories]) if @extra[:include_subcategories].present?
|
2018-02-18 13:22:09 -05:00
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
report = Report.find(@extra[:name], @extra)
|
|
|
|
|
|
|
|
header = []
|
|
|
|
titles = {}
|
2018-02-18 13:22:09 -05:00
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
report.labels.each do |label|
|
|
|
|
if label[:type] == :user
|
|
|
|
titles[label[:properties][:username]] = label[:title]
|
|
|
|
header << label[:properties][:username]
|
2020-06-01 13:23:58 -04:00
|
|
|
elsif label[:type] == :topic
|
|
|
|
titles[label[:properties][:id]] = label[:title]
|
|
|
|
header << label[:properties][:id]
|
2019-06-28 02:50:31 -04:00
|
|
|
else
|
|
|
|
titles[label[:property]] = label[:title]
|
|
|
|
header << label[:property]
|
|
|
|
end
|
2015-09-15 16:45:01 -04:00
|
|
|
end
|
2019-06-28 02:50:31 -04:00
|
|
|
|
2020-01-21 11:43:19 -05:00
|
|
|
if report.modes == [:stacked_chart]
|
|
|
|
header = [:x]
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
report.data.map do |series|
|
|
|
|
header << series[:label]
|
|
|
|
series[:data].each do |datapoint|
|
|
|
|
data[datapoint[:x]] ||= { x: datapoint[:x] }
|
|
|
|
data[datapoint[:x]][series[:label]] = datapoint[:y]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
data = data.values
|
|
|
|
else
|
|
|
|
data = report.data
|
|
|
|
end
|
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
yield header.map { |k| titles[k] || k }
|
2020-01-21 11:43:19 -05:00
|
|
|
data.each { |row| yield row.values_at(*header).map(&:to_s) }
|
2015-09-15 16:45:01 -04:00
|
|
|
end
|
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
def get_header(entity)
|
|
|
|
if entity == 'user_list'
|
2017-03-21 06:11:03 -04:00
|
|
|
header_array = HEADER_ATTRS_FOR['user_list'] + HEADER_ATTRS_FOR['user_stats'] + HEADER_ATTRS_FOR['user_profile']
|
2021-02-08 05:04:33 -05:00
|
|
|
header_array.concat(HEADER_ATTRS_FOR['user_sso']) if SiteSetting.enable_discourse_connect
|
2016-05-25 16:20:35 -04:00
|
|
|
user_custom_fields = UserField.all
|
|
|
|
if user_custom_fields.present?
|
|
|
|
user_custom_fields.each do |custom_field|
|
|
|
|
header_array.push("#{custom_field.name} (custom user field)")
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
2016-05-25 16:20:35 -04:00
|
|
|
header_array.push("group_names")
|
|
|
|
else
|
2020-04-29 05:09:50 -04:00
|
|
|
header_array = HEADER_ATTRS_FOR[entity]
|
2016-05-25 16:20:35 -04:00
|
|
|
end
|
2014-11-25 17:43:17 -05:00
|
|
|
|
|
|
|
header_array
|
|
|
|
end
|
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
private
|
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def escape_comma(string)
|
2019-02-27 04:12:20 -05:00
|
|
|
string&.include?(",") ? %Q|"#{string}"| : string
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
|
|
|
|
def get_base_user_array(user)
|
2021-04-20 20:42:07 -04:00
|
|
|
# preloading scopes is hard, do this by hand
|
|
|
|
secondary_emails = []
|
|
|
|
primary_email = nil
|
|
|
|
|
|
|
|
user.user_emails.each do |user_email|
|
|
|
|
if user_email.primary?
|
|
|
|
primary_email = user_email.email
|
|
|
|
else
|
|
|
|
secondary_emails << user_email.email
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-27 04:12:20 -05:00
|
|
|
[
|
|
|
|
user.id,
|
|
|
|
escape_comma(user.name),
|
|
|
|
user.username,
|
2021-04-20 20:42:07 -04:00
|
|
|
primary_email,
|
2019-02-27 04:12:20 -05:00
|
|
|
escape_comma(user.title),
|
|
|
|
user.created_at,
|
|
|
|
user.last_seen_at,
|
|
|
|
user.last_posted_at,
|
|
|
|
user.last_emailed_at,
|
|
|
|
user.trust_level,
|
|
|
|
user.approved,
|
|
|
|
user.suspended_at,
|
|
|
|
user.suspended_till,
|
|
|
|
user.silenced_till,
|
|
|
|
user.active,
|
|
|
|
user.admin,
|
|
|
|
user.moderator,
|
|
|
|
user.ip_address,
|
|
|
|
user.staged,
|
2021-04-20 20:42:07 -04:00
|
|
|
secondary_emails.join(";"),
|
2019-02-27 04:12:20 -05:00
|
|
|
user.user_stat.topics_entered,
|
|
|
|
user.user_stat.posts_read_count,
|
|
|
|
user.user_stat.time_read,
|
|
|
|
user.user_stat.topic_count,
|
|
|
|
user.user_stat.post_count,
|
|
|
|
user.user_stat.likes_given,
|
|
|
|
user.user_stat.likes_received,
|
|
|
|
escape_comma(user.user_profile.location),
|
|
|
|
user.user_profile.website,
|
|
|
|
user.user_profile.views,
|
|
|
|
]
|
2016-02-05 10:16:33 -05:00
|
|
|
end
|
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def add_single_sign_on(user, user_info_array)
|
2016-02-05 10:16:33 -05:00
|
|
|
if user.single_sign_on_record
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array.push(user.single_sign_on_record.external_id, user.single_sign_on_record.external_email, user.single_sign_on_record.external_username, escape_comma(user.single_sign_on_record.external_name), user.single_sign_on_record.external_avatar_url)
|
2016-02-05 10:16:33 -05:00
|
|
|
else
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array.push(nil, nil, nil, nil, nil)
|
2016-02-05 10:16:33 -05:00
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-02-05 10:16:33 -05:00
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def add_custom_fields(user, user_info_array, user_field_ids)
|
2016-02-05 10:16:33 -05:00
|
|
|
if user_field_ids.present?
|
|
|
|
user.user_fields.each do |custom_field|
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array << escape_comma(custom_field[1])
|
2016-02-05 10:16:33 -05:00
|
|
|
end
|
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-02-05 10:16:33 -05:00
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def add_group_names(user, user_info_array)
|
2019-02-27 04:12:20 -05:00
|
|
|
group_names = user.groups.map { |g| g.name }.join(";")
|
2021-11-02 05:29:24 -04:00
|
|
|
if group_names.present?
|
|
|
|
user_info_array << escape_comma(group_names)
|
|
|
|
else
|
|
|
|
user_info_array << nil
|
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-02-05 10:16:33 -05:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def get_staff_action_fields(staff_action)
|
|
|
|
staff_action_array = []
|
2018-06-07 01:28:18 -04:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
HEADER_ATTRS_FOR['staff_action'].each do |attr|
|
2018-06-07 01:28:18 -04:00
|
|
|
data =
|
2014-12-06 23:15:22 -05:00
|
|
|
if attr == 'action'
|
|
|
|
UserHistory.actions.key(staff_action.attributes[attr]).to_s
|
|
|
|
elsif attr == 'staff_user'
|
|
|
|
user = User.find_by(id: staff_action.attributes['acting_user_id'])
|
|
|
|
user.username if !user.nil?
|
2016-04-19 08:32:01 -04:00
|
|
|
elsif attr == 'subject'
|
|
|
|
user = User.find_by(id: staff_action.attributes['target_user_id'])
|
2016-05-25 16:20:35 -04:00
|
|
|
user.nil? ? staff_action.attributes[attr] : "#{user.username} #{staff_action.attributes[attr]}"
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
staff_action.attributes[attr]
|
|
|
|
end
|
|
|
|
|
|
|
|
staff_action_array.push(data)
|
|
|
|
end
|
|
|
|
staff_action_array
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_screened_email_fields(screened_email)
|
|
|
|
screened_email_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR['screened_email'].each do |attr|
|
2018-06-07 01:28:18 -04:00
|
|
|
data =
|
2014-12-06 23:15:22 -05:00
|
|
|
if attr == 'action'
|
|
|
|
ScreenedEmail.actions.key(screened_email.attributes['action_type']).to_s
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_email.attributes[attr]
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_email_array.push(data)
|
|
|
|
end
|
|
|
|
|
2014-11-21 10:25:04 -05:00
|
|
|
screened_email_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2014-11-12 03:42:50 -05:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def get_screened_ip_fields(screened_ip)
|
|
|
|
screened_ip_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR['screened_ip'].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == 'action'
|
|
|
|
ScreenedIpAddress.actions.key(screened_ip.attributes['action_type']).to_s
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_ip.attributes[attr]
|
2014-11-21 10:25:04 -05:00
|
|
|
end
|
|
|
|
|
2014-11-25 17:43:17 -05:00
|
|
|
screened_ip_array.push(data)
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
screened_ip_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2014-12-06 23:15:22 -05:00
|
|
|
|
|
|
|
def get_screened_url_fields(screened_url)
|
|
|
|
screened_url_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR['screened_url'].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == 'action'
|
|
|
|
action = ScreenedUrl.actions.key(screened_url.attributes['action_type']).to_s
|
|
|
|
action = "do nothing" if action.blank?
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_url.attributes[attr]
|
|
|
|
end
|
|
|
|
|
|
|
|
screened_url_array.push(data)
|
2014-11-21 10:25:04 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_url_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
|
|
|
|
2019-06-11 00:14:31 -04:00
|
|
|
def notify_user(upload, export_title)
|
2019-05-28 07:08:41 -04:00
|
|
|
post = nil
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
if @current_user
|
2019-06-11 00:14:31 -04:00
|
|
|
post = if upload
|
2016-05-25 16:20:35 -04:00
|
|
|
SystemMessage.create_from_system_user(
|
|
|
|
@current_user,
|
|
|
|
:csv_export_succeeded,
|
2019-07-25 10:34:46 -04:00
|
|
|
download_link: UploadMarkdown.new(upload).attachment_markdown,
|
2018-04-24 11:38:56 -04:00
|
|
|
export_title: export_title
|
2016-05-25 16:20:35 -04:00
|
|
|
)
|
2014-08-09 06:28:57 -04:00
|
|
|
else
|
|
|
|
SystemMessage.create_from_system_user(@current_user, :csv_export_failed)
|
|
|
|
end
|
|
|
|
end
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
post
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2014-08-09 06:28:57 -04:00
|
|
|
end
|
|
|
|
end
|