2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
require "csv"
|
|
|
|
|
|
|
|
module Jobs
|
2019-10-02 00:01:53 -04:00
|
|
|
class ExportCsvFile < ::Jobs::Base
|
2014-08-09 06:28:57 -04:00
|
|
|
sidekiq_options retry: false
|
2016-05-25 16:20:35 -04:00
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
attr_accessor :extra
|
|
|
|
attr_accessor :current_user
|
|
|
|
attr_accessor :entity
|
|
|
|
|
2016-05-25 16:20:35 -04:00
|
|
|
HEADER_ATTRS_FOR ||=
|
|
|
|
HashWithIndifferentAccess.new(
|
2019-02-27 04:12:20 -05:00
|
|
|
user_list: %w[
|
|
|
|
id
|
|
|
|
name
|
|
|
|
username
|
|
|
|
email
|
|
|
|
title
|
|
|
|
created_at
|
|
|
|
last_seen_at
|
|
|
|
last_posted_at
|
|
|
|
last_emailed_at
|
|
|
|
trust_level
|
|
|
|
approved
|
|
|
|
suspended_at
|
|
|
|
suspended_till
|
|
|
|
silenced_till
|
|
|
|
active
|
|
|
|
admin
|
|
|
|
moderator
|
|
|
|
ip_address
|
|
|
|
staged
|
|
|
|
secondary_emails
|
2018-12-04 04:48:16 -05:00
|
|
|
],
|
|
|
|
user_stats: %w[
|
|
|
|
topics_entered
|
|
|
|
posts_read_count
|
|
|
|
time_read
|
|
|
|
topic_count
|
|
|
|
post_count
|
|
|
|
likes_given
|
|
|
|
likes_received
|
|
|
|
],
|
|
|
|
user_profile: %w[location website views],
|
|
|
|
user_sso: %w[
|
|
|
|
external_id
|
|
|
|
external_email
|
|
|
|
external_username
|
|
|
|
external_name
|
|
|
|
external_avatar_url
|
|
|
|
],
|
|
|
|
staff_action: %w[staff_user action subject created_at details context],
|
2016-05-25 16:20:35 -04:00
|
|
|
screened_email: %w[email action match_count last_match_at created_at ip_address],
|
2018-12-04 04:48:16 -05:00
|
|
|
screened_ip: %w[ip_address action match_count last_match_at created_at],
|
|
|
|
screened_url: %w[domain action match_count last_match_at created_at],
|
|
|
|
report: %w[date value],
|
2016-05-25 16:20:35 -04:00
|
|
|
)
|
2014-08-09 06:28:57 -04:00
|
|
|
|
|
|
|
def execute(args)
|
2015-01-14 11:00:51 -05:00
|
|
|
@entity = args[:entity]
|
2015-09-15 16:45:01 -04:00
|
|
|
@extra = HashWithIndifferentAccess.new(args[:args]) if args[:args]
|
2014-08-09 06:28:57 -04:00
|
|
|
@current_user = User.find_by(id: args[:user_id])
|
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
entities = [{ name: @entity }]
|
2016-05-25 16:20:35 -04:00
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
entities.each do |entity|
|
|
|
|
entity[:method] = :"#{entity[:name]}_export"
|
|
|
|
raise Discourse::InvalidParameters.new(:entity) unless respond_to?(entity[:method])
|
|
|
|
|
|
|
|
@timestamp ||= Time.now.strftime("%y%m%d-%H%M%S")
|
2020-08-27 18:54:25 -04:00
|
|
|
entity[:filename] = if entity[:name] == "report" && @extra[:name].present?
|
2020-04-29 05:09:50 -04:00
|
|
|
"#{@extra[:name].dasherize}-#{@timestamp}"
|
|
|
|
else
|
|
|
|
"#{entity[:name].dasherize}-#{@timestamp}"
|
|
|
|
end
|
2016-05-25 16:20:35 -04:00
|
|
|
end
|
|
|
|
|
2018-04-24 11:38:56 -04:00
|
|
|
export_title =
|
|
|
|
if @entity == "report" && @extra[:name].present?
|
|
|
|
I18n.t("reports.#{@extra[:name]}.title")
|
|
|
|
else
|
2020-04-29 05:09:50 -04:00
|
|
|
@entity.gsub("_", " ").titleize
|
2018-04-24 11:38:56 -04:00
|
|
|
end
|
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
filename = entities[0][:filename] # use first entity as a name for this export
|
|
|
|
user_export = UserExport.create(file_name: filename, user_id: @current_user.id)
|
|
|
|
filename = "#{filename}-#{user_export.id}"
|
|
|
|
|
DEV: make sure we don't load all data into memory when exporting chat messages (#22276)
This commit makes sure we don't load all data into memory when doing CSV exports.
The most important change here made to the recently introduced export of chat
messages (3ea31f4). We were loading all data into memory in the first version, with
this commit it's not the case anymore.
Speaking of old exports. Some of them already use find_each, and it worked as
expected, without loading all data into memory. And it will proceed working as
expected after this commit.
In general, I made sure this change didn't break other CSV exports, first manually, and
then by writing system specs for them. Sadly, I haven't managed yet to make those
specs stable, they work fine locally, but flaky in GitHub actions, so I've disabled them
for now.
I'll be making more changes to the CSV exports code soon, those system specs will be
very helpful. I'll be running them locally, and I hope I'll manage to make them stable
while doing that work.
2023-07-12 10:52:18 -04:00
|
|
|
zip_filename = write_to_csv_and_zip(filename, entities)
|
2018-04-19 07:30:31 -04:00
|
|
|
|
|
|
|
# create upload
|
2019-06-11 00:14:31 -04:00
|
|
|
upload = nil
|
2018-04-19 07:30:31 -04:00
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
if File.exist?(zip_filename)
|
|
|
|
File.open(zip_filename) do |file|
|
2018-04-19 07:30:31 -04:00
|
|
|
upload =
|
|
|
|
UploadCreator.new(
|
|
|
|
file,
|
2020-04-29 05:09:50 -04:00
|
|
|
File.basename(zip_filename),
|
2018-04-19 07:30:31 -04:00
|
|
|
type: "csv_export",
|
|
|
|
for_export: "true",
|
|
|
|
).create_for(@current_user.id)
|
|
|
|
|
|
|
|
if upload.persisted?
|
|
|
|
user_export.update_columns(upload_id: upload.id)
|
|
|
|
else
|
2020-04-29 05:09:50 -04:00
|
|
|
Rails.logger.warn("Failed to upload the file #{zip_filename}")
|
2018-04-19 07:30:31 -04:00
|
|
|
end
|
|
|
|
end
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
File.delete(zip_filename)
|
2018-04-19 07:30:31 -04:00
|
|
|
end
|
2014-12-29 06:58:33 -05:00
|
|
|
ensure
|
2019-06-11 00:14:31 -04:00
|
|
|
post = notify_user(upload, export_title)
|
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
if user_export.present? && post.present?
|
|
|
|
topic = post.topic
|
|
|
|
user_export.update_columns(topic_id: topic.id)
|
|
|
|
topic.update_status("closed", true, Discourse.system_user)
|
|
|
|
end
|
2014-08-09 06:28:57 -04:00
|
|
|
end
|
|
|
|
|
2015-01-02 01:59:05 -05:00
|
|
|
def user_list_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:user_list_export) unless block_given?
|
|
|
|
|
2015-11-02 00:31:08 -05:00
|
|
|
user_field_ids = UserField.pluck(:id)
|
2016-05-25 16:20:35 -04:00
|
|
|
|
2016-03-07 08:17:10 -05:00
|
|
|
condition = {}
|
|
|
|
if @extra && @extra[:trust_level] &&
|
|
|
|
trust_level = TrustLevel.levels[@extra[:trust_level].to_sym]
|
2016-05-25 16:20:35 -04:00
|
|
|
condition = { trust_level: trust_level }
|
2016-03-07 08:17:10 -05:00
|
|
|
end
|
2015-11-02 00:31:08 -05:00
|
|
|
|
2021-04-20 20:42:07 -04:00
|
|
|
includes = %i[user_profile user_stat groups user_emails]
|
2021-02-08 05:04:33 -05:00
|
|
|
includes << [:single_sign_on_record] if SiteSetting.enable_discourse_connect
|
2021-04-20 20:42:07 -04:00
|
|
|
|
|
|
|
User
|
|
|
|
.where(condition)
|
|
|
|
.includes(*includes)
|
|
|
|
.find_each do |user|
|
|
|
|
user_info_array = get_base_user_array(user)
|
|
|
|
if SiteSetting.enable_discourse_connect
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array = add_single_sign_on(user, user_info_array)
|
2023-01-09 07:20:10 -05:00
|
|
|
end
|
2021-04-20 20:42:07 -04:00
|
|
|
user_info_array = add_custom_fields(user, user_info_array, user_field_ids)
|
|
|
|
user_info_array = add_group_names(user, user_info_array)
|
|
|
|
yield user_info_array
|
2015-11-02 00:31:08 -05:00
|
|
|
end
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def staff_action_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:staff_action_export) unless block_given?
|
|
|
|
|
|
|
|
staff_action_data =
|
|
|
|
if @current_user.admin?
|
2023-06-18 21:23:21 -04:00
|
|
|
UserHistory.only_staff_actions
|
2015-01-02 00:33:14 -05:00
|
|
|
else
|
2023-06-18 21:23:21 -04:00
|
|
|
UserHistory.where(admin_only: false).only_staff_actions
|
2015-01-02 00:33:14 -05:00
|
|
|
end
|
|
|
|
|
2023-06-18 21:23:21 -04:00
|
|
|
staff_action_data.find_each(order: :desc) do |staff_action|
|
|
|
|
yield get_staff_action_fields(staff_action)
|
|
|
|
end
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
2014-11-25 17:43:17 -05:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def screened_email_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:screened_email_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedEmail
|
|
|
|
.order("last_match_at DESC")
|
2023-06-13 17:05:08 -04:00
|
|
|
.find_each { |screened_email| yield get_screened_email_fields(screened_email) }
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def screened_ip_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:screened_ip_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedIpAddress
|
|
|
|
.order("id DESC")
|
|
|
|
.each { |screened_ip| yield get_screened_ip_fields(screened_ip) }
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def screened_url_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:screened_url_export) unless block_given?
|
|
|
|
|
|
|
|
ScreenedUrl
|
|
|
|
.select(
|
|
|
|
"domain, sum(match_count) as match_count, max(last_match_at) as last_match_at, min(created_at) as created_at",
|
|
|
|
)
|
|
|
|
.group(:domain)
|
|
|
|
.order("last_match_at DESC")
|
|
|
|
.each { |screened_url| yield get_screened_url_fields(screened_url) }
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
|
2015-09-15 16:45:01 -04:00
|
|
|
def report_export
|
2016-05-25 16:20:35 -04:00
|
|
|
return enum_for(:report_export) unless block_given?
|
|
|
|
|
2020-04-07 14:05:27 -04:00
|
|
|
# If dates are invalid consider then `nil`
|
|
|
|
if @extra[:start_date].is_a?(String)
|
|
|
|
@extra[:start_date] = begin
|
|
|
|
@extra[:start_date].to_date.beginning_of_day
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-09 07:20:10 -05:00
|
|
|
end
|
2020-04-07 14:05:27 -04:00
|
|
|
end
|
|
|
|
if @extra[:end_date].is_a?(String)
|
|
|
|
@extra[:end_date] = begin
|
|
|
|
@extra[:end_date].to_date.end_of_day
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-09 07:20:10 -05:00
|
|
|
end
|
2020-04-07 14:05:27 -04:00
|
|
|
end
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
@extra[:filters] = {}
|
2020-06-10 11:57:39 -04:00
|
|
|
@extra[:filters][:category] = @extra[:category].to_i if @extra[:category].present?
|
|
|
|
@extra[:filters][:group] = @extra[:group].to_i if @extra[:group].present?
|
|
|
|
@extra[:filters][:include_subcategories] = !!ActiveRecord::Type::Boolean.new.cast(
|
|
|
|
@extra[:include_subcategories],
|
|
|
|
) if @extra[:include_subcategories].present?
|
2018-02-18 13:22:09 -05:00
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
report = Report.find(@extra[:name], @extra)
|
|
|
|
|
|
|
|
header = []
|
|
|
|
titles = {}
|
2018-02-18 13:22:09 -05:00
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
report.labels.each do |label|
|
|
|
|
if label[:type] == :user
|
|
|
|
titles[label[:properties][:username]] = label[:title]
|
|
|
|
header << label[:properties][:username]
|
2020-06-01 13:23:58 -04:00
|
|
|
elsif label[:type] == :topic
|
|
|
|
titles[label[:properties][:id]] = label[:title]
|
|
|
|
header << label[:properties][:id]
|
2019-06-28 02:50:31 -04:00
|
|
|
else
|
|
|
|
titles[label[:property]] = label[:title]
|
|
|
|
header << label[:property]
|
|
|
|
end
|
2015-09-15 16:45:01 -04:00
|
|
|
end
|
2019-06-28 02:50:31 -04:00
|
|
|
|
2020-01-21 11:43:19 -05:00
|
|
|
if report.modes == [:stacked_chart]
|
|
|
|
header = [:x]
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
report.data.map do |series|
|
|
|
|
header << series[:label]
|
|
|
|
series[:data].each do |datapoint|
|
|
|
|
data[datapoint[:x]] ||= { x: datapoint[:x] }
|
|
|
|
data[datapoint[:x]][series[:label]] = datapoint[:y]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
data = data.values
|
|
|
|
else
|
|
|
|
data = report.data
|
|
|
|
end
|
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
yield header.map { |k| titles[k] || k }
|
2020-01-21 11:43:19 -05:00
|
|
|
data.each { |row| yield row.values_at(*header).map(&:to_s) }
|
2015-09-15 16:45:01 -04:00
|
|
|
end
|
|
|
|
|
2020-04-29 05:09:50 -04:00
|
|
|
def get_header(entity)
|
|
|
|
if entity == "user_list"
|
2017-03-21 06:11:03 -04:00
|
|
|
header_array =
|
|
|
|
HEADER_ATTRS_FOR["user_list"] + HEADER_ATTRS_FOR["user_stats"] +
|
|
|
|
HEADER_ATTRS_FOR["user_profile"]
|
2021-02-08 05:04:33 -05:00
|
|
|
header_array.concat(HEADER_ATTRS_FOR["user_sso"]) if SiteSetting.enable_discourse_connect
|
2016-05-25 16:20:35 -04:00
|
|
|
user_custom_fields = UserField.all
|
|
|
|
if user_custom_fields.present?
|
|
|
|
user_custom_fields.each do |custom_field|
|
|
|
|
header_array.push("#{custom_field.name} (custom user field)")
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
2016-05-25 16:20:35 -04:00
|
|
|
header_array.push("group_names")
|
|
|
|
else
|
2020-04-29 05:09:50 -04:00
|
|
|
header_array = HEADER_ATTRS_FOR[entity]
|
2016-05-25 16:20:35 -04:00
|
|
|
end
|
2014-11-25 17:43:17 -05:00
|
|
|
|
|
|
|
header_array
|
|
|
|
end
|
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
private
|
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def escape_comma(string)
|
2019-02-27 04:12:20 -05:00
|
|
|
string&.include?(",") ? %Q|"#{string}"| : string
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
|
|
|
|
def get_base_user_array(user)
|
2021-04-20 20:42:07 -04:00
|
|
|
# preloading scopes is hard, do this by hand
|
|
|
|
secondary_emails = []
|
|
|
|
primary_email = nil
|
|
|
|
|
|
|
|
user.user_emails.each do |user_email|
|
|
|
|
if user_email.primary?
|
|
|
|
primary_email = user_email.email
|
|
|
|
else
|
|
|
|
secondary_emails << user_email.email
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-27 04:12:20 -05:00
|
|
|
[
|
|
|
|
user.id,
|
|
|
|
escape_comma(user.name),
|
|
|
|
user.username,
|
2021-04-20 20:42:07 -04:00
|
|
|
primary_email,
|
2019-02-27 04:12:20 -05:00
|
|
|
escape_comma(user.title),
|
|
|
|
user.created_at,
|
|
|
|
user.last_seen_at,
|
|
|
|
user.last_posted_at,
|
|
|
|
user.last_emailed_at,
|
|
|
|
user.trust_level,
|
|
|
|
user.approved,
|
|
|
|
user.suspended_at,
|
|
|
|
user.suspended_till,
|
|
|
|
user.silenced_till,
|
|
|
|
user.active,
|
|
|
|
user.admin,
|
|
|
|
user.moderator,
|
|
|
|
user.ip_address,
|
|
|
|
user.staged,
|
2021-04-20 20:42:07 -04:00
|
|
|
secondary_emails.join(";"),
|
2019-02-27 04:12:20 -05:00
|
|
|
user.user_stat.topics_entered,
|
|
|
|
user.user_stat.posts_read_count,
|
|
|
|
user.user_stat.time_read,
|
|
|
|
user.user_stat.topic_count,
|
|
|
|
user.user_stat.post_count,
|
|
|
|
user.user_stat.likes_given,
|
|
|
|
user.user_stat.likes_received,
|
|
|
|
escape_comma(user.user_profile.location),
|
|
|
|
user.user_profile.website,
|
|
|
|
user.user_profile.views,
|
|
|
|
]
|
2016-02-05 10:16:33 -05:00
|
|
|
end
|
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def add_single_sign_on(user, user_info_array)
|
2016-02-05 10:16:33 -05:00
|
|
|
if user.single_sign_on_record
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array.push(
|
|
|
|
user.single_sign_on_record.external_id,
|
|
|
|
user.single_sign_on_record.external_email,
|
|
|
|
user.single_sign_on_record.external_username,
|
|
|
|
escape_comma(user.single_sign_on_record.external_name),
|
|
|
|
user.single_sign_on_record.external_avatar_url,
|
|
|
|
)
|
2016-02-05 10:16:33 -05:00
|
|
|
else
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array.push(nil, nil, nil, nil, nil)
|
2016-02-05 10:16:33 -05:00
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-02-05 10:16:33 -05:00
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def add_custom_fields(user, user_info_array, user_field_ids)
|
2016-02-05 10:16:33 -05:00
|
|
|
if user_field_ids.present?
|
2016-07-19 02:43:50 -04:00
|
|
|
user.user_fields.each { |custom_field| user_info_array << escape_comma(custom_field[1]) }
|
2016-02-05 10:16:33 -05:00
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-02-05 10:16:33 -05:00
|
|
|
|
2016-07-19 02:43:50 -04:00
|
|
|
def add_group_names(user, user_info_array)
|
2019-02-27 04:12:20 -05:00
|
|
|
group_names = user.groups.map { |g| g.name }.join(";")
|
2021-11-02 05:29:24 -04:00
|
|
|
if group_names.present?
|
|
|
|
user_info_array << escape_comma(group_names)
|
|
|
|
else
|
|
|
|
user_info_array << nil
|
|
|
|
end
|
2016-07-19 02:43:50 -04:00
|
|
|
user_info_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2016-02-05 10:16:33 -05:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def get_staff_action_fields(staff_action)
|
|
|
|
staff_action_array = []
|
2018-06-07 01:28:18 -04:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
HEADER_ATTRS_FOR["staff_action"].each do |attr|
|
2018-06-07 01:28:18 -04:00
|
|
|
data =
|
2014-12-06 23:15:22 -05:00
|
|
|
if attr == "action"
|
|
|
|
UserHistory.actions.key(staff_action.attributes[attr]).to_s
|
|
|
|
elsif attr == "staff_user"
|
|
|
|
user = User.find_by(id: staff_action.attributes["acting_user_id"])
|
|
|
|
user.username if !user.nil?
|
2016-04-19 08:32:01 -04:00
|
|
|
elsif attr == "subject"
|
|
|
|
user = User.find_by(id: staff_action.attributes["target_user_id"])
|
2016-05-25 16:20:35 -04:00
|
|
|
if user.nil?
|
|
|
|
staff_action.attributes[attr]
|
2023-01-09 07:20:10 -05:00
|
|
|
else
|
2016-05-25 16:20:35 -04:00
|
|
|
"#{user.username} #{staff_action.attributes[attr]}"
|
2023-01-09 07:20:10 -05:00
|
|
|
end
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
staff_action.attributes[attr]
|
|
|
|
end
|
|
|
|
|
|
|
|
staff_action_array.push(data)
|
|
|
|
end
|
|
|
|
staff_action_array
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_screened_email_fields(screened_email)
|
|
|
|
screened_email_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR["screened_email"].each do |attr|
|
2018-06-07 01:28:18 -04:00
|
|
|
data =
|
2014-12-06 23:15:22 -05:00
|
|
|
if attr == "action"
|
|
|
|
ScreenedEmail.actions.key(screened_email.attributes["action_type"]).to_s
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_email.attributes[attr]
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_email_array.push(data)
|
|
|
|
end
|
|
|
|
|
2014-11-21 10:25:04 -05:00
|
|
|
screened_email_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2014-11-12 03:42:50 -05:00
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
def get_screened_ip_fields(screened_ip)
|
|
|
|
screened_ip_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR["screened_ip"].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == "action"
|
|
|
|
ScreenedIpAddress.actions.key(screened_ip.attributes["action_type"]).to_s
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_ip.attributes[attr]
|
2014-11-21 10:25:04 -05:00
|
|
|
end
|
|
|
|
|
2014-11-25 17:43:17 -05:00
|
|
|
screened_ip_array.push(data)
|
2014-12-06 23:15:22 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
screened_ip_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
2014-12-06 23:15:22 -05:00
|
|
|
|
|
|
|
def get_screened_url_fields(screened_url)
|
|
|
|
screened_url_array = []
|
|
|
|
|
|
|
|
HEADER_ATTRS_FOR["screened_url"].each do |attr|
|
|
|
|
data =
|
|
|
|
if attr == "action"
|
|
|
|
action = ScreenedUrl.actions.key(screened_url.attributes["action_type"]).to_s
|
|
|
|
action = "do nothing" if action.blank?
|
2018-06-07 01:28:18 -04:00
|
|
|
else
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_url.attributes[attr]
|
|
|
|
end
|
|
|
|
|
|
|
|
screened_url_array.push(data)
|
2014-11-21 10:25:04 -05:00
|
|
|
end
|
|
|
|
|
2014-12-06 23:15:22 -05:00
|
|
|
screened_url_array
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
|
|
|
|
2019-06-11 00:14:31 -04:00
|
|
|
def notify_user(upload, export_title)
|
2019-05-28 07:08:41 -04:00
|
|
|
post = nil
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
if @current_user
|
2019-06-11 00:14:31 -04:00
|
|
|
post =
|
|
|
|
if upload
|
2016-05-25 16:20:35 -04:00
|
|
|
SystemMessage.create_from_system_user(
|
|
|
|
@current_user,
|
|
|
|
:csv_export_succeeded,
|
2019-07-25 10:34:46 -04:00
|
|
|
download_link: UploadMarkdown.new(upload).attachment_markdown,
|
2018-04-24 11:38:56 -04:00
|
|
|
export_title: export_title,
|
2016-05-25 16:20:35 -04:00
|
|
|
)
|
2014-08-09 06:28:57 -04:00
|
|
|
else
|
|
|
|
SystemMessage.create_from_system_user(@current_user, :csv_export_failed)
|
|
|
|
end
|
|
|
|
end
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
post
|
2018-06-07 01:28:18 -04:00
|
|
|
end
|
DEV: make sure we don't load all data into memory when exporting chat messages (#22276)
This commit makes sure we don't load all data into memory when doing CSV exports.
The most important change here made to the recently introduced export of chat
messages (3ea31f4). We were loading all data into memory in the first version, with
this commit it's not the case anymore.
Speaking of old exports. Some of them already use find_each, and it worked as
expected, without loading all data into memory. And it will proceed working as
expected after this commit.
In general, I made sure this change didn't break other CSV exports, first manually, and
then by writing system specs for them. Sadly, I haven't managed yet to make those
specs stable, they work fine locally, but flaky in GitHub actions, so I've disabled them
for now.
I'll be making more changes to the CSV exports code soon, those system specs will be
very helpful. I'll be running them locally, and I hope I'll manage to make them stable
while doing that work.
2023-07-12 10:52:18 -04:00
|
|
|
|
|
|
|
def write_to_csv_and_zip(filename, entities)
|
|
|
|
dirname = "#{UserExport.base_directory}/#{filename}"
|
|
|
|
FileUtils.mkdir_p(dirname) unless Dir.exist?(dirname)
|
|
|
|
begin
|
|
|
|
entities.each do |entity|
|
|
|
|
CSV.open("#{dirname}/#{entity[:filename]}.csv", "w") do |csv|
|
|
|
|
csv << get_header(entity[:name]) if entity[:name] != "report"
|
|
|
|
public_send(entity[:method]) { |d| csv << d }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
Compression::Zip.new.compress(UserExport.base_directory, filename)
|
|
|
|
ensure
|
|
|
|
FileUtils.rm_rf(dirname)
|
|
|
|
end
|
|
|
|
end
|
2014-08-09 06:28:57 -04:00
|
|
|
end
|
|
|
|
end
|