DEV: Switch to new ExportUserArchive job
We now use the newly created job class from the previous commit.
This commit is contained in:
parent
a8560d741f
commit
7bf199b0c4
|
@ -6,7 +6,12 @@ class ExportCsvController < ApplicationController
|
|||
|
||||
def export_entity
|
||||
guardian.ensure_can_export_entity!(export_params[:entity])
|
||||
Jobs.enqueue(:export_csv_file, entity: export_params[:entity], user_id: current_user.id, args: export_params[:args])
|
||||
|
||||
if export_params[:entity] == 'user_archive'
|
||||
Jobs.enqueue(:export_user_archive, user_id: current_user.id, args: export_params[:args])
|
||||
else
|
||||
Jobs.enqueue(:export_csv_file, entity: export_params[:entity], user_id: current_user.id, args: export_params[:args])
|
||||
end
|
||||
StaffActionLogger.new(current_user).log_entity_export(export_params[:entity])
|
||||
render json: success_json
|
||||
rescue Discourse::InvalidAccess
|
||||
|
|
|
@ -12,8 +12,6 @@ module Jobs
|
|||
attr_accessor :entity
|
||||
|
||||
HEADER_ATTRS_FOR ||= HashWithIndifferentAccess.new(
|
||||
user_archive: ['topic_title', 'categories', 'is_pm', 'post', 'like_count', 'reply_count', 'url', 'created_at'],
|
||||
user_archive_profile: ['location', 'website', 'bio', 'views'],
|
||||
user_list: ['id', 'name', 'username', 'email', 'title', 'created_at', 'last_seen_at', 'last_posted_at', 'last_emailed_at', 'trust_level', 'approved', 'suspended_at', 'suspended_till', 'silenced_till', 'active', 'admin', 'moderator', 'ip_address', 'staged', 'secondary_emails'],
|
||||
user_stats: ['topics_entered', 'posts_read_count', 'time_read', 'topic_count', 'post_count', 'likes_given', 'likes_received'],
|
||||
user_profile: ['location', 'website', 'views'],
|
||||
|
@ -31,7 +29,6 @@ module Jobs
|
|||
@current_user = User.find_by(id: args[:user_id])
|
||||
|
||||
entities = [{ name: @entity }]
|
||||
entities << { name: "user_archive_profile" } if @entity === "user_archive"
|
||||
|
||||
entities.each do |entity|
|
||||
entity[:method] = :"#{entity[:name]}_export"
|
||||
|
@ -39,9 +36,7 @@ module Jobs
|
|||
|
||||
@timestamp ||= Time.now.strftime("%y%m%d-%H%M%S")
|
||||
entity[:filename] =
|
||||
if entity[:name] == "user_archive" || entity[:name] === "user_archive_profile"
|
||||
"#{entity[:name].dasherize}-#{@current_user.username}-#{@timestamp}"
|
||||
elsif entity[:name] == "report" && @extra[:name].present?
|
||||
if entity[:name] == "report" && @extra[:name].present?
|
||||
"#{@extra[:name].dasherize}-#{@timestamp}"
|
||||
else
|
||||
"#{entity[:name].dasherize}-#{@timestamp}"
|
||||
|
@ -108,30 +103,6 @@ module Jobs
|
|||
end
|
||||
end
|
||||
|
||||
def user_archive_export
|
||||
return enum_for(:user_archive_export) unless block_given?
|
||||
|
||||
Post.includes(topic: :category)
|
||||
.where(user_id: @current_user.id)
|
||||
.select(:topic_id, :post_number, :raw, :like_count, :reply_count, :created_at)
|
||||
.order(:created_at)
|
||||
.with_deleted
|
||||
.each do |user_archive|
|
||||
yield get_user_archive_fields(user_archive)
|
||||
end
|
||||
end
|
||||
|
||||
def user_archive_profile_export
|
||||
return enum_for(:user_archive_profile_export) unless block_given?
|
||||
|
||||
UserProfile
|
||||
.where(user_id: @current_user.id)
|
||||
.select(:location, :website, :bio_raw, :views)
|
||||
.each do |user_profile|
|
||||
yield get_user_archive_profile_fields(user_profile)
|
||||
end
|
||||
end
|
||||
|
||||
def user_list_export
|
||||
return enum_for(:user_list_export) unless block_given?
|
||||
|
||||
|
@ -341,54 +312,6 @@ module Jobs
|
|||
user_info_array
|
||||
end
|
||||
|
||||
def get_user_archive_fields(user_archive)
|
||||
user_archive_array = []
|
||||
topic_data = user_archive.topic
|
||||
user_archive = user_archive.as_json
|
||||
topic_data = Topic.with_deleted.find_by(id: user_archive['topic_id']) if topic_data.nil?
|
||||
return user_archive_array if topic_data.nil?
|
||||
|
||||
all_categories = Category.all.to_h { |category| [category.id, category] }
|
||||
|
||||
categories = "-"
|
||||
if topic_data.category_id && category = all_categories[topic_data.category_id]
|
||||
categories = [category.name]
|
||||
while category.parent_category_id && category = all_categories[category.parent_category_id]
|
||||
categories << category.name
|
||||
end
|
||||
categories = categories.reverse.join("|")
|
||||
end
|
||||
|
||||
is_pm = topic_data.archetype == "private_message" ? I18n.t("csv_export.boolean_yes") : I18n.t("csv_export.boolean_no")
|
||||
url = "#{Discourse.base_url}/t/#{topic_data.slug}/#{topic_data.id}/#{user_archive['post_number']}"
|
||||
|
||||
topic_hash = { "post" => user_archive['raw'], "topic_title" => topic_data.title, "categories" => categories, "is_pm" => is_pm, "url" => url }
|
||||
user_archive.merge!(topic_hash)
|
||||
|
||||
HEADER_ATTRS_FOR['user_archive'].each do |attr|
|
||||
user_archive_array.push(user_archive[attr])
|
||||
end
|
||||
|
||||
user_archive_array
|
||||
end
|
||||
|
||||
def get_user_archive_profile_fields(user_profile)
|
||||
user_archive_profile = []
|
||||
|
||||
HEADER_ATTRS_FOR['user_archive_profile'].each do |attr|
|
||||
data =
|
||||
if attr == 'bio'
|
||||
user_profile.attributes['bio_raw']
|
||||
else
|
||||
user_profile.attributes[attr]
|
||||
end
|
||||
|
||||
user_archive_profile.push(data)
|
||||
end
|
||||
|
||||
user_archive_profile
|
||||
end
|
||||
|
||||
def get_staff_action_fields(staff_action)
|
||||
staff_action_array = []
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ module Jobs
|
|||
sidekiq_options retry: false
|
||||
|
||||
attr_accessor :current_user
|
||||
# note: contents provided entirely by user
|
||||
attr_accessor :extra
|
||||
|
||||
COMPONENTS ||= %w(
|
||||
|
|
|
@ -5,26 +5,30 @@ require 'rails_helper'
|
|||
describe Jobs::ExportCsvFile do
|
||||
|
||||
context '#execute' do
|
||||
fab!(:user) { Fabricate(:user, username: "john_doe") }
|
||||
let(:other_user) { Fabricate(:user) }
|
||||
let(:admin) { Fabricate(:admin) }
|
||||
let(:action_log) { StaffActionLogger.new(admin).log_revoke_moderation(other_user) }
|
||||
|
||||
it 'raises an error when the entity is missing' do
|
||||
expect { Jobs::ExportCsvFile.new.execute(user_id: user.id) }.to raise_error(Discourse::InvalidParameters)
|
||||
expect { Jobs::ExportCsvFile.new.execute(user_id: admin.id) }.to raise_error(Discourse::InvalidParameters)
|
||||
end
|
||||
|
||||
it 'works' do
|
||||
action_log
|
||||
|
||||
begin
|
||||
expect do
|
||||
Jobs::ExportCsvFile.new.execute(
|
||||
user_id: user.id,
|
||||
entity: "user_archive"
|
||||
user_id: admin.id,
|
||||
entity: "staff_action"
|
||||
)
|
||||
end.to change { Upload.count }.by(1)
|
||||
|
||||
system_message = user.topics_allowed.last
|
||||
system_message = admin.topics_allowed.last
|
||||
|
||||
expect(system_message.title).to eq(I18n.t(
|
||||
"system_messages.csv_export_succeeded.subject_template",
|
||||
export_title: "User Archive"
|
||||
export_title: "Staff Action"
|
||||
))
|
||||
|
||||
upload = system_message.first_post.uploads.first
|
||||
|
@ -42,44 +46,13 @@ describe Jobs::ExportCsvFile do
|
|||
zip_file.each { |entry| files << entry.name }
|
||||
end
|
||||
|
||||
expect(files.size).to eq(2)
|
||||
expect(files.size).to eq(1)
|
||||
ensure
|
||||
user.uploads.each(&:destroy!)
|
||||
admin.uploads.each(&:destroy!)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context '#user_archive_export' do
|
||||
let(:user) { Fabricate(:user) }
|
||||
|
||||
let(:category) { Fabricate(:category_with_definition) }
|
||||
let(:subcategory) { Fabricate(:category_with_definition, parent_category_id: category.id) }
|
||||
let(:subsubcategory) { Fabricate(:category_with_definition, parent_category_id: subcategory.id) }
|
||||
|
||||
it 'works with sub-sub-categories' do
|
||||
SiteSetting.max_category_nesting = 3
|
||||
topic = Fabricate(:topic, category: subsubcategory)
|
||||
post = Fabricate(:post, topic: topic, user: user)
|
||||
|
||||
exporter = Jobs::ExportCsvFile.new
|
||||
exporter.current_user = User.find_by(id: user.id)
|
||||
|
||||
rows = []
|
||||
exporter.user_archive_export { |row| rows << row }
|
||||
|
||||
expect(rows.length).to eq(1)
|
||||
|
||||
first_row = Jobs::ExportCsvFile::HEADER_ATTRS_FOR['user_archive'].zip(rows[0]).to_h
|
||||
|
||||
expect(first_row["topic_title"]).to eq(topic.title)
|
||||
expect(first_row["categories"]).to eq("#{category.name}|#{subcategory.name}|#{subsubcategory.name}")
|
||||
expect(first_row["is_pm"]).to eq(I18n.t("csv_export.boolean_no"))
|
||||
expect(first_row["post"]).to eq(post.raw)
|
||||
expect(first_row["like_count"]).to eq(0)
|
||||
expect(first_row["reply_count"]).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context '.report_export' do
|
||||
|
||||
let(:user) { Fabricate(:admin) }
|
||||
|
|
|
@ -8,13 +8,12 @@ describe ExportCsvController do
|
|||
before { sign_in(user) }
|
||||
|
||||
describe "#export_entity" do
|
||||
it "enqueues export job" do
|
||||
it "enqueues user archive job" do
|
||||
post "/export_csv/export_entity.json", params: { entity: "user_archive" }
|
||||
expect(response.status).to eq(200)
|
||||
expect(Jobs::ExportCsvFile.jobs.size).to eq(1)
|
||||
expect(Jobs::ExportUserArchive.jobs.size).to eq(1)
|
||||
|
||||
job_data = Jobs::ExportCsvFile.jobs.first["args"].first
|
||||
expect(job_data["entity"]).to eq("user_archive")
|
||||
job_data = Jobs::ExportUserArchive.jobs.first["args"].first
|
||||
expect(job_data["user_id"]).to eq(user.id)
|
||||
end
|
||||
|
||||
|
@ -22,7 +21,7 @@ describe ExportCsvController do
|
|||
UserExport.create(file_name: "user-archive-codinghorror-150116-003249", user_id: user.id)
|
||||
post "/export_csv/export_entity.json", params: { entity: "user_archive" }
|
||||
expect(response.status).to eq(422)
|
||||
expect(Jobs::ExportCsvFile.jobs.size).to eq(0)
|
||||
expect(Jobs::ExportUserArchive.jobs.size).to eq(0)
|
||||
end
|
||||
|
||||
it "returns 404 when normal user tries to export admin entity" do
|
||||
|
|
Loading…
Reference in New Issue