2019-04-29 20:27:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-10-11 05:41:23 -04:00
|
|
|
require 'rails_helper'
|
2014-08-09 06:28:57 -04:00
|
|
|
|
|
|
|
describe Jobs::ExportCsvFile do
|
|
|
|
|
2019-06-11 00:14:31 -04:00
|
|
|
context '#execute' do
|
2019-05-06 23:12:20 -04:00
|
|
|
fab!(:user) { Fabricate(:user, username: "john_doe") }
|
2018-04-25 00:12:42 -04:00
|
|
|
|
2014-08-09 06:28:57 -04:00
|
|
|
it 'raises an error when the entity is missing' do
|
2018-04-25 00:12:42 -04:00
|
|
|
expect { Jobs::ExportCsvFile.new.execute(user_id: user.id) }.to raise_error(Discourse::InvalidParameters)
|
|
|
|
end
|
|
|
|
|
|
|
|
it 'works' do
|
|
|
|
begin
|
2019-06-11 00:14:31 -04:00
|
|
|
expect do
|
|
|
|
Jobs::ExportCsvFile.new.execute(
|
|
|
|
user_id: user.id,
|
|
|
|
entity: "user_archive"
|
|
|
|
)
|
|
|
|
end.to change { Upload.count }.by(1)
|
2018-05-24 21:56:35 -04:00
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
system_message = user.topics_allowed.last
|
2019-06-11 00:14:31 -04:00
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
expect(system_message.title).to eq(I18n.t(
|
2018-04-25 00:57:56 -04:00
|
|
|
"system_messages.csv_export_succeeded.subject_template",
|
|
|
|
export_title: "User Archive"
|
|
|
|
))
|
2019-06-11 00:14:31 -04:00
|
|
|
|
|
|
|
upload = system_message.first_post.uploads.first
|
|
|
|
|
|
|
|
expect(system_message.first_post.raw).to eq(I18n.t(
|
|
|
|
"system_messages.csv_export_succeeded.text_body_template",
|
|
|
|
download_link: "[#{upload.original_filename}|attachment](#{upload.short_url}) (#{upload.filesize} Bytes)"
|
|
|
|
).chomp)
|
|
|
|
|
2019-05-28 07:08:41 -04:00
|
|
|
expect(system_message.id).to eq(UserExport.last.topic_id)
|
|
|
|
expect(system_message.closed).to eq(true)
|
2020-04-29 05:09:50 -04:00
|
|
|
|
|
|
|
files = []
|
|
|
|
Zip::File.open(Discourse.store.path_for(upload)) do |zip_file|
|
|
|
|
zip_file.each { |entry| files << entry.name }
|
|
|
|
end
|
|
|
|
|
|
|
|
expect(files.size).to eq(2)
|
2018-04-25 00:12:42 -04:00
|
|
|
ensure
|
2019-02-27 04:12:20 -05:00
|
|
|
user.uploads.each(&:destroy!)
|
2018-04-25 00:12:42 -04:00
|
|
|
end
|
2014-08-09 06:28:57 -04:00
|
|
|
end
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
|
|
|
|
2020-01-28 05:52:07 -05:00
|
|
|
context '#user_archive_export' do
|
|
|
|
let(:user) { Fabricate(:user) }
|
|
|
|
|
|
|
|
let(:category) { Fabricate(:category_with_definition) }
|
|
|
|
let(:subcategory) { Fabricate(:category_with_definition, parent_category_id: category.id) }
|
|
|
|
let(:subsubcategory) { Fabricate(:category_with_definition, parent_category_id: subcategory.id) }
|
|
|
|
|
|
|
|
it 'works with sub-sub-categories' do
|
|
|
|
SiteSetting.max_category_nesting = 3
|
|
|
|
topic = Fabricate(:topic, category: subsubcategory)
|
|
|
|
post = Fabricate(:post, topic: topic, user: user)
|
|
|
|
|
|
|
|
exporter = Jobs::ExportCsvFile.new
|
2020-03-30 13:08:47 -04:00
|
|
|
exporter.current_user = User.find_by(id: user.id)
|
2020-01-28 05:52:07 -05:00
|
|
|
|
|
|
|
rows = []
|
|
|
|
exporter.user_archive_export { |row| rows << row }
|
|
|
|
|
|
|
|
expect(rows.length).to eq(1)
|
|
|
|
|
|
|
|
first_row = Jobs::ExportCsvFile::HEADER_ATTRS_FOR['user_archive'].zip(rows[0]).to_h
|
|
|
|
|
|
|
|
expect(first_row["topic_title"]).to eq(topic.title)
|
|
|
|
expect(first_row["categories"]).to eq("#{category.name}|#{subcategory.name}|#{subsubcategory.name}")
|
|
|
|
expect(first_row["is_pm"]).to eq(I18n.t("csv_export.boolean_no"))
|
|
|
|
expect(first_row["post"]).to eq(post.raw)
|
|
|
|
expect(first_row["like_count"]).to eq(0)
|
|
|
|
expect(first_row["reply_count"]).to eq(0)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
context '.report_export' do
|
|
|
|
|
|
|
|
let(:user) { Fabricate(:admin) }
|
|
|
|
|
|
|
|
let(:exporter) do
|
|
|
|
exporter = Jobs::ExportCsvFile.new
|
2020-03-30 13:08:47 -04:00
|
|
|
exporter.entity = 'report'
|
|
|
|
exporter.extra = HashWithIndifferentAccess.new(start_date: '2010-01-01', end_date: '2011-01-01')
|
|
|
|
exporter.current_user = User.find_by(id: user.id)
|
2019-06-28 02:50:31 -04:00
|
|
|
exporter
|
|
|
|
end
|
|
|
|
|
2020-04-07 14:05:27 -04:00
|
|
|
it "does not throw an error when the dates are invalid" do
|
|
|
|
Jobs::ExportCsvFile.new.execute(
|
|
|
|
entity: 'report',
|
|
|
|
user_id: user.id,
|
|
|
|
args: { start_date: 'asdfasdf', end_date: 'not-a-date', name: 'dau_by_mau' }
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
it 'works with single-column reports' do
|
|
|
|
user.user_visits.create!(visited_at: '2010-01-01', posts_read: 42)
|
|
|
|
Fabricate(:user).user_visits.create!(visited_at: '2010-01-03', posts_read: 420)
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
exporter.extra['name'] = 'dau_by_mau'
|
2019-06-28 02:50:31 -04:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("Day", "Percent")
|
|
|
|
expect(report.second).to contain_exactly("2010-01-01", "100.0")
|
|
|
|
expect(report.third).to contain_exactly("2010-01-03", "50.0")
|
2019-07-26 03:27:13 -04:00
|
|
|
end
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
it 'works with filters' do
|
|
|
|
user.user_visits.create!(visited_at: '2010-01-01', posts_read: 42)
|
|
|
|
|
|
|
|
group = Fabricate(:group)
|
|
|
|
user1 = Fabricate(:user)
|
|
|
|
group_user = Fabricate(:group_user, group: group, user: user1)
|
|
|
|
user1.user_visits.create!(visited_at: '2010-01-03', posts_read: 420)
|
|
|
|
|
|
|
|
exporter.extra['name'] = 'visits'
|
|
|
|
exporter.extra['group_id'] = group.id
|
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.length).to eq(2)
|
|
|
|
expect(report.first).to contain_exactly("Day", "Count")
|
|
|
|
expect(report.second).to contain_exactly("2010-01-03", "1")
|
|
|
|
end
|
|
|
|
|
2019-07-26 03:27:13 -04:00
|
|
|
it 'works with single-column reports with default label' do
|
|
|
|
user.user_visits.create!(visited_at: '2010-01-01')
|
|
|
|
Fabricate(:user).user_visits.create!(visited_at: '2010-01-03')
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
exporter.extra['name'] = 'visits'
|
2019-07-26 03:27:13 -04:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("Day", "Count")
|
|
|
|
expect(report.second).to contain_exactly("2010-01-01", "1")
|
|
|
|
expect(report.third).to contain_exactly("2010-01-03", "1")
|
2019-06-28 02:50:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
it 'works with multi-columns reports' do
|
|
|
|
DiscourseIpInfo.stubs(:get).with("1.1.1.1").returns(location: "Earth")
|
|
|
|
user.user_auth_token_logs.create!(action: "login", client_ip: "1.1.1.1", created_at: '2010-01-01')
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
exporter.extra['name'] = 'staff_logins'
|
2019-06-28 02:50:31 -04:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("User", "Location", "Login at")
|
|
|
|
expect(report.second).to contain_exactly(user.username, "Earth", "2010-01-01 00:00:00 UTC")
|
|
|
|
end
|
|
|
|
|
2020-06-01 13:23:58 -04:00
|
|
|
it 'works with topic reports' do
|
|
|
|
freeze_time DateTime.parse('2010-01-01 6:00')
|
|
|
|
|
|
|
|
exporter.extra['name'] = 'top_referred_topics'
|
|
|
|
post1 = Fabricate(:post)
|
|
|
|
post2 = Fabricate(:post)
|
|
|
|
IncomingLink.add(host: "a.com", referer: "http://twitter.com", post_id: post1.id, ip_address: '1.1.1.1')
|
|
|
|
|
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report.first).to contain_exactly("Topic", "Clicks")
|
|
|
|
expect(report.second).to contain_exactly(post1.topic.id.to_s, "1")
|
|
|
|
end
|
|
|
|
|
2020-01-21 11:43:19 -05:00
|
|
|
it 'works with stacked_chart reports' do
|
|
|
|
ApplicationRequest.create!(date: '2010-01-01', req_type: 'page_view_logged_in', count: 1)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-02', req_type: 'page_view_logged_in', count: 2)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-03', req_type: 'page_view_logged_in', count: 3)
|
|
|
|
|
|
|
|
ApplicationRequest.create!(date: '2010-01-01', req_type: 'page_view_anon', count: 4)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-02', req_type: 'page_view_anon', count: 5)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-03', req_type: 'page_view_anon', count: 6)
|
|
|
|
|
|
|
|
ApplicationRequest.create!(date: '2010-01-01', req_type: 'page_view_crawler', count: 7)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-02', req_type: 'page_view_crawler', count: 8)
|
|
|
|
ApplicationRequest.create!(date: '2010-01-03', req_type: 'page_view_crawler', count: 9)
|
|
|
|
|
2020-03-30 13:08:47 -04:00
|
|
|
exporter.extra['name'] = 'consolidated_page_views'
|
2020-01-21 11:43:19 -05:00
|
|
|
report = exporter.report_export.to_a
|
|
|
|
|
|
|
|
expect(report[0]).to contain_exactly("Day", "Logged in users", "Anonymous users", "Crawlers")
|
|
|
|
expect(report[1]).to contain_exactly("2010-01-01", "1", "4", "7")
|
|
|
|
expect(report[2]).to contain_exactly("2010-01-02", "2", "5", "8")
|
|
|
|
expect(report[3]).to contain_exactly("2010-01-03", "3", "6", "9")
|
|
|
|
end
|
|
|
|
|
2019-06-28 02:50:31 -04:00
|
|
|
end
|
|
|
|
|
2017-09-13 12:09:11 -04:00
|
|
|
let(:user_list_header) {
|
|
|
|
%w{
|
|
|
|
id name username email title created_at last_seen_at last_posted_at
|
|
|
|
last_emailed_at trust_level approved suspended_at suspended_till blocked
|
2019-02-27 04:12:20 -05:00
|
|
|
active admin moderator ip_address staged secondary_emails topics_entered
|
|
|
|
posts_read_count time_read topic_count post_count likes_given
|
|
|
|
likes_received location website views external_id external_email
|
|
|
|
external_username external_name external_avatar_url
|
2017-09-13 12:09:11 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let(:user_list_export) { Jobs::ExportCsvFile.new.user_list_export }
|
2014-11-25 17:43:17 -05:00
|
|
|
|
|
|
|
def to_hash(row)
|
2015-01-02 01:59:05 -05:00
|
|
|
Hash[*user_list_header.zip(row).flatten]
|
2014-11-25 17:43:17 -05:00
|
|
|
end
|
|
|
|
|
2019-04-11 02:55:02 -04:00
|
|
|
it "exports secondary emails" do
|
2019-02-27 04:12:20 -05:00
|
|
|
user = Fabricate(:user)
|
|
|
|
Fabricate(:secondary_email, user: user, primary: false)
|
2019-04-11 02:55:02 -04:00
|
|
|
secondary_emails = user.secondary_emails
|
2019-02-27 04:12:20 -05:00
|
|
|
|
|
|
|
user = to_hash(user_list_export.find { |u| u[0].to_i == user.id })
|
|
|
|
|
2019-04-11 02:55:02 -04:00
|
|
|
expect(user["secondary_emails"].split(";")).to match_array(secondary_emails)
|
2019-02-27 04:12:20 -05:00
|
|
|
end
|
|
|
|
|
2014-11-25 17:43:17 -05:00
|
|
|
it 'exports sso data' do
|
2017-12-23 03:46:48 -05:00
|
|
|
SiteSetting.sso_url = "https://www.example.com/sso"
|
2014-11-25 17:43:17 -05:00
|
|
|
SiteSetting.enable_sso = true
|
|
|
|
user = Fabricate(:user)
|
2019-02-27 04:12:20 -05:00
|
|
|
user.user_profile.update_column(:location, "La,La Land")
|
2014-11-25 17:43:17 -05:00
|
|
|
user.create_single_sign_on_record(external_id: "123", last_payload: "xxx", external_email: 'test@test.com')
|
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
user = to_hash(user_list_export.find { |u| u[0].to_i == user.id })
|
2014-08-09 06:28:57 -04:00
|
|
|
|
2019-02-27 04:12:20 -05:00
|
|
|
expect(user["location"]).to eq('"La,La Land"')
|
2014-12-31 09:55:03 -05:00
|
|
|
expect(user["external_id"]).to eq("123")
|
|
|
|
expect(user["external_email"]).to eq("test@test.com")
|
2014-08-09 06:28:57 -04:00
|
|
|
end
|
|
|
|
end
|