FEATURE: Add script to post report results in a topic regularly (#328)

This script is similar to the existing one that schedules
report results to to be sent to a PM on a regular basis,
but instead takes a topic ID and posts to that. This way
people can have report results sent to a public topic regularly
too and not have to deal with PM recipients and so on.
This commit is contained in:
Martin Brennan 2024-10-10 16:09:09 +10:00 committed by GitHub
parent d3064661cb
commit da1c99ec2d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 410 additions and 54 deletions

View File

@ -118,3 +118,15 @@ en:
label: Skip sending PM if there are no results
attach_csv:
label: Attach the CSV file to the PM
recurring_data_explorer_result_topic:
fields:
topic_id:
label: The topic to post query results in
query_id:
label: Data Explorer Query
query_params:
label: Data Explorer Query parameters
skip_empty:
label: Skip posting if there are no results
attach_csv:
label: Attach the CSV file to the post

View File

@ -5,4 +5,34 @@ en:
scriptables:
recurring_data_explorer_result_pm:
title: Schedule a PM with Data Explorer results
description: Get scheduled reports sent to your messages each month
description: Get scheduled reports sent to your messages
recurring_data_explorer_result_topic:
title: Schedule a post in a topic with Data Explorer results
description: Get scheduled reports posted to a specific topic
data_explorer:
report_generator:
private_message:
title: "Scheduled report for %{query_name}"
body: |
Hi %{recipient_name}, your Data Explorer report is ready.
Query name:
%{query_name}
Here are the results:
%{table}
<a href='%{base_url}/admin/plugins/explorer?id=%{query_id}'>View query in Data Explorer</a>
Report created at %{created_at} (%{timezone})
post:
body: |
### Scheduled report for %{query_name}
Here are the results:
%{table}
<a href='%{base_url}/admin/plugins/explorer?id=%{query_id}'>View query in Data Explorer</a>
Report created at %{created_at} (%{timezone})
upload_appendix: "Appendix: [%{filename}|attachment](%{short_url})"

View File

@ -18,6 +18,21 @@ module ::DiscourseDataExplorer
build_report_pms(query, table, recipients, attach_csv: opts[:attach_csv], result:)
end
def self.generate_post(query_id, query_params, opts = {})
query = DiscourseDataExplorer::Query.find(query_id)
return {} if !query
params = params_to_hash(query_params)
result = DataExplorer.run_query(query, params)
query.update!(last_run_at: Time.now)
return {} if opts[:skip_empty] && result[:pg_result].values.empty?
table = ResultToMarkdown.convert(result[:pg_result])
build_report_post(query, table, attach_csv: opts[:attach_csv], result:)
end
private
def self.params_to_hash(query_params)
@ -42,37 +57,77 @@ module ::DiscourseDataExplorer
def self.build_report_pms(query, table = "", targets = [], attach_csv: false, result: nil)
pms = []
upload =
if attach_csv
tmp_filename =
"#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
tmp = Tempfile.new(tmp_filename)
tmp.write(ResultFormatConverter.convert(:csv, result))
tmp.rewind
UploadCreator.new(tmp, tmp_filename, type: "csv_export").create_for(
Discourse.system_user.id,
)
end
upload = create_csv_upload(query, result) if attach_csv
targets.each do |target|
name = target[0]
pm_type = "target_#{target[1]}s"
pm = {}
pm["title"] = "Scheduled Report for #{query.name}"
pm["title"] = I18n.t(
"data_explorer.report_generator.private_message.title",
query_name: query.name,
)
pm[pm_type] = Array(name)
pm["raw"] = "Hi #{name}, your data explorer report is ready.\n\n" +
"Query Name:\n#{query.name}\n\nHere are the results:\n#{table}\n\n" +
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})"
pm["raw"] = I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: name,
query_name: query.name,
table: table,
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
)
if upload
pm["raw"] << "\n\nAppendix: [#{upload.original_filename}|attachment](#{upload.short_url})"
pm["raw"] << "\n\n" +
I18n.t(
"data_explorer.report_generator.upload_appendix",
filename: upload.original_filename,
short_url: upload.short_url,
)
end
pms << pm
end
pms
end
def self.build_report_post(query, table = "", attach_csv: false, result: nil)
upload = create_csv_upload(query, result) if attach_csv
post = {}
post["raw"] = I18n.t(
"data_explorer.report_generator.post.body",
recipient_name: name,
query_name: query.name,
table: table,
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
)
if upload
post["raw"] << "\n\n" +
I18n.t(
"data_explorer.report_generator.upload_appendix",
filename: upload.original_filename,
short_url: upload.short_url,
)
end
post
end
def self.create_csv_upload(query, result)
tmp_filename =
"#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
tmp = Tempfile.new(tmp_filename)
tmp.write(ResultFormatConverter.convert(:csv, result))
tmp.rewind
UploadCreator.new(tmp, tmp_filename, type: "csv_export").create_for(Discourse.system_user.id)
end
def self.filter_recipients_by_query_access(recipients, query)
users = User.where(username: recipients)
groups = Group.where(name: recipients)

View File

@ -115,6 +115,60 @@ after_initialize do
end
end
end
add_automation_scriptable("recurring_data_explorer_result_topic") do
queries =
DiscourseDataExplorer::Query
.where(hidden: false)
.map { |q| { id: q.id, translated_name: q.name } }
field :topic_id, component: :text, required: true
field :query_id, component: :choices, required: true, extra: { content: queries }
field :query_params, component: :"key-value", accepts_placeholders: true
field :skip_empty, component: :boolean
field :attach_csv, component: :boolean
version 1
triggerables [:recurring]
script do |_, fields, automation|
topic_id = fields.dig("topic_id", "value")
query_id = fields.dig("query_id", "value")
query_params = fields.dig("query_params", "value") || {}
skip_empty = fields.dig("skip_empty", "value") || false
attach_csv = fields.dig("attach_csv", "value") || false
unless SiteSetting.data_explorer_enabled
Rails.logger.warn "#{DiscourseDataExplorer::PLUGIN_NAME} - plugin must be enabled to run automation #{automation.id}"
next
end
topic = Topic.find_by(id: topic_id)
if topic.blank?
Rails.logger.warn "#{DiscourseDataExplorer::PLUGIN_NAME} - couldn't find topic ID (#{topic_id}) for automation #{automation.id}"
next
end
begin
post =
DiscourseDataExplorer::ReportGenerator.generate_post(
query_id,
query_params,
{ skip_empty:, attach_csv: },
)
next if post.empty?
PostCreator.create!(
Discourse.system_user,
topic_id: topic.id,
raw: post["raw"],
skip_validations: true,
)
rescue ActiveRecord::RecordNotSaved => e
Rails.logger.warn "#{DiscourseDataExplorer::PLUGIN_NAME} - couldn't reply to topic ID #{topic_id} for automation #{automation.id}: #{e.message}"
end
end
end
end
end
end

View File

@ -2,7 +2,7 @@
require "rails_helper"
describe "RecurringDataExplorerResultPm" do
describe "RecurringDataExplorerResultPM" do
fab!(:admin)
fab!(:user)
@ -16,7 +16,7 @@ describe "RecurringDataExplorerResultPm" do
fab!(:automation) do
Fabricate(:automation, script: "recurring_data_explorer_result_pm", trigger: "recurring")
end
fab!(:query)
fab!(:query) { Fabricate(:query, sql: "SELECT 'testabcd' AS data") }
fab!(:query_group) { Fabricate(:query_group, query: query, group: group) }
fab!(:query_group) { Fabricate(:query_group, query: query, group: another_group) }
@ -49,7 +49,8 @@ describe "RecurringDataExplorerResultPm" do
freeze_time 1.day.from_now do
expect { Jobs::DiscourseAutomation::Tracker.new.execute }.to change { Topic.count }.by(3)
title = "Scheduled Report for #{query.name}"
title =
I18n.t("data_explorer.report_generator.private_message.title", query_name: query.name)
expect(Topic.last(3).pluck(:title)).to eq([title, title, title])
end
end
@ -77,15 +78,27 @@ describe "RecurringDataExplorerResultPm" do
end
it "has appropriate content from the report generator" do
freeze_time
automation.update(last_updated_by_id: admin.id)
automation.trigger!
expect(Post.last.raw).to include(
"Hi #{another_group.name}, your data explorer report is ready.\n\nQuery Name:\n#{query.name}",
expect(Post.last.raw).to eq(
I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: another_group.name,
query_name: query.name,
table: "| data |\n| :----- |\n| testabcd |\n",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
).chomp,
)
end
it "does not send the PM if skip_empty" do
query.update!(sql: "SELECT NULL LIMIT 0")
automation.upsert_field!("skip_empty", "boolean", { value: true })
automation.update(last_updated_by_id: admin.id)

View File

@ -0,0 +1,92 @@
# frozen_string_literal: true
require "rails_helper"
describe "RecurringDataExplorerResultTopic" do
fab!(:admin)
fab!(:user)
fab!(:another_user) { Fabricate(:user) }
fab!(:group_user) { Fabricate(:user) }
fab!(:not_allowed_user) { Fabricate(:user) }
fab!(:topic)
fab!(:group) { Fabricate(:group, users: [user, another_user]) }
fab!(:another_group) { Fabricate(:group, users: [group_user]) }
fab!(:automation) do
Fabricate(:automation, script: "recurring_data_explorer_result_topic", trigger: "recurring")
end
fab!(:query) { Fabricate(:query, sql: "SELECT 'testabcd' AS data") }
fab!(:query_group) { Fabricate(:query_group, query: query, group: group) }
fab!(:query_group) { Fabricate(:query_group, query: query, group: another_group) }
before do
SiteSetting.data_explorer_enabled = true
SiteSetting.discourse_automation_enabled = true
automation.upsert_field!("query_id", "choices", { value: query.id })
automation.upsert_field!("topic_id", "text", { value: topic.id })
automation.upsert_field!(
"query_params",
"key-value",
{ value: [%w[from_days_ago 0], %w[duration_days 15]] },
)
automation.upsert_field!(
"recurrence",
"period",
{ value: { interval: 1, frequency: "day" } },
target: "trigger",
)
automation.upsert_field!("start_date", "date_time", { value: 2.minutes.ago }, target: "trigger")
end
context "when using recurring trigger" do
it "sends the post at recurring date_date" do
freeze_time 1.day.from_now do
expect { Jobs::DiscourseAutomation::Tracker.new.execute }.to change {
topic.reload.posts.count
}.by(1)
expect(topic.posts.last.raw).to eq(
I18n.t(
"data_explorer.report_generator.post.body",
query_name: query.name,
table: "| data |\n| :----- |\n| testabcd |\n",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
).chomp,
)
end
end
it "has appropriate content from the report generator" do
freeze_time
automation.update(last_updated_by_id: admin.id)
automation.trigger!
expect(topic.posts.last.raw).to eq(
I18n.t(
"data_explorer.report_generator.post.body",
query_name: query.name,
table: "| data |\n| :----- |\n| testabcd |\n",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
).chomp,
)
end
it "does not create the post if skip_empty" do
query.update!(sql: "SELECT NULL LIMIT 0")
automation.upsert_field!("skip_empty", "boolean", { value: true })
automation.update(last_updated_by_id: admin.id)
expect { automation.trigger! }.to_not change { Post.count }
end
end
end

View File

@ -3,7 +3,7 @@
Fabricator(:query, from: DiscourseDataExplorer::Query) do
name { sequence(:name) { |i| "cat#{i}" } }
description { sequence(:desc) { |i| "description #{i}" } }
sql { sequence(:sql) { |i| "SELECT * FROM users limit #{i}" } }
sql { sequence(:sql) { |i| "SELECT * FROM users WHERE id > 0 LIMIT #{i}" } }
user
end

View File

@ -12,7 +12,10 @@ describe DiscourseDataExplorer::ReportGenerator do
let(:query_params) { [%w[from_days_ago 0], %w[duration_days 15]] }
before { SiteSetting.data_explorer_enabled = true }
before do
SiteSetting.data_explorer_enabled = true
SiteSetting.authorized_extensions = "csv"
end
describe ".generate" do
it "returns [] if the recipient is not in query group" do
@ -37,13 +40,23 @@ describe DiscourseDataExplorer::ReportGenerator do
expect(result).to eq(
[
{
"title" => "Scheduled Report for #{query.name}",
"title" =>
I18n.t(
"data_explorer.report_generator.private_message.title",
query_name: query.name,
),
"target_usernames" => [user.username],
"raw" =>
"Hi #{user.username}, your data explorer report is ready.\n\n" +
"Query Name:\n#{query.name}\n\nHere are the results:\nle table\n\n" +
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})",
I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: user.username,
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
),
},
],
)
@ -57,17 +70,26 @@ describe DiscourseDataExplorer::ReportGenerator do
freeze_time
result = described_class.generate(query.id, query_params, [group.name, "non-existent-group"])
expect(result).to eq(
[
{
"title" => "Scheduled Report for #{query.name}",
"title" =>
I18n.t(
"data_explorer.report_generator.private_message.title",
query_name: query.name,
),
"target_group_names" => [group.name],
"raw" =>
"Hi #{group.name}, your data explorer report is ready.\n\n" +
"Query Name:\n#{query.name}\n\nHere are the results:\nle table\n\n" +
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})",
I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: group.name,
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
),
},
],
)
@ -83,20 +105,30 @@ describe DiscourseDataExplorer::ReportGenerator do
expect(result).to eq(
[
{
"title" => "Scheduled Report for #{query.name}",
"title" =>
I18n.t(
"data_explorer.report_generator.private_message.title",
query_name: query.name,
),
"target_emails" => [email],
"raw" =>
"Hi #{email}, your data explorer report is ready.\n\n" +
"Query Name:\n#{query.name}\n\nHere are the results:\nle table\n\n" +
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})",
I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: email,
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
),
},
],
)
end
it "works with duplicate recipients" do
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("table data")
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("le table")
freeze_time
result = described_class.generate(query.id, query_params, [user.username, user.username])
@ -104,13 +136,23 @@ describe DiscourseDataExplorer::ReportGenerator do
expect(result).to eq(
[
{
"title" => "Scheduled Report for #{query.name}",
"title" =>
I18n.t(
"data_explorer.report_generator.private_message.title",
query_name: query.name,
),
"target_usernames" => [user.username],
"raw" =>
"Hi #{user.username}, your data explorer report is ready.\n\n" +
"Query Name:\n#{query.name}\n\nHere are the results:\ntable data\n\n" +
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})",
I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: user.username,
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
),
},
],
)
@ -118,7 +160,7 @@ describe DiscourseDataExplorer::ReportGenerator do
it "works with multiple recipient types" do
Fabricate(:query_group, query: query, group: group)
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("table data")
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("le table")
result =
described_class.generate(
@ -144,12 +186,70 @@ describe DiscourseDataExplorer::ReportGenerator do
filename =
"#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
expect(result[0]["raw"]).to include(
"Hi #{user.username}, your data explorer report is ready.\n\n" +
"Query Name:\n#{query.name}\n\nHere are the results:\nle table\n\n" +
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})\n\n" +
"Appendix: [#{filename}|attachment](upload://",
expect(result[0]["raw"]).to eq(
I18n.t(
"data_explorer.report_generator.private_message.body",
recipient_name: user.username,
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
) + "\n\n" +
I18n.t(
"data_explorer.report_generator.upload_appendix",
filename: filename,
short_url: Upload.find_by(original_filename: filename).short_url,
),
)
end
end
describe ".generate_post" do
it "works without attached csv file" do
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("le table")
freeze_time
result = described_class.generate_post(query.id, query_params)
expect(result["raw"]).to eq(
I18n.t(
"data_explorer.report_generator.post.body",
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
),
)
end
it "works with attached csv file" do
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("le table")
freeze_time
result = described_class.generate_post(query.id, query_params, { attach_csv: true })
filename =
"#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
expect(result["raw"]).to eq(
I18n.t(
"data_explorer.report_generator.post.body",
query_name: query.name,
table: "le table",
base_url: Discourse.base_url,
query_id: query.id,
created_at: Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S"),
timezone: Time.zone.name,
) + "\n\n" +
I18n.t(
"data_explorer.report_generator.upload_appendix",
filename: filename,
short_url: Upload.find_by(original_filename: filename).short_url,
),
)
end
end