FEATURE: Allows CSV file result to be attached in automated PMs (#318)
This commit adds an optional setting that allows to attach query results in CSV format as a file to PMs sent by Data Explorer's automation scripts. meta topic: https://meta.discourse.org/t/turn-data-explorer-query-results-into-csv-to-attach-to-discourse-automated-emails/267529 Co-authored-by: Drenmi <drenmi@gmail.com>
This commit is contained in:
parent
68760cd3a5
commit
cbae98f6ed
|
@ -179,49 +179,27 @@ module ::DiscourseDataExplorer
|
||||||
|
|
||||||
render json: { success: false, errors: [err_msg] }, status: 422
|
render json: { success: false, errors: [err_msg] }, status: 422
|
||||||
else
|
else
|
||||||
pg_result = result[:pg_result]
|
content_disposition =
|
||||||
cols = pg_result.fields
|
"attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult"
|
||||||
|
|
||||||
respond_to do |format|
|
respond_to do |format|
|
||||||
format.json do
|
format.json do
|
||||||
if params[:download]
|
response.headers["Content-Disposition"] = "#{content_disposition}.json" if params[
|
||||||
response.headers[
|
:download
|
||||||
"Content-Disposition"
|
]
|
||||||
] = "attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.json"
|
|
||||||
end
|
|
||||||
json = {
|
|
||||||
success: true,
|
|
||||||
errors: [],
|
|
||||||
duration: (result[:duration_secs].to_f * 1000).round(1),
|
|
||||||
result_count: pg_result.values.length || 0,
|
|
||||||
params: query_params,
|
|
||||||
columns: cols,
|
|
||||||
default_limit: SiteSetting.data_explorer_query_result_limit,
|
|
||||||
}
|
|
||||||
json[:explain] = result[:explain] if opts[:explain]
|
|
||||||
|
|
||||||
if !params[:download]
|
render json:
|
||||||
relations, colrender = DataExplorer.add_extra_data(pg_result)
|
ResultFormatConverter.convert(
|
||||||
json[:relations] = relations
|
:json,
|
||||||
json[:colrender] = colrender
|
result,
|
||||||
end
|
query_params:,
|
||||||
|
download: params[:download],
|
||||||
json[:rows] = pg_result.values
|
)
|
||||||
|
|
||||||
render json: json
|
|
||||||
end
|
end
|
||||||
format.csv do
|
format.csv do
|
||||||
response.headers[
|
response.headers["Content-Disposition"] = "#{content_disposition}.csv"
|
||||||
"Content-Disposition"
|
|
||||||
] = "attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
|
|
||||||
|
|
||||||
require "csv"
|
render plain: ResultFormatConverter.convert(:csv, result)
|
||||||
text =
|
|
||||||
CSV.generate do |csv|
|
|
||||||
csv << cols
|
|
||||||
pg_result.values.each { |row| csv << row }
|
|
||||||
end
|
|
||||||
|
|
||||||
render plain: text
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -116,3 +116,5 @@ en:
|
||||||
label: Data Explorer Query parameters
|
label: Data Explorer Query parameters
|
||||||
skip_empty:
|
skip_empty:
|
||||||
label: Skip sending PM if there are no results
|
label: Skip sending PM if there are no results
|
||||||
|
attach_csv:
|
||||||
|
label: Attach the CSV file to the PM
|
||||||
|
|
|
@ -9,13 +9,13 @@ module ::DiscourseDataExplorer
|
||||||
recipients = filter_recipients_by_query_access(recipients, query)
|
recipients = filter_recipients_by_query_access(recipients, query)
|
||||||
params = params_to_hash(query_params)
|
params = params_to_hash(query_params)
|
||||||
|
|
||||||
result = DataExplorer.run_query(query, params)[:pg_result]
|
result = DataExplorer.run_query(query, params)
|
||||||
query.update!(last_run_at: Time.now)
|
query.update!(last_run_at: Time.now)
|
||||||
|
|
||||||
return [] if opts[:skip_empty] && result.values.empty?
|
return [] if opts[:skip_empty] && result[:pg_result].values.empty?
|
||||||
table = ResultToMarkdown.convert(result)
|
table = ResultToMarkdown.convert(result[:pg_result])
|
||||||
|
|
||||||
build_report_pms(query, table, recipients)
|
build_report_pms(query, table, recipients, attach_csv: opts[:attach_csv], result:)
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
@ -40,8 +40,20 @@ module ::DiscourseDataExplorer
|
||||||
params_hash
|
params_hash
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.build_report_pms(query, table = "", targets = [])
|
def self.build_report_pms(query, table = "", targets = [], attach_csv: false, result: nil)
|
||||||
pms = []
|
pms = []
|
||||||
|
upload =
|
||||||
|
if attach_csv
|
||||||
|
tmp_filename =
|
||||||
|
"#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
|
||||||
|
tmp = Tempfile.new(tmp_filename)
|
||||||
|
tmp.write(ResultFormatConverter.convert(:csv, result))
|
||||||
|
tmp.rewind
|
||||||
|
UploadCreator.new(tmp, tmp_filename, type: "csv_export").create_for(
|
||||||
|
Discourse.system_user.id,
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
targets.each do |target|
|
targets.each do |target|
|
||||||
name = target[0]
|
name = target[0]
|
||||||
pm_type = "target_#{target[1]}s"
|
pm_type = "target_#{target[1]}s"
|
||||||
|
@ -53,6 +65,9 @@ module ::DiscourseDataExplorer
|
||||||
"Query Name:\n#{query.name}\n\nHere are the results:\n#{table}\n\n" +
|
"Query Name:\n#{query.name}\n\nHere are the results:\n#{table}\n\n" +
|
||||||
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
|
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
|
||||||
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})"
|
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})"
|
||||||
|
if upload
|
||||||
|
pm["raw"] << "\n\nAppendix: [#{upload.original_filename}|attachment](#{upload.short_url})"
|
||||||
|
end
|
||||||
pms << pm
|
pms << pm
|
||||||
end
|
end
|
||||||
pms
|
pms
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
module ::DiscourseDataExplorer
|
||||||
|
class ResultFormatConverter
|
||||||
|
def self.convert(file_type, result, opts = {})
|
||||||
|
self.new(result, opts).send("to_#{file_type}")
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize(result, opts)
|
||||||
|
@result = result
|
||||||
|
@opts = opts
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :result
|
||||||
|
attr_reader :opts
|
||||||
|
|
||||||
|
def pg_result
|
||||||
|
@pg_result ||= @result[:pg_result]
|
||||||
|
end
|
||||||
|
|
||||||
|
def cols
|
||||||
|
@cols ||= pg_result.fields
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_csv
|
||||||
|
require "csv"
|
||||||
|
CSV.generate do |csv|
|
||||||
|
csv << cols
|
||||||
|
pg_result.values.each { |row| csv << row }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_json
|
||||||
|
json = {
|
||||||
|
success: true,
|
||||||
|
errors: [],
|
||||||
|
duration: (result[:duration_secs].to_f * 1000).round(1),
|
||||||
|
result_count: pg_result.values.length || 0,
|
||||||
|
params: opts[:query_params],
|
||||||
|
columns: cols,
|
||||||
|
default_limit: SiteSetting.data_explorer_query_result_limit,
|
||||||
|
}
|
||||||
|
json[:explain] = result[:explain] if opts[:explain]
|
||||||
|
|
||||||
|
if !opts[:download]
|
||||||
|
relations, colrender = DataExplorer.add_extra_data(pg_result)
|
||||||
|
json[:relations] = relations
|
||||||
|
json[:colrender] = colrender
|
||||||
|
end
|
||||||
|
|
||||||
|
json[:rows] = pg_result.values
|
||||||
|
|
||||||
|
json
|
||||||
|
end
|
||||||
|
|
||||||
|
#TODO: we can move ResultToMarkdown here
|
||||||
|
end
|
||||||
|
end
|
|
@ -79,6 +79,7 @@ after_initialize do
|
||||||
|
|
||||||
require_relative "lib/report_generator"
|
require_relative "lib/report_generator"
|
||||||
require_relative "lib/result_to_markdown"
|
require_relative "lib/result_to_markdown"
|
||||||
|
require_relative "lib/result_format_converter"
|
||||||
reloadable_patch do
|
reloadable_patch do
|
||||||
if defined?(DiscourseAutomation)
|
if defined?(DiscourseAutomation)
|
||||||
add_automation_scriptable("recurring_data_explorer_result_pm") do
|
add_automation_scriptable("recurring_data_explorer_result_pm") do
|
||||||
|
@ -90,6 +91,7 @@ after_initialize do
|
||||||
field :query_id, component: :choices, required: true, extra: { content: queries }
|
field :query_id, component: :choices, required: true, extra: { content: queries }
|
||||||
field :query_params, component: :"key-value", accepts_placeholders: true
|
field :query_params, component: :"key-value", accepts_placeholders: true
|
||||||
field :skip_empty, component: :boolean
|
field :skip_empty, component: :boolean
|
||||||
|
field :attach_csv, component: :boolean
|
||||||
|
|
||||||
version 1
|
version 1
|
||||||
triggerables [:recurring]
|
triggerables [:recurring]
|
||||||
|
@ -99,6 +101,7 @@ after_initialize do
|
||||||
query_id = fields.dig("query_id", "value")
|
query_id = fields.dig("query_id", "value")
|
||||||
query_params = fields.dig("query_params", "value") || {}
|
query_params = fields.dig("query_params", "value") || {}
|
||||||
skip_empty = fields.dig("skip_empty", "value") || false
|
skip_empty = fields.dig("skip_empty", "value") || false
|
||||||
|
attach_csv = fields.dig("attach_csv", "value") || false
|
||||||
|
|
||||||
unless SiteSetting.data_explorer_enabled
|
unless SiteSetting.data_explorer_enabled
|
||||||
Rails.logger.warn "#{DiscourseDataExplorer::PLUGIN_NAME} - plugin must be enabled to run automation #{automation.id}"
|
Rails.logger.warn "#{DiscourseDataExplorer::PLUGIN_NAME} - plugin must be enabled to run automation #{automation.id}"
|
||||||
|
@ -111,7 +114,7 @@ after_initialize do
|
||||||
end
|
end
|
||||||
|
|
||||||
DiscourseDataExplorer::ReportGenerator
|
DiscourseDataExplorer::ReportGenerator
|
||||||
.generate(query_id, query_params, recipients, { skip_empty: })
|
.generate(query_id, query_params, recipients, { skip_empty:, attach_csv: })
|
||||||
.each do |pm|
|
.each do |pm|
|
||||||
begin
|
begin
|
||||||
utils.send_pm(pm, automation_id: automation.id, prefers_encrypt: false)
|
utils.send_pm(pm, automation_id: automation.id, prefers_encrypt: false)
|
||||||
|
|
|
@ -130,5 +130,25 @@ describe DiscourseDataExplorer::ReportGenerator do
|
||||||
expect(result[1]["target_group_names"]).to eq([group.name])
|
expect(result[1]["target_group_names"]).to eq([group.name])
|
||||||
expect(result[2]["target_emails"]).to eq(["john@doe.com"])
|
expect(result[2]["target_emails"]).to eq(["john@doe.com"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it "works with attached csv file" do
|
||||||
|
SiteSetting.personal_message_enabled_groups = group.id
|
||||||
|
DiscourseDataExplorer::ResultToMarkdown.expects(:convert).returns("le table")
|
||||||
|
freeze_time
|
||||||
|
|
||||||
|
result =
|
||||||
|
described_class.generate(query.id, query_params, [user.username], { attach_csv: true })
|
||||||
|
|
||||||
|
filename =
|
||||||
|
"#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
|
||||||
|
|
||||||
|
expect(result[0]["raw"]).to include(
|
||||||
|
"Hi #{user.username}, your data explorer report is ready.\n\n" +
|
||||||
|
"Query Name:\n#{query.name}\n\nHere are the results:\nle table\n\n" +
|
||||||
|
"<a href='#{Discourse.base_url}/admin/plugins/explorer?id=#{query.id}'>View query in Data Explorer</a>\n\n" +
|
||||||
|
"Report created at #{Time.zone.now.strftime("%Y-%m-%d at %H:%M:%S")} (#{Time.zone.name})\n\n" +
|
||||||
|
"Appendix: [#{filename}|attachment](upload://",
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
describe DiscourseDataExplorer::ResultFormatConverter do
|
||||||
|
fab!(:user)
|
||||||
|
fab!(:post)
|
||||||
|
fab!(:query) { DiscourseDataExplorer::Query.find(-1) }
|
||||||
|
|
||||||
|
let(:query_params) { [{ from_days_ago: 0 }, { duration_days: 15 }] }
|
||||||
|
let(:query_result) { DiscourseDataExplorer::DataExplorer.run_query(query, query_params) }
|
||||||
|
|
||||||
|
before { SiteSetting.data_explorer_enabled = true }
|
||||||
|
|
||||||
|
describe ".convert" do
|
||||||
|
context "for csv files" do
|
||||||
|
it "format results as a csv table with headers and columns" do
|
||||||
|
result = described_class.convert(:csv, query_result)
|
||||||
|
|
||||||
|
table = <<~CSV
|
||||||
|
liker_user_id,liked_user_id,count
|
||||||
|
CSV
|
||||||
|
|
||||||
|
expect(result).to include(table)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "for json files" do
|
||||||
|
it "format results as a json file" do
|
||||||
|
result = described_class.convert(:json, query_result, { query_params: })
|
||||||
|
|
||||||
|
expect(result[:columns]).to contain_exactly("liker_user_id", "liked_user_id", "count")
|
||||||
|
expect(result[:params]).to eq(query_params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
Loading…
Reference in New Issue