DEV: Update plugin to match latest guidelines (#229)

This commit updates the plugin to the latest guidelines, as shown in
discourse-plugin-skeleton, which involves moving a lot of the code to
dedicated files, use proper namespaces, use the autoloader as much as
possible, etc.
This commit is contained in:
Bianca Nenciu 2023-03-22 23:29:08 +02:00 committed by GitHub
parent 075a508e52
commit 206d937a78
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 1976 additions and 1928 deletions

View File

@ -1,235 +0,0 @@
# frozen_string_literal: true
class DataExplorer::QueryController < ::ApplicationController
requires_plugin DataExplorer.plugin_name
before_action :set_group, only: %i[group_reports_index group_reports_show group_reports_run]
before_action :set_query, only: %i[group_reports_show group_reports_run show update]
before_action :ensure_admin
skip_before_action :check_xhr, only: %i[show group_reports_run run]
skip_before_action :ensure_admin,
only: %i[group_reports_index group_reports_show group_reports_run]
def index
queries =
DataExplorer::Query.where(hidden: false).order(:last_run_at, :name).includes(:groups).to_a
database_queries_ids = DataExplorer::Query.pluck(:id)
Queries.default.each do |params|
attributes = params.last
next if database_queries_ids.include?(attributes["id"])
query = DataExplorer::Query.new
query.id = attributes["id"]
query.sql = attributes["sql"]
query.name = attributes["name"]
query.description = attributes["description"]
query.user_id = Discourse::SYSTEM_USER_ID.to_s
queries << query
end
render_serialized queries, DataExplorer::QuerySerializer, root: "queries"
end
def show
check_xhr unless params[:export]
if params[:export]
response.headers["Content-Disposition"] = "attachment; filename=#{@query.slug}.dcquery.json"
response.sending_file = true
end
return raise Discourse::NotFound if !guardian.user_can_access_query?(@query) || @query.hidden
render_serialized @query, DataExplorer::QuerySerializer, root: "query"
end
def groups
render json: Group.all.select(:id, :name), root: false
end
def group_reports_index
return raise Discourse::NotFound unless guardian.user_is_a_member_of_group?(@group)
respond_to do |format|
format.json do
queries = DataExplorer::Query.for_group(@group)
render_serialized(queries, DataExplorer::QuerySerializer, root: "queries")
end
end
end
def group_reports_show
if !guardian.group_and_user_can_access_query?(@group, @query) || @query.hidden
return raise Discourse::NotFound
end
respond_to do |format|
format.json do
query_group = DataExplorer::QueryGroup.find_by(query_id: @query.id, group_id: @group.id)
render json: {
query: serialize_data(@query, DataExplorer::QuerySerializer, root: nil),
query_group:
serialize_data(query_group, DataExplorer::QueryGroupSerializer, root: nil),
}
end
end
end
def group_reports_run
if !guardian.group_and_user_can_access_query?(@group, @query) || @query.hidden
return raise Discourse::NotFound
end
run
end
def create
query =
DataExplorer::Query.create!(
params
.require(:query)
.permit(:name, :description, :sql)
.merge(user_id: current_user.id, last_run_at: Time.now),
)
group_ids = params.require(:query)[:group_ids]
group_ids&.each { |group_id| query.query_groups.find_or_create_by!(group_id: group_id) }
render_serialized query, DataExplorer::QuerySerializer, root: "query"
end
def update
ActiveRecord::Base.transaction do
@query.update!(params.require(:query).permit(:name, :sql, :description).merge(hidden: false))
group_ids = params.require(:query)[:group_ids]
DataExplorer::QueryGroup.where.not(group_id: group_ids).where(query_id: @query.id).delete_all
group_ids&.each { |group_id| @query.query_groups.find_or_create_by!(group_id: group_id) }
end
render_serialized @query, DataExplorer::QuerySerializer, root: "query"
rescue DataExplorer::ValidationError => e
render_json_error e.message
end
def destroy
query = DataExplorer::Query.where(id: params[:id]).first_or_initialize
query.update!(hidden: true)
render json: { success: true, errors: [] }
end
def schema
schema_version = DB.query_single("SELECT max(version) AS tag FROM schema_migrations").first
render json: DataExplorer.schema if stale?(public: true, etag: schema_version, template: false)
end
# Return value:
# success - true/false. if false, inspect the errors value.
# errors - array of strings.
# params - hash. Echo of the query parameters as executed.
# duration - float. Time to execute the query, in milliseconds, to 1 decimal place.
# columns - array of strings. Titles of the returned columns, in order.
# explain - string. (Optional - pass explain=true in the request) Postgres query plan, UNIX newlines.
# rows - array of array of strings. Results of the query. In the same order as 'columns'.
def run
check_xhr unless params[:download]
query = DataExplorer::Query.find(params[:id].to_i)
query.update!(last_run_at: Time.now)
response.sending_file = true if params[:download]
query_params = {}
query_params = MultiJson.load(params[:params]) if params[:params]
opts = { current_user: current_user.username }
opts[:explain] = true if params[:explain] == "true"
opts[:limit] = if params[:format] == "csv"
if params[:limit].present?
limit = params[:limit].to_i
limit = DataExplorer::QUERY_RESULT_MAX_LIMIT if limit > DataExplorer::QUERY_RESULT_MAX_LIMIT
limit
else
DataExplorer::QUERY_RESULT_MAX_LIMIT
end
elsif params[:limit].present?
params[:limit] == "ALL" ? "ALL" : params[:limit].to_i
end
result = DataExplorer.run_query(query, query_params, opts)
if result[:error]
err = result[:error]
# Pretty printing logic
err_class = err.class
err_msg = err.message
if err.is_a? ActiveRecord::StatementInvalid
err_class = err.original_exception.class
err_msg.gsub!("#{err_class}:", "")
else
err_msg = "#{err_class}: #{err_msg}"
end
render json: { success: false, errors: [err_msg] }, status: 422
else
pg_result = result[:pg_result]
cols = pg_result.fields
respond_to do |format|
format.json do
if params[:download]
response.headers[
"Content-Disposition"
] = "attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.json"
end
json = {
success: true,
errors: [],
duration: (result[:duration_secs].to_f * 1000).round(1),
result_count: pg_result.values.length || 0,
params: query_params,
columns: cols,
default_limit: SiteSetting.data_explorer_query_result_limit,
}
json[:explain] = result[:explain] if opts[:explain]
if !params[:download]
relations, colrender = DataExplorer.add_extra_data(pg_result)
json[:relations] = relations
json[:colrender] = colrender
end
json[:rows] = pg_result.values
render json: json
end
format.csv do
response.headers[
"Content-Disposition"
] = "attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
require "csv"
text =
CSV.generate do |csv|
csv << cols
pg_result.values.each { |row| csv << row }
end
render plain: text
end
end
end
end
private
def set_group
@group = Group.find_by(name: params["group_name"])
end
def set_query
@query = DataExplorer::Query.find(params[:id])
raise Discourse::NotFound unless @query
end
end

View File

@ -0,0 +1,239 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class QueryController < ApplicationController
requires_plugin PLUGIN_NAME
before_action :set_group, only: %i[group_reports_index group_reports_show group_reports_run]
before_action :set_query, only: %i[group_reports_show group_reports_run show update]
before_action :ensure_admin
skip_before_action :check_xhr, only: %i[show group_reports_run run]
skip_before_action :ensure_admin,
only: %i[group_reports_index group_reports_show group_reports_run]
def index
queries = Query.where(hidden: false).order(:last_run_at, :name).includes(:groups).to_a
database_queries_ids = Query.pluck(:id)
Queries.default.each do |params|
attributes = params.last
next if database_queries_ids.include?(attributes["id"])
query = Query.new
query.id = attributes["id"]
query.sql = attributes["sql"]
query.name = attributes["name"]
query.description = attributes["description"]
query.user_id = Discourse::SYSTEM_USER_ID.to_s
queries << query
end
render_serialized queries, QuerySerializer, root: "queries"
end
def show
check_xhr unless params[:export]
if params[:export]
response.headers["Content-Disposition"] = "attachment; filename=#{@query.slug}.dcquery.json"
response.sending_file = true
end
return raise Discourse::NotFound if !guardian.user_can_access_query?(@query) || @query.hidden
render_serialized @query, QuerySerializer, root: "query"
end
def groups
render json: Group.all.select(:id, :name), root: false
end
def group_reports_index
return raise Discourse::NotFound unless guardian.user_is_a_member_of_group?(@group)
respond_to do |format|
format.json do
queries = Query.for_group(@group)
render_serialized(queries, QuerySerializer, root: "queries")
end
end
end
def group_reports_show
if !guardian.group_and_user_can_access_query?(@group, @query) || @query.hidden
return raise Discourse::NotFound
end
respond_to do |format|
format.json do
query_group = QueryGroup.find_by(query_id: @query.id, group_id: @group.id)
render json: {
query: serialize_data(@query, QuerySerializer, root: nil),
query_group: serialize_data(query_group, QueryGroupSerializer, root: nil),
}
end
end
end
def group_reports_run
if !guardian.group_and_user_can_access_query?(@group, @query) || @query.hidden
return raise Discourse::NotFound
end
run
end
def create
query =
Query.create!(
params
.require(:query)
.permit(:name, :description, :sql)
.merge(user_id: current_user.id, last_run_at: Time.now),
)
group_ids = params.require(:query)[:group_ids]
group_ids&.each { |group_id| query.query_groups.find_or_create_by!(group_id: group_id) }
render_serialized query, QuerySerializer, root: "query"
end
def update
ActiveRecord::Base.transaction do
@query.update!(
params.require(:query).permit(:name, :sql, :description).merge(hidden: false),
)
group_ids = params.require(:query)[:group_ids]
QueryGroup.where.not(group_id: group_ids).where(query_id: @query.id).delete_all
group_ids&.each { |group_id| @query.query_groups.find_or_create_by!(group_id: group_id) }
end
render_serialized @query, QuerySerializer, root: "query"
rescue ValidationError => e
render_json_error e.message
end
def destroy
query = Query.where(id: params[:id]).first_or_initialize
query.update!(hidden: true)
render json: { success: true, errors: [] }
end
def schema
schema_version = DB.query_single("SELECT max(version) AS tag FROM schema_migrations").first
if stale?(public: true, etag: schema_version, template: false)
render json: DataExplorer.schema
end
end
# Return value:
# success - true/false. if false, inspect the errors value.
# errors - array of strings.
# params - hash. Echo of the query parameters as executed.
# duration - float. Time to execute the query, in milliseconds, to 1 decimal place.
# columns - array of strings. Titles of the returned columns, in order.
# explain - string. (Optional - pass explain=true in the request) Postgres query plan, UNIX newlines.
# rows - array of array of strings. Results of the query. In the same order as 'columns'.
def run
check_xhr unless params[:download]
query = Query.find(params[:id].to_i)
query.update!(last_run_at: Time.now)
response.sending_file = true if params[:download]
query_params = {}
query_params = MultiJson.load(params[:params]) if params[:params]
opts = { current_user: current_user.username }
opts[:explain] = true if params[:explain] == "true"
opts[:limit] = if params[:format] == "csv"
if params[:limit].present?
limit = params[:limit].to_i
limit = QUERY_RESULT_MAX_LIMIT if limit > QUERY_RESULT_MAX_LIMIT
limit
else
QUERY_RESULT_MAX_LIMIT
end
elsif params[:limit].present?
params[:limit] == "ALL" ? "ALL" : params[:limit].to_i
end
result = DataExplorer.run_query(query, query_params, opts)
if result[:error]
err = result[:error]
# Pretty printing logic
err_class = err.class
err_msg = err.message
if err.is_a? ActiveRecord::StatementInvalid
err_class = err.original_exception.class
err_msg.gsub!("#{err_class}:", "")
else
err_msg = "#{err_class}: #{err_msg}"
end
render json: { success: false, errors: [err_msg] }, status: 422
else
pg_result = result[:pg_result]
cols = pg_result.fields
respond_to do |format|
format.json do
if params[:download]
response.headers[
"Content-Disposition"
] = "attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.json"
end
json = {
success: true,
errors: [],
duration: (result[:duration_secs].to_f * 1000).round(1),
result_count: pg_result.values.length || 0,
params: query_params,
columns: cols,
default_limit: SiteSetting.data_explorer_query_result_limit,
}
json[:explain] = result[:explain] if opts[:explain]
if !params[:download]
relations, colrender = DataExplorer.add_extra_data(pg_result)
json[:relations] = relations
json[:colrender] = colrender
end
json[:rows] = pg_result.values
render json: json
end
format.csv do
response.headers[
"Content-Disposition"
] = "attachment; filename=#{query.slug}@#{Slug.for(Discourse.current_hostname, "discourse")}-#{Date.today}.dcqresult.csv"
require "csv"
text =
CSV.generate do |csv|
csv << cols
pg_result.values.each { |row| csv << row }
end
render plain: text
end
end
end
end
private
def set_group
@group = Group.find_by(name: params["group_name"])
end
def set_query
@query = Query.find(params[:id])
raise Discourse::NotFound unless @query
end
end
end

View File

@ -7,7 +7,7 @@ module Jobs
def execute(args)
return unless SiteSetting.data_explorer_enabled
DataExplorer::Query
DiscourseDataExplorer::Query
.where("id > 0")
.where(hidden: true)
.where(

View File

@ -1,8 +1,9 @@
# frozen_string_literal: true
module DataExplorer
module ::DiscourseDataExplorer
class Query < ActiveRecord::Base
self.table_name = "data_explorer_queries"
has_many :query_groups
has_many :groups, through: :query_groups
belongs_to :user
@ -18,7 +19,7 @@ module DataExplorer
}
def params
@params ||= DataExplorer::Parameter.create_from_sql(sql)
@params ||= Parameter.create_from_sql(sql)
end
def cast_params(input_params)

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
module DataExplorer
module ::DiscourseDataExplorer
class QueryGroup < ActiveRecord::Base
self.table_name = "data_explorer_query_groups"

View File

@ -1,26 +0,0 @@
# frozen_string_literal: true
class DataExplorer::QueryGroupSerializer < ActiveModel::Serializer
attributes :id,
:group_id,
:query_id,
:bookmark,
def query_group_bookmark
@query_group_bookmark ||= Bookmark.find_by(user: scope.user, bookmarkable: object)
end
def include_bookmark?
query_group_bookmark.present?
end
def bookmark
{
id: query_group_bookmark.id,
reminder_at: query_group_bookmark.reminder_at,
name: query_group_bookmark.name,
auto_delete_preference: query_group_bookmark.auto_delete_preference,
bookmarkable_id: query_group_bookmark.bookmarkable_id,
bookmarkable_type: query_group_bookmark.bookmarkable_type,
}
end
end

View File

@ -1,27 +0,0 @@
# frozen_string_literal: true
class DataExplorer::QuerySerializer < ActiveModel::Serializer
attributes :id,
:sql,
:name,
:description,
:param_info,
:created_at,
:username,
:group_ids,
:last_run_at,
:hidden,
:user_id
def param_info
object&.params&.map(&:to_hash)
end
def username
object&.user&.username
end
def group_ids
object.groups.map(&:id)
end
end

View File

@ -1,14 +0,0 @@
# frozen_string_literal: true
class DataExplorer::SmallPostWithExcerptSerializer < ApplicationSerializer
attributes :id, :topic_id, :post_number, :excerpt, :username, :avatar_template
def excerpt
Post.excerpt(object.cooked, 70)
end
def username
object.user && object.user.username
end
def avatar_template
object.user && object.user.avatar_template
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class QueryGroupBookmarkSerializer < UserBookmarkBaseSerializer
def title
fancy_title
end
def fancy_title
data_explorer_query.name
end
def cooked
data_explorer_query.description
end
def bookmarkable_user
@bookmarkable_user ||= data_explorer_query.user
end
def bookmarkable_url
"/g/#{data_explorer_query_group.group.name}/reports/#{data_explorer_query_group.query_id}"
end
def excerpt
return nil unless cooked
@excerpt ||= PrettyText.excerpt(cooked, 300, keep_emoji_images: true)
end
private
def data_explorer_query
data_explorer_query_group.query
end
def data_explorer_query_group
object.bookmarkable
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class QueryGroupSerializer < ActiveModel::Serializer
attributes :id, :group_id, :query_id, :bookmark
def query_group_bookmark
@query_group_bookmark ||= Bookmark.find_by(user: scope.user, bookmarkable: object)
end
def include_bookmark?
query_group_bookmark.present?
end
def bookmark
{
id: query_group_bookmark.id,
reminder_at: query_group_bookmark.reminder_at,
name: query_group_bookmark.name,
auto_delete_preference: query_group_bookmark.auto_delete_preference,
bookmarkable_id: query_group_bookmark.bookmarkable_id,
bookmarkable_type: query_group_bookmark.bookmarkable_type,
}
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class QuerySerializer < ActiveModel::Serializer
attributes :id,
:sql,
:name,
:description,
:param_info,
:created_at,
:username,
:group_ids,
:last_run_at,
:hidden,
:user_id
def param_info
object&.params&.map(&:to_hash)
end
def username
object&.user&.username
end
def group_ids
object.groups.map(&:id)
end
end
end

View File

@ -1,5 +1,5 @@
# frozen_string_literal: true
class DataExplorer::SmallBadgeSerializer < ApplicationSerializer
class DiscourseDataExplorer::SmallBadgeSerializer < ApplicationSerializer
attributes :id, :name, :display_name, :badge_type, :description, :icon
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class SmallPostWithExcerptSerializer < ApplicationSerializer
attributes :id, :topic_id, :post_number, :excerpt, :username, :avatar_template
def excerpt
Post.excerpt(object.cooked, 70)
end
def username
object.user && object.user.username
end
def avatar_template
object.user && object.user.avatar_template
end
end
end

View File

@ -1,38 +0,0 @@
# frozen_string_literal: true
class UserDataExplorerQueryGroupBookmarkSerializer < UserBookmarkBaseSerializer
def title
fancy_title
end
def fancy_title
data_explorer_query.name
end
def cooked
data_explorer_query.description
end
def bookmarkable_user
@bookmarkable_user ||= data_explorer_query.user
end
def bookmarkable_url
"/g/#{data_explorer_query_group.group.name}/reports/#{data_explorer_query_group.query_id}"
end
def excerpt
return nil unless cooked
@excerpt ||= PrettyText.excerpt(cooked, 300, keep_emoji_images: true)
end
private
def data_explorer_query
data_explorer_query_group.query
end
def data_explorer_query_group
object.bookmarkable
end
end

View File

@ -82,7 +82,7 @@ export default class GroupReportsShowController extends Controller {
return openBookmarkModal(
this.queryGroupBookmark ||
this.store.createRecord("bookmark", {
bookmarkable_type: "DataExplorer::QueryGroup",
bookmarkable_type: "DiscourseDataExplorer::QueryGroup",
bookmarkable_id: this.queryGroup.id,
user_id: this.currentUser.id,
}),

View File

@ -94,5 +94,5 @@ en:
api:
scopes:
descriptions:
data_explorer:
discourse_data_explorer:
run_queries: "Run Data Explorer queries. Restrict the API key to a set of queries by specifying queries IDs."

24
config/routes.rb Normal file
View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
DiscourseDataExplorer::Engine.routes.draw do
root to: "query#index"
get "queries" => "query#index"
scope "/", defaults: { format: :json } do
get "schema" => "query#schema"
get "groups" => "query#groups"
post "queries" => "query#create"
get "queries/:id" => "query#show"
put "queries/:id" => "query#update"
delete "queries/:id" => "query#destroy"
post "queries/:id/run" => "query#run", :constraints => { format: /(json|csv)/ }
end
end
Discourse::Application.routes.append do
get "/g/:group_name/reports" => "discourse_data_explorer/query#group_reports_index"
get "/g/:group_name/reports/:id" => "discourse_data_explorer/query#group_reports_show"
post "/g/:group_name/reports/:id/run" => "discourse_data_explorer/query#group_reports_run"
mount DiscourseDataExplorer::Engine, at: "/admin/plugins/explorer"
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class RenameDataExplorerNamespace < ActiveRecord::Migration[7.0]
def up
execute <<~SQL
UPDATE api_key_scopes
SET resource = 'discourse_data_explorer'
WHERE resource = 'data_explorer'
SQL
execute <<~SQL
UPDATE bookmarks
SET bookmarkable_type = 'DiscourseDataExplorer::QueryGroup'
WHERE bookmarkable_type = 'DataExplorer::QueryGroup'
SQL
end
def down
execute <<~SQL
UPDATE api_key_scopes
SET resource = 'data_explorer'
WHERE resource = 'discourse_data_explorer'
SQL
execute <<~SQL
UPDATE bookmarks
SET bookmarkable_type = 'DiscourseDataExplorer::QueryGroup'
WHERE bookmarkable_type = 'DataExplorer::QueryGroup'
SQL
end
end

View File

@ -1,60 +0,0 @@
# frozen_string_literal: true
class DataExplorerQueryGroupBookmarkable < BaseBookmarkable
def self.model
DataExplorer::QueryGroup
end
def self.serializer
UserDataExplorerQueryGroupBookmarkSerializer
end
def self.preload_associations
%i[data_explorer_queries groups]
end
def self.list_query(user, guardian)
group_ids = []
if !user.admin?
group_ids = user.visible_groups.pluck(:id)
return if group_ids.empty?
end
query =
user
.bookmarks_of_type("DataExplorer::QueryGroup")
.joins(
"INNER JOIN data_explorer_query_groups ON data_explorer_query_groups.id = bookmarks.bookmarkable_id",
)
.joins(
"LEFT JOIN data_explorer_queries ON data_explorer_queries.id = data_explorer_query_groups.query_id",
)
query = query.where("data_explorer_query_groups.group_id IN (?)", group_ids) if !user.admin?
query
end
# Searchable only by data_explorer_queries name
def self.search_query(bookmarks, query, ts_query, &bookmarkable_search)
bookmarkable_search.call(bookmarks, "data_explorer_queries.name ILIKE :q")
end
def self.reminder_handler(bookmark)
send_reminder_notification(
bookmark,
data: {
title: bookmark.bookmarkable.query.name,
bookmarkable_url:
"/g/#{bookmark.bookmarkable.group.name}/reports/#{bookmark.bookmarkable.query.id}",
},
)
end
def self.reminder_conditions(bookmark)
bookmark.bookmarkable.present?
end
def self.can_see?(guardian, bookmark)
return false if !bookmark.bookmarkable.group
guardian.user_is_a_member_of_group?(bookmark.bookmarkable.group)
end
end

View File

@ -0,0 +1,572 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class ValidationError < StandardError
end
module DataExplorer
# Run a data explorer query on the currently connected database.
#
# @param [Query] query the Query object to run
# @param [Hash] params the colon-style query parameters for the query
# @param [Hash] opts hash of options
# explain - include a query plan in the result
# @return [Hash]
# error - any exception that was raised in the execution. Check this
# first before looking at any other fields.
# pg_result - the PG::Result object
# duration_nanos - the query duration, in nanoseconds
# explain - the query
def self.run_query(query, req_params = {}, opts = {})
# Safety checks
# see test 'doesn't allow you to modify the database #2'
if query.sql =~ /;/
err = ValidationError.new(I18n.t("js.errors.explorer.no_semicolons"))
return { error: err, duration_nanos: 0 }
end
query_args = {}
begin
query_args = query.cast_params req_params
rescue ValidationError => e
return { error: e, duration_nanos: 0 }
end
time_start, time_end, explain, err, result = nil
begin
ActiveRecord::Base.connection.transaction do
# Setting transaction to read only prevents shoot-in-foot actions like SELECT FOR UPDATE
# see test 'doesn't allow you to modify the database #1'
DB.exec "SET TRANSACTION READ ONLY"
# Set a statement timeout so we can't tie up the server
DB.exec "SET LOCAL statement_timeout = 10000"
# SQL comments are for the benefits of the slow queries log
sql = <<-SQL
/*
* DiscourseDataExplorer Query
* Query: /admin/plugins/explorer?id=#{query.id}
* Started by: #{opts[:current_user]}
*/
WITH query AS (
#{query.sql}
) SELECT * FROM query
LIMIT #{opts[:limit] || SiteSetting.data_explorer_query_result_limit}
SQL
time_start = Time.now
# Using MiniSql::InlineParamEncoder directly instead of DB.param_encoder because current implementation of
# DB.param_encoder is meant for SQL fragments and not an entire SQL string.
sql =
MiniSql::InlineParamEncoder.new(ActiveRecord::Base.connection.raw_connection).encode(
sql,
query_args,
)
result = ActiveRecord::Base.connection.raw_connection.async_exec(sql)
result.check # make sure it's done
time_end = Time.now
if opts[:explain]
explain =
DB
.query_hash("EXPLAIN #{query.sql}", query_args)
.map { |row| row["QUERY PLAN"] }.join "\n"
end
# All done. Issue a rollback anyways, just in case
# see test 'doesn't allow you to modify the database #1'
raise ActiveRecord::Rollback
end
rescue Exception => ex
err = ex
time_end = Time.now
end
{
error: err,
pg_result: result,
duration_secs: time_end - time_start,
explain: explain,
params_full: query_args,
}
end
def self.extra_data_pluck_fields
@extra_data_pluck_fields ||= {
user: {
class: User,
fields: %i[id username uploaded_avatar_id],
serializer: BasicUserSerializer,
},
badge: {
class: Badge,
fields: %i[id name badge_type_id description icon],
include: [:badge_type],
serializer: SmallBadgeSerializer,
},
post: {
class: Post,
fields: %i[id topic_id post_number cooked user_id],
include: [:user],
serializer: SmallPostWithExcerptSerializer,
},
topic: {
class: Topic,
fields: %i[id title slug posts_count],
serializer: BasicTopicSerializer,
},
group: {
class: Group,
ignore: true,
},
category: {
class: Category,
ignore: true,
},
reltime: {
ignore: true,
},
html: {
ignore: true,
},
}
end
def self.column_regexes
@column_regexes ||=
extra_data_pluck_fields
.map { |key, val| /(#{val[:class].to_s.downcase})_id$/ if val[:class] }
.compact
end
def self.add_extra_data(pg_result)
needed_classes = {}
ret = {}
col_map = {}
pg_result.fields.each_with_index do |col, idx|
rgx = column_regexes.find { |r| r.match col }
if rgx
cls = (rgx.match col)[1].to_sym
needed_classes[cls] ||= []
needed_classes[cls] << idx
elsif col =~ /^(\w+)\$/
cls = $1.to_sym
needed_classes[cls] ||= []
needed_classes[cls] << idx
elsif col =~ /^\w+_url$/
col_map[idx] = "url"
end
end
needed_classes.each do |cls, column_nums|
next unless column_nums.present?
support_info = extra_data_pluck_fields[cls]
next unless support_info
column_nums.each { |col_n| col_map[col_n] = cls }
if support_info[:ignore]
ret[cls] = []
next
end
ids = Set.new
column_nums.each { |col_n| ids.merge(pg_result.column_values(col_n)) }
ids.delete nil
ids.map! &:to_i
object_class = support_info[:class]
all_objs = object_class
all_objs = all_objs.with_deleted if all_objs.respond_to? :with_deleted
all_objs =
all_objs
.select(support_info[:fields])
.where(id: ids.to_a.sort)
.includes(support_info[:include])
.order(:id)
ret[cls] = ActiveModel::ArraySerializer.new(
all_objs,
each_serializer: support_info[:serializer],
)
end
[ret, col_map]
end
def self.sensitive_column_names
%w[
#_IP_Addresses
topic_views.ip_address
users.ip_address
users.registration_ip_address
incoming_links.ip_address
topic_link_clicks.ip_address
user_histories.ip_address
#_Emails
email_tokens.email
users.email
invites.email
user_histories.email
email_logs.to_address
posts.raw_email
badge_posts.raw_email
#_Secret_Tokens
email_tokens.token
email_logs.reply_key
api_keys.key
site_settings.value
users.auth_token
users.password_hash
users.salt
#_Authentication_Info
user_open_ids.email
oauth2_user_infos.uid
oauth2_user_infos.email
facebook_user_infos.facebook_user_id
facebook_user_infos.email
twitter_user_infos.twitter_user_id
github_user_infos.github_user_id
single_sign_on_records.external_email
single_sign_on_records.external_id
google_user_infos.google_user_id
google_user_infos.email
]
end
def self.schema
# No need to expire this, because the server processes get restarted on upgrade
# refer user to http://www.postgresql.org/docs/9.3/static/datatype.html
@schema ||=
begin
results = DB.query_hash <<~SQL
select
c.column_name column_name,
c.data_type data_type,
c.character_maximum_length character_maximum_length,
c.is_nullable is_nullable,
c.column_default column_default,
c.table_name table_name,
pgd.description column_desc
from INFORMATION_SCHEMA.COLUMNS c
inner join pg_catalog.pg_statio_all_tables st on (c.table_schema = st.schemaname and c.table_name = st.relname)
left outer join pg_catalog.pg_description pgd on (pgd.objoid = st.relid and pgd.objsubid = c.ordinal_position)
where c.table_schema = 'public'
ORDER BY c.table_name, c.ordinal_position
SQL
by_table = {}
# Massage the results into a nicer form
results.each do |hash|
full_col_name = "#{hash["table_name"]}.#{hash["column_name"]}"
if hash["is_nullable"] == "YES"
hash["is_nullable"] = true
else
hash.delete("is_nullable")
end
clen = hash.delete "character_maximum_length"
dt = hash["data_type"]
if hash["column_name"] == "id"
hash["data_type"] = "serial"
hash["primary"] = true
elsif dt == "character varying"
hash["data_type"] = "varchar(#{clen.to_i})"
elsif dt == "timestamp without time zone"
hash["data_type"] = "timestamp"
elsif dt == "double precision"
hash["data_type"] = "double"
end
default = hash["column_default"]
if default.nil? || default =~ /^nextval\(/
hash.delete "column_default"
elsif default =~ /^'(.*)'::(character varying|text)/
hash["column_default"] = $1
end
hash.delete("column_desc") unless hash["column_desc"]
hash["sensitive"] = true if sensitive_column_names.include? full_col_name
hash["enum"] = enum_info[full_col_name] if enum_info.include? full_col_name
if denormalized_columns.include? full_col_name
hash["denormal"] = denormalized_columns[full_col_name]
end
fkey = fkey_info(hash["table_name"], hash["column_name"])
hash["fkey_info"] = fkey if fkey
table_name = hash.delete("table_name")
by_table[table_name] ||= []
by_table[table_name] << hash
end
# this works for now, but no big loss if the tables aren't quite sorted
favored_order = %w[
posts
topics
users
categories
badges
groups
notifications
post_actions
site_settings
]
sorted_by_table = {}
favored_order.each { |tbl| sorted_by_table[tbl] = by_table[tbl] }
by_table.keys.sort.each do |tbl|
next if favored_order.include? tbl
sorted_by_table[tbl] = by_table[tbl]
end
sorted_by_table
end
end
def self.enums
return @enums if @enums
@enums = {
"application_requests.req_type": ApplicationRequest.req_types,
"badges.badge_type_id": Enum.new(:gold, :silver, :bronze, start: 1),
"bookmarks.auto_delete_preference": Bookmark.auto_delete_preferences,
"category_groups.permission_type": CategoryGroup.permission_types,
"category_users.notification_level": CategoryUser.notification_levels,
"directory_items.period_type": DirectoryItem.period_types,
"email_change_requests.change_state": EmailChangeRequest.states,
"groups.id": Group::AUTO_GROUPS,
"groups.mentionable_level": Group::ALIAS_LEVELS,
"groups.messageable_level": Group::ALIAS_LEVELS,
"groups.members_visibility_level": Group.visibility_levels,
"groups.visibility_level": Group.visibility_levels,
"groups.default_notification_level": GroupUser.notification_levels,
"group_histories.action": GroupHistory.actions,
"group_users.notification_level": GroupUser.notification_levels,
"imap_sync_logs.level": ImapSyncLog.levels,
"invites.emailed_status": Invite.emailed_status_types,
"notifications.notification_type": Notification.types,
"polls.results": Poll.results,
"polls.status": Poll.statuses,
"polls.type": Poll.types,
"polls.visibility": Poll.visibilities,
"post_action_types.id": PostActionType.types,
"post_actions.post_action_type_id": PostActionType.types,
"posts.cook_method": Post.cook_methods,
"posts.hidden_reason_id": Post.hidden_reasons,
"posts.post_type": Post.types,
"reviewables.status": Reviewable.statuses,
"reviewable_histories.reviewable_history_type": ReviewableHistory.types,
"reviewable_scores.status": ReviewableScore.statuses,
"screened_emails.action_type": ScreenedEmail.actions,
"screened_ip_addresses.action_type": ScreenedIpAddress.actions,
"screened_urls.action_type": ScreenedUrl.actions,
"search_logs.search_result_type": SearchLog.search_result_types,
"search_logs.search_type": SearchLog.search_types,
"site_settings.data_type": SiteSetting.types,
"skipped_email_logs.reason_type": SkippedEmailLog.reason_types,
"tag_group_permissions.permission_type": TagGroupPermission.permission_types,
"theme_fields.type_id": ThemeField.types,
"theme_settings.data_type": ThemeSetting.types,
"topic_timers.status_type": TopicTimer.types,
"topic_users.notification_level": TopicUser.notification_levels,
"topic_users.notifications_reason_id": TopicUser.notification_reasons,
"uploads.verification_status": Upload.verification_statuses,
"user_actions.action_type": UserAction.types,
"user_histories.action": UserHistory.actions,
"user_options.email_previous_replies": UserOption.previous_replies_type,
"user_options.like_notification_frequency": UserOption.like_notification_frequency_type,
"user_options.text_size_key": UserOption.text_sizes,
"user_options.title_count_mode_key": UserOption.title_count_modes,
"user_options.email_level": UserOption.email_level_types,
"user_options.email_messages_level": UserOption.email_level_types,
"user_second_factors.method": UserSecondFactor.methods,
"user_security_keys.factor_type": UserSecurityKey.factor_types,
"users.trust_level": TrustLevel.levels,
"watched_words.action": WatchedWord.actions,
"web_hooks.content_type": WebHook.content_types,
"web_hooks.last_delivery_status": WebHook.last_delivery_statuses,
}.with_indifferent_access
# QueuedPost is removed in recent Discourse releases
@enums["queued_posts.state"] = QueuedPost.states if defined?(QueuedPost)
@enums
end
def self.enum_info
@enum_info ||=
begin
enum_info = {}
enums.map do |key, enum|
# https://stackoverflow.com/questions/10874356/reverse-a-hash-in-ruby
enum_info[key] = Hash[enum.to_a.map(&:reverse)]
end
enum_info
end
end
def self.fkey_info(table, column)
full_name = "#{table}.#{column}"
if fkey_defaults[column]
fkey_defaults[column]
elsif column =~ /_by_id$/ || column =~ /_user_id$/
:users
elsif foreign_keys[full_name]
foreign_keys[full_name]
else
nil
end
end
def self.foreign_keys
@fkey_columns ||= {
"posts.last_editor_id": :users,
"posts.version": :"post_revisions.number",
"topics.featured_user1_id": :users,
"topics.featured_user2_id": :users,
"topics.featured_user3_id": :users,
"topics.featured_user4_id": :users,
"topics.featured_user5_id": :users,
"users.seen_notification_id": :notifications,
"users.uploaded_avatar_id": :uploads,
"users.primary_group_id": :groups,
"categories.latest_post_id": :posts,
"categories.latest_topic_id": :topics,
"categories.parent_category_id": :categories,
"badges.badge_grouping_id": :badge_groupings,
"post_actions.related_post_id": :posts,
"color_scheme_colors.color_scheme_id": :color_schemes,
"color_schemes.versioned_id": :color_schemes,
"incoming_links.incoming_referer_id": :incoming_referers,
"incoming_referers.incoming_domain_id": :incoming_domains,
"post_replies.reply_id": :posts,
"quoted_posts.quoted_post_id": :posts,
"topic_link_clicks.topic_link_id": :topic_links,
"topic_link_clicks.link_topic_id": :topics,
"topic_link_clicks.link_post_id": :posts,
"user_actions.target_topic_id": :topics,
"user_actions.target_post_id": :posts,
"user_avatars.custom_upload_id": :uploads,
"user_avatars.gravatar_upload_id": :uploads,
"user_badges.notification_id": :notifications,
"user_profiles.card_image_badge_id": :badges,
}.with_indifferent_access
end
def self.fkey_defaults
@fkey_defaults ||= {
user_id: :users,
# :*_by_id => :users,
# :*_user_id => :users,
category_id: :categories,
group_id: :groups,
post_id: :posts,
post_action_id: :post_actions,
topic_id: :topics,
upload_id: :uploads,
}.with_indifferent_access
end
def self.denormalized_columns
{
"posts.reply_count": :post_replies,
"posts.quote_count": :quoted_posts,
"posts.incoming_link_count": :topic_links,
"posts.word_count": :posts,
"posts.avg_time": :post_timings,
"posts.reads": :post_timings,
"posts.like_score": :post_actions,
"posts.like_count": :post_actions,
"posts.bookmark_count": :post_actions,
"posts.vote_count": :post_actions,
"posts.off_topic_count": :post_actions,
"posts.notify_moderators_count": :post_actions,
"posts.spam_count": :post_actions,
"posts.illegal_count": :post_actions,
"posts.inappropriate_count": :post_actions,
"posts.notify_user_count": :post_actions,
"topics.views": :topic_views,
"topics.posts_count": :posts,
"topics.reply_count": :posts,
"topics.incoming_link_count": :topic_links,
"topics.moderator_posts_count": :posts,
"topics.participant_count": :posts,
"topics.word_count": :posts,
"topics.last_posted_at": :posts,
"topics.last_post_user_idt": :posts,
"topics.avg_time": :post_timings,
"topics.highest_post_number": :posts,
"topics.image_url": :posts,
"topics.excerpt": :posts,
"topics.like_count": :post_actions,
"topics.bookmark_count": :post_actions,
"topics.vote_count": :post_actions,
"topics.off_topic_count": :post_actions,
"topics.notify_moderators_count": :post_actions,
"topics.spam_count": :post_actions,
"topics.illegal_count": :post_actions,
"topics.inappropriate_count": :post_actions,
"topics.notify_user_count": :post_actions,
"categories.topic_count": :topics,
"categories.post_count": :posts,
"categories.latest_post_id": :posts,
"categories.latest_topic_id": :topics,
"categories.description": :posts,
"categories.read_restricted": :category_groups,
"categories.topics_year": :topics,
"categories.topics_month": :topics,
"categories.topics_week": :topics,
"categories.topics_day": :topics,
"categories.posts_year": :posts,
"categories.posts_month": :posts,
"categories.posts_week": :posts,
"categories.posts_day": :posts,
"badges.grant_count": :user_badges,
"groups.user_count": :group_users,
"directory_items.likes_received": :post_actions,
"directory_items.likes_given": :post_actions,
"directory_items.topics_entered": :user_stats,
"directory_items.days_visited": :user_stats,
"directory_items.posts_read": :user_stats,
"directory_items.topic_count": :topics,
"directory_items.post_count": :posts,
"post_search_data.search_data": :posts,
"top_topics.yearly_posts_count": :posts,
"top_topics.monthly_posts_count": :posts,
"top_topics.weekly_posts_count": :posts,
"top_topics.daily_posts_count": :posts,
"top_topics.yearly_views_count": :topic_views,
"top_topics.monthly_views_count": :topic_views,
"top_topics.weekly_views_count": :topic_views,
"top_topics.daily_views_count": :topic_views,
"top_topics.yearly_likes_count": :post_actions,
"top_topics.monthly_likes_count": :post_actions,
"top_topics.weekly_likes_count": :post_actions,
"top_topics.daily_likes_count": :post_actions,
"top_topics.yearly_op_likes_count": :post_actions,
"top_topics.monthly_op_likes_count": :post_actions,
"top_topics.weekly_op_likes_count": :post_actions,
"top_topics.daily_op_likes_count": :post_actions,
"top_topics.all_score": :posts,
"top_topics.yearly_score": :posts,
"top_topics.monthly_score": :posts,
"top_topics.weekly_score": :posts,
"top_topics.daily_score": :posts,
"topic_links.clicks": :topic_link_clicks,
"topic_search_data.search_data": :topics,
"topic_users.liked": :post_actions,
"topic_users.bookmarked": :post_actions,
"user_stats.posts_read_count": :post_timings,
"user_stats.topic_reply_count": :posts,
"user_stats.first_post_created_at": :posts,
"user_stats.post_count": :posts,
"user_stats.topic_count": :topics,
"user_stats.likes_given": :post_actions,
"user_stats.likes_received": :post_actions,
"user_search_data.search_data": :user_profiles,
"users.last_posted_at": :posts,
"users.previous_visit_at": :user_visits,
}.with_indifferent_access
end
end
end

View File

@ -1,7 +1,8 @@
# frozen_string_literal: true
module DiscourseDataExplorer
module ::DiscourseDataExplorer
class Engine < ::Rails::Engine
engine_name PLUGIN_NAME
isolate_namespace DiscourseDataExplorer
end
end

View File

@ -0,0 +1,255 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class Parameter
attr_accessor :identifier, :type, :default, :nullable
def initialize(identifier, type, default, nullable)
unless identifier
raise ValidationError.new("Parameter declaration error - identifier is missing")
end
raise ValidationError.new("Parameter declaration error - type is missing") unless type
# process aliases
type = type.to_sym
type = Parameter.type_aliases[type] if Parameter.type_aliases[type]
unless Parameter.types[type]
raise ValidationError.new("Parameter declaration error - unknown type #{type}")
end
@identifier = identifier
@type = type
@default = default
@nullable = nullable
begin
cast_to_ruby default unless default.blank?
rescue ValidationError
raise ValidationError.new(
"Parameter declaration error - the default value is not a valid #{type}",
)
end
end
def to_hash
{ identifier: @identifier, type: @type, default: @default, nullable: @nullable }
end
def self.types
@types ||=
Enum.new(
# Normal types
:int,
:bigint,
:boolean,
:string,
:date,
:time,
:datetime,
:double,
# Selection help
:user_id,
:post_id,
:topic_id,
:category_id,
:group_id,
:badge_id,
# Arrays
:int_list,
:string_list,
:user_list,
)
end
def self.type_aliases
@type_aliases ||= { integer: :int, text: :string, timestamp: :datetime }
end
def cast_to_ruby(string)
string = @default unless string
if string.blank?
if @nullable
return nil
else
raise ValidationError.new("Missing parameter #{identifier} of type #{type}")
end
end
return nil if string.downcase == "#null"
def invalid_format(string, msg = nil)
if msg
raise ValidationError.new("'#{string}' is an invalid #{type} - #{msg}")
else
raise ValidationError.new("'#{string}' is an invalid value for #{type}")
end
end
value = nil
case @type
when :int
invalid_format string, "Not an integer" unless string =~ /^-?\d+$/
value = string.to_i
invalid_format string, "Too large" unless Integer === value
when :bigint
invalid_format string, "Not an integer" unless string =~ /^-?\d+$/
value = string.to_i
when :boolean
value = !!(string =~ /t|true|y|yes|1/i)
when :string
value = string
when :time
begin
value = Time.parse string
rescue ArgumentError => e
invalid_format string, e.message
end
when :date
begin
value = Date.parse string
rescue ArgumentError => e
invalid_format string, e.message
end
when :datetime
begin
value = DateTime.parse string
rescue ArgumentError => e
invalid_format string, e.message
end
when :double
if string =~ /-?\d*(\.\d+)/
value = Float(string)
elsif string =~ /^(-?)Inf(inity)?$/i
if $1
value = -Float::INFINITY
else
value = Float::INFINITY
end
elsif string =~ /^(-?)NaN$/i
if $1
value = -Float::NAN
else
value = Float::NAN
end
else
invalid_format string
end
when :category_id
if string =~ %r{(.*)/(.*)}
parent_name = $1
child_name = $2
parent = Category.query_parent_category(parent_name)
invalid_format string, "Could not find category named #{parent_name}" unless parent
object = Category.query_category(child_name, parent)
unless object
invalid_format string,
"Could not find subcategory of #{parent_name} named #{child_name}"
end
else
object =
Category.where(id: string.to_i).first || Category.where(slug: string).first ||
Category.where(name: string).first
invalid_format string, "Could not find category named #{string}" unless object
end
value = object.id
when :user_id, :post_id, :topic_id, :group_id, :badge_id
if string.gsub(/[ _]/, "") =~ /^-?\d+$/
clazz_name = (/^(.*)_id$/.match(type.to_s)[1].classify.to_sym)
begin
object = Object.const_get(clazz_name).with_deleted.find(string.gsub(/[ _]/, "").to_i)
value = object.id
rescue ActiveRecord::RecordNotFound
invalid_format string, "The specified #{clazz_name} was not found"
end
elsif type == :user_id
begin
object = User.find_by_username_or_email(string)
value = object.id
rescue ActiveRecord::RecordNotFound
invalid_format string, "The user named #{string} was not found"
end
elsif type == :post_id
if string =~ %r{(\d+)/(\d+)(\?u=.*)?$}
object = Post.with_deleted.find_by(topic_id: $1, post_number: $2)
unless object
invalid_format string, "The post at topic:#{$1} post_number:#{$2} was not found"
end
value = object.id
end
elsif type == :topic_id
if string =~ %r{/t/[^/]+/(\d+)}
begin
object = Topic.with_deleted.find($1)
value = object.id
rescue ActiveRecord::RecordNotFound
invalid_format string, "The topic with id #{$1} was not found"
end
end
elsif type == :group_id
object = Group.where(name: string).first
invalid_format string, "The group named #{string} was not found" unless object
value = object.id
else
invalid_format string
end
when :int_list
value = string.split(",").map { |s| s.downcase == "#null" ? nil : s.to_i }
invalid_format string, "can't be empty" if value.length == 0
when :string_list
value = string.split(",").map { |s| s.downcase == "#null" ? nil : s }
invalid_format string, "can't be empty" if value.length == 0
when :user_list
value = string.split(",").map { |s| User.find_by_username_or_email(s) }
invalid_format string, "can't be empty" if value.length == 0
else
raise TypeError.new("unknown parameter type??? should not get here")
end
value
end
def self.create_from_sql(sql, opts = {})
in_params = false
ret_params = []
sql.lines.find do |line|
line.chomp!
if in_params
# -- (ident) :(ident) (= (ident))?
if line =~ /^\s*--\s*([a-zA-Z_ ]+)\s*:([a-z_]+)\s*(?:=\s+(.*)\s*)?$/
type = $1
ident = $2
default = $3
nullable = false
if type =~ /^(null)?(.*?)(null)?$/i
nullable = true if $1 || $3
type = $2
end
type = type.strip
begin
ret_params << Parameter.new(ident, type, default, nullable)
rescue StandardError
raise if opts[:strict]
end
false
elsif line =~ /^\s+$/
false
else
true
end
else
in_params = true if line =~ /^\s*--\s*\[params\]\s*$/
false
end
end
ret_params
end
end
end

View File

@ -0,0 +1,563 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class Queries
def self.default
# WARNING: Edit the query hash carefully
# For each query, add id, name and description here and add sql below
# Feel free to add new queries at the bottom of the hash in numerical order
# If any query has been run on an instance, it is then saved in the local db
# Locally stored queries are updated from the below data only when they are run again
# eg. If you update a query with id=-1 in this file and the query has been run on a site,
# you must run the query with id=-1 on the site again to update these changes in the site db
queries = {
"most-common-likers": {
id: -1,
name: "Most Common Likers",
description: "Which users like particular other users the most?",
},
"most-messages": {
id: -2,
name: "Who has been sending the most messages in the last week?",
description: "tracking down suspicious PM activity",
},
"edited-post-spam": {
id: -3,
name: "Last 500 posts that were edited by TL0/TL1 users",
description: "fighting human-driven copy-paste spam",
},
"new-topics": {
id: -4,
name: "New Topics by Category",
description:
"Lists all new topics ordered by category and creation_date. The query accepts a months_ago parameter. It defaults to 0 to give you the stats for the current month.",
},
"active-topics": {
id: -5,
name: "Top 100 Active Topics",
description:
"based on the number of replies, it accepts a months_ago parameter, defaults to 1 to give results for the last calendar month.",
},
"top-likers": {
id: -6,
name: "Top 100 Likers",
description:
"returns the top 100 likers for a given monthly period ordered by like_count. It accepts a months_ago parameter, defaults to 1 to give results for the last calendar month.",
},
"quality-users": {
id: -7,
name: "Top 50 Quality Users",
description:
"based on post score calculated using reply count, likes, incoming links, bookmarks, time spent and read count.",
},
"user-participation": {
id: -8,
name: "User Participation Statistics",
description: "Detailed statistics for the most active users.",
},
"largest-uploads": {
id: -9,
name: "Top 50 Largest Uploads",
description: "sorted by file size.",
},
"inactive-users": {
id: -10,
name: "Inactive Users with no posts",
description: "analyze pre-Discourse signups.",
},
"active-lurkers": {
id: -11,
name: "Most Active Lurkers",
description:
"active users without posts and excessive read times, it accepts a post_read_count parameter that sets the threshold for posts read.",
},
"topic-user-notification-level": {
id: -12,
name: "List of topics a user is watching/tracking/muted",
description:
"The query requires a notification_level parameter. Use 0 for muted, 1 for regular, 2 for tracked and 3 for watched topics.",
},
"assigned-topics-report": {
id: -13,
name: "List of assigned topics by user",
description: "This report requires the assign plugin, it will find all assigned topics",
},
"group-members-reply-count": {
id: -14,
name: "Group Members Reply Count",
description:
"Number of replies by members of a group over a given time period. Requires 'group_name', 'start_date', and 'end_date' parameters. Dates need to be in the form 'yyyy-mm-dd'. Accepts an 'include_pms' parameter.",
},
"total-assigned-topics-report": {
id: -15,
name: "Total topics assigned per user",
description: "Count of assigned topis per user linking to assign list",
},
"poll-results": {
id: -16,
name: "Poll results report",
description:
"Details of a poll result, including details about each vote and voter, useful for analyzing results in external software.",
},
"top-tags-per-year": {
id: -17,
name: "Top tags per year",
description: "List the top tags per year.",
},
number_of_replies_by_category: {
id: -18,
name: "Number of replies by category",
description: "List the number of replies by category.",
},
}.with_indifferent_access
queries["most-common-likers"]["sql"] = <<~SQL
WITH pairs AS (
SELECT p.user_id liked, pa.user_id liker
FROM post_actions pa
LEFT JOIN posts p ON p.id = pa.post_id
WHERE post_action_type_id = 2
)
SELECT liker liker_user_id, liked liked_user_id, count(*)
FROM pairs
GROUP BY liked, liker
ORDER BY count DESC
SQL
queries["most-messages"]["sql"] = <<~SQL
SELECT user_id, count(*) AS message_count
FROM topics
WHERE archetype = 'private_message' AND subtype = 'user_to_user'
AND age(created_at) < interval '7 days'
GROUP BY user_id
ORDER BY message_count DESC
SQL
queries["edited-post-spam"]["sql"] = <<~SQL
SELECT
p.id AS post_id,
topic_id
FROM posts p
JOIN users u
ON u.id = p.user_id
JOIN topics t
ON t.id = p.topic_id
WHERE p.last_editor_id = p.user_id
AND p.self_edits > 0
AND (u.trust_level = 0 OR u.trust_level = 1)
AND p.deleted_at IS NULL
AND t.deleted_at IS NULL
AND t.archetype = 'regular'
ORDER BY p.updated_at DESC
LIMIT 500
SQL
queries["new-topics"]["sql"] = <<~SQL
-- [params]
-- int :months_ago = 1
WITH query_period as (
SELECT
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' as period_start,
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' + INTERVAL '1 month' - INTERVAL '1 second' as period_end
)
SELECT
t.id as topic_id,
t.category_id
FROM topics t
RIGHT JOIN query_period qp
ON t.created_at >= qp.period_start
AND t.created_at <= qp.period_end
WHERE t.user_id > 0
AND t.category_id IS NOT NULL
ORDER BY t.category_id, t.created_at DESC
SQL
queries["active-topics"]["sql"] = <<~SQL
-- [params]
-- int :months_ago = 1
WITH query_period AS
(SELECT date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' AS period_start,
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' + INTERVAL '1 month' - INTERVAL '1 second' AS period_end)
SELECT t.id AS topic_id,
t.category_id,
COUNT(p.id) AS reply_count
FROM topics t
JOIN posts p ON t.id = p.topic_id
JOIN query_period qp ON p.created_at >= qp.period_start
AND p.created_at <= qp.period_end
WHERE t.archetype = 'regular'
AND t.user_id > 0
GROUP BY t.id
ORDER BY COUNT(p.id) DESC, t.score DESC
LIMIT 100
SQL
queries["top-likers"]["sql"] = <<~SQL
-- [params]
-- int :months_ago = 1
WITH query_period AS (
SELECT
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' as period_start,
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' + INTERVAL '1 month' - INTERVAL '1 second' as period_end
)
SELECT
ua.user_id,
count(1) AS like_count
FROM user_actions ua
INNER JOIN query_period qp
ON ua.created_at >= qp.period_start
AND ua.created_at <= qp.period_end
WHERE ua.action_type = 1
GROUP BY ua.user_id
ORDER BY like_count DESC
LIMIT 100
SQL
queries["quality-users"]["sql"] = <<~SQL
SELECT sum(p.score) / count(p) AS "average score per post",
count(p.id) AS post_count,
p.user_id
FROM posts p
JOIN users u ON u.id = p.user_id
WHERE p.created_at >= CURRENT_DATE - INTERVAL '6 month'
AND NOT u.admin
AND u.active
GROUP BY user_id,
u.views
HAVING count(p.id) > 50
ORDER BY sum(p.score) / count(p) DESC
LIMIT 50
SQL
queries["user-participation"]["sql"] = <<~SQL
-- [params]
-- int :from_days_ago = 0
-- int :duration_days = 30
WITH t AS (
SELECT CURRENT_TIMESTAMP - ((:from_days_ago + :duration_days) * (INTERVAL '1 days')) AS START,
CURRENT_TIMESTAMP - (:from_days_ago * (INTERVAL '1 days')) AS END
),
pr AS (
SELECT user_id, COUNT(1) AS visits,
SUM(posts_read) AS posts_read
FROM user_visits, t
WHERE posts_read > 0
AND visited_at > t.START
AND visited_at < t.
END
GROUP BY
user_id
),
pc AS (
SELECT user_id, COUNT(1) AS posts_created
FROM posts, t
WHERE
created_at > t.START
AND created_at < t.
END
GROUP BY
user_id
),
ttopics AS (
SELECT user_id, posts_count
FROM topics, t
WHERE created_at > t.START
AND created_at < t.
END
),
tc AS (
SELECT user_id, COUNT(1) AS topics_created
FROM ttopics
GROUP BY user_id
),
twr AS (
SELECT user_id, COUNT(1) AS topics_with_replies
FROM ttopics
WHERE posts_count > 1
GROUP BY user_id
),
tv AS (
SELECT user_id,
COUNT(DISTINCT(topic_id)) AS topics_viewed
FROM topic_views, t
WHERE viewed_at > t.START
AND viewed_at < t.
END
GROUP BY user_id
),
likes AS (
SELECT post_actions.user_id AS given_by_user_id,
posts.user_id AS received_by_user_id
FROM t,
post_actions
LEFT JOIN
posts
ON post_actions.post_id = posts.id
WHERE
post_actions.created_at > t.START
AND post_actions.created_at < t.
END
AND post_action_type_id = 2
),
lg AS (
SELECT given_by_user_id AS user_id,
COUNT(1) AS likes_given
FROM likes
GROUP BY user_id
),
lr AS (
SELECT received_by_user_id AS user_id,
COUNT(1) AS likes_received
FROM likes
GROUP BY user_id
),
e AS (
SELECT email, user_id
FROM user_emails u
WHERE u.PRIMARY = TRUE
)
SELECT
pr.user_id,
username,
name,
email,
visits,
COALESCE(topics_viewed, 0) AS topics_viewed,
COALESCE(posts_read, 0) AS posts_read,
COALESCE(posts_created, 0) AS posts_created,
COALESCE(topics_created, 0) AS topics_created,
COALESCE(topics_with_replies, 0) AS topics_with_replies,
COALESCE(likes_given, 0) AS likes_given,
COALESCE(likes_received, 0) AS likes_received
FROM pr
LEFT JOIN tv USING (user_id)
LEFT JOIN pc USING (user_id)
LEFT JOIN tc USING (user_id)
LEFT JOIN twr USING (user_id)
LEFT JOIN lg USING (user_id)
LEFT JOIN lr USING (user_id)
LEFT JOIN e USING (user_id)
LEFT JOIN users ON pr.user_id = users.id
ORDER BY
visits DESC,
posts_read DESC,
posts_created DESC
SQL
queries["largest-uploads"]["sql"] = <<~SQL
SELECT posts.id AS post_id,
uploads.original_filename,
ROUND(uploads.filesize / 1000000.0, 2) AS size_in_mb,
uploads.extension,
uploads.created_at,
uploads.url
FROM post_uploads
JOIN uploads ON uploads.id = post_uploads.upload_id
JOIN posts ON posts.id = post_uploads.post_id
ORDER BY uploads.filesize DESC
LIMIT 50
SQL
queries["inactive-users"]["sql"] = <<~SQL
SELECT
u.id,
u.username_lower AS "username",
u.created_at,
u.last_seen_at
FROM users u
WHERE u.active = false
ORDER BY u.id
SQL
queries["active-lurkers"]["sql"] = <<~SQL
-- [params]
-- int :post_read_count = 100
WITH posts_by_user AS (
SELECT COUNT(*) AS posts, user_id
FROM posts
GROUP BY user_id
), posts_read_by_user AS (
SELECT SUM(posts_read) AS posts_read, user_id
FROM user_visits
GROUP BY user_id
)
SELECT
u.id,
u.username_lower AS "username",
u.created_at,
u.last_seen_at,
COALESCE(pbu.posts, 0) AS "posts_created",
COALESCE(prbu.posts_read, 0) AS "posts_read"
FROM users u
LEFT JOIN posts_by_user pbu ON pbu.user_id = u.id
LEFT JOIN posts_read_by_user prbu ON prbu.user_id = u.id
WHERE u.active = true
AND posts IS NULL
AND posts_read > :post_read_count
ORDER BY u.id
SQL
queries["topic-user-notification-level"]["sql"] = <<~SQL
-- [params]
-- null int :user
-- null int :notification_level
SELECT t.category_id AS category_id, t.id AS topic_id, tu.last_visited_at AS topic_last_visited_at
FROM topics t
JOIN topic_users tu ON tu.topic_id = t.id AND tu.user_id = :user AND tu.notification_level = :notification_level
ORDER BY tu.last_visited_at DESC
SQL
queries["assigned-topics-report"]["sql"] = <<~SQL
SELECT a.assigned_to_id user_id, a.topic_id
FROM assignments a
JOIN topics t on t.id = a.topic_id
JOIN users u on u.id = a.assigned_to_id
WHERE a.assigned_to_type = 'User'
AND t.deleted_at IS NULL
ORDER BY username, topic_id
SQL
queries["group-members-reply-count"]["sql"] = <<~SQL
-- [params]
-- date :start_date
-- date :end_date
-- string :group_name
-- boolean :include_pms = false
WITH target_users AS (
SELECT
u.id AS user_id
FROM users u
JOIN group_users gu
ON gu.user_id = u.id
JOIN groups g
ON g.id = gu.group_id
WHERE g.name = :group_name
AND gu.created_at::date <= :end_date
),
target_posts AS (
SELECT
p.id,
p.user_id
FROM posts p
JOIN topics t
ON t.id = p.topic_id
WHERE CASE WHEN :include_pms THEN true ELSE t.archetype = 'regular' END
AND t.deleted_at IS NULL
AND p.deleted_at IS NULL
AND p.created_at::date >= :start_date
AND p.created_at::date <= :end_date
AND p.post_number > 1
)
SELECT
tu.user_id,
COALESCE(COUNT(tp.id), 0) AS reply_count
FROM target_users tu
LEFT OUTER JOIN target_posts tp
ON tp.user_id = tu.user_id
GROUP BY tu.user_id
ORDER BY reply_count DESC, tu.user_id
SQL
queries["total-assigned-topics-report"]["sql"] = <<~SQL
SELECT a.assigned_to_id AS user_id,
count(*)::varchar || ',/u/' || username_lower || '/activity/assigned' assigned_url
FROM assignments a
JOIN topics t on t.id = a.topic_id
JOIN users u on u.id = a.assigned_to_id
WHERE a.assigned_to_type = 'User'
AND t.deleted_at IS NULL
GROUP BY a.assigned_to_id, username_lower
ORDER BY count(*) DESC, username_lower
SQL
queries["poll-results"]["sql"] = <<~SQL
-- [params]
-- string :poll_name
-- int :post_id
SELECT
poll_votes.updated_at AS vote_time,
poll_votes.poll_option_id AS vote_option,
users.id AS user_id,
users.username,
users.name,
users.trust_level,
poll_options.html AS vote_option_full
FROM
poll_votes
INNER JOIN
polls ON polls.id = poll_votes.poll_id
INNER JOIN
users ON users.id = poll_votes.user_id
INNER JOIN
poll_options ON poll_votes.poll_id = poll_options.poll_id AND poll_votes.poll_option_id = poll_options.id
WHERE
polls.name = :poll_name AND
polls.post_id = :post_id
SQL
queries["top-tags-per-year"]["sql"] = <<~SQL
-- [params]
-- integer :rank_max = 5
WITH data AS (SELECT
tag_id,
EXTRACT(YEAR FROM created_at) AS year
FROM topic_tags)
SELECT year, rank, name, qt FROM (
SELECT
tag_id,
COUNT(tag_id) AS qt,
year,
rank() OVER (PARTITION BY year ORDER BY COUNT(tag_id) DESC) AS rank
FROM
data
GROUP BY year, tag_id) as rnk
INNER JOIN tags ON tags.id = rnk.tag_id
WHERE rank <= :rank_max
ORDER BY year DESC, qt DESC
SQL
queries["number_of_replies_by_category"]["sql"] = <<~SQL
-- [params]
-- boolean :enable_null_category = false
WITH post AS (SELECT
id AS post_id,
topic_id,
EXTRACT(YEAR FROM created_at) AS year
FROM posts
WHERE post_type = 1
AND deleted_at ISNULL
AND post_number != 1)
SELECT
p.year,
t.category_id AS id,
c.name category,
COUNT(p.post_id) AS qt
FROM post p
INNER JOIN topics t ON t.id = p.topic_id
LEFT JOIN categories c ON c.id = t.category_id
WHERE t.deleted_at ISNULL
AND (:enable_null_category = true OR t.category_id NOTNULL)
GROUP BY t.category_id, c.name, p.year
ORDER BY p.year DESC, qt DESC
SQL
# convert query ids from "mostcommonlikers" to "-1", "mostmessages" to "-2" etc.
queries.transform_keys!.with_index { |key, idx| "-#{idx + 1}" }
queries
end
end
end

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module ::DiscourseDataExplorer
class QueryGroupBookmarkable < BaseBookmarkable
def self.model
QueryGroup
end
def self.serializer
QueryGroupBookmarkSerializer
end
def self.preload_associations
%i[data_explorer_queries groups]
end
def self.list_query(user, guardian)
group_ids = []
if !user.admin?
group_ids = user.visible_groups.pluck(:id)
return if group_ids.empty?
end
query =
user
.bookmarks_of_type("DiscourseDataExplorer::QueryGroup")
.joins(
"INNER JOIN data_explorer_query_groups ON data_explorer_query_groups.id = bookmarks.bookmarkable_id",
)
.joins(
"LEFT JOIN data_explorer_queries ON data_explorer_queries.id = data_explorer_query_groups.query_id",
)
query = query.where("data_explorer_query_groups.group_id IN (?)", group_ids) if !user.admin?
query
end
# Searchable only by data_explorer_queries name
def self.search_query(bookmarks, query, ts_query, &bookmarkable_search)
bookmarkable_search.call(bookmarks, "data_explorer_queries.name ILIKE :q")
end
def self.reminder_handler(bookmark)
send_reminder_notification(
bookmark,
data: {
title: bookmark.bookmarkable.query.name,
bookmarkable_url:
"/g/#{bookmark.bookmarkable.group.name}/reports/#{bookmark.bookmarkable.query.id}",
},
)
end
def self.reminder_conditions(bookmark)
bookmark.bookmarkable.present?
end
def self.can_see?(guardian, bookmark)
return false if !bookmark.bookmarkable.group
guardian.user_is_a_member_of_group?(bookmark.bookmarkable.group)
end
end
end

View File

@ -1,561 +0,0 @@
# frozen_string_literal: true
class Queries
def self.default
# WARNING: Edit the query hash carefully
# For each query, add id, name and description here and add sql below
# Feel free to add new queries at the bottom of the hash in numerical order
# If any query has been run on an instance, it is then saved in the local db
# Locally stored queries are updated from the below data only when they are run again
# eg. If you update a query with id=-1 in this file and the query has been run on a site,
# you must run the query with id=-1 on the site again to update these changes in the site db
queries = {
"most-common-likers": {
id: -1,
name: "Most Common Likers",
description: "Which users like particular other users the most?",
},
"most-messages": {
id: -2,
name: "Who has been sending the most messages in the last week?",
description: "tracking down suspicious PM activity",
},
"edited-post-spam": {
id: -3,
name: "Last 500 posts that were edited by TL0/TL1 users",
description: "fighting human-driven copy-paste spam",
},
"new-topics": {
id: -4,
name: "New Topics by Category",
description:
"Lists all new topics ordered by category and creation_date. The query accepts a months_ago parameter. It defaults to 0 to give you the stats for the current month.",
},
"active-topics": {
id: -5,
name: "Top 100 Active Topics",
description:
"based on the number of replies, it accepts a months_ago parameter, defaults to 1 to give results for the last calendar month.",
},
"top-likers": {
id: -6,
name: "Top 100 Likers",
description:
"returns the top 100 likers for a given monthly period ordered by like_count. It accepts a months_ago parameter, defaults to 1 to give results for the last calendar month.",
},
"quality-users": {
id: -7,
name: "Top 50 Quality Users",
description:
"based on post score calculated using reply count, likes, incoming links, bookmarks, time spent and read count.",
},
"user-participation": {
id: -8,
name: "User Participation Statistics",
description: "Detailed statistics for the most active users.",
},
"largest-uploads": {
id: -9,
name: "Top 50 Largest Uploads",
description: "sorted by file size.",
},
"inactive-users": {
id: -10,
name: "Inactive Users with no posts",
description: "analyze pre-Discourse signups.",
},
"active-lurkers": {
id: -11,
name: "Most Active Lurkers",
description:
"active users without posts and excessive read times, it accepts a post_read_count parameter that sets the threshold for posts read.",
},
"topic-user-notification-level": {
id: -12,
name: "List of topics a user is watching/tracking/muted",
description:
"The query requires a notification_level parameter. Use 0 for muted, 1 for regular, 2 for tracked and 3 for watched topics.",
},
"assigned-topics-report": {
id: -13,
name: "List of assigned topics by user",
description: "This report requires the assign plugin, it will find all assigned topics",
},
"group-members-reply-count": {
id: -14,
name: "Group Members Reply Count",
description:
"Number of replies by members of a group over a given time period. Requires 'group_name', 'start_date', and 'end_date' parameters. Dates need to be in the form 'yyyy-mm-dd'. Accepts an 'include_pms' parameter.",
},
"total-assigned-topics-report": {
id: -15,
name: "Total topics assigned per user",
description: "Count of assigned topis per user linking to assign list",
},
"poll-results": {
id: -16,
name: "Poll results report",
description:
"Details of a poll result, including details about each vote and voter, useful for analyzing results in external software.",
},
"top-tags-per-year": {
id: -17,
name: "Top tags per year",
description: "List the top tags per year.",
},
number_of_replies_by_category: {
id: -18,
name: "Number of replies by category",
description: "List the number of replies by category.",
},
}.with_indifferent_access
queries["most-common-likers"]["sql"] = <<~SQL
WITH pairs AS (
SELECT p.user_id liked, pa.user_id liker
FROM post_actions pa
LEFT JOIN posts p ON p.id = pa.post_id
WHERE post_action_type_id = 2
)
SELECT liker liker_user_id, liked liked_user_id, count(*)
FROM pairs
GROUP BY liked, liker
ORDER BY count DESC
SQL
queries["most-messages"]["sql"] = <<~SQL
SELECT user_id, count(*) AS message_count
FROM topics
WHERE archetype = 'private_message' AND subtype = 'user_to_user'
AND age(created_at) < interval '7 days'
GROUP BY user_id
ORDER BY message_count DESC
SQL
queries["edited-post-spam"]["sql"] = <<~SQL
SELECT
p.id AS post_id,
topic_id
FROM posts p
JOIN users u
ON u.id = p.user_id
JOIN topics t
ON t.id = p.topic_id
WHERE p.last_editor_id = p.user_id
AND p.self_edits > 0
AND (u.trust_level = 0 OR u.trust_level = 1)
AND p.deleted_at IS NULL
AND t.deleted_at IS NULL
AND t.archetype = 'regular'
ORDER BY p.updated_at DESC
LIMIT 500
SQL
queries["new-topics"]["sql"] = <<~SQL
-- [params]
-- int :months_ago = 1
WITH query_period as (
SELECT
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' as period_start,
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' + INTERVAL '1 month' - INTERVAL '1 second' as period_end
)
SELECT
t.id as topic_id,
t.category_id
FROM topics t
RIGHT JOIN query_period qp
ON t.created_at >= qp.period_start
AND t.created_at <= qp.period_end
WHERE t.user_id > 0
AND t.category_id IS NOT NULL
ORDER BY t.category_id, t.created_at DESC
SQL
queries["active-topics"]["sql"] = <<~SQL
-- [params]
-- int :months_ago = 1
WITH query_period AS
(SELECT date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' AS period_start,
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' + INTERVAL '1 month' - INTERVAL '1 second' AS period_end)
SELECT t.id AS topic_id,
t.category_id,
COUNT(p.id) AS reply_count
FROM topics t
JOIN posts p ON t.id = p.topic_id
JOIN query_period qp ON p.created_at >= qp.period_start
AND p.created_at <= qp.period_end
WHERE t.archetype = 'regular'
AND t.user_id > 0
GROUP BY t.id
ORDER BY COUNT(p.id) DESC, t.score DESC
LIMIT 100
SQL
queries["top-likers"]["sql"] = <<~SQL
-- [params]
-- int :months_ago = 1
WITH query_period AS (
SELECT
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' as period_start,
date_trunc('month', CURRENT_DATE) - INTERVAL ':months_ago months' + INTERVAL '1 month' - INTERVAL '1 second' as period_end
)
SELECT
ua.user_id,
count(1) AS like_count
FROM user_actions ua
INNER JOIN query_period qp
ON ua.created_at >= qp.period_start
AND ua.created_at <= qp.period_end
WHERE ua.action_type = 1
GROUP BY ua.user_id
ORDER BY like_count DESC
LIMIT 100
SQL
queries["quality-users"]["sql"] = <<~SQL
SELECT sum(p.score) / count(p) AS "average score per post",
count(p.id) AS post_count,
p.user_id
FROM posts p
JOIN users u ON u.id = p.user_id
WHERE p.created_at >= CURRENT_DATE - INTERVAL '6 month'
AND NOT u.admin
AND u.active
GROUP BY user_id,
u.views
HAVING count(p.id) > 50
ORDER BY sum(p.score) / count(p) DESC
LIMIT 50
SQL
queries["user-participation"]["sql"] = <<~SQL
-- [params]
-- int :from_days_ago = 0
-- int :duration_days = 30
WITH t AS (
SELECT CURRENT_TIMESTAMP - ((:from_days_ago + :duration_days) * (INTERVAL '1 days')) AS START,
CURRENT_TIMESTAMP - (:from_days_ago * (INTERVAL '1 days')) AS END
),
pr AS (
SELECT user_id, COUNT(1) AS visits,
SUM(posts_read) AS posts_read
FROM user_visits, t
WHERE posts_read > 0
AND visited_at > t.START
AND visited_at < t.
END
GROUP BY
user_id
),
pc AS (
SELECT user_id, COUNT(1) AS posts_created
FROM posts, t
WHERE
created_at > t.START
AND created_at < t.
END
GROUP BY
user_id
),
ttopics AS (
SELECT user_id, posts_count
FROM topics, t
WHERE created_at > t.START
AND created_at < t.
END
),
tc AS (
SELECT user_id, COUNT(1) AS topics_created
FROM ttopics
GROUP BY user_id
),
twr AS (
SELECT user_id, COUNT(1) AS topics_with_replies
FROM ttopics
WHERE posts_count > 1
GROUP BY user_id
),
tv AS (
SELECT user_id,
COUNT(DISTINCT(topic_id)) AS topics_viewed
FROM topic_views, t
WHERE viewed_at > t.START
AND viewed_at < t.
END
GROUP BY user_id
),
likes AS (
SELECT post_actions.user_id AS given_by_user_id,
posts.user_id AS received_by_user_id
FROM t,
post_actions
LEFT JOIN
posts
ON post_actions.post_id = posts.id
WHERE
post_actions.created_at > t.START
AND post_actions.created_at < t.
END
AND post_action_type_id = 2
),
lg AS (
SELECT given_by_user_id AS user_id,
COUNT(1) AS likes_given
FROM likes
GROUP BY user_id
),
lr AS (
SELECT received_by_user_id AS user_id,
COUNT(1) AS likes_received
FROM likes
GROUP BY user_id
),
e AS (
SELECT email, user_id
FROM user_emails u
WHERE u.PRIMARY = TRUE
)
SELECT
pr.user_id,
username,
name,
email,
visits,
COALESCE(topics_viewed, 0) AS topics_viewed,
COALESCE(posts_read, 0) AS posts_read,
COALESCE(posts_created, 0) AS posts_created,
COALESCE(topics_created, 0) AS topics_created,
COALESCE(topics_with_replies, 0) AS topics_with_replies,
COALESCE(likes_given, 0) AS likes_given,
COALESCE(likes_received, 0) AS likes_received
FROM pr
LEFT JOIN tv USING (user_id)
LEFT JOIN pc USING (user_id)
LEFT JOIN tc USING (user_id)
LEFT JOIN twr USING (user_id)
LEFT JOIN lg USING (user_id)
LEFT JOIN lr USING (user_id)
LEFT JOIN e USING (user_id)
LEFT JOIN users ON pr.user_id = users.id
ORDER BY
visits DESC,
posts_read DESC,
posts_created DESC
SQL
queries["largest-uploads"]["sql"] = <<~SQL
SELECT posts.id AS post_id,
uploads.original_filename,
ROUND(uploads.filesize / 1000000.0, 2) AS size_in_mb,
uploads.extension,
uploads.created_at,
uploads.url
FROM post_uploads
JOIN uploads ON uploads.id = post_uploads.upload_id
JOIN posts ON posts.id = post_uploads.post_id
ORDER BY uploads.filesize DESC
LIMIT 50
SQL
queries["inactive-users"]["sql"] = <<~SQL
SELECT
u.id,
u.username_lower AS "username",
u.created_at,
u.last_seen_at
FROM users u
WHERE u.active = false
ORDER BY u.id
SQL
queries["active-lurkers"]["sql"] = <<~SQL
-- [params]
-- int :post_read_count = 100
WITH posts_by_user AS (
SELECT COUNT(*) AS posts, user_id
FROM posts
GROUP BY user_id
), posts_read_by_user AS (
SELECT SUM(posts_read) AS posts_read, user_id
FROM user_visits
GROUP BY user_id
)
SELECT
u.id,
u.username_lower AS "username",
u.created_at,
u.last_seen_at,
COALESCE(pbu.posts, 0) AS "posts_created",
COALESCE(prbu.posts_read, 0) AS "posts_read"
FROM users u
LEFT JOIN posts_by_user pbu ON pbu.user_id = u.id
LEFT JOIN posts_read_by_user prbu ON prbu.user_id = u.id
WHERE u.active = true
AND posts IS NULL
AND posts_read > :post_read_count
ORDER BY u.id
SQL
queries["topic-user-notification-level"]["sql"] = <<~SQL
-- [params]
-- null int :user
-- null int :notification_level
SELECT t.category_id AS category_id, t.id AS topic_id, tu.last_visited_at AS topic_last_visited_at
FROM topics t
JOIN topic_users tu ON tu.topic_id = t.id AND tu.user_id = :user AND tu.notification_level = :notification_level
ORDER BY tu.last_visited_at DESC
SQL
queries["assigned-topics-report"]["sql"] = <<~SQL
SELECT a.assigned_to_id user_id, a.topic_id
FROM assignments a
JOIN topics t on t.id = a.topic_id
JOIN users u on u.id = a.assigned_to_id
WHERE a.assigned_to_type = 'User'
AND t.deleted_at IS NULL
ORDER BY username, topic_id
SQL
queries["group-members-reply-count"]["sql"] = <<~SQL
-- [params]
-- date :start_date
-- date :end_date
-- string :group_name
-- boolean :include_pms = false
WITH target_users AS (
SELECT
u.id AS user_id
FROM users u
JOIN group_users gu
ON gu.user_id = u.id
JOIN groups g
ON g.id = gu.group_id
WHERE g.name = :group_name
AND gu.created_at::date <= :end_date
),
target_posts AS (
SELECT
p.id,
p.user_id
FROM posts p
JOIN topics t
ON t.id = p.topic_id
WHERE CASE WHEN :include_pms THEN true ELSE t.archetype = 'regular' END
AND t.deleted_at IS NULL
AND p.deleted_at IS NULL
AND p.created_at::date >= :start_date
AND p.created_at::date <= :end_date
AND p.post_number > 1
)
SELECT
tu.user_id,
COALESCE(COUNT(tp.id), 0) AS reply_count
FROM target_users tu
LEFT OUTER JOIN target_posts tp
ON tp.user_id = tu.user_id
GROUP BY tu.user_id
ORDER BY reply_count DESC, tu.user_id
SQL
queries["total-assigned-topics-report"]["sql"] = <<~SQL
SELECT a.assigned_to_id AS user_id,
count(*)::varchar || ',/u/' || username_lower || '/activity/assigned' assigned_url
FROM assignments a
JOIN topics t on t.id = a.topic_id
JOIN users u on u.id = a.assigned_to_id
WHERE a.assigned_to_type = 'User'
AND t.deleted_at IS NULL
GROUP BY a.assigned_to_id, username_lower
ORDER BY count(*) DESC, username_lower
SQL
queries["poll-results"]["sql"] = <<~SQL
-- [params]
-- string :poll_name
-- int :post_id
SELECT
poll_votes.updated_at AS vote_time,
poll_votes.poll_option_id AS vote_option,
users.id AS user_id,
users.username,
users.name,
users.trust_level,
poll_options.html AS vote_option_full
FROM
poll_votes
INNER JOIN
polls ON polls.id = poll_votes.poll_id
INNER JOIN
users ON users.id = poll_votes.user_id
INNER JOIN
poll_options ON poll_votes.poll_id = poll_options.poll_id AND poll_votes.poll_option_id = poll_options.id
WHERE
polls.name = :poll_name AND
polls.post_id = :post_id
SQL
queries["top-tags-per-year"]["sql"] = <<~SQL
-- [params]
-- integer :rank_max = 5
WITH data AS (SELECT
tag_id,
EXTRACT(YEAR FROM created_at) AS year
FROM topic_tags)
SELECT year, rank, name, qt FROM (
SELECT
tag_id,
COUNT(tag_id) AS qt,
year,
rank() OVER (PARTITION BY year ORDER BY COUNT(tag_id) DESC) AS rank
FROM
data
GROUP BY year, tag_id) as rnk
INNER JOIN tags ON tags.id = rnk.tag_id
WHERE rank <= :rank_max
ORDER BY year DESC, qt DESC
SQL
queries["number_of_replies_by_category"]["sql"] = <<~SQL
-- [params]
-- boolean :enable_null_category = false
WITH post AS (SELECT
id AS post_id,
topic_id,
EXTRACT(YEAR FROM created_at) AS year
FROM posts
WHERE post_type = 1
AND deleted_at ISNULL
AND post_number != 1)
SELECT
p.year,
t.category_id AS id,
c.name category,
COUNT(p.post_id) AS qt
FROM post p
INNER JOIN topics t ON t.id = p.topic_id
LEFT JOIN categories c ON c.id = t.category_id
WHERE t.deleted_at ISNULL
AND (:enable_null_category = true OR t.category_id NOTNULL)
GROUP BY t.category_id, c.name, p.year
ORDER BY p.year DESC, qt DESC
SQL
# convert query ids from "mostcommonlikers" to "-1", "mostmessages" to "-2" etc.
queries.transform_keys!.with_index { |key, idx| "-#{idx + 1}" }
queries
end
end

View File

@ -2,11 +2,10 @@
# rake data_explorer:list_hidden_queries
desc "Shows a list of hidden queries"
task("data_explorer:list_hidden_queries").clear
task "data_explorer:list_hidden_queries" => :environment do |t|
puts "\nHidden Queries\n\n"
hidden_queries = DataExplorer::Query.where(hidden: false)
hidden_queries = DiscourseDataExplorer::Query.where(hidden: false)
hidden_queries.each do |query|
puts "Name: #{query.name}"
@ -18,11 +17,10 @@ end
# rake data_explorer[-1]
# rake data_explorer[1,-2,3,-4,5]
desc "Hides one or multiple queries by ID"
task("data_explorer").clear
task "data_explorer" => :environment do |t, args|
args.extras.each do |arg|
id = arg.to_i
query = DataExplorer::Query.find_by(id: id)
query = DiscourseDataExplorer::Query.find_by(id: id)
if query
puts "\nFound query with id #{id}"
query.update!(hidden: true)
@ -37,11 +35,10 @@ end
# rake data_explorer:unhide_query[-1]
# rake data_explorer:unhide_query[1,-2,3,-4,5]
desc "Unhides one or multiple queries by ID"
task("data_explorer:unhide_query").clear
task "data_explorer:unhide_query" => :environment do |t, args|
args.extras.each do |arg|
id = arg.to_i
query = DataExplorer::Query.find_by(id: id)
query = DiscourseDataExplorer::Query.find_by(id: id)
if query
puts "\nFound query with id #{id}"
query.update!(hidden: false)
@ -56,11 +53,10 @@ end
# rake data_explorer:hard_delete[-1]
# rake data_explorer:hard_delete[1,-2,3,-4,5]
desc "Hard deletes one or multiple queries by ID"
task("data_explorer:hard_delete").clear
task "data_explorer:hard_delete" => :environment do |t, args|
args.extras.each do |arg|
id = arg.to_i
query = DataExplorer::Query.find_by(id: id)
query = DiscourseDataExplorer::Query.find_by(id: id)
if query
puts "\nFound query with id #{id}"

View File

@ -1,8 +1,6 @@
# frozen_string_literal: true
desc "Fix query IDs to match the old ones used in the plugin store (q:id)"
task("data_explorer:fix_query_ids").clear
task "data_explorer:fix_query_ids" => :environment do
ActiveRecord::Base.transaction do
# Only queries with unique title can be fixed

898
plugin.rb
View File

@ -9,10 +9,8 @@
enabled_site_setting :data_explorer_enabled
require File.expand_path("../lib/discourse_data_explorer/engine.rb", __FILE__)
register_asset "stylesheets/explorer.scss"
if respond_to?(:register_svg_icon)
register_svg_icon "caret-down"
register_svg_icon "caret-right"
register_svg_icon "chevron-left"
@ -20,26 +18,30 @@ if respond_to?(:register_svg_icon)
register_svg_icon "info"
register_svg_icon "pencil-alt"
register_svg_icon "upload"
end
# route: /admin/plugins/explorer
add_admin_route "explorer.title", "explorer"
module ::DataExplorer
# This should always match the max value for the data_explorer_query_result_limit
# site setting.
QUERY_RESULT_MAX_LIMIT = 10_000
module ::DiscourseDataExplorer
PLUGIN_NAME = "discourse-data-explorer"
def self.plugin_name
"discourse-data-explorer".freeze
end
# This should always match the max value for the
# data_explorer_query_result_limit site setting
QUERY_RESULT_MAX_LIMIT = 10_000
end
require_relative "lib/discourse_data_explorer/engine"
after_initialize do
require_relative "app/jobs/scheduled/delete_hidden_queries"
require_relative "lib/discourse_data_explorer/data_explorer"
require_relative "lib/discourse_data_explorer/parameter"
require_relative "lib/discourse_data_explorer/queries"
require_relative "lib/discourse_data_explorer/query_group_bookmarkable"
add_to_class(:guardian, :user_is_a_member_of_group?) do |group|
return false if !current_user
return true if current_user.admin?
return current_user.group_ids.include?(group.id)
current_user.group_ids.include?(group.id)
end
add_to_class(:guardian, :user_can_access_query?) do |query|
@ -51,883 +53,21 @@ after_initialize do
add_to_class(:guardian, :group_and_user_can_access_query?) do |group, query|
return false if !current_user
return true if current_user.admin?
return user_is_a_member_of_group?(group) && query.groups.exists?(id: group.id)
user_is_a_member_of_group?(group) && query.groups.exists?(id: group.id)
end
add_to_serializer(:group_show, :has_visible_data_explorer_queries, false) do
DataExplorer::Query.for_group(object).exists?
DiscourseDataExplorer::Query.for_group(object).exists?
end
add_to_serializer(:group_show, :include_has_visible_data_explorer_queries?, false) do
SiteSetting.data_explorer_enabled && scope.user_is_a_member_of_group?(object)
end
module ::DataExplorer
class Engine < ::Rails::Engine
engine_name "data_explorer"
isolate_namespace DataExplorer
end
class ValidationError < StandardError
end
# Run a data explorer query on the currently connected database.
#
# @param [DataExplorer::Query] query the Query object to run
# @param [Hash] params the colon-style query parameters for the query
# @param [Hash] opts hash of options
# explain - include a query plan in the result
# @return [Hash]
# error - any exception that was raised in the execution. Check this
# first before looking at any other fields.
# pg_result - the PG::Result object
# duration_nanos - the query duration, in nanoseconds
# explain - the query
def self.run_query(query, req_params = {}, opts = {})
# Safety checks
# see test 'doesn't allow you to modify the database #2'
if query.sql =~ /;/
err = DataExplorer::ValidationError.new(I18n.t("js.errors.explorer.no_semicolons"))
return { error: err, duration_nanos: 0 }
end
query_args = {}
begin
query_args = query.cast_params req_params
rescue DataExplorer::ValidationError => e
return { error: e, duration_nanos: 0 }
end
time_start, time_end, explain, err, result = nil
begin
ActiveRecord::Base.connection.transaction do
# Setting transaction to read only prevents shoot-in-foot actions like SELECT FOR UPDATE
# see test 'doesn't allow you to modify the database #1'
DB.exec "SET TRANSACTION READ ONLY"
# Set a statement timeout so we can't tie up the server
DB.exec "SET LOCAL statement_timeout = 10000"
# SQL comments are for the benefits of the slow queries log
sql = <<-SQL
/*
* DataExplorer Query
* Query: /admin/plugins/explorer?id=#{query.id}
* Started by: #{opts[:current_user]}
*/
WITH query AS (
#{query.sql}
) SELECT * FROM query
LIMIT #{opts[:limit] || SiteSetting.data_explorer_query_result_limit}
SQL
time_start = Time.now
# Using MiniSql::InlineParamEncoder directly instead of DB.param_encoder because current implementation of
# DB.param_encoder is meant for SQL fragments and not an entire SQL string.
sql =
MiniSql::InlineParamEncoder.new(ActiveRecord::Base.connection.raw_connection).encode(
sql,
query_args,
)
result = ActiveRecord::Base.connection.raw_connection.async_exec(sql)
result.check # make sure it's done
time_end = Time.now
if opts[:explain]
explain =
DB
.query_hash("EXPLAIN #{query.sql}", query_args)
.map { |row| row["QUERY PLAN"] }.join "\n"
end
# All done. Issue a rollback anyways, just in case
# see test 'doesn't allow you to modify the database #1'
raise ActiveRecord::Rollback
end
rescue Exception => ex
err = ex
time_end = Time.now
end
{
error: err,
pg_result: result,
duration_secs: time_end - time_start,
explain: explain,
params_full: query_args,
}
end
def self.extra_data_pluck_fields
@extra_data_pluck_fields ||= {
user: {
class: User,
fields: %i[id username uploaded_avatar_id],
serializer: BasicUserSerializer,
},
badge: {
class: Badge,
fields: %i[id name badge_type_id description icon],
include: [:badge_type],
serializer: SmallBadgeSerializer,
},
post: {
class: Post,
fields: %i[id topic_id post_number cooked user_id],
include: [:user],
serializer: SmallPostWithExcerptSerializer,
},
topic: {
class: Topic,
fields: %i[id title slug posts_count],
serializer: BasicTopicSerializer,
},
group: {
class: Group,
ignore: true,
},
category: {
class: Category,
ignore: true,
},
reltime: {
ignore: true,
},
html: {
ignore: true,
},
}
end
def self.column_regexes
@column_regexes ||=
extra_data_pluck_fields
.map { |key, val| /(#{val[:class].to_s.downcase})_id$/ if val[:class] }
.compact
end
def self.add_extra_data(pg_result)
needed_classes = {}
ret = {}
col_map = {}
pg_result.fields.each_with_index do |col, idx|
rgx = column_regexes.find { |r| r.match col }
if rgx
cls = (rgx.match col)[1].to_sym
needed_classes[cls] ||= []
needed_classes[cls] << idx
elsif col =~ /^(\w+)\$/
cls = $1.to_sym
needed_classes[cls] ||= []
needed_classes[cls] << idx
elsif col =~ /^\w+_url$/
col_map[idx] = "url"
end
end
needed_classes.each do |cls, column_nums|
next unless column_nums.present?
support_info = extra_data_pluck_fields[cls]
next unless support_info
column_nums.each { |col_n| col_map[col_n] = cls }
if support_info[:ignore]
ret[cls] = []
next
end
ids = Set.new
column_nums.each { |col_n| ids.merge(pg_result.column_values(col_n)) }
ids.delete nil
ids.map! &:to_i
object_class = support_info[:class]
all_objs = object_class
all_objs = all_objs.with_deleted if all_objs.respond_to? :with_deleted
all_objs =
all_objs
.select(support_info[:fields])
.where(id: ids.to_a.sort)
.includes(support_info[:include])
.order(:id)
ret[cls] = ActiveModel::ArraySerializer.new(
all_objs,
each_serializer: support_info[:serializer],
)
end
[ret, col_map]
end
def self.sensitive_column_names
%w[
#_IP_Addresses
topic_views.ip_address
users.ip_address
users.registration_ip_address
incoming_links.ip_address
topic_link_clicks.ip_address
user_histories.ip_address
#_Emails
email_tokens.email
users.email
invites.email
user_histories.email
email_logs.to_address
posts.raw_email
badge_posts.raw_email
#_Secret_Tokens
email_tokens.token
email_logs.reply_key
api_keys.key
site_settings.value
users.auth_token
users.password_hash
users.salt
#_Authentication_Info
user_open_ids.email
oauth2_user_infos.uid
oauth2_user_infos.email
facebook_user_infos.facebook_user_id
facebook_user_infos.email
twitter_user_infos.twitter_user_id
github_user_infos.github_user_id
single_sign_on_records.external_email
single_sign_on_records.external_id
google_user_infos.google_user_id
google_user_infos.email
]
end
def self.schema
# No need to expire this, because the server processes get restarted on upgrade
# refer user to http://www.postgresql.org/docs/9.3/static/datatype.html
@schema ||=
begin
results = DB.query_hash <<~SQL
select
c.column_name column_name,
c.data_type data_type,
c.character_maximum_length character_maximum_length,
c.is_nullable is_nullable,
c.column_default column_default,
c.table_name table_name,
pgd.description column_desc
from INFORMATION_SCHEMA.COLUMNS c
inner join pg_catalog.pg_statio_all_tables st on (c.table_schema = st.schemaname and c.table_name = st.relname)
left outer join pg_catalog.pg_description pgd on (pgd.objoid = st.relid and pgd.objsubid = c.ordinal_position)
where c.table_schema = 'public'
ORDER BY c.table_name, c.ordinal_position
SQL
by_table = {}
# Massage the results into a nicer form
results.each do |hash|
full_col_name = "#{hash["table_name"]}.#{hash["column_name"]}"
if hash["is_nullable"] == "YES"
hash["is_nullable"] = true
else
hash.delete("is_nullable")
end
clen = hash.delete "character_maximum_length"
dt = hash["data_type"]
if hash["column_name"] == "id"
hash["data_type"] = "serial"
hash["primary"] = true
elsif dt == "character varying"
hash["data_type"] = "varchar(#{clen.to_i})"
elsif dt == "timestamp without time zone"
hash["data_type"] = "timestamp"
elsif dt == "double precision"
hash["data_type"] = "double"
end
default = hash["column_default"]
if default.nil? || default =~ /^nextval\(/
hash.delete "column_default"
elsif default =~ /^'(.*)'::(character varying|text)/
hash["column_default"] = $1
end
hash.delete("column_desc") unless hash["column_desc"]
hash["sensitive"] = true if sensitive_column_names.include? full_col_name
hash["enum"] = enum_info[full_col_name] if enum_info.include? full_col_name
if denormalized_columns.include? full_col_name
hash["denormal"] = denormalized_columns[full_col_name]
end
fkey = fkey_info(hash["table_name"], hash["column_name"])
hash["fkey_info"] = fkey if fkey
table_name = hash.delete("table_name")
by_table[table_name] ||= []
by_table[table_name] << hash
end
# this works for now, but no big loss if the tables aren't quite sorted
favored_order = %w[
posts
topics
users
categories
badges
groups
notifications
post_actions
site_settings
]
sorted_by_table = {}
favored_order.each { |tbl| sorted_by_table[tbl] = by_table[tbl] }
by_table.keys.sort.each do |tbl|
next if favored_order.include? tbl
sorted_by_table[tbl] = by_table[tbl]
end
sorted_by_table
end
end
def self.enums
return @enums if @enums
@enums = {
"application_requests.req_type": ApplicationRequest.req_types,
"badges.badge_type_id": Enum.new(:gold, :silver, :bronze, start: 1),
"bookmarks.auto_delete_preference": Bookmark.auto_delete_preferences,
"category_groups.permission_type": CategoryGroup.permission_types,
"category_users.notification_level": CategoryUser.notification_levels,
"directory_items.period_type": DirectoryItem.period_types,
"email_change_requests.change_state": EmailChangeRequest.states,
"groups.id": Group::AUTO_GROUPS,
"groups.mentionable_level": Group::ALIAS_LEVELS,
"groups.messageable_level": Group::ALIAS_LEVELS,
"groups.members_visibility_level": Group.visibility_levels,
"groups.visibility_level": Group.visibility_levels,
"groups.default_notification_level": GroupUser.notification_levels,
"group_histories.action": GroupHistory.actions,
"group_users.notification_level": GroupUser.notification_levels,
"imap_sync_logs.level": ImapSyncLog.levels,
"invites.emailed_status": Invite.emailed_status_types,
"notifications.notification_type": Notification.types,
"polls.results": Poll.results,
"polls.status": Poll.statuses,
"polls.type": Poll.types,
"polls.visibility": Poll.visibilities,
"post_action_types.id": PostActionType.types,
"post_actions.post_action_type_id": PostActionType.types,
"posts.cook_method": Post.cook_methods,
"posts.hidden_reason_id": Post.hidden_reasons,
"posts.post_type": Post.types,
"reviewables.status": Reviewable.statuses,
"reviewable_histories.reviewable_history_type": ReviewableHistory.types,
"reviewable_scores.status": ReviewableScore.statuses,
"screened_emails.action_type": ScreenedEmail.actions,
"screened_ip_addresses.action_type": ScreenedIpAddress.actions,
"screened_urls.action_type": ScreenedUrl.actions,
"search_logs.search_result_type": SearchLog.search_result_types,
"search_logs.search_type": SearchLog.search_types,
"site_settings.data_type": SiteSetting.types,
"skipped_email_logs.reason_type": SkippedEmailLog.reason_types,
"tag_group_permissions.permission_type": TagGroupPermission.permission_types,
"theme_fields.type_id": ThemeField.types,
"theme_settings.data_type": ThemeSetting.types,
"topic_timers.status_type": TopicTimer.types,
"topic_users.notification_level": TopicUser.notification_levels,
"topic_users.notifications_reason_id": TopicUser.notification_reasons,
"uploads.verification_status": Upload.verification_statuses,
"user_actions.action_type": UserAction.types,
"user_histories.action": UserHistory.actions,
"user_options.email_previous_replies": UserOption.previous_replies_type,
"user_options.like_notification_frequency": UserOption.like_notification_frequency_type,
"user_options.text_size_key": UserOption.text_sizes,
"user_options.title_count_mode_key": UserOption.title_count_modes,
"user_options.email_level": UserOption.email_level_types,
"user_options.email_messages_level": UserOption.email_level_types,
"user_second_factors.method": UserSecondFactor.methods,
"user_security_keys.factor_type": UserSecurityKey.factor_types,
"users.trust_level": TrustLevel.levels,
"watched_words.action": WatchedWord.actions,
"web_hooks.content_type": WebHook.content_types,
"web_hooks.last_delivery_status": WebHook.last_delivery_statuses,
}.with_indifferent_access
# QueuedPost is removed in recent Discourse releases
@enums["queued_posts.state"] = QueuedPost.states if defined?(QueuedPost)
@enums
end
def self.enum_info
@enum_info ||=
begin
enum_info = {}
enums.map do |key, enum|
# https://stackoverflow.com/questions/10874356/reverse-a-hash-in-ruby
enum_info[key] = Hash[enum.to_a.map(&:reverse)]
end
enum_info
end
end
def self.fkey_info(table, column)
full_name = "#{table}.#{column}"
if fkey_defaults[column]
fkey_defaults[column]
elsif column =~ /_by_id$/ || column =~ /_user_id$/
:users
elsif foreign_keys[full_name]
foreign_keys[full_name]
else
nil
end
end
def self.foreign_keys
@fkey_columns ||= {
"posts.last_editor_id": :users,
"posts.version": :"post_revisions.number",
"topics.featured_user1_id": :users,
"topics.featured_user2_id": :users,
"topics.featured_user3_id": :users,
"topics.featured_user4_id": :users,
"topics.featured_user5_id": :users,
"users.seen_notification_id": :notifications,
"users.uploaded_avatar_id": :uploads,
"users.primary_group_id": :groups,
"categories.latest_post_id": :posts,
"categories.latest_topic_id": :topics,
"categories.parent_category_id": :categories,
"badges.badge_grouping_id": :badge_groupings,
"post_actions.related_post_id": :posts,
"color_scheme_colors.color_scheme_id": :color_schemes,
"color_schemes.versioned_id": :color_schemes,
"incoming_links.incoming_referer_id": :incoming_referers,
"incoming_referers.incoming_domain_id": :incoming_domains,
"post_replies.reply_id": :posts,
"quoted_posts.quoted_post_id": :posts,
"topic_link_clicks.topic_link_id": :topic_links,
"topic_link_clicks.link_topic_id": :topics,
"topic_link_clicks.link_post_id": :posts,
"user_actions.target_topic_id": :topics,
"user_actions.target_post_id": :posts,
"user_avatars.custom_upload_id": :uploads,
"user_avatars.gravatar_upload_id": :uploads,
"user_badges.notification_id": :notifications,
"user_profiles.card_image_badge_id": :badges,
}.with_indifferent_access
end
def self.fkey_defaults
@fkey_defaults ||= {
user_id: :users,
# :*_by_id => :users,
# :*_user_id => :users,
category_id: :categories,
group_id: :groups,
post_id: :posts,
post_action_id: :post_actions,
topic_id: :topics,
upload_id: :uploads,
}.with_indifferent_access
end
def self.denormalized_columns
{
"posts.reply_count": :post_replies,
"posts.quote_count": :quoted_posts,
"posts.incoming_link_count": :topic_links,
"posts.word_count": :posts,
"posts.avg_time": :post_timings,
"posts.reads": :post_timings,
"posts.like_score": :post_actions,
"posts.like_count": :post_actions,
"posts.bookmark_count": :post_actions,
"posts.vote_count": :post_actions,
"posts.off_topic_count": :post_actions,
"posts.notify_moderators_count": :post_actions,
"posts.spam_count": :post_actions,
"posts.illegal_count": :post_actions,
"posts.inappropriate_count": :post_actions,
"posts.notify_user_count": :post_actions,
"topics.views": :topic_views,
"topics.posts_count": :posts,
"topics.reply_count": :posts,
"topics.incoming_link_count": :topic_links,
"topics.moderator_posts_count": :posts,
"topics.participant_count": :posts,
"topics.word_count": :posts,
"topics.last_posted_at": :posts,
"topics.last_post_user_idt": :posts,
"topics.avg_time": :post_timings,
"topics.highest_post_number": :posts,
"topics.image_url": :posts,
"topics.excerpt": :posts,
"topics.like_count": :post_actions,
"topics.bookmark_count": :post_actions,
"topics.vote_count": :post_actions,
"topics.off_topic_count": :post_actions,
"topics.notify_moderators_count": :post_actions,
"topics.spam_count": :post_actions,
"topics.illegal_count": :post_actions,
"topics.inappropriate_count": :post_actions,
"topics.notify_user_count": :post_actions,
"categories.topic_count": :topics,
"categories.post_count": :posts,
"categories.latest_post_id": :posts,
"categories.latest_topic_id": :topics,
"categories.description": :posts,
"categories.read_restricted": :category_groups,
"categories.topics_year": :topics,
"categories.topics_month": :topics,
"categories.topics_week": :topics,
"categories.topics_day": :topics,
"categories.posts_year": :posts,
"categories.posts_month": :posts,
"categories.posts_week": :posts,
"categories.posts_day": :posts,
"badges.grant_count": :user_badges,
"groups.user_count": :group_users,
"directory_items.likes_received": :post_actions,
"directory_items.likes_given": :post_actions,
"directory_items.topics_entered": :user_stats,
"directory_items.days_visited": :user_stats,
"directory_items.posts_read": :user_stats,
"directory_items.topic_count": :topics,
"directory_items.post_count": :posts,
"post_search_data.search_data": :posts,
"top_topics.yearly_posts_count": :posts,
"top_topics.monthly_posts_count": :posts,
"top_topics.weekly_posts_count": :posts,
"top_topics.daily_posts_count": :posts,
"top_topics.yearly_views_count": :topic_views,
"top_topics.monthly_views_count": :topic_views,
"top_topics.weekly_views_count": :topic_views,
"top_topics.daily_views_count": :topic_views,
"top_topics.yearly_likes_count": :post_actions,
"top_topics.monthly_likes_count": :post_actions,
"top_topics.weekly_likes_count": :post_actions,
"top_topics.daily_likes_count": :post_actions,
"top_topics.yearly_op_likes_count": :post_actions,
"top_topics.monthly_op_likes_count": :post_actions,
"top_topics.weekly_op_likes_count": :post_actions,
"top_topics.daily_op_likes_count": :post_actions,
"top_topics.all_score": :posts,
"top_topics.yearly_score": :posts,
"top_topics.monthly_score": :posts,
"top_topics.weekly_score": :posts,
"top_topics.daily_score": :posts,
"topic_links.clicks": :topic_link_clicks,
"topic_search_data.search_data": :topics,
"topic_users.liked": :post_actions,
"topic_users.bookmarked": :post_actions,
"user_stats.posts_read_count": :post_timings,
"user_stats.topic_reply_count": :posts,
"user_stats.first_post_created_at": :posts,
"user_stats.post_count": :posts,
"user_stats.topic_count": :topics,
"user_stats.likes_given": :post_actions,
"user_stats.likes_received": :post_actions,
"user_search_data.search_data": :user_profiles,
"users.last_posted_at": :posts,
"users.previous_visit_at": :user_visits,
}.with_indifferent_access
end
end
class DataExplorer::Parameter
attr_accessor :identifier, :type, :default, :nullable
def initialize(identifier, type, default, nullable)
unless identifier
raise DataExplorer::ValidationError.new(
"Parameter declaration error - identifier is missing",
)
end
unless type
raise DataExplorer::ValidationError.new("Parameter declaration error - type is missing")
end
# process aliases
type = type.to_sym
if DataExplorer::Parameter.type_aliases[type]
type = DataExplorer::Parameter.type_aliases[type]
end
unless DataExplorer::Parameter.types[type]
raise DataExplorer::ValidationError.new(
"Parameter declaration error - unknown type #{type}",
)
end
@identifier = identifier
@type = type
@default = default
@nullable = nullable
begin
cast_to_ruby default unless default.blank?
rescue DataExplorer::ValidationError
raise DataExplorer::ValidationError.new(
"Parameter declaration error - the default value is not a valid #{type}",
)
end
end
def to_hash
{ identifier: @identifier, type: @type, default: @default, nullable: @nullable }
end
def self.types
@types ||=
Enum.new(
# Normal types
:int,
:bigint,
:boolean,
:string,
:date,
:time,
:datetime,
:double,
# Selection help
:user_id,
:post_id,
:topic_id,
:category_id,
:group_id,
:badge_id,
# Arrays
:int_list,
:string_list,
:user_list,
)
end
def self.type_aliases
@type_aliases ||= { integer: :int, text: :string, timestamp: :datetime }
end
def cast_to_ruby(string)
string = @default unless string
if string.blank?
if @nullable
return nil
else
raise DataExplorer::ValidationError.new("Missing parameter #{identifier} of type #{type}")
end
end
return nil if string.downcase == "#null"
def invalid_format(string, msg = nil)
if msg
raise DataExplorer::ValidationError.new("'#{string}' is an invalid #{type} - #{msg}")
else
raise DataExplorer::ValidationError.new("'#{string}' is an invalid value for #{type}")
end
end
value = nil
case @type
when :int
invalid_format string, "Not an integer" unless string =~ /^-?\d+$/
value = string.to_i
invalid_format string, "Too large" unless Integer === value
when :bigint
invalid_format string, "Not an integer" unless string =~ /^-?\d+$/
value = string.to_i
when :boolean
value = !!(string =~ /t|true|y|yes|1/i)
when :string
value = string
when :time
begin
value = Time.parse string
rescue ArgumentError => e
invalid_format string, e.message
end
when :date
begin
value = Date.parse string
rescue ArgumentError => e
invalid_format string, e.message
end
when :datetime
begin
value = DateTime.parse string
rescue ArgumentError => e
invalid_format string, e.message
end
when :double
if string =~ /-?\d*(\.\d+)/
value = Float(string)
elsif string =~ /^(-?)Inf(inity)?$/i
if $1
value = -Float::INFINITY
else
value = Float::INFINITY
end
elsif string =~ /^(-?)NaN$/i
if $1
value = -Float::NAN
else
value = Float::NAN
end
else
invalid_format string
end
when :category_id
if string =~ %r{(.*)/(.*)}
parent_name = $1
child_name = $2
parent = Category.query_parent_category(parent_name)
invalid_format string, "Could not find category named #{parent_name}" unless parent
object = Category.query_category(child_name, parent)
unless object
invalid_format string,
"Could not find subcategory of #{parent_name} named #{child_name}"
end
else
object =
Category.where(id: string.to_i).first || Category.where(slug: string).first ||
Category.where(name: string).first
invalid_format string, "Could not find category named #{string}" unless object
end
value = object.id
when :user_id, :post_id, :topic_id, :group_id, :badge_id
if string.gsub(/[ _]/, "") =~ /^-?\d+$/
clazz_name = (/^(.*)_id$/.match(type.to_s)[1].classify.to_sym)
begin
object = Object.const_get(clazz_name).with_deleted.find(string.gsub(/[ _]/, "").to_i)
value = object.id
rescue ActiveRecord::RecordNotFound
invalid_format string, "The specified #{clazz_name} was not found"
end
elsif type == :user_id
begin
object = User.find_by_username_or_email(string)
value = object.id
rescue ActiveRecord::RecordNotFound
invalid_format string, "The user named #{string} was not found"
end
elsif type == :post_id
if string =~ %r{(\d+)/(\d+)(\?u=.*)?$}
object = Post.with_deleted.find_by(topic_id: $1, post_number: $2)
unless object
invalid_format string, "The post at topic:#{$1} post_number:#{$2} was not found"
end
value = object.id
end
elsif type == :topic_id
if string =~ %r{/t/[^/]+/(\d+)}
begin
object = Topic.with_deleted.find($1)
value = object.id
rescue ActiveRecord::RecordNotFound
invalid_format string, "The topic with id #{$1} was not found"
end
end
elsif type == :group_id
object = Group.where(name: string).first
invalid_format string, "The group named #{string} was not found" unless object
value = object.id
else
invalid_format string
end
when :int_list
value = string.split(",").map { |s| s.downcase == "#null" ? nil : s.to_i }
invalid_format string, "can't be empty" if value.length == 0
when :string_list
value = string.split(",").map { |s| s.downcase == "#null" ? nil : s }
invalid_format string, "can't be empty" if value.length == 0
when :user_list
value = string.split(",").map { |s| User.find_by_username_or_email(s) }
invalid_format string, "can't be empty" if value.length == 0
else
raise TypeError.new("unknown parameter type??? should not get here")
end
value
end
def self.create_from_sql(sql, opts = {})
in_params = false
ret_params = []
sql.lines.find do |line|
line.chomp!
if in_params
# -- (ident) :(ident) (= (ident))?
if line =~ /^\s*--\s*([a-zA-Z_ ]+)\s*:([a-z_]+)\s*(?:=\s+(.*)\s*)?$/
type = $1
ident = $2
default = $3
nullable = false
if type =~ /^(null)?(.*?)(null)?$/i
nullable = true if $1 || $3
type = $2
end
type = type.strip
begin
ret_params << DataExplorer::Parameter.new(ident, type, default, nullable)
rescue StandardError
raise if opts[:strict]
end
false
elsif line =~ /^\s+$/
false
else
true
end
else
in_params = true if line =~ /^\s*--\s*\[params\]\s*$/
false
end
end
ret_params
end
end
load File.expand_path("../lib/data_explorer_query_group_bookmarkable.rb", __FILE__)
load File.expand_path(
"../app/serializers/user_data_explorer_query_group_bookmark_serializer.rb",
__FILE__,
)
# Making DataExplorer::QueryGroup Bookmarkable.
register_bookmarkable(DataExplorerQueryGroupBookmarkable)
require_dependency "application_controller"
require_dependency File.expand_path("../lib/queries.rb", __FILE__)
DataExplorer::Engine.routes.draw do
root to: "query#index"
get "queries" => "query#index"
scope "/", defaults: { format: :json } do
get "schema" => "query#schema"
get "groups" => "query#groups"
post "queries" => "query#create"
get "queries/:id" => "query#show"
put "queries/:id" => "query#update"
delete "queries/:id" => "query#destroy"
post "queries/:id/run" => "query#run", :constraints => { format: /(json|csv)/ }
end
end
Discourse::Application.routes.append do
get "/g/:group_name/reports" => "data_explorer/query#group_reports_index"
get "/g/:group_name/reports/:id" => "data_explorer/query#group_reports_show"
post "/g/:group_name/reports/:id/run" => "data_explorer/query#group_reports_run"
mount ::DataExplorer::Engine, at: "/admin/plugins/explorer"
end
register_bookmarkable(DiscourseDataExplorer::QueryGroupBookmarkable)
add_api_key_scope(
:data_explorer,
{ run_queries: { actions: %w[data_explorer/query#run], params: %i[id] } },
:discourse_data_explorer,
{ run_queries: { actions: %w[discourse_data_explorer/query#run], params: %i[id] } },
)
end

View File

@ -1,6 +1,6 @@
# frozen_string_literal: true
describe DataExplorer do
describe DiscourseDataExplorer::DataExplorer do
describe ".run_query" do
fab!(:topic) { Fabricate(:topic) }
@ -11,7 +11,7 @@ describe DataExplorer do
) SELECT * FROM query
SQL
query = DataExplorer::Query.create!(name: "some query", sql: sql)
query = DiscourseDataExplorer::Query.create!(name: "some query", sql: sql)
result = described_class.run_query(query)
@ -26,7 +26,7 @@ describe DataExplorer do
) SELECT * FROM query
SQL
query = DataExplorer::Query.create!(name: "some query", sql: sql)
query = DiscourseDataExplorer::Query.create!(name: "some query", sql: sql)
result = described_class.run_query(query)
@ -49,7 +49,7 @@ describe DataExplorer do
) SELECT * FROM query
SQL
query = DataExplorer::Query.create!(name: "some query", sql: sql)
query = DiscourseDataExplorer::Query.create!(name: "some query", sql: sql)
result = described_class.run_query(query, { "topic_id" => topic2.id.to_s })

View File

@ -1,13 +1,13 @@
# frozen_string_literal: true
Fabricator(:query, from: "DataExplorer::Query") do
Fabricator(:query, from: "DiscourseDataExplorer::Query") do
name
description
sql
user
end
Fabricator(:query_group, from: "DataExplorer::QueryGroup") do
Fabricator(:query_group, from: "DiscourseDataExplorer::QueryGroup") do
query
group
end

View File

@ -7,7 +7,7 @@ describe Guardian do
def make_query(group_ids = [])
query =
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
name: "Query number #{Fabrication::Sequencer.sequence("query-id", 1)}",
sql: "SELECT 1",
)

View File

@ -5,20 +5,28 @@ require "rails_helper"
describe "API keys scoped to query#run" do
before { SiteSetting.data_explorer_enabled = true }
fab!(:query1) { DataExplorer::Query.create!(name: "Query 1", sql: "SELECT 1 AS query1_res") }
fab!(:query2) { DataExplorer::Query.create!(name: "Query 2", sql: "SELECT 1 AS query2_res") }
fab!(:query1) do
DiscourseDataExplorer::Query.create!(name: "Query 1", sql: "SELECT 1 AS query1_res")
end
fab!(:query2) do
DiscourseDataExplorer::Query.create!(name: "Query 2", sql: "SELECT 1 AS query2_res")
end
fab!(:admin) { Fabricate(:admin) }
let(:all_queries_api_key) do
key = ApiKey.create!
ApiKeyScope.create!(resource: "data_explorer", action: "run_queries", api_key_id: key.id)
ApiKeyScope.create!(
resource: "discourse_data_explorer",
action: "run_queries",
api_key_id: key.id,
)
key
end
let(:single_query_api_key) do
key = ApiKey.create!
ApiKeyScope.create!(
resource: "data_explorer",
resource: "discourse_data_explorer",
action: "run_queries",
api_key_id: key.id,
allowed_parameters: {

View File

@ -9,7 +9,7 @@ describe Jobs::DeleteHiddenQueries do
end
it "will correctly destroy old hidden queries" do
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: 1,
name: "A",
description: "A description for A",
@ -18,7 +18,7 @@ describe Jobs::DeleteHiddenQueries do
last_run_at: 2.days.ago,
updated_at: 2.days.ago,
)
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: 2,
name: "B",
description: "A description for B",
@ -27,7 +27,7 @@ describe Jobs::DeleteHiddenQueries do
last_run_at: 8.days.ago,
updated_at: 8.days.ago,
)
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: 3,
name: "C",
description: "A description for C",
@ -36,7 +36,7 @@ describe Jobs::DeleteHiddenQueries do
last_run_at: 4.days.ago,
updated_at: 4.days.ago,
)
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: 4,
name: "D",
description: "A description for D",
@ -45,7 +45,7 @@ describe Jobs::DeleteHiddenQueries do
last_run_at: nil,
updated_at: 10.days.ago,
)
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: 5,
name: "E",
description: "A description for E",
@ -54,7 +54,7 @@ describe Jobs::DeleteHiddenQueries do
last_run_at: 5.days.ago,
updated_at: 10.days.ago,
)
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: 6,
name: "F",
description: "A description for F",
@ -65,6 +65,6 @@ describe Jobs::DeleteHiddenQueries do
)
subject.execute(nil)
expect(DataExplorer::Query.all.length).to eq(4)
expect(DiscourseDataExplorer::Query.all.length).to eq(4)
end
end

View File

@ -2,7 +2,7 @@
require "rails_helper"
describe DataExplorerQueryGroupBookmarkable do
describe DiscourseDataExplorer::QueryGroupBookmarkable do
fab!(:admin_user) { Fabricate(:admin) }
fab!(:user) { Fabricate(:user) }
fab!(:guardian) { Guardian.new(user) }
@ -31,7 +31,7 @@ describe DataExplorerQueryGroupBookmarkable do
before do
SiteSetting.data_explorer_enabled = true
register_test_bookmarkable(DataExplorerQueryGroupBookmarkable)
register_test_bookmarkable(DiscourseDataExplorer::QueryGroupBookmarkable)
end
after { DiscoursePluginRegistry.reset_register!(:bookmarkables) }
@ -80,7 +80,7 @@ describe DataExplorerQueryGroupBookmarkable do
Fabricate(:bookmark, user: user, bookmarkable: query_group4, name: "something i gotta do also")
end
subject { RegisteredBookmarkable.new(DataExplorerQueryGroupBookmarkable) }
subject { RegisteredBookmarkable.new(DiscourseDataExplorer::QueryGroupBookmarkable) }
describe "#perform_list_query" do
it "returns all the user's bookmarks" do

View File

@ -6,12 +6,12 @@ describe "Data explorer group serializer additions" do
fab!(:group_user) { Fabricate(:user) }
fab!(:other_user) { Fabricate(:user) }
fab!(:group) { Fabricate(:group) }
let!(:query) { DataExplorer::Query.create!(name: "My query", sql: "") }
let!(:query) { DiscourseDataExplorer::Query.create!(name: "My query", sql: "") }
before do
SiteSetting.data_explorer_enabled = true
group.add(group_user)
DataExplorer::QueryGroup.create!(group: group, query: query)
DiscourseDataExplorer::QueryGroup.create!(group: group, query: query)
end
it "query boolean is true for group user" do

View File

@ -2,7 +2,7 @@
require "rails_helper"
describe DataExplorer::QueryController do
describe DiscourseDataExplorer::QueryController do
def response_json
response.parsed_body
end
@ -11,7 +11,7 @@ describe DataExplorer::QueryController do
def make_query(sql, opts = {}, group_ids = [])
query =
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
name: opts[:name] || "Query number",
description: "A description for query number",
sql: sql,
@ -55,31 +55,35 @@ describe DataExplorer::QueryController do
describe "#index" do
it "behaves nicely with no user created queries" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
get "/admin/plugins/explorer/queries.json"
expect(response.status).to eq(200)
expect(response_json["queries"].count).to eq(Queries.default.count)
expect(response_json["queries"].count).to eq(DiscourseDataExplorer::Queries.default.count)
end
it "shows all available queries in alphabetical order" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", name: "B")
make_query("SELECT 1 as value", name: "A")
get "/admin/plugins/explorer/queries.json"
expect(response.status).to eq(200)
expect(response_json["queries"].length).to eq(Queries.default.count + 2)
expect(response_json["queries"].length).to eq(
DiscourseDataExplorer::Queries.default.count + 2,
)
expect(response_json["queries"][0]["name"]).to eq("A")
expect(response_json["queries"][1]["name"]).to eq("B")
end
it "doesn't show hidden/deleted queries" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", name: "A", hidden: false)
make_query("SELECT 1 as value", name: "B", hidden: true)
make_query("SELECT 1 as value", name: "C", hidden: true)
get "/admin/plugins/explorer/queries.json"
expect(response.status).to eq(200)
expect(response_json["queries"].length).to eq(Queries.default.count + 1)
expect(response_json["queries"].length).to eq(
DiscourseDataExplorer::Queries.default.count + 1,
)
end
end
@ -88,7 +92,7 @@ describe DataExplorer::QueryController do
fab!(:group2) { Fabricate(:group, users: [user2]) }
it "allows group to access system query" do
query = DataExplorer::Query.find(-4)
query = DiscourseDataExplorer::Query.find(-4)
put "/admin/plugins/explorer/queries/#{query.id}.json",
params: {
"query" => {
@ -107,7 +111,7 @@ describe DataExplorer::QueryController do
end
it "returns a proper json error for invalid updates" do
query = DataExplorer::Query.find(-4)
query = DiscourseDataExplorer::Query.find(-4)
put "/admin/plugins/explorer/queries/#{query.id}",
params: {
"query" => {
@ -209,7 +213,7 @@ describe DataExplorer::QueryController do
# Manual Test - change out the following line:
#
# module ::DataExplorer
# module ::DiscourseDataExplorer
# def self.run_query(...)
# if query.sql =~ /;/
#
@ -312,9 +316,9 @@ describe DataExplorer::QueryController do
it "should limit the results in CSV download" do
begin
original_const = DataExplorer::QUERY_RESULT_MAX_LIMIT
DataExplorer.send(:remove_const, "QUERY_RESULT_MAX_LIMIT")
DataExplorer.const_set("QUERY_RESULT_MAX_LIMIT", 2)
original_const = DiscourseDataExplorer::QUERY_RESULT_MAX_LIMIT
DiscourseDataExplorer.send(:remove_const, "QUERY_RESULT_MAX_LIMIT")
DiscourseDataExplorer.const_set("QUERY_RESULT_MAX_LIMIT", 2)
query = make_query <<~SQL
SELECT id FROM posts
@ -338,8 +342,8 @@ describe DataExplorer::QueryController do
}
expect(response.body.split("\n").count).to eq(1)
ensure
DataExplorer.send(:remove_const, "QUERY_RESULT_MAX_LIMIT")
DataExplorer.const_set("QUERY_RESULT_MAX_LIMIT", original_const)
DiscourseDataExplorer.send(:remove_const, "QUERY_RESULT_MAX_LIMIT")
DiscourseDataExplorer.const_set("QUERY_RESULT_MAX_LIMIT", original_const)
end
end
end

View File

@ -10,7 +10,7 @@ describe "Data Explorer rake tasks" do
def make_query(sql, opts = {}, group_ids = [])
query =
DataExplorer::Query.create!(
DiscourseDataExplorer::Query.create!(
id: opts[:id],
name: opts[:name] || "Query number",
description: "A description for query number",
@ -22,19 +22,19 @@ describe "Data Explorer rake tasks" do
end
def hidden_queries
DataExplorer::Query.where(hidden: true).order(:id)
DiscourseDataExplorer::Query.where(hidden: true).order(:id)
end
describe "data_explorer" do
it "hides a single query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A")
make_query("SELECT 1 as value", id: 2, name: "B")
# rake data_explorer[1] => hide query with ID 1
silence_stdout { Rake::Task["data_explorer"].invoke(1) }
# Soft deletion: PluginStoreRow should not be modified
expect(DataExplorer::Query.all.length).to eq(2)
expect(DiscourseDataExplorer::Query.all.length).to eq(2)
# Array of hidden queries should have exactly 1 element
expect(hidden_queries.length).to eq(1)
# That one element should have the same ID as the one invoked to be hidden
@ -42,7 +42,7 @@ describe "Data Explorer rake tasks" do
end
it "hides multiple queries" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A")
make_query("SELECT 1 as value", id: 2, name: "B")
make_query("SELECT 1 as value", id: 3, name: "C")
@ -51,7 +51,7 @@ describe "Data Explorer rake tasks" do
silence_stdout { Rake::Task["data_explorer"].invoke(1, 2, 4) }
# Soft deletion: PluginStoreRow should not be modified
expect(DataExplorer::Query.all.length).to eq(4)
expect(DiscourseDataExplorer::Query.all.length).to eq(4)
# Array of hidden queries should have the same number of elements invoked to be hidden
expect(hidden_queries.length).to eq(3)
# The elements should have the same ID as the ones invoked to be hidden
@ -62,7 +62,7 @@ describe "Data Explorer rake tasks" do
context "when query does not exist in PluginStore" do
it "should not hide the query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A")
make_query("SELECT 1 as value", id: 2, name: "B")
# rake data_explorer[3] => try to hide query with ID 3
@ -78,14 +78,14 @@ describe "Data Explorer rake tasks" do
describe "#unhide_query" do
it "unhides a single query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A", hidden: true)
make_query("SELECT 1 as value", id: 2, name: "B", hidden: true)
# rake data_explorer:unhide_query[1] => unhide query with ID 1
silence_stdout { Rake::Task["data_explorer:unhide_query"].invoke(1) }
# Soft deletion: PluginStoreRow should not be modified
expect(DataExplorer::Query.all.length).to eq(2)
expect(DiscourseDataExplorer::Query.all.length).to eq(2)
# Array of hidden queries should have exactly 1 element
expect(hidden_queries.length).to eq(1)
# There should be one remaining element that is still hidden
@ -93,7 +93,7 @@ describe "Data Explorer rake tasks" do
end
it "unhides multiple queries" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A", hidden: true)
make_query("SELECT 1 as value", id: 2, name: "B", hidden: true)
make_query("SELECT 1 as value", id: 3, name: "C", hidden: true)
@ -102,7 +102,7 @@ describe "Data Explorer rake tasks" do
silence_stdout { Rake::Task["data_explorer:unhide_query"].invoke(1, 2, 4) }
# Soft deletion: PluginStoreRow should not be modified
expect(DataExplorer::Query.all.length).to eq(4)
expect(DiscourseDataExplorer::Query.all.length).to eq(4)
# Array of hidden queries should have exactly 1 element
expect(hidden_queries.length).to eq(1)
# There should be one remaining element that is still hidden
@ -111,7 +111,7 @@ describe "Data Explorer rake tasks" do
context "when query does not exist in PluginStore" do
it "should not unhide the query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A", hidden: true)
make_query("SELECT 1 as value", id: 2, name: "B", hidden: true)
# rake data_explorer:unhide_query[3] => try to unhide query with ID 3
@ -127,14 +127,14 @@ describe "Data Explorer rake tasks" do
describe "#hard_delete" do
it "hard deletes a single query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A", hidden: true)
make_query("SELECT 1 as value", id: 2, name: "B", hidden: true)
# rake data_explorer:hard_delete[1] => hard delete query with ID 1
silence_stdout { Rake::Task["data_explorer:hard_delete"].invoke(1) }
# Hard deletion: query list should be shorter by 1
expect(DataExplorer::Query.all.length).to eq(1)
expect(DiscourseDataExplorer::Query.all.length).to eq(1)
# Array of hidden queries should have exactly 1 element
expect(hidden_queries.length).to eq(1)
# There should be one remaining hidden element
@ -142,7 +142,7 @@ describe "Data Explorer rake tasks" do
end
it "hard deletes multiple queries" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A", hidden: true)
make_query("SELECT 1 as value", id: 2, name: "B", hidden: true)
make_query("SELECT 1 as value", id: 3, name: "C", hidden: true)
@ -151,7 +151,7 @@ describe "Data Explorer rake tasks" do
silence_stdout { Rake::Task["data_explorer:hard_delete"].invoke(1, 2, 4) }
# Hard deletion: query list should be shorter by 3
expect(DataExplorer::Query.all.length).to eq(1)
expect(DiscourseDataExplorer::Query.all.length).to eq(1)
# Array of hidden queries should have exactly 1 element
expect(hidden_queries.length).to eq(1)
# There should be one remaining hidden element
@ -160,7 +160,7 @@ describe "Data Explorer rake tasks" do
context "when query does not exist in PluginStore" do
it "should not hard delete the query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A", hidden: true)
make_query("SELECT 1 as value", id: 2, name: "B", hidden: true)
# rake data_explorer:hard_delete[3] => try to hard delete query with ID 3
@ -175,13 +175,13 @@ describe "Data Explorer rake tasks" do
context "when query is not hidden" do
it "should not hard delete the query" do
DataExplorer::Query.destroy_all
DiscourseDataExplorer::Query.destroy_all
make_query("SELECT 1 as value", id: 1, name: "A")
# rake data_explorer:hard_delete[1] => try to hard delete query with ID 1
silence_stdout { Rake::Task["data_explorer:hard_delete"].invoke(1) }
# List of queries shouldn't change
expect(DataExplorer::Query.all.length).to eq(1)
expect(DiscourseDataExplorer::Query.all.length).to eq(1)
end
end
end

View File

@ -94,7 +94,7 @@ describe "fix query ids rake task" do
end
def find_query_group(id)
DataExplorer::QueryGroup.find_by(query_id: id)
DiscourseDataExplorer::QueryGroup.find_by(query_id: id)
end
end
@ -117,21 +117,22 @@ describe "fix query ids rake task" do
key = "q:#{id}"
PluginStore.set(
DataExplorer.plugin_name,
DiscourseDataExplorer::PLUGIN_NAME,
key,
attributes(name).merge(group_ids: group_ids, id: id),
)
end
def create_query(name, group_ids = [])
DataExplorer::Query
DiscourseDataExplorer::Query
.create!(attributes(name))
.tap { |query| group_ids.each { |group_id| query.query_groups.create!(group_id: group_id) } }
end
def attributes(name)
{
id: DataExplorer::Query.count == 0 ? 5 : DataExplorer::Query.maximum(:id) + 1,
id:
DiscourseDataExplorer::Query.count == 0 ? 5 : DiscourseDataExplorer::Query.maximum(:id) + 1,
name: name,
description: "A Query",
sql: "SELECT 1",
@ -142,6 +143,6 @@ describe "fix query ids rake task" do
end
def find(name)
DataExplorer::Query.find_by(name: name)
DiscourseDataExplorer::Query.find_by(name: name)
end
end