FEATURE: Upgrade to mini_sql
WARNING if you are on the stable branch of Discourse use the stable branch of data explorer
This commit is contained in:
parent
067a43b8cd
commit
37be7a54f0
33
plugin.rb
33
plugin.rb
|
@ -80,18 +80,14 @@ after_initialize do
|
||||||
return { error: e, duration_nanos: 0 }
|
return { error: e, duration_nanos: 0 }
|
||||||
end
|
end
|
||||||
|
|
||||||
# If we don't include this, then queries with a % sign in them fail
|
|
||||||
# because AR thinks we want percent-based parametes
|
|
||||||
query_args[:xxdummy] = 1
|
|
||||||
|
|
||||||
time_start, time_end, explain, err, result = nil
|
time_start, time_end, explain, err, result = nil
|
||||||
begin
|
begin
|
||||||
ActiveRecord::Base.connection.transaction do
|
ActiveRecord::Base.connection.transaction do
|
||||||
# Setting transaction to read only prevents shoot-in-foot actions like SELECT FOR UPDATE
|
# Setting transaction to read only prevents shoot-in-foot actions like SELECT FOR UPDATE
|
||||||
# see test 'doesn't allow you to modify the database #1'
|
# see test 'doesn't allow you to modify the database #1'
|
||||||
ActiveRecord::Base.exec_sql "SET TRANSACTION READ ONLY"
|
DB.exec "SET TRANSACTION READ ONLY"
|
||||||
# Set a statement timeout so we can't tie up the server
|
# Set a statement timeout so we can't tie up the server
|
||||||
ActiveRecord::Base.exec_sql "SET LOCAL statement_timeout = 10000"
|
DB.exec "SET LOCAL statement_timeout = 10000"
|
||||||
|
|
||||||
# SQL comments are for the benefits of the slow queries log
|
# SQL comments are for the benefits of the slow queries log
|
||||||
sql = <<-SQL
|
sql = <<-SQL
|
||||||
|
@ -99,7 +95,6 @@ after_initialize do
|
||||||
* DataExplorer Query
|
* DataExplorer Query
|
||||||
* Query: /admin/plugins/explorer?id=#{query.id}
|
* Query: /admin/plugins/explorer?id=#{query.id}
|
||||||
* Started by: #{opts[:current_user]}
|
* Started by: #{opts[:current_user]}
|
||||||
* :xxdummy
|
|
||||||
*/
|
*/
|
||||||
WITH query AS (
|
WITH query AS (
|
||||||
#{query.sql}
|
#{query.sql}
|
||||||
|
@ -108,12 +103,17 @@ LIMIT #{opts[:limit] || 250}
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
time_start = Time.now
|
time_start = Time.now
|
||||||
result = ActiveRecord::Base.exec_sql(sql, query_args)
|
|
||||||
|
# we probably want to rewrite this ... but for now reuse the working
|
||||||
|
# code we have
|
||||||
|
sql = DB.param_encoder.encode(sql, query_args)
|
||||||
|
|
||||||
|
result = ActiveRecord::Base.connection.raw_connection.async_exec(sql)
|
||||||
result.check # make sure it's done
|
result.check # make sure it's done
|
||||||
time_end = Time.now
|
time_end = Time.now
|
||||||
|
|
||||||
if opts[:explain]
|
if opts[:explain]
|
||||||
explain = ActiveRecord::Base.exec_sql("-- :xxdummy \nEXPLAIN #{query.sql}", query_args)
|
explain = DB.query_hash("EXPLAIN #{query.sql}", query_args)
|
||||||
.map { |row| row["QUERY PLAN"] }.join "\n"
|
.map { |row| row["QUERY PLAN"] }.join "\n"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -131,7 +131,7 @@ SQL
|
||||||
pg_result: result,
|
pg_result: result,
|
||||||
duration_secs: time_end - time_start,
|
duration_secs: time_end - time_start,
|
||||||
explain: explain,
|
explain: explain,
|
||||||
params_full: query_args.tap { |h| h.delete :xxdummy }
|
params_full: query_args
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -258,7 +258,7 @@ google_user_infos.email
|
||||||
# No need to expire this, because the server processes get restarted on upgrade
|
# No need to expire this, because the server processes get restarted on upgrade
|
||||||
# refer user to http://www.postgresql.org/docs/9.3/static/datatype.html
|
# refer user to http://www.postgresql.org/docs/9.3/static/datatype.html
|
||||||
@schema ||= begin
|
@schema ||= begin
|
||||||
results = ActiveRecord::Base.exec_sql <<SQL
|
results = DB.query_hash <<~SQL
|
||||||
select
|
select
|
||||||
c.column_name column_name,
|
c.column_name column_name,
|
||||||
c.data_type data_type,
|
c.data_type data_type,
|
||||||
|
@ -273,14 +273,7 @@ left outer join pg_catalog.pg_description pgd on (pgd.objoid = st.relid and pgd.
|
||||||
where c.table_schema = 'public'
|
where c.table_schema = 'public'
|
||||||
ORDER BY c.table_name, c.ordinal_position
|
ORDER BY c.table_name, c.ordinal_position
|
||||||
SQL
|
SQL
|
||||||
table_comment_results = ActiveRecord::Base.exec_sql <<SQL
|
|
||||||
SELECT
|
|
||||||
st.relname table_name,
|
|
||||||
pgd.description table_desc
|
|
||||||
FROM pg_catalog.pg_statio_all_tables st
|
|
||||||
INNER JOIN pg_catalog.pg_description pgd ON (pgd.objoid = st.relid AND pgd.objsubid = 0)
|
|
||||||
WHERE st.schemaname = 'public'
|
|
||||||
SQL
|
|
||||||
by_table = {}
|
by_table = {}
|
||||||
# Massage the results into a nicer form
|
# Massage the results into a nicer form
|
||||||
results.each do |hash|
|
results.each do |hash|
|
||||||
|
@ -984,7 +977,7 @@ SQL
|
||||||
end
|
end
|
||||||
|
|
||||||
def schema
|
def schema
|
||||||
schema_version = ActiveRecord::Base.exec_sql("SELECT max(version) AS tag FROM schema_migrations").first['tag']
|
schema_version = DB.query_single("SELECT max(version) AS tag FROM schema_migrations").first
|
||||||
if stale?(public: true, etag: schema_version, template: false)
|
if stale?(public: true, etag: schema_version, template: false)
|
||||||
render json: DataExplorer.schema
|
render json: DataExplorer.schema
|
||||||
end
|
end
|
||||||
|
|
|
@ -58,7 +58,7 @@ describe DataExplorer::QueryController do
|
||||||
it "behaves nicely with no queries" do
|
it "behaves nicely with no queries" do
|
||||||
DataExplorer::Query.destroy_all
|
DataExplorer::Query.destroy_all
|
||||||
get :index, format: :json
|
get :index, format: :json
|
||||||
expect(response).to be_success
|
expect(response.status).to eq(200)
|
||||||
expect(response_json['queries']).to eq([])
|
expect(response_json['queries']).to eq([])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ describe DataExplorer::QueryController do
|
||||||
make_query('SELECT 1 as value', name: 'B')
|
make_query('SELECT 1 as value', name: 'B')
|
||||||
make_query('SELECT 1 as value', name: 'A')
|
make_query('SELECT 1 as value', name: 'A')
|
||||||
get :index, format: :json
|
get :index, format: :json
|
||||||
expect(response).to be_success
|
expect(response.status).to eq(200)
|
||||||
expect(response_json['queries'].length).to eq(2)
|
expect(response_json['queries'].length).to eq(2)
|
||||||
expect(response_json['queries'][0]['name']).to eq('A')
|
expect(response_json['queries'][0]['name']).to eq('A')
|
||||||
expect(response_json['queries'][1]['name']).to eq('B')
|
expect(response_json['queries'][1]['name']).to eq('B')
|
||||||
|
@ -84,7 +84,7 @@ describe DataExplorer::QueryController do
|
||||||
it "can run queries" do
|
it "can run queries" do
|
||||||
q = make_query('SELECT 23 as my_value')
|
q = make_query('SELECT 23 as my_value')
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to be_success
|
expect(response.status).to eq(200)
|
||||||
expect(response_json['success']).to eq(true)
|
expect(response_json['success']).to eq(true)
|
||||||
expect(response_json['errors']).to eq([])
|
expect(response_json['errors']).to eq([])
|
||||||
expect(response_json['columns']).to eq(['my_value'])
|
expect(response_json['columns']).to eq(['my_value'])
|
||||||
|
@ -99,14 +99,14 @@ describe DataExplorer::QueryController do
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
run_query q.id, foo: 23
|
run_query q.id, foo: 23
|
||||||
expect(response).to be_success
|
expect(response.status).to eq(200)
|
||||||
expect(response_json['errors']).to eq([])
|
expect(response_json['errors']).to eq([])
|
||||||
expect(response_json['success']).to eq(true)
|
expect(response_json['success']).to eq(true)
|
||||||
expect(response_json['columns']).to eq(['my_value'])
|
expect(response_json['columns']).to eq(['my_value'])
|
||||||
expect(response_json['rows']).to eq([[23]])
|
expect(response_json['rows']).to eq([[23]])
|
||||||
|
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to be_success
|
expect(response.status).to eq(200)
|
||||||
expect(response_json['errors']).to eq([])
|
expect(response_json['errors']).to eq([])
|
||||||
expect(response_json['success']).to eq(true)
|
expect(response_json['success']).to eq(true)
|
||||||
expect(response_json['columns']).to eq(['my_value'])
|
expect(response_json['columns']).to eq(['my_value'])
|
||||||
|
@ -114,7 +114,7 @@ describe DataExplorer::QueryController do
|
||||||
|
|
||||||
# 2.3 is not an integer
|
# 2.3 is not an integer
|
||||||
run_query q.id, foo: '2.3'
|
run_query q.id, foo: '2.3'
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/ValidationError/)
|
expect(response_json['errors'].first).to match(/ValidationError/)
|
||||||
|
@ -132,11 +132,11 @@ describe DataExplorer::QueryController do
|
||||||
p.reload
|
p.reload
|
||||||
|
|
||||||
# Manual Test - comment out the following lines:
|
# Manual Test - comment out the following lines:
|
||||||
# ActiveRecord::Base.exec_sql "SET TRANSACTION READ ONLY"
|
# DB.exec "SET TRANSACTION READ ONLY"
|
||||||
# raise ActiveRecord::Rollback
|
# raise ActiveRecord::Rollback
|
||||||
# This test should fail on the below check.
|
# This test should fail on the below check.
|
||||||
expect(p.cooked).to_not match(/winner/)
|
expect(p.cooked).to_not match(/winner/)
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/read-only transaction/)
|
expect(response_json['errors'].first).to match(/read-only transaction/)
|
||||||
|
@ -173,7 +173,7 @@ describe DataExplorer::QueryController do
|
||||||
#
|
#
|
||||||
# Afterwards, this test should fail on the below check.
|
# Afterwards, this test should fail on the below check.
|
||||||
expect(p.cooked).to_not match(/winner/)
|
expect(p.cooked).to_not match(/winner/)
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/semicolon/)
|
expect(response_json['errors'].first).to match(/semicolon/)
|
||||||
|
@ -185,7 +185,7 @@ describe DataExplorer::QueryController do
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/read-only transaction/)
|
expect(response_json['errors'].first).to match(/read-only transaction/)
|
||||||
|
@ -197,7 +197,7 @@ describe DataExplorer::QueryController do
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/read-only transaction|syntax error/)
|
expect(response_json['errors'].first).to match(/read-only transaction|syntax error/)
|
||||||
|
@ -209,7 +209,7 @@ describe DataExplorer::QueryController do
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/syntax error/)
|
expect(response_json['errors'].first).to match(/syntax error/)
|
||||||
|
@ -219,7 +219,7 @@ describe DataExplorer::QueryController do
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/syntax error/)
|
expect(response_json['errors'].first).to match(/syntax error/)
|
||||||
|
@ -229,7 +229,7 @@ describe DataExplorer::QueryController do
|
||||||
SQL
|
SQL
|
||||||
|
|
||||||
run_query q.id
|
run_query q.id
|
||||||
expect(response).to_not be_success
|
expect(response.status).to eq(422)
|
||||||
expect(response_json['errors']).to_not eq([])
|
expect(response_json['errors']).to_not eq([])
|
||||||
expect(response_json['success']).to eq(false)
|
expect(response_json['success']).to eq(false)
|
||||||
expect(response_json['errors'].first).to match(/syntax error/)
|
expect(response_json['errors'].first).to match(/syntax error/)
|
||||||
|
@ -238,7 +238,7 @@ describe DataExplorer::QueryController do
|
||||||
it "can export data in CSV format" do
|
it "can export data in CSV format" do
|
||||||
q = make_query('SELECT 23 as my_value')
|
q = make_query('SELECT 23 as my_value')
|
||||||
post :run, params: { id: q.id, download: 1 }, format: :csv
|
post :run, params: { id: q.id, download: 1 }, format: :csv
|
||||||
expect(response).to be_success
|
expect(response.status).to eq(200)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue