2013-04-16 16:56:18 -04:00
|
|
|
|
require_dependency 'topic_subtype'
|
|
|
|
|
|
2013-02-27 22:39:42 -05:00
|
|
|
|
class Report
|
2018-07-31 17:35:13 -04:00
|
|
|
|
# Change this line each time report format change
|
|
|
|
|
# and you want to ensure cache is reset
|
2018-08-30 08:56:11 -04:00
|
|
|
|
SCHEMA_VERSION = 3
|
2018-07-31 17:35:13 -04:00
|
|
|
|
|
2018-05-03 09:41:41 -04:00
|
|
|
|
attr_accessor :type, :data, :total, :prev30Days, :start_date,
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
:end_date, :category_id, :group_id, :filter,
|
|
|
|
|
:labels, :async, :prev_period, :facets, :limit, :processing, :average, :percent,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
:higher_is_better, :icon, :modes, :category_filtering,
|
|
|
|
|
:group_filtering, :prev_data, :prev_start_date, :prev_end_date,
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
:dates_filtering, :error, :primary_color, :secondary_color, :filter_options
|
2014-11-04 17:08:39 -05:00
|
|
|
|
|
|
|
|
|
def self.default_days
|
|
|
|
|
30
|
|
|
|
|
end
|
2013-02-27 22:39:42 -05:00
|
|
|
|
|
|
|
|
|
def initialize(type)
|
|
|
|
|
@type = type
|
2018-08-17 10:19:25 -04:00
|
|
|
|
@start_date ||= Report.default_days.days.ago.utc.beginning_of_day
|
|
|
|
|
@end_date ||= Time.now.utc.end_of_day
|
2018-07-19 14:33:11 -04:00
|
|
|
|
@prev_end_date = @start_date
|
2018-05-17 16:44:33 -04:00
|
|
|
|
@average = false
|
|
|
|
|
@percent = false
|
|
|
|
|
@higher_is_better = true
|
2018-07-19 14:33:11 -04:00
|
|
|
|
@category_filtering = false
|
|
|
|
|
@group_filtering = false
|
|
|
|
|
@modes = [:table, :chart]
|
|
|
|
|
@prev_data = nil
|
|
|
|
|
@dates_filtering = true
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
@filter_options = nil
|
|
|
|
|
@filter = nil
|
2018-09-13 11:36:39 -04:00
|
|
|
|
|
|
|
|
|
tertiary = ColorScheme.hex_for_name('tertiary') || '0088cc'
|
|
|
|
|
@primary_color = rgba_color(tertiary)
|
|
|
|
|
@secondary_color = rgba_color(tertiary, 0.1)
|
2013-02-27 22:39:42 -05:00
|
|
|
|
end
|
|
|
|
|
|
2018-05-03 09:41:41 -04:00
|
|
|
|
def self.cache_key(report)
|
2018-05-10 23:30:21 -04:00
|
|
|
|
(+"reports:") <<
|
|
|
|
|
[
|
|
|
|
|
report.type,
|
|
|
|
|
report.category_id,
|
|
|
|
|
report.start_date.to_date.strftime("%Y%m%d"),
|
|
|
|
|
report.end_date.to_date.strftime("%Y%m%d"),
|
|
|
|
|
report.group_id,
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
report.filter,
|
2018-05-15 01:08:23 -04:00
|
|
|
|
report.facets,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
report.limit,
|
|
|
|
|
SCHEMA_VERSION,
|
|
|
|
|
].compact.map(&:to_s).join(':')
|
2018-05-03 09:41:41 -04:00
|
|
|
|
end
|
|
|
|
|
|
2018-12-14 17:14:46 -05:00
|
|
|
|
def self.clear_cache(type = nil)
|
|
|
|
|
pattern = type ? "reports:#{type}:*" : "reports:*"
|
|
|
|
|
|
|
|
|
|
Discourse.cache.keys(pattern).each do |key|
|
2018-05-03 09:41:41 -04:00
|
|
|
|
Discourse.cache.redis.del(key)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
def self.wrap_slow_query(timeout = 20000)
|
2018-08-01 07:39:57 -04:00
|
|
|
|
ActiveRecord::Base.connection.transaction do
|
|
|
|
|
# Set a statement timeout so we can't tie up the server
|
|
|
|
|
DB.exec "SET LOCAL statement_timeout = #{timeout}"
|
|
|
|
|
yield
|
2018-07-19 14:33:11 -04:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def prev_start_date
|
|
|
|
|
self.start_date - (self.end_date - self.start_date)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def prev_end_date
|
|
|
|
|
self.start_date
|
|
|
|
|
end
|
|
|
|
|
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
def filter_values
|
|
|
|
|
if self.filter.present?
|
|
|
|
|
return self.filter.delete_prefix("[").delete_suffix("]").split("&").map { |param| param.split("=") }.to_h
|
|
|
|
|
end
|
|
|
|
|
{}
|
|
|
|
|
end
|
|
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
|
def as_json(options = nil)
|
2018-05-14 10:34:56 -04:00
|
|
|
|
description = I18n.t("reports.#{type}.description", default: "")
|
2013-02-27 22:39:42 -05:00
|
|
|
|
{
|
2018-07-31 17:35:13 -04:00
|
|
|
|
type: type,
|
2018-12-14 17:14:46 -05:00
|
|
|
|
title: I18n.t("reports.#{type}.title", default: nil),
|
|
|
|
|
xaxis: I18n.t("reports.#{type}.xaxis", default: nil),
|
|
|
|
|
yaxis: I18n.t("reports.#{type}.yaxis", default: nil),
|
2018-07-31 17:35:13 -04:00
|
|
|
|
description: description.presence ? description : nil,
|
|
|
|
|
data: data,
|
|
|
|
|
start_date: start_date&.iso8601,
|
|
|
|
|
end_date: end_date&.iso8601,
|
|
|
|
|
prev_data: self.prev_data,
|
|
|
|
|
prev_start_date: prev_start_date&.iso8601,
|
|
|
|
|
prev_end_date: prev_end_date&.iso8601,
|
|
|
|
|
category_id: category_id,
|
|
|
|
|
group_id: group_id,
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
filter: self.filter,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
prev30Days: self.prev30Days,
|
|
|
|
|
dates_filtering: self.dates_filtering,
|
|
|
|
|
report_key: Report.cache_key(self),
|
2018-08-30 08:56:11 -04:00
|
|
|
|
primary_color: self.primary_color,
|
|
|
|
|
secondary_color: self.secondary_color,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
labels: labels || [
|
|
|
|
|
{
|
|
|
|
|
type: :date,
|
|
|
|
|
property: :x,
|
|
|
|
|
title: I18n.t("reports.default.labels.day")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :number,
|
|
|
|
|
property: :y,
|
|
|
|
|
title: I18n.t("reports.default.labels.count")
|
|
|
|
|
},
|
|
|
|
|
],
|
|
|
|
|
processing: self.processing,
|
|
|
|
|
average: self.average,
|
|
|
|
|
percent: self.percent,
|
|
|
|
|
higher_is_better: self.higher_is_better,
|
|
|
|
|
category_filtering: self.category_filtering,
|
|
|
|
|
group_filtering: self.group_filtering,
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
filter_options: self.filter_options,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
modes: self.modes,
|
2018-03-15 17:10:45 -04:00
|
|
|
|
}.tap do |json|
|
2018-08-06 16:57:40 -04:00
|
|
|
|
json[:icon] = self.icon if self.icon
|
2018-07-31 21:23:28 -04:00
|
|
|
|
json[:error] = self.error if self.error
|
2018-07-19 14:33:11 -04:00
|
|
|
|
json[:total] = self.total if self.total
|
|
|
|
|
json[:prev_period] = self.prev_period if self.prev_period
|
2018-05-10 23:30:21 -04:00
|
|
|
|
json[:prev30Days] = self.prev30Days if self.prev30Days
|
2018-05-15 01:08:23 -04:00
|
|
|
|
json[:limit] = self.limit if self.limit
|
2018-05-10 23:30:21 -04:00
|
|
|
|
|
2018-03-15 17:10:45 -04:00
|
|
|
|
if type == 'page_view_crawler_reqs'
|
|
|
|
|
json[:related_report] = Report.find('web_crawlers', start_date: start_date, end_date: end_date)&.as_json
|
|
|
|
|
end
|
|
|
|
|
end
|
2013-02-27 22:39:42 -05:00
|
|
|
|
end
|
|
|
|
|
|
2015-06-24 20:42:08 -04:00
|
|
|
|
def Report.add_report(name, &block)
|
|
|
|
|
singleton_class.instance_eval { define_method("report_#{name}", &block) }
|
|
|
|
|
end
|
|
|
|
|
|
2018-05-16 02:05:03 -04:00
|
|
|
|
def self._get(type, opts = nil)
|
2014-11-04 17:08:39 -05:00
|
|
|
|
opts ||= {}
|
2015-10-19 16:30:34 -04:00
|
|
|
|
|
2013-02-27 22:39:42 -05:00
|
|
|
|
# Load the report
|
|
|
|
|
report = Report.new(type)
|
2018-05-03 11:39:37 -04:00
|
|
|
|
report.start_date = opts[:start_date] if opts[:start_date]
|
|
|
|
|
report.end_date = opts[:end_date] if opts[:end_date]
|
2015-06-24 09:19:39 -04:00
|
|
|
|
report.category_id = opts[:category_id] if opts[:category_id]
|
2016-02-02 21:29:51 -05:00
|
|
|
|
report.group_id = opts[:group_id] if opts[:group_id]
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
report.filter = opts[:filter] if opts[:filter]
|
2018-05-10 23:30:21 -04:00
|
|
|
|
report.facets = opts[:facets] || [:total, :prev30Days]
|
2018-05-15 01:08:23 -04:00
|
|
|
|
report.limit = opts[:limit] if opts[:limit]
|
2018-05-15 14:12:03 -04:00
|
|
|
|
report.processing = false
|
2018-05-17 16:44:33 -04:00
|
|
|
|
report.average = opts[:average] if opts[:average]
|
|
|
|
|
report.percent = opts[:percent] if opts[:percent]
|
|
|
|
|
report.higher_is_better = opts[:higher_is_better] if opts[:higher_is_better]
|
2018-05-16 02:05:03 -04:00
|
|
|
|
report
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.find_cached(type, opts = nil)
|
|
|
|
|
report = _get(type, opts)
|
|
|
|
|
Discourse.cache.read(cache_key(report))
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.cache(report, duration)
|
2018-08-01 09:45:50 -04:00
|
|
|
|
Discourse.cache.write(cache_key(report), report.as_json, force: true, expires_in: duration)
|
2018-05-16 02:05:03 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.find(type, opts = nil)
|
2019-02-08 11:25:32 -05:00
|
|
|
|
opts ||= {}
|
|
|
|
|
|
2018-07-31 21:23:28 -04:00
|
|
|
|
begin
|
|
|
|
|
report = _get(type, opts)
|
|
|
|
|
report_method = :"report_#{type}"
|
2015-02-05 00:08:52 -05:00
|
|
|
|
|
2018-08-01 07:39:57 -04:00
|
|
|
|
begin
|
|
|
|
|
wrap_slow_query do
|
|
|
|
|
if respond_to?(report_method)
|
|
|
|
|
send(report_method, report)
|
|
|
|
|
elsif type =~ /_reqs$/
|
|
|
|
|
req_report(report, type.split(/_reqs$/)[0].to_sym)
|
|
|
|
|
else
|
|
|
|
|
return nil
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
rescue ActiveRecord::QueryCanceled, PG::QueryCanceled => e
|
|
|
|
|
report.error = :timeout
|
2018-07-31 21:23:28 -04:00
|
|
|
|
end
|
|
|
|
|
rescue Exception => e
|
2019-02-08 11:25:32 -05:00
|
|
|
|
|
|
|
|
|
# In test mode, don't swallow exceptions by default to help debug errors.
|
|
|
|
|
raise if Rails.env.test? && !opts[:wrap_exceptions_in_test]
|
|
|
|
|
|
2018-09-13 11:36:55 -04:00
|
|
|
|
# ensures that if anything unexpected prevents us from
|
|
|
|
|
# creating a report object we fail elegantly and log an error
|
|
|
|
|
if !report
|
|
|
|
|
Rails.logger.error("Couldn’t create report `#{type}`: <#{e.class} #{e.message}>")
|
|
|
|
|
return nil
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-31 21:23:28 -04:00
|
|
|
|
report.error = :exception
|
|
|
|
|
|
|
|
|
|
# given reports can be added by plugins we don’t want dashboard failures
|
|
|
|
|
# on report computation, however we do want to log which report is provoking
|
|
|
|
|
# an error
|
2018-10-10 05:43:27 -04:00
|
|
|
|
Rails.logger.error("Error while computing report `#{report.type}`: #{e.message}\n#{e.backtrace.join("\n")}")
|
2015-02-05 00:08:52 -05:00
|
|
|
|
end
|
2015-06-24 09:19:39 -04:00
|
|
|
|
|
2013-02-27 22:39:42 -05:00
|
|
|
|
report
|
|
|
|
|
end
|
|
|
|
|
|
2019-01-21 09:17:04 -05:00
|
|
|
|
def self.report_consolidated_page_views(report)
|
|
|
|
|
filters = %w[
|
|
|
|
|
page_view_logged_in
|
|
|
|
|
page_view_anon
|
2019-02-13 06:24:18 -05:00
|
|
|
|
page_view_crawler
|
2019-01-21 09:17:04 -05:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.modes = [:stacked_chart]
|
|
|
|
|
|
|
|
|
|
tertiary = ColorScheme.hex_for_name('tertiary') || '0088cc'
|
|
|
|
|
danger = ColorScheme.hex_for_name('danger') || 'e45735'
|
|
|
|
|
|
|
|
|
|
requests = filters.map do |filter|
|
|
|
|
|
color = report.rgba_color(tertiary)
|
|
|
|
|
|
|
|
|
|
if filter == "page_view_anon"
|
|
|
|
|
color = report.rgba_color(tertiary, 0.5)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if filter == "page_view_crawler"
|
|
|
|
|
color = report.rgba_color(danger, 0.75)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
{
|
|
|
|
|
req: filter,
|
2019-01-21 11:10:10 -05:00
|
|
|
|
label: I18n.t("reports.consolidated_page_views.xaxis.#{filter}"),
|
2019-01-21 09:17:04 -05:00
|
|
|
|
color: color,
|
|
|
|
|
data: ApplicationRequest.where(req_type: ApplicationRequest.req_types[filter])
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
requests.each do |request|
|
|
|
|
|
request[:data] = request[:data].where('date >= ? AND date <= ?', report.start_date, report.end_date)
|
|
|
|
|
.order(date: :asc)
|
|
|
|
|
.group(:date)
|
|
|
|
|
.sum(:count)
|
|
|
|
|
.map { |date, count| { x: date, y: count } }
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
report.data = requests
|
|
|
|
|
end
|
|
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
|
def self.req_report(report, filter = nil)
|
2015-02-05 22:39:04 -05:00
|
|
|
|
data =
|
|
|
|
|
if filter == :page_view_total
|
|
|
|
|
ApplicationRequest.where(req_type: [
|
2017-07-27 21:20:09 -04:00
|
|
|
|
ApplicationRequest.req_types.reject { |k, v| k =~ /mobile/ }.map { |k, v| v if k =~ /page_view/ }.compact
|
2015-09-23 01:24:30 -04:00
|
|
|
|
].flatten)
|
2015-02-05 22:39:04 -05:00
|
|
|
|
else
|
2018-07-31 17:35:13 -04:00
|
|
|
|
ApplicationRequest.where(req_type: ApplicationRequest.req_types[filter])
|
2015-02-05 22:39:04 -05:00
|
|
|
|
end
|
2015-02-04 19:18:11 -05:00
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
if filter == :page_view_total
|
|
|
|
|
report.icon = 'file'
|
|
|
|
|
end
|
|
|
|
|
|
2015-02-04 19:18:11 -05:00
|
|
|
|
report.data = []
|
2018-05-03 09:41:41 -04:00
|
|
|
|
data.where('date >= ? AND date <= ?', report.start_date, report.end_date)
|
2017-07-27 21:20:09 -04:00
|
|
|
|
.order(date: :asc)
|
|
|
|
|
.group(:date)
|
|
|
|
|
.sum(:count)
|
|
|
|
|
.each do |date, count|
|
2015-06-24 09:19:39 -04:00
|
|
|
|
report.data << { x: date, y: count }
|
2015-02-04 19:18:11 -05:00
|
|
|
|
end
|
|
|
|
|
|
2018-02-01 15:50:41 -05:00
|
|
|
|
report.total = data.sum(:count)
|
|
|
|
|
|
|
|
|
|
report.prev30Days = data.where(
|
|
|
|
|
'date >= ? AND date < ?',
|
2018-05-03 09:41:41 -04:00
|
|
|
|
(report.start_date - 31.days), report.start_date
|
2018-02-01 15:50:41 -05:00
|
|
|
|
).sum(:count)
|
2015-02-04 19:18:11 -05:00
|
|
|
|
end
|
|
|
|
|
|
2013-02-27 22:39:42 -05:00
|
|
|
|
def self.report_visits(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.group_filtering = true
|
|
|
|
|
report.icon = 'user'
|
|
|
|
|
|
2016-02-02 21:29:51 -05:00
|
|
|
|
basic_report_about report, UserVisit, :by_day, report.start_date, report.end_date, report.group_id
|
2015-06-15 13:16:23 -04:00
|
|
|
|
add_counts report, UserVisit, 'visited_at'
|
2018-08-22 12:00:11 -04:00
|
|
|
|
|
2019-01-18 11:24:18 -05:00
|
|
|
|
report.prev30Days = UserVisit.where("visited_at >= ? and visited_at < ?", report.start_date - 30.days, report.start_date).count
|
2013-03-07 11:07:59 -05:00
|
|
|
|
end
|
|
|
|
|
|
2015-07-07 12:31:07 -04:00
|
|
|
|
def self.report_mobile_visits(report)
|
|
|
|
|
basic_report_about report, UserVisit, :mobile_by_day, report.start_date, report.end_date
|
|
|
|
|
report.total = UserVisit.where(mobile: true).count
|
|
|
|
|
report.prev30Days = UserVisit.where(mobile: true).where("visited_at >= ? and visited_at < ?", report.start_date - 30.days, report.start_date).count
|
|
|
|
|
end
|
|
|
|
|
|
2013-03-07 11:07:59 -05:00
|
|
|
|
def self.report_signups(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.group_filtering = true
|
|
|
|
|
|
2018-12-26 04:29:07 -05:00
|
|
|
|
report.icon = 'user-plus'
|
|
|
|
|
|
2016-02-02 21:29:51 -05:00
|
|
|
|
if report.group_id
|
|
|
|
|
basic_report_about report, User.real, :count_by_signup_date, report.start_date, report.end_date, report.group_id
|
|
|
|
|
add_counts report, User.real, 'users.created_at'
|
|
|
|
|
else
|
|
|
|
|
report_about report, User.real, :count_by_signup_date
|
|
|
|
|
end
|
2018-07-19 14:33:11 -04:00
|
|
|
|
|
2018-09-05 17:33:29 -04:00
|
|
|
|
# add_prev_data report, User.real, :count_by_signup_date, report.prev_start_date, report.prev_end_date
|
2013-03-07 11:07:59 -05:00
|
|
|
|
end
|
|
|
|
|
|
2018-04-26 08:49:41 -04:00
|
|
|
|
def self.report_new_contributors(report)
|
2018-05-03 09:41:41 -04:00
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
data = User.real.count_by_first_post(report.start_date, report.end_date)
|
|
|
|
|
|
2018-05-10 23:30:21 -04:00
|
|
|
|
if report.facets.include?(:prev30Days)
|
|
|
|
|
prev30DaysData = User.real.count_by_first_post(report.start_date - 30.days, report.start_date)
|
|
|
|
|
report.prev30Days = prev30DaysData.sum { |k, v| v }
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if report.facets.include?(:total)
|
|
|
|
|
report.total = User.real.count_by_first_post
|
|
|
|
|
end
|
2018-05-03 09:41:41 -04:00
|
|
|
|
|
2018-05-10 23:30:21 -04:00
|
|
|
|
if report.facets.include?(:prev_period)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
prev_period_data = User.real.count_by_first_post(report.prev_start_date, report.prev_end_date)
|
2018-05-10 23:30:21 -04:00
|
|
|
|
report.prev_period = prev_period_data.sum { |k, v| v }
|
2018-09-05 17:33:29 -04:00
|
|
|
|
# report.prev_data = prev_period_data.map { |k, v| { x: k, y: v } }
|
2018-05-10 23:30:21 -04:00
|
|
|
|
end
|
2018-05-03 09:41:41 -04:00
|
|
|
|
|
|
|
|
|
data.each do |key, value|
|
|
|
|
|
report.data << { x: key, y: value }
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_daily_engaged_users(report)
|
2018-05-15 14:12:03 -04:00
|
|
|
|
report.average = true
|
|
|
|
|
|
2018-05-03 09:41:41 -04:00
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
data = UserAction.count_daily_engaged_users(report.start_date, report.end_date)
|
|
|
|
|
|
2018-05-10 23:30:21 -04:00
|
|
|
|
if report.facets.include?(:prev30Days)
|
|
|
|
|
prev30DaysData = UserAction.count_daily_engaged_users(report.start_date - 30.days, report.start_date)
|
|
|
|
|
report.prev30Days = prev30DaysData.sum { |k, v| v }
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if report.facets.include?(:total)
|
|
|
|
|
report.total = UserAction.count_daily_engaged_users
|
|
|
|
|
end
|
2018-05-03 09:41:41 -04:00
|
|
|
|
|
2018-05-10 23:30:21 -04:00
|
|
|
|
if report.facets.include?(:prev_period)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
prev_data = UserAction.count_daily_engaged_users(report.prev_start_date, report.prev_end_date)
|
2018-05-14 20:17:17 -04:00
|
|
|
|
|
|
|
|
|
prev = prev_data.sum { |k, v| v }
|
|
|
|
|
if prev > 0
|
|
|
|
|
prev = prev / ((report.end_date - report.start_date) / 1.day)
|
|
|
|
|
end
|
|
|
|
|
report.prev_period = prev
|
2018-05-10 23:30:21 -04:00
|
|
|
|
end
|
2018-05-03 09:41:41 -04:00
|
|
|
|
|
|
|
|
|
data.each do |key, value|
|
|
|
|
|
report.data << { x: key, y: value }
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_dau_by_mau(report)
|
2018-08-01 18:40:59 -04:00
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :date,
|
|
|
|
|
property: :x,
|
|
|
|
|
title: I18n.t("reports.default.labels.day")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :percent,
|
|
|
|
|
property: :y,
|
|
|
|
|
title: I18n.t("reports.default.labels.percent")
|
|
|
|
|
},
|
|
|
|
|
]
|
|
|
|
|
|
2018-05-15 14:12:03 -04:00
|
|
|
|
report.average = true
|
|
|
|
|
report.percent = true
|
|
|
|
|
|
2018-05-03 09:41:41 -04:00
|
|
|
|
data_points = UserVisit.count_by_active_users(report.start_date, report.end_date)
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
compute_dau_by_mau = Proc.new { |data_point|
|
|
|
|
|
if data_point["mau"] == 0
|
|
|
|
|
0
|
|
|
|
|
else
|
2018-05-10 23:30:21 -04:00
|
|
|
|
((data_point["dau"].to_f / data_point["mau"].to_f) * 100).ceil(2)
|
|
|
|
|
end
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
dau_avg = Proc.new { |start_date, end_date|
|
|
|
|
|
data_points = UserVisit.count_by_active_users(start_date, end_date)
|
|
|
|
|
if !data_points.empty?
|
|
|
|
|
sum = data_points.sum { |data_point| compute_dau_by_mau.call(data_point) }
|
|
|
|
|
(sum.to_f / data_points.count.to_f).ceil(2)
|
2018-05-03 09:41:41 -04:00
|
|
|
|
end
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
data_points.each do |data_point|
|
|
|
|
|
report.data << { x: data_point["date"], y: compute_dau_by_mau.call(data_point) }
|
|
|
|
|
end
|
|
|
|
|
|
2018-05-10 23:30:21 -04:00
|
|
|
|
if report.facets.include?(:prev_period)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.prev_period = dau_avg.call(report.prev_start_date, report.prev_end_date)
|
2018-05-10 23:30:21 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if report.facets.include?(:prev30Days)
|
|
|
|
|
report.prev30Days = dau_avg.call(report.start_date - 30.days, report.start_date)
|
2018-05-03 09:41:41 -04:00
|
|
|
|
end
|
2018-04-26 08:49:41 -04:00
|
|
|
|
end
|
|
|
|
|
|
2015-09-14 13:30:06 -04:00
|
|
|
|
def self.report_profile_views(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.group_filtering = true
|
2018-05-03 09:41:41 -04:00
|
|
|
|
start_date = report.start_date
|
|
|
|
|
end_date = report.end_date
|
2016-02-02 21:29:51 -05:00
|
|
|
|
basic_report_about report, UserProfileView, :profile_views_by_day, start_date, end_date, report.group_id
|
|
|
|
|
|
2015-09-14 13:30:06 -04:00
|
|
|
|
report.total = UserProfile.sum(:views)
|
|
|
|
|
report.prev30Days = UserProfileView.where("viewed_at >= ? AND viewed_at < ?", start_date - 30.days, start_date + 1).count
|
|
|
|
|
end
|
|
|
|
|
|
2013-03-07 11:07:59 -05:00
|
|
|
|
def self.report_topics(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.category_filtering = true
|
2015-06-24 09:19:39 -04:00
|
|
|
|
basic_report_about report, Topic, :listable_count_per_day, report.start_date, report.end_date, report.category_id
|
|
|
|
|
countable = Topic.listable_topics
|
2018-08-09 20:50:05 -04:00
|
|
|
|
countable = countable.in_category_and_subcategories(report.category_id) if report.category_id
|
2015-06-24 09:19:39 -04:00
|
|
|
|
add_counts report, countable, 'topics.created_at'
|
2013-02-27 22:39:42 -05:00
|
|
|
|
end
|
|
|
|
|
|
2013-03-07 11:07:59 -05:00
|
|
|
|
def self.report_posts(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table, :chart]
|
|
|
|
|
report.category_filtering = true
|
2015-06-24 09:19:39 -04:00
|
|
|
|
basic_report_about report, Post, :public_posts_count_per_day, report.start_date, report.end_date, report.category_id
|
2017-02-21 14:45:34 -05:00
|
|
|
|
countable = Post.public_posts.where(post_type: Post.types[:regular])
|
2018-08-09 20:50:05 -04:00
|
|
|
|
if report.category_id
|
|
|
|
|
countable = countable.joins(:topic).merge(Topic.in_category_and_subcategories(report.category_id))
|
|
|
|
|
end
|
2015-06-24 09:19:39 -04:00
|
|
|
|
add_counts report, countable, 'posts.created_at'
|
2013-04-03 13:25:52 -04:00
|
|
|
|
end
|
|
|
|
|
|
2015-06-22 13:46:51 -04:00
|
|
|
|
def self.report_time_to_first_response(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.category_filtering = true
|
|
|
|
|
report.icon = 'reply'
|
2018-05-17 16:44:33 -04:00
|
|
|
|
report.higher_is_better = false
|
2015-06-22 13:46:51 -04:00
|
|
|
|
report.data = []
|
2017-07-27 21:20:09 -04:00
|
|
|
|
Topic.time_to_first_response_per_day(report.start_date, report.end_date, category_id: report.category_id).each do |r|
|
2018-06-20 08:42:15 -04:00
|
|
|
|
report.data << { x: r["date"], y: r["hours"].to_f.round(2) }
|
2015-06-22 13:46:51 -04:00
|
|
|
|
end
|
2015-06-24 09:19:39 -04:00
|
|
|
|
report.total = Topic.time_to_first_response_total(category_id: report.category_id)
|
|
|
|
|
report.prev30Days = Topic.time_to_first_response_total(start_date: report.start_date - 30.days, end_date: report.start_date, category_id: report.category_id)
|
2015-06-22 13:46:51 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_topics_with_no_response(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.category_filtering = true
|
2015-06-25 18:45:11 -04:00
|
|
|
|
report.data = []
|
|
|
|
|
Topic.with_no_response_per_day(report.start_date, report.end_date, report.category_id).each do |r|
|
2018-06-20 08:42:15 -04:00
|
|
|
|
report.data << { x: r["date"], y: r["count"].to_i }
|
2015-06-25 18:45:11 -04:00
|
|
|
|
end
|
2015-06-24 09:19:39 -04:00
|
|
|
|
report.total = Topic.with_no_response_total(category_id: report.category_id)
|
|
|
|
|
report.prev30Days = Topic.with_no_response_total(start_date: report.start_date - 30.days, end_date: report.start_date, category_id: report.category_id)
|
2015-06-22 13:46:51 -04:00
|
|
|
|
end
|
|
|
|
|
|
2013-04-01 09:21:34 -04:00
|
|
|
|
def self.report_emails(report)
|
|
|
|
|
report_about report, EmailLog
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_about(report, subject_class, report_method = :count_per_day)
|
2014-11-05 13:11:23 -05:00
|
|
|
|
basic_report_about report, subject_class, report_method, report.start_date, report.end_date
|
2014-12-30 09:06:15 -05:00
|
|
|
|
add_counts report, subject_class
|
2013-04-01 09:21:34 -04:00
|
|
|
|
end
|
|
|
|
|
|
2013-04-16 16:56:18 -04:00
|
|
|
|
def self.basic_report_about(report, subject_class, report_method, *args)
|
2013-03-07 11:07:59 -05:00
|
|
|
|
report.data = []
|
2018-05-03 11:39:37 -04:00
|
|
|
|
|
2014-11-04 17:08:39 -05:00
|
|
|
|
subject_class.send(report_method, *args).each do |date, count|
|
2015-06-24 09:19:39 -04:00
|
|
|
|
report.data << { x: date, y: count }
|
2013-03-07 11:07:59 -05:00
|
|
|
|
end
|
2013-04-01 09:21:34 -04:00
|
|
|
|
end
|
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
def self.add_prev_data(report, subject_class, report_method, *args)
|
|
|
|
|
if report.modes.include?(:chart) && report.facets.include?(:prev_period)
|
|
|
|
|
prev_data = subject_class.send(report_method, *args)
|
|
|
|
|
report.prev_data = prev_data.map { |k, v| { x: k, y: v } }
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2014-12-30 09:06:15 -05:00
|
|
|
|
def self.add_counts(report, subject_class, query_column = 'created_at')
|
2018-05-10 23:30:21 -04:00
|
|
|
|
if report.facets.include?(:prev_period)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
prev_data = subject_class
|
2018-05-10 23:30:21 -04:00
|
|
|
|
.where("#{query_column} >= ? and #{query_column} < ?",
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.prev_start_date,
|
|
|
|
|
report.prev_end_date)
|
|
|
|
|
|
|
|
|
|
report.prev_period = prev_data.count
|
2018-05-10 23:30:21 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if report.facets.include?(:total)
|
|
|
|
|
report.total = subject_class.count
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if report.facets.include?(:prev30Days)
|
|
|
|
|
report.prev30Days = subject_class
|
|
|
|
|
.where("#{query_column} >= ? and #{query_column} < ?",
|
|
|
|
|
report.start_date - 30.days,
|
|
|
|
|
report.start_date).count
|
|
|
|
|
end
|
2013-03-07 11:07:59 -05:00
|
|
|
|
end
|
|
|
|
|
|
2013-04-18 14:27:22 -04:00
|
|
|
|
def self.report_users_by_trust_level(report)
|
|
|
|
|
report.data = []
|
2018-05-22 10:47:23 -04:00
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.dates_filtering = false
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
property: :key,
|
|
|
|
|
title: I18n.t("reports.users_by_trust_level.labels.level")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :y,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.default.labels.count")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
2018-05-11 02:32:12 -04:00
|
|
|
|
User.real.group('trust_level').count.sort.each do |level, count|
|
2018-05-22 10:47:23 -04:00
|
|
|
|
key = TrustLevel.levels[level.to_i]
|
2018-06-28 20:30:22 -04:00
|
|
|
|
url = Proc.new { |k| "/admin/users/list/#{k}" }
|
2018-05-22 10:47:23 -04:00
|
|
|
|
report.data << { url: url.call(key), key: key, x: level.to_i, y: count }
|
2013-04-18 14:27:22 -04:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# Post action counts:
|
2013-03-12 14:19:01 -04:00
|
|
|
|
def self.report_flags(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.category_filtering = true
|
|
|
|
|
report.icon = 'flag'
|
2018-05-25 06:59:39 -04:00
|
|
|
|
report.higher_is_better = false
|
|
|
|
|
|
2015-06-24 09:19:39 -04:00
|
|
|
|
basic_report_about report, PostAction, :flag_count_by_date, report.start_date, report.end_date, report.category_id
|
2017-10-17 13:31:45 -04:00
|
|
|
|
countable = PostAction.where(post_action_type_id: PostActionType.flag_types_without_custom.values)
|
2018-08-09 20:50:05 -04:00
|
|
|
|
countable = countable.joins(post: :topic).merge(Topic.in_category_and_subcategories(report.category_id)) if report.category_id
|
2015-06-24 09:19:39 -04:00
|
|
|
|
add_counts report, countable, 'post_actions.created_at'
|
2013-03-12 14:19:01 -04:00
|
|
|
|
end
|
|
|
|
|
|
2013-04-18 14:27:22 -04:00
|
|
|
|
def self.report_likes(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.category_filtering = true
|
|
|
|
|
report.icon = 'heart'
|
2013-04-18 14:27:22 -04:00
|
|
|
|
post_action_report report, PostActionType.types[:like]
|
2013-03-15 18:08:46 -04:00
|
|
|
|
end
|
|
|
|
|
|
2013-04-18 14:27:22 -04:00
|
|
|
|
def self.report_bookmarks(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.category_filtering = true
|
|
|
|
|
report.icon = 'bookmark'
|
2013-04-18 14:27:22 -04:00
|
|
|
|
post_action_report report, PostActionType.types[:bookmark]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.post_action_report(report, post_action_type)
|
2013-03-17 13:53:00 -04:00
|
|
|
|
report.data = []
|
2015-10-19 16:30:34 -04:00
|
|
|
|
PostAction.count_per_day_for_type(post_action_type, category_id: report.category_id, start_date: report.start_date, end_date: report.end_date).each do |date, count|
|
2014-07-28 13:17:37 -04:00
|
|
|
|
report.data << { x: date, y: count }
|
2013-03-17 13:53:00 -04:00
|
|
|
|
end
|
2015-06-24 09:19:39 -04:00
|
|
|
|
countable = PostAction.unscoped.where(post_action_type_id: post_action_type)
|
2018-08-09 20:50:05 -04:00
|
|
|
|
countable = countable.joins(post: :topic).merge(Topic.in_category_and_subcategories(report.category_id)) if report.category_id
|
2015-06-24 09:19:39 -04:00
|
|
|
|
add_counts report, countable, 'post_actions.created_at'
|
2013-03-17 13:53:00 -04:00
|
|
|
|
end
|
2013-04-16 16:56:18 -04:00
|
|
|
|
|
|
|
|
|
def self.private_messages_report(report, topic_subtype)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2018-07-23 10:33:12 -04:00
|
|
|
|
subject = Topic.where('topics.user_id > 0')
|
|
|
|
|
basic_report_about report, subject, :private_message_topics_count_per_day, report.start_date, report.end_date, topic_subtype
|
|
|
|
|
subject = Topic.private_messages.where('topics.user_id > 0').with_subtype(topic_subtype)
|
|
|
|
|
add_counts report, subject, 'topics.created_at'
|
2013-04-16 16:56:18 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_user_to_user_private_messages(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2013-04-16 16:56:18 -04:00
|
|
|
|
private_messages_report report, TopicSubtype.user_to_user
|
|
|
|
|
end
|
|
|
|
|
|
2018-03-27 04:30:08 -04:00
|
|
|
|
def self.report_user_to_user_private_messages_with_replies(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2018-03-27 04:30:08 -04:00
|
|
|
|
topic_subtype = TopicSubtype.user_to_user
|
2018-07-23 10:33:12 -04:00
|
|
|
|
subject = Post.where('posts.user_id > 0')
|
|
|
|
|
basic_report_about report, subject, :private_messages_count_per_day, report.start_date, report.end_date, topic_subtype
|
|
|
|
|
subject = Post.private_posts.where('posts.user_id > 0').with_topic_subtype(topic_subtype)
|
|
|
|
|
add_counts report, subject, 'posts.created_at'
|
2018-03-27 04:30:08 -04:00
|
|
|
|
end
|
|
|
|
|
|
2013-04-16 16:56:18 -04:00
|
|
|
|
def self.report_system_private_messages(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2013-04-16 16:56:18 -04:00
|
|
|
|
private_messages_report report, TopicSubtype.system_message
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_moderator_warning_private_messages(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2013-04-16 16:56:18 -04:00
|
|
|
|
private_messages_report report, TopicSubtype.moderator_warning
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_notify_moderators_private_messages(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2013-04-16 16:56:18 -04:00
|
|
|
|
private_messages_report report, TopicSubtype.notify_moderators
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_notify_user_private_messages(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.icon = 'envelope'
|
2013-04-16 16:56:18 -04:00
|
|
|
|
private_messages_report report, TopicSubtype.notify_user
|
|
|
|
|
end
|
2018-03-15 17:10:45 -04:00
|
|
|
|
|
|
|
|
|
def self.report_web_crawlers(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
type: :string,
|
|
|
|
|
property: :user_agent,
|
|
|
|
|
title: I18n.t("reports.web_crawlers.labels.user_agent")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :count,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.web_crawlers.labels.page_views")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
report.modes = [:table]
|
2018-03-15 17:10:45 -04:00
|
|
|
|
report.data = WebCrawlerRequest.where('date >= ? and date <= ?', report.start_date, report.end_date)
|
|
|
|
|
.limit(200)
|
|
|
|
|
.order('sum_count DESC')
|
|
|
|
|
.group(:user_agent).sum(:count)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
.map { |ua, count| { user_agent: ua, count: count } }
|
2018-03-15 17:10:45 -04:00
|
|
|
|
end
|
2018-04-16 07:03:43 -04:00
|
|
|
|
|
2018-04-18 15:30:41 -04:00
|
|
|
|
def self.report_users_by_type(report)
|
2018-04-16 07:03:43 -04:00
|
|
|
|
report.data = []
|
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.dates_filtering = false
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
property: :x,
|
|
|
|
|
title: I18n.t("reports.users_by_type.labels.type")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :y,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.default.labels.count")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
2018-05-22 10:47:23 -04:00
|
|
|
|
label = Proc.new { |x| I18n.t("reports.users_by_type.xaxis_labels.#{x}") }
|
|
|
|
|
url = Proc.new { |key| "/admin/users/list/#{key}" }
|
2018-04-16 07:03:43 -04:00
|
|
|
|
|
2018-04-18 15:30:41 -04:00
|
|
|
|
admins = User.real.admins.count
|
2019-01-21 10:56:05 -05:00
|
|
|
|
report.data << { url: url.call("admins"), icon: "shield-alt", key: "admins", x: label.call("admin"), y: admins } if admins > 0
|
2018-04-16 07:03:43 -04:00
|
|
|
|
|
2018-04-18 15:30:41 -04:00
|
|
|
|
moderators = User.real.moderators.count
|
2019-01-21 10:56:05 -05:00
|
|
|
|
report.data << { url: url.call("moderators"), icon: "shield-alt", key: "moderators", x: label.call("moderator"), y: moderators } if moderators > 0
|
2018-04-16 07:03:43 -04:00
|
|
|
|
|
2018-04-17 05:01:06 -04:00
|
|
|
|
suspended = User.real.suspended.count
|
2018-05-22 10:47:23 -04:00
|
|
|
|
report.data << { url: url.call("suspended"), icon: "ban", key: "suspended", x: label.call("suspended"), y: suspended } if suspended > 0
|
2018-04-16 07:03:43 -04:00
|
|
|
|
|
2018-04-17 05:01:06 -04:00
|
|
|
|
silenced = User.real.silenced.count
|
2018-05-22 10:47:23 -04:00
|
|
|
|
report.data << { url: url.call("silenced"), icon: "ban", key: "silenced", x: label.call("silenced"), y: silenced } if silenced > 0
|
2018-04-16 07:03:43 -04:00
|
|
|
|
end
|
2018-04-19 12:19:21 -04:00
|
|
|
|
|
2018-05-15 01:08:23 -04:00
|
|
|
|
def self.report_top_referred_topics(report)
|
2018-08-09 20:50:05 -04:00
|
|
|
|
report.category_filtering = true
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
type: :topic,
|
|
|
|
|
properties: {
|
|
|
|
|
title: :topic_title,
|
|
|
|
|
id: :topic_id
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.top_referred_topics.labels.topic")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :num_clicks,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.top_referred_topics.labels.num_clicks")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
2018-08-09 20:50:05 -04:00
|
|
|
|
options = {
|
|
|
|
|
end_date: report.end_date,
|
|
|
|
|
start_date: report.start_date,
|
|
|
|
|
limit: report.limit || 8,
|
|
|
|
|
category_id: report.category_id
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
result = nil
|
2018-08-01 07:39:57 -04:00
|
|
|
|
result = IncomingLinksReport.find(:top_referred_topics, options)
|
|
|
|
|
report.data = result.data
|
2018-07-19 14:33:11 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_top_traffic_sources(report)
|
2018-08-09 20:50:05 -04:00
|
|
|
|
report.category_filtering = true
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
property: :domain,
|
|
|
|
|
title: I18n.t("reports.top_traffic_sources.labels.domain")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :num_clicks,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.top_traffic_sources.labels.num_clicks")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :num_topics,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.top_traffic_sources.labels.num_topics")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
2018-08-09 20:50:05 -04:00
|
|
|
|
options = {
|
|
|
|
|
end_date: report.end_date,
|
|
|
|
|
start_date: report.start_date,
|
|
|
|
|
limit: report.limit || 8,
|
|
|
|
|
category_id: report.category_id
|
|
|
|
|
}
|
2018-11-19 06:20:05 -05:00
|
|
|
|
|
2018-08-01 07:39:57 -04:00
|
|
|
|
result = IncomingLinksReport.find(:top_traffic_sources, options)
|
|
|
|
|
report.data = result.data
|
2018-05-15 01:08:23 -04:00
|
|
|
|
end
|
|
|
|
|
|
2018-11-19 06:20:05 -05:00
|
|
|
|
def self.report_top_referrers(report)
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :username,
|
|
|
|
|
id: :user_id,
|
|
|
|
|
avatar: :user_avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.top_referrers.labels.user")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :num_clicks,
|
|
|
|
|
type: :number,
|
|
|
|
|
title: I18n.t("reports.top_referrers.labels.num_clicks")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :num_topics,
|
|
|
|
|
type: :number,
|
|
|
|
|
title: I18n.t("reports.top_referrers.labels.num_topics")
|
|
|
|
|
}
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
options = {
|
|
|
|
|
end_date: report.end_date,
|
|
|
|
|
start_date: report.start_date,
|
|
|
|
|
limit: report.limit || 8
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result = IncomingLinksReport.find(:top_referrers, options)
|
|
|
|
|
report.data = result.data
|
|
|
|
|
end
|
|
|
|
|
|
2018-04-19 12:19:21 -04:00
|
|
|
|
def self.report_trending_search(report)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
property: :term,
|
2018-08-27 05:58:46 -04:00
|
|
|
|
type: :text,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.trending_search.labels.term")
|
|
|
|
|
},
|
|
|
|
|
{
|
2018-12-18 08:43:46 -05:00
|
|
|
|
property: :searches,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.trending_search.labels.searches")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :percent,
|
|
|
|
|
property: :ctr,
|
|
|
|
|
title: I18n.t("reports.trending_search.labels.click_through")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
2018-04-19 12:19:21 -04:00
|
|
|
|
report.data = []
|
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
2018-09-07 10:54:38 -04:00
|
|
|
|
trends = SearchLog.trending_from(report.start_date,
|
|
|
|
|
end_date: report.end_date,
|
|
|
|
|
limit: report.limit
|
|
|
|
|
)
|
2018-04-19 12:19:21 -04:00
|
|
|
|
|
|
|
|
|
trends.each do |trend|
|
2018-05-15 01:08:23 -04:00
|
|
|
|
report.data << {
|
|
|
|
|
term: trend.term,
|
2018-12-18 08:43:46 -05:00
|
|
|
|
searches: trend.searches,
|
|
|
|
|
ctr: trend.ctr
|
2018-07-19 14:33:11 -04:00
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_moderators_activity(report)
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :username,
|
|
|
|
|
id: :user_id,
|
|
|
|
|
avatar: :user_avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.moderator"),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :flag_count,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.flag_count")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :seconds,
|
|
|
|
|
property: :time_read,
|
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.time_read")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :topic_count,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.topic_count")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :pm_count,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.pm_count")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :post_count,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.post_count")
|
2018-07-31 23:40:45 -04:00
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :revision_count,
|
2018-08-01 18:40:59 -04:00
|
|
|
|
type: :number,
|
2018-07-31 23:40:45 -04:00
|
|
|
|
title: I18n.t("reports.moderators_activity.labels.revision_count")
|
2018-07-31 17:35:13 -04:00
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
report.data = []
|
|
|
|
|
|
2018-08-09 20:51:31 -04:00
|
|
|
|
query = <<~SQL
|
|
|
|
|
WITH mods AS (
|
|
|
|
|
SELECT
|
|
|
|
|
id AS user_id,
|
|
|
|
|
username_lower AS username,
|
|
|
|
|
uploaded_avatar_id
|
|
|
|
|
FROM users u
|
2018-07-19 14:33:11 -04:00
|
|
|
|
WHERE u.moderator = 'true'
|
|
|
|
|
AND u.id > 0
|
2018-08-09 20:51:31 -04:00
|
|
|
|
),
|
|
|
|
|
time_read AS (
|
|
|
|
|
SELECT SUM(uv.time_read) AS time_read,
|
|
|
|
|
uv.user_id
|
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN user_visits uv
|
|
|
|
|
ON m.user_id = uv.user_id
|
|
|
|
|
WHERE uv.visited_at >= '#{report.start_date}'
|
2018-07-19 14:33:11 -04:00
|
|
|
|
AND uv.visited_at <= '#{report.end_date}'
|
|
|
|
|
GROUP BY uv.user_id
|
|
|
|
|
),
|
2018-08-09 20:51:31 -04:00
|
|
|
|
flag_count AS (
|
|
|
|
|
WITH period_actions AS (
|
|
|
|
|
SELECT agreed_by_id,
|
|
|
|
|
disagreed_by_id
|
|
|
|
|
FROM post_actions
|
|
|
|
|
WHERE post_action_type_id IN (#{PostActionType.flag_types_without_custom.values.join(',')})
|
|
|
|
|
AND created_at >= '#{report.start_date}'
|
|
|
|
|
AND created_at <= '#{report.end_date}'
|
|
|
|
|
),
|
|
|
|
|
agreed_flags AS (
|
|
|
|
|
SELECT pa.agreed_by_id AS user_id,
|
|
|
|
|
COUNT(*) AS flag_count
|
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN period_actions pa
|
|
|
|
|
ON pa.agreed_by_id = m.user_id
|
|
|
|
|
GROUP BY agreed_by_id
|
|
|
|
|
),
|
|
|
|
|
disagreed_flags AS (
|
|
|
|
|
SELECT pa.disagreed_by_id AS user_id,
|
|
|
|
|
COUNT(*) AS flag_count
|
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN period_actions pa
|
|
|
|
|
ON pa.disagreed_by_id = m.user_id
|
|
|
|
|
GROUP BY disagreed_by_id
|
|
|
|
|
)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
SELECT
|
|
|
|
|
COALESCE(af.user_id, df.user_id) AS user_id,
|
|
|
|
|
COALESCE(af.flag_count, 0) + COALESCE(df.flag_count, 0) AS flag_count
|
|
|
|
|
FROM agreed_flags af
|
|
|
|
|
FULL OUTER JOIN disagreed_flags df
|
|
|
|
|
ON df.user_id = af.user_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
),
|
|
|
|
|
revision_count AS (
|
2018-07-31 23:40:45 -04:00
|
|
|
|
SELECT pr.user_id,
|
|
|
|
|
COUNT(*) AS revision_count
|
2018-08-09 20:51:31 -04:00
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN post_revisions pr
|
|
|
|
|
ON pr.user_id = m.user_id
|
|
|
|
|
JOIN posts p
|
|
|
|
|
ON p.id = pr.post_id
|
|
|
|
|
WHERE pr.created_at >= '#{report.start_date}'
|
2018-07-31 23:40:45 -04:00
|
|
|
|
AND pr.created_at <= '#{report.end_date}'
|
2018-08-09 20:51:31 -04:00
|
|
|
|
AND p.user_id <> pr.user_id
|
2018-07-31 23:40:45 -04:00
|
|
|
|
GROUP BY pr.user_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
),
|
|
|
|
|
topic_count AS (
|
2018-07-19 14:33:11 -04:00
|
|
|
|
SELECT t.user_id,
|
|
|
|
|
COUNT(*) AS topic_count
|
2018-08-09 20:51:31 -04:00
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN topics t
|
|
|
|
|
ON t.user_id = m.user_id
|
|
|
|
|
WHERE t.archetype = 'regular'
|
2018-07-19 14:33:11 -04:00
|
|
|
|
AND t.created_at >= '#{report.start_date}'
|
|
|
|
|
AND t.created_at <= '#{report.end_date}'
|
|
|
|
|
GROUP BY t.user_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
),
|
|
|
|
|
post_count AS (
|
2018-07-19 14:33:11 -04:00
|
|
|
|
SELECT p.user_id,
|
|
|
|
|
COUNT(*) AS post_count
|
2018-08-09 20:51:31 -04:00
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN posts p
|
|
|
|
|
ON p.user_id = m.user_id
|
2018-07-19 19:29:42 -04:00
|
|
|
|
JOIN topics t
|
|
|
|
|
ON t.id = p.topic_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
WHERE t.archetype = 'regular'
|
2018-07-19 14:33:11 -04:00
|
|
|
|
AND p.created_at >= '#{report.start_date}'
|
|
|
|
|
AND p.created_at <= '#{report.end_date}'
|
2018-07-19 19:29:42 -04:00
|
|
|
|
GROUP BY p.user_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
),
|
|
|
|
|
pm_count AS (
|
2018-07-19 19:29:42 -04:00
|
|
|
|
SELECT p.user_id,
|
|
|
|
|
COUNT(*) AS pm_count
|
2018-08-09 20:51:31 -04:00
|
|
|
|
FROM mods m
|
|
|
|
|
JOIN posts p
|
|
|
|
|
ON p.user_id = m.user_id
|
2018-07-19 19:29:42 -04:00
|
|
|
|
JOIN topics t
|
|
|
|
|
ON t.id = p.topic_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
WHERE t.archetype = 'private_message'
|
2018-07-19 19:29:42 -04:00
|
|
|
|
AND p.created_at >= '#{report.start_date}'
|
|
|
|
|
AND p.created_at <= '#{report.end_date}'
|
|
|
|
|
GROUP BY p.user_id
|
2018-08-09 20:51:31 -04:00
|
|
|
|
)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
|
2018-08-09 20:51:31 -04:00
|
|
|
|
SELECT
|
|
|
|
|
m.user_id,
|
|
|
|
|
m.username,
|
|
|
|
|
m.uploaded_avatar_id,
|
|
|
|
|
tr.time_read,
|
|
|
|
|
fc.flag_count,
|
|
|
|
|
rc.revision_count,
|
|
|
|
|
tc.topic_count,
|
|
|
|
|
pc.post_count,
|
|
|
|
|
pmc.pm_count
|
|
|
|
|
FROM mods m
|
|
|
|
|
LEFT JOIN time_read tr ON tr.user_id = m.user_id
|
|
|
|
|
LEFT JOIN flag_count fc ON fc.user_id = m.user_id
|
|
|
|
|
LEFT JOIN revision_count rc ON rc.user_id = m.user_id
|
|
|
|
|
LEFT JOIN topic_count tc ON tc.user_id = m.user_id
|
|
|
|
|
LEFT JOIN post_count pc ON pc.user_id = m.user_id
|
|
|
|
|
LEFT JOIN pm_count pmc ON pmc.user_id = m.user_id
|
|
|
|
|
ORDER BY m.username
|
|
|
|
|
SQL
|
2018-07-19 14:33:11 -04:00
|
|
|
|
|
2018-08-09 20:51:31 -04:00
|
|
|
|
DB.query(query).each do |row|
|
|
|
|
|
mod = {}
|
|
|
|
|
mod[:username] = row.username
|
|
|
|
|
mod[:user_id] = row.user_id
|
|
|
|
|
mod[:user_avatar_template] = User.avatar_template(row.username, row.uploaded_avatar_id)
|
|
|
|
|
mod[:time_read] = row.time_read
|
|
|
|
|
mod[:flag_count] = row.flag_count
|
|
|
|
|
mod[:revision_count] = row.revision_count
|
|
|
|
|
mod[:topic_count] = row.topic_count
|
|
|
|
|
mod[:post_count] = row.post_count
|
|
|
|
|
mod[:pm_count] = row.pm_count
|
|
|
|
|
report.data << mod
|
2018-07-19 19:29:42 -04:00
|
|
|
|
end
|
2018-07-19 14:33:11 -04:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_flags_status(report)
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
2018-12-26 04:29:33 -05:00
|
|
|
|
type: :post,
|
|
|
|
|
properties: {
|
|
|
|
|
topic_id: :topic_id,
|
|
|
|
|
number: :post_number,
|
|
|
|
|
truncated_raw: :post_type
|
|
|
|
|
},
|
2018-07-31 17:35:13 -04:00
|
|
|
|
title: I18n.t("reports.flags_status.labels.flag")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :staff_username,
|
|
|
|
|
id: :staff_id,
|
|
|
|
|
avatar: :staff_avatar_template
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.flags_status.labels.assigned")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :poster_username,
|
|
|
|
|
id: :poster_id,
|
|
|
|
|
avatar: :poster_avatar_template
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.flags_status.labels.poster")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :flagger_username,
|
|
|
|
|
id: :flagger_id,
|
|
|
|
|
avatar: :flagger_avatar_template
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.flags_status.labels.flagger")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :seconds,
|
|
|
|
|
property: :response_time,
|
|
|
|
|
title: I18n.t("reports.flags_status.labels.time_to_resolution")
|
|
|
|
|
}
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
2018-12-26 04:29:33 -05:00
|
|
|
|
flag_types = PostActionType.flag_types
|
2018-07-19 14:33:11 -04:00
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
WITH period_actions AS (
|
|
|
|
|
SELECT id,
|
|
|
|
|
post_action_type_id,
|
|
|
|
|
created_at,
|
|
|
|
|
agreed_at,
|
|
|
|
|
disagreed_at,
|
|
|
|
|
deferred_at,
|
|
|
|
|
agreed_by_id,
|
|
|
|
|
disagreed_by_id,
|
|
|
|
|
deferred_by_id,
|
|
|
|
|
post_id,
|
|
|
|
|
user_id,
|
|
|
|
|
COALESCE(disagreed_at, agreed_at, deferred_at) AS responded_at
|
|
|
|
|
FROM post_actions
|
2018-12-26 04:29:33 -05:00
|
|
|
|
WHERE post_action_type_id IN (#{flag_types.values.join(',')})
|
|
|
|
|
AND created_at >= '#{report.start_date}'
|
|
|
|
|
AND created_at <= '#{report.end_date}'
|
|
|
|
|
ORDER BY created_at DESC
|
2018-07-19 14:33:11 -04:00
|
|
|
|
),
|
|
|
|
|
poster_data AS (
|
|
|
|
|
SELECT pa.id,
|
|
|
|
|
p.user_id AS poster_id,
|
2018-12-26 04:29:33 -05:00
|
|
|
|
p.topic_id as topic_id,
|
|
|
|
|
p.post_number as post_number,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
u.username_lower AS poster_username,
|
|
|
|
|
u.uploaded_avatar_id AS poster_avatar_id
|
2018-07-19 14:33:11 -04:00
|
|
|
|
FROM period_actions pa
|
|
|
|
|
JOIN posts p
|
|
|
|
|
ON p.id = pa.post_id
|
|
|
|
|
JOIN users u
|
|
|
|
|
ON u.id = p.user_id
|
|
|
|
|
),
|
|
|
|
|
flagger_data AS (
|
|
|
|
|
SELECT pa.id,
|
|
|
|
|
u.id AS flagger_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
u.username_lower AS flagger_username,
|
|
|
|
|
u.uploaded_avatar_id AS flagger_avatar_id
|
2018-07-19 14:33:11 -04:00
|
|
|
|
FROM period_actions pa
|
|
|
|
|
JOIN users u
|
|
|
|
|
ON u.id = pa.user_id
|
|
|
|
|
),
|
|
|
|
|
staff_data AS (
|
|
|
|
|
SELECT pa.id,
|
|
|
|
|
u.id AS staff_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
u.username_lower AS staff_username,
|
|
|
|
|
u.uploaded_avatar_id AS staff_avatar_id
|
2018-07-19 14:33:11 -04:00
|
|
|
|
FROM period_actions pa
|
|
|
|
|
JOIN users u
|
|
|
|
|
ON u.id = COALESCE(pa.agreed_by_id, pa.disagreed_by_id, pa.deferred_by_id)
|
|
|
|
|
)
|
|
|
|
|
SELECT
|
|
|
|
|
sd.staff_username,
|
|
|
|
|
sd.staff_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
sd.staff_avatar_id,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
pd.poster_username,
|
|
|
|
|
pd.poster_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
pd.poster_avatar_id,
|
2018-12-26 04:29:33 -05:00
|
|
|
|
pd.post_number,
|
|
|
|
|
pd.topic_id,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
fd.flagger_username,
|
|
|
|
|
fd.flagger_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
fd.flagger_avatar_id,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
pa.post_action_type_id,
|
|
|
|
|
pa.created_at,
|
|
|
|
|
pa.agreed_at,
|
|
|
|
|
pa.disagreed_at,
|
|
|
|
|
pa.deferred_at,
|
|
|
|
|
pa.agreed_by_id,
|
|
|
|
|
pa.disagreed_by_id,
|
|
|
|
|
pa.deferred_by_id,
|
|
|
|
|
COALESCE(pa.disagreed_at, pa.agreed_at, pa.deferred_at) AS responded_at
|
|
|
|
|
FROM period_actions pa
|
|
|
|
|
FULL OUTER JOIN staff_data sd
|
|
|
|
|
ON sd.id = pa.id
|
|
|
|
|
FULL OUTER JOIN flagger_data fd
|
|
|
|
|
ON fd.id = pa.id
|
|
|
|
|
FULL OUTER JOIN poster_data pd
|
|
|
|
|
ON pd.id = pa.id
|
|
|
|
|
SQL
|
|
|
|
|
|
|
|
|
|
DB.query(sql).each do |row|
|
|
|
|
|
data = {}
|
2018-12-26 04:29:33 -05:00
|
|
|
|
|
|
|
|
|
data[:post_type] = flag_types.key(row.post_action_type_id).to_s
|
|
|
|
|
data[:post_number] = row.post_number
|
|
|
|
|
data[:topic_id] = row.topic_id
|
2018-07-31 17:35:13 -04:00
|
|
|
|
|
|
|
|
|
if row.staff_id
|
|
|
|
|
data[:staff_username] = row.staff_username
|
|
|
|
|
data[:staff_id] = row.staff_id
|
|
|
|
|
data[:staff_avatar_template] = User.avatar_template(row.staff_username, row.staff_avatar_id)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
end
|
2018-07-31 17:35:13 -04:00
|
|
|
|
|
|
|
|
|
if row.poster_id
|
|
|
|
|
data[:poster_username] = row.poster_username
|
|
|
|
|
data[:poster_id] = row.poster_id
|
|
|
|
|
data[:poster_avatar_template] = User.avatar_template(row.poster_username, row.poster_avatar_id)
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-31 23:48:54 -04:00
|
|
|
|
if row.flagger_id
|
|
|
|
|
data[:flagger_id] = row.flagger_id
|
|
|
|
|
data[:flagger_username] = row.flagger_username
|
|
|
|
|
data[:flagger_avatar_template] = User.avatar_template(row.flagger_username, row.flagger_avatar_id)
|
|
|
|
|
end
|
2018-07-31 17:35:13 -04:00
|
|
|
|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
if row.agreed_by_id
|
|
|
|
|
data[:resolution] = I18n.t("reports.flags_status.values.agreed")
|
|
|
|
|
elsif row.disagreed_by_id
|
|
|
|
|
data[:resolution] = I18n.t("reports.flags_status.values.disagreed")
|
|
|
|
|
elsif row.deferred_by_id
|
|
|
|
|
data[:resolution] = I18n.t("reports.flags_status.values.deferred")
|
|
|
|
|
else
|
|
|
|
|
data[:resolution] = I18n.t("reports.flags_status.values.no_action")
|
|
|
|
|
end
|
|
|
|
|
data[:response_time] = row.responded_at ? row.responded_at - row.created_at : nil
|
|
|
|
|
report.data << data
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.report_post_edits(report)
|
2018-08-09 20:50:05 -04:00
|
|
|
|
report.category_filtering = true
|
2018-07-19 14:33:11 -04:00
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
2018-07-31 17:35:13 -04:00
|
|
|
|
{
|
|
|
|
|
type: :post,
|
|
|
|
|
properties: {
|
|
|
|
|
topic_id: :topic_id,
|
|
|
|
|
number: :post_number,
|
|
|
|
|
truncated_raw: :post_raw
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.post_edits.labels.post")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :editor_username,
|
|
|
|
|
id: :editor_id,
|
|
|
|
|
avatar: :editor_avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.post_edits.labels.editor")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :author_username,
|
|
|
|
|
id: :author_id,
|
|
|
|
|
avatar: :author_avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.post_edits.labels.author")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :text,
|
|
|
|
|
property: :edit_reason,
|
|
|
|
|
title: I18n.t("reports.post_edits.labels.edit_reason")
|
|
|
|
|
},
|
2018-07-19 14:33:11 -04:00
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
WITH period_revisions AS (
|
|
|
|
|
SELECT pr.user_id AS editor_id,
|
|
|
|
|
pr.number AS revision_version,
|
|
|
|
|
pr.created_at,
|
|
|
|
|
pr.post_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
u.username AS editor_username,
|
|
|
|
|
u.uploaded_avatar_id as editor_avatar_id
|
2018-07-19 14:33:11 -04:00
|
|
|
|
FROM post_revisions pr
|
|
|
|
|
JOIN users u
|
|
|
|
|
ON u.id = pr.user_id
|
2018-07-21 05:17:25 -04:00
|
|
|
|
WHERE u.id > 0
|
|
|
|
|
AND pr.created_at >= '#{report.start_date}'
|
2018-07-19 14:33:11 -04:00
|
|
|
|
AND pr.created_at <= '#{report.end_date}'
|
|
|
|
|
ORDER BY pr.created_at DESC
|
|
|
|
|
LIMIT 20
|
|
|
|
|
)
|
|
|
|
|
SELECT pr.editor_id,
|
|
|
|
|
pr.editor_username,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
pr.editor_avatar_id,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
p.user_id AS author_id,
|
|
|
|
|
u.username AS author_username,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
u.uploaded_avatar_id AS author_avatar_id,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
pr.revision_version,
|
|
|
|
|
p.version AS post_version,
|
|
|
|
|
pr.post_id,
|
2018-07-31 17:35:13 -04:00
|
|
|
|
left(p.raw, 40) AS post_raw,
|
2018-07-19 14:33:11 -04:00
|
|
|
|
p.topic_id,
|
|
|
|
|
p.post_number,
|
|
|
|
|
p.edit_reason,
|
|
|
|
|
pr.created_at
|
|
|
|
|
FROM period_revisions pr
|
|
|
|
|
JOIN posts p
|
|
|
|
|
ON p.id = pr.post_id
|
|
|
|
|
JOIN users u
|
|
|
|
|
ON u.id = p.user_id
|
|
|
|
|
SQL
|
|
|
|
|
|
2018-08-09 20:50:05 -04:00
|
|
|
|
if report.category_id
|
|
|
|
|
sql += <<~SQL
|
|
|
|
|
JOIN topics t
|
|
|
|
|
ON t.id = p.topic_id
|
|
|
|
|
WHERE t.category_id = ? OR t.category_id IN (SELECT id FROM categories WHERE categories.parent_category_id = ?)
|
|
|
|
|
SQL
|
|
|
|
|
end
|
|
|
|
|
result = report.category_id ? DB.query(sql, report.category_id, report.category_id) : DB.query(sql)
|
|
|
|
|
|
|
|
|
|
result.each do |r|
|
2018-07-19 14:33:11 -04:00
|
|
|
|
revision = {}
|
|
|
|
|
revision[:editor_id] = r.editor_id
|
|
|
|
|
revision[:editor_username] = r.editor_username
|
2018-07-31 17:35:13 -04:00
|
|
|
|
revision[:editor_avatar_template] = User.avatar_template(r.editor_username, r.editor_avatar_id)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
revision[:author_id] = r.author_id
|
|
|
|
|
revision[:author_username] = r.author_username
|
2018-07-31 17:35:13 -04:00
|
|
|
|
revision[:author_avatar_template] = User.avatar_template(r.author_username, r.author_avatar_id)
|
2018-07-19 14:33:11 -04:00
|
|
|
|
revision[:edit_reason] = r.revision_version == r.post_version ? r.edit_reason : nil
|
|
|
|
|
revision[:created_at] = r.created_at
|
2018-07-31 17:35:13 -04:00
|
|
|
|
revision[:post_raw] = r.post_raw
|
|
|
|
|
revision[:topic_id] = r.topic_id
|
|
|
|
|
revision[:post_number] = r.post_number
|
2018-07-19 14:33:11 -04:00
|
|
|
|
|
|
|
|
|
report.data << revision
|
|
|
|
|
end
|
2018-04-19 12:19:21 -04:00
|
|
|
|
end
|
2018-08-30 08:56:11 -04:00
|
|
|
|
|
2018-11-12 16:23:10 -05:00
|
|
|
|
def self.report_user_flagging_ratio(report)
|
2018-10-26 09:59:04 -04:00
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
2018-11-02 09:42:52 -04:00
|
|
|
|
report.dates_filtering = false
|
|
|
|
|
|
2018-10-26 09:59:04 -04:00
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :username,
|
|
|
|
|
id: :user_id,
|
|
|
|
|
avatar: :avatar_template,
|
|
|
|
|
},
|
2018-11-12 16:23:10 -05:00
|
|
|
|
title: I18n.t("reports.user_flagging_ratio.labels.user")
|
2018-10-26 09:59:04 -04:00
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :number,
|
|
|
|
|
property: :disagreed_flags,
|
2018-11-12 16:23:10 -05:00
|
|
|
|
title: I18n.t("reports.user_flagging_ratio.labels.disagreed_flags")
|
2018-10-26 09:59:04 -04:00
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :number,
|
|
|
|
|
property: :agreed_flags,
|
2018-11-12 16:23:10 -05:00
|
|
|
|
title: I18n.t("reports.user_flagging_ratio.labels.agreed_flags")
|
2018-10-26 09:59:04 -04:00
|
|
|
|
},
|
2018-11-02 06:08:00 -04:00
|
|
|
|
{
|
|
|
|
|
type: :number,
|
|
|
|
|
property: :ignored_flags,
|
2018-11-12 16:23:10 -05:00
|
|
|
|
title: I18n.t("reports.user_flagging_ratio.labels.ignored_flags")
|
2018-11-02 06:08:00 -04:00
|
|
|
|
},
|
2018-10-26 09:59:04 -04:00
|
|
|
|
{
|
|
|
|
|
type: :number,
|
|
|
|
|
property: :score,
|
2018-11-12 16:23:10 -05:00
|
|
|
|
title: I18n.t("reports.user_flagging_ratio.labels.score")
|
2018-10-26 09:59:04 -04:00
|
|
|
|
},
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
SELECT u.id,
|
|
|
|
|
u.username,
|
|
|
|
|
u.uploaded_avatar_id as avatar_id,
|
|
|
|
|
CASE WHEN u.silenced_till IS NOT NULL THEN 't' ELSE 'f' END as silenced,
|
2018-10-31 15:35:07 -04:00
|
|
|
|
us.flags_disagreed AS disagreed_flags,
|
|
|
|
|
us.flags_agreed AS agreed_flags,
|
2018-11-02 06:08:00 -04:00
|
|
|
|
us.flags_ignored AS ignored_flags,
|
2018-10-31 15:35:07 -04:00
|
|
|
|
ROUND((1-(us.flags_agreed::numeric / us.flags_disagreed::numeric)) *
|
|
|
|
|
(us.flags_disagreed - us.flags_agreed)) AS score
|
|
|
|
|
FROM users AS u
|
|
|
|
|
INNER JOIN user_stats AS us ON us.user_id = u.id
|
|
|
|
|
WHERE u.id <> -1
|
|
|
|
|
AND flags_disagreed > flags_agreed
|
2018-10-26 09:59:04 -04:00
|
|
|
|
ORDER BY score DESC
|
2018-11-12 16:23:10 -05:00
|
|
|
|
LIMIT 100
|
2018-10-26 09:59:04 -04:00
|
|
|
|
SQL
|
|
|
|
|
|
|
|
|
|
DB.query(sql).each do |row|
|
|
|
|
|
flagger = {}
|
|
|
|
|
flagger[:user_id] = row.id
|
|
|
|
|
flagger[:username] = row.username
|
|
|
|
|
flagger[:avatar_template] = User.avatar_template(row.username, row.avatar_id)
|
|
|
|
|
flagger[:disagreed_flags] = row.disagreed_flags
|
2018-11-02 06:08:00 -04:00
|
|
|
|
flagger[:ignored_flags] = row.ignored_flags
|
2018-10-26 09:59:04 -04:00
|
|
|
|
flagger[:agreed_flags] = row.agreed_flags
|
|
|
|
|
flagger[:score] = row.score
|
|
|
|
|
|
|
|
|
|
report.data << flagger
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-12-14 07:47:59 -05:00
|
|
|
|
def self.report_staff_logins(report)
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :username,
|
|
|
|
|
id: :user_id,
|
|
|
|
|
avatar: :avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.staff_logins.labels.user")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :location,
|
|
|
|
|
title: I18n.t("reports.staff_logins.labels.location")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :created_at,
|
|
|
|
|
type: :precise_date,
|
|
|
|
|
title: I18n.t("reports.staff_logins.labels.login_at")
|
|
|
|
|
}
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
SELECT
|
|
|
|
|
t1.created_at created_at,
|
|
|
|
|
t1.client_ip client_ip,
|
|
|
|
|
u.username username,
|
|
|
|
|
u.uploaded_avatar_id uploaded_avatar_id,
|
|
|
|
|
u.id user_id
|
|
|
|
|
FROM (
|
|
|
|
|
SELECT DISTINCT ON (t.client_ip, t.user_id) t.client_ip, t.user_id, t.created_at
|
|
|
|
|
FROM user_auth_token_logs t
|
2019-01-25 05:28:52 -05:00
|
|
|
|
WHERE t.user_id IN (#{User.admins.pluck(:id).join(',')})
|
2018-12-14 07:47:59 -05:00
|
|
|
|
AND t.created_at >= :start_date
|
|
|
|
|
AND t.created_at <= :end_date
|
|
|
|
|
ORDER BY t.client_ip, t.user_id, t.created_at DESC
|
|
|
|
|
LIMIT #{report.limit || 20}
|
|
|
|
|
) t1
|
|
|
|
|
JOIN users u ON u.id = t1.user_id
|
|
|
|
|
ORDER BY created_at DESC
|
|
|
|
|
SQL
|
|
|
|
|
|
|
|
|
|
DB.query(sql, start_date: report.start_date, end_date: report.end_date).each do |row|
|
|
|
|
|
data = {}
|
|
|
|
|
data[:avatar_template] = User.avatar_template(row.username, row.uploaded_avatar_id)
|
|
|
|
|
data[:user_id] = row.user_id
|
|
|
|
|
data[:username] = row.username
|
|
|
|
|
data[:location] = DiscourseIpInfo.get(row.client_ip)[:location]
|
|
|
|
|
data[:created_at] = row.created_at
|
|
|
|
|
|
|
|
|
|
report.data << data
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-10-30 18:51:58 -04:00
|
|
|
|
def self.report_suspicious_logins(report)
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :username,
|
|
|
|
|
id: :user_id,
|
|
|
|
|
avatar: :avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.user")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :client_ip,
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.client_ip")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :location,
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.location")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :browser,
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.browser")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :device,
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.device")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
property: :os,
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.os")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :date,
|
|
|
|
|
property: :login_time,
|
|
|
|
|
title: I18n.t("reports.suspicious_logins.labels.login_time")
|
|
|
|
|
},
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
SELECT u.id user_id, u.username, u.uploaded_avatar_id, t.client_ip, t.user_agent, t.created_at login_time
|
|
|
|
|
FROM user_auth_token_logs t
|
|
|
|
|
JOIN users u ON u.id = t.user_id
|
|
|
|
|
WHERE t.action = 'suspicious'
|
2018-10-30 22:37:54 -04:00
|
|
|
|
AND t.created_at >= :start_date
|
|
|
|
|
AND t.created_at <= :end_date
|
2019-02-12 13:17:29 -05:00
|
|
|
|
ORDER BY t.created_at DESC
|
2018-10-30 18:51:58 -04:00
|
|
|
|
SQL
|
|
|
|
|
|
2018-10-30 22:37:54 -04:00
|
|
|
|
DB.query(sql, start_date: report.start_date, end_date: report.end_date).each do |row|
|
2018-10-30 18:51:58 -04:00
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
|
|
ipinfo = DiscourseIpInfo.get(row.client_ip)
|
|
|
|
|
browser = BrowserDetection.browser(row.user_agent)
|
|
|
|
|
device = BrowserDetection.device(row.user_agent)
|
|
|
|
|
os = BrowserDetection.os(row.user_agent)
|
|
|
|
|
|
|
|
|
|
data[:username] = row.username
|
|
|
|
|
data[:user_id] = row.user_id
|
|
|
|
|
data[:avatar_template] = User.avatar_template(row.username, row.uploaded_avatar_id)
|
|
|
|
|
data[:client_ip] = row.client_ip.to_s
|
|
|
|
|
data[:location] = ipinfo[:location]
|
|
|
|
|
data[:browser] = I18n.t("user_auth_tokens.browser.#{browser}")
|
|
|
|
|
data[:device] = I18n.t("user_auth_tokens.device.#{device}")
|
|
|
|
|
data[:os] = I18n.t("user_auth_tokens.os.#{os}")
|
|
|
|
|
data[:login_time] = row.login_time
|
|
|
|
|
|
|
|
|
|
report.data << data
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-12-14 17:14:46 -05:00
|
|
|
|
def self.report_storage_stats(report)
|
|
|
|
|
backup_stats = begin
|
|
|
|
|
BackupRestore::BackupStore.create.stats
|
|
|
|
|
rescue BackupRestore::BackupStore::StorageError
|
|
|
|
|
nil
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
report.data = {
|
|
|
|
|
backups: backup_stats,
|
|
|
|
|
uploads: {
|
|
|
|
|
used_bytes: DiskSpace.uploads_used_bytes,
|
|
|
|
|
free_bytes: DiskSpace.uploads_free_bytes
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
2018-12-28 14:48:54 -05:00
|
|
|
|
def self.report_top_uploads(report)
|
|
|
|
|
report.modes = [:table]
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
report.filter_options = [
|
|
|
|
|
{
|
|
|
|
|
id: "file-extension",
|
|
|
|
|
selected: report.filter_values.fetch("file-extension", "any"),
|
|
|
|
|
choices: (SiteSetting.authorized_extensions.split("|") + report.filter_values.values).uniq,
|
|
|
|
|
allowAny: true
|
|
|
|
|
}
|
|
|
|
|
]
|
2018-12-28 14:48:54 -05:00
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :link,
|
|
|
|
|
properties: [
|
|
|
|
|
:file_url,
|
|
|
|
|
:file_name,
|
|
|
|
|
],
|
|
|
|
|
title: I18n.t("reports.top_uploads.labels.filename")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
username: :author_username,
|
|
|
|
|
id: :author_id,
|
|
|
|
|
avatar: :author_avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.top_uploads.labels.author")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :text,
|
|
|
|
|
property: :extension,
|
|
|
|
|
title: I18n.t("reports.top_uploads.labels.extension")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :bytes,
|
|
|
|
|
property: :filesize,
|
|
|
|
|
title: I18n.t("reports.top_uploads.labels.filesize")
|
|
|
|
|
},
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
SELECT
|
|
|
|
|
u.id as user_id,
|
|
|
|
|
u.username,
|
|
|
|
|
u.uploaded_avatar_id,
|
|
|
|
|
up.filesize,
|
|
|
|
|
up.original_filename,
|
|
|
|
|
up.extension,
|
|
|
|
|
up.url
|
|
|
|
|
FROM uploads up
|
|
|
|
|
JOIN users u
|
|
|
|
|
ON u.id = up.user_id
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
/*where*/
|
2018-12-28 14:48:54 -05:00
|
|
|
|
ORDER BY up.filesize DESC
|
|
|
|
|
LIMIT #{report.limit || 250}
|
|
|
|
|
SQL
|
|
|
|
|
|
FEATURE: Exposing a way to add a generic report filter (#6816)
* FEATURE: Exposing a way to add a generic report filter
## Why do we need this change?
Part of the work discussed [here](https://meta.discourse.org/t/gain-understanding-of-file-uploads-usage/104994), and implemented a first spike [here](https://github.com/discourse/discourse/pull/6809), I am trying to expose a single generic filter selector per report.
## How does this work?
We basically expose a simple, single generic filter that is computed and displayed based on backend values passed into the report.
This would be a simple contract between the frontend and the backend.
**Backend changes:** we simply need to return a list of dropdown / select options, and enable the report's newly introduced `custom_filtering` property.
For example, for our [Top Uploads](https://github.com/discourse/discourse/pull/6809/files#diff-3f97cbb8726f3310e0b0c386dbe89e22R1423) report, it can look like this on the backend:
```ruby
report.custom_filtering = true
report.custom_filter_options = [{ id: "any", name: "Any" }, { id: "jpg", name: "JPEG" } ]
```
In our javascript report HTTP call, it will look like:
```js
{
"custom_filtering": true,
"custom_filter_options": [
{
"id": "any",
"name": "Any"
},
{
"id": "jpg",
"name": "JPG"
}
]
}
```
**Frontend changes:** We introduced a generic `filter` param and a `combo-box` which hooks up into the existing framework for fetching a report.
This works alright, with the limitation of being a single custom filter per report. If we wanted to add, for an instance a `filesize filter`, this will not work for us. _I went through with this approach because it is hard to predict and build abstractions for requirements or problems we don't have yet, or might not have._
## How does it look like?
![a1ktg1odde](https://user-images.githubusercontent.com/45508821/50485875-f17edb80-09ee-11e9-92dd-1454ab041fbb.gif)
## More on the bigger picture
The major concern here I have is the solution I introduced might serve the `think small` version of the reporting work, but I don't think it serves the `think big`, I will try to shed some light into why.
Within the current design, It is hard to maintain QueryParams for dynamically generated params (based on the idea of introducing more than one custom filter per report).
To allow ourselves to have more than one generic filter, we will need to:
a. Use the Route's model to retrieve the report's payload (we are now dependent on changes of the QueryParams via computed properties)
b. After retrieving the payload, we can use the `setupController` to define our dynamic QueryParams based on the custom filters definitions we received from the backend
c. Load a custom filter specific Ember component based on the definitions we received from the backend
2019-03-15 08:15:38 -04:00
|
|
|
|
extension_filter = report.filter_values["file-extension"]
|
|
|
|
|
builder = DB.build(sql)
|
|
|
|
|
builder.where("up.id > :seeded_id_threshold", seeded_id_threshold: Upload::SEEDED_ID_THRESHOLD)
|
|
|
|
|
builder.where("up.created_at >= :start_date", start_date: report.start_date)
|
|
|
|
|
builder.where("up.created_at < :end_date", end_date: report.end_date)
|
|
|
|
|
builder.where("up.extension = :extension", extension: extension_filter) if extension_filter.present?
|
|
|
|
|
builder.query.each do |row|
|
2018-12-28 14:48:54 -05:00
|
|
|
|
data = {}
|
|
|
|
|
data[:author_id] = row.user_id
|
|
|
|
|
data[:author_username] = row.username
|
|
|
|
|
data[:author_avatar_template] = User.avatar_template(row.username, row.uploaded_avatar_id)
|
|
|
|
|
data[:filesize] = row.filesize
|
|
|
|
|
data[:extension] = row.extension
|
|
|
|
|
data[:file_url] = Discourse.store.cdn_url(row.url)
|
|
|
|
|
data[:file_name] = row.original_filename.truncate(25)
|
|
|
|
|
report.data << data
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-03-12 12:01:58 -04:00
|
|
|
|
def self.report_top_ignored_users(report)
|
|
|
|
|
report.modes = [:table]
|
|
|
|
|
|
|
|
|
|
report.labels = [
|
|
|
|
|
{
|
|
|
|
|
type: :user,
|
|
|
|
|
properties: {
|
|
|
|
|
id: :ignored_user_id,
|
|
|
|
|
username: :ignored_username,
|
|
|
|
|
avatar: :ignored_user_avatar_template,
|
|
|
|
|
},
|
|
|
|
|
title: I18n.t("reports.top_ignored_users.labels.ignored_user")
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
type: :number,
|
|
|
|
|
properties: [
|
|
|
|
|
:ignores_count,
|
|
|
|
|
],
|
|
|
|
|
title: I18n.t("reports.top_ignored_users.labels.ignores_count")
|
|
|
|
|
}
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
report.data = []
|
|
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
|
SELECT
|
|
|
|
|
u.id AS user_id,
|
|
|
|
|
u.username,
|
|
|
|
|
u.uploaded_avatar_id,
|
|
|
|
|
COUNT(*) AS ignores_count
|
|
|
|
|
FROM users AS u
|
|
|
|
|
INNER JOIN ignored_users AS ig ON ig.ignored_user_id = u.id
|
|
|
|
|
WHERE ig.created_at >= '#{report.start_date}' AND ig.created_at <= '#{report.end_date}'
|
|
|
|
|
GROUP BY u.id
|
|
|
|
|
ORDER BY COUNT(*) DESC
|
|
|
|
|
LIMIT #{report.limit || 250}
|
|
|
|
|
SQL
|
|
|
|
|
|
|
|
|
|
DB.query(sql).each do |row|
|
|
|
|
|
report.data << {
|
|
|
|
|
ignored_user_id: row.user_id,
|
|
|
|
|
ignored_username: row.username,
|
|
|
|
|
ignored_user_avatar_template: User.avatar_template(row.username, row.uploaded_avatar_id),
|
|
|
|
|
ignores_count: row.ignores_count,
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-12-14 17:14:46 -05:00
|
|
|
|
DiscourseEvent.on(:site_setting_saved) do |site_setting|
|
|
|
|
|
if ["backup_location", "s3_backup_bucket"].include?(site_setting.name.to_s)
|
|
|
|
|
clear_cache(:storage_stats)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-08-30 08:56:11 -04:00
|
|
|
|
def rgba_color(hex, opacity = 1)
|
2018-11-15 15:41:05 -05:00
|
|
|
|
if hex.size == 3
|
2018-11-14 18:52:47 -05:00
|
|
|
|
chars = hex.scan(/\w/)
|
2018-11-15 15:41:05 -05:00
|
|
|
|
hex = chars.zip(chars).flatten.join
|
2018-11-14 18:52:47 -05:00
|
|
|
|
end
|
|
|
|
|
|
2018-11-15 15:41:05 -05:00
|
|
|
|
if hex.size < 3
|
2018-11-14 18:52:47 -05:00
|
|
|
|
hex = hex.ljust(6, hex.last)
|
|
|
|
|
end
|
|
|
|
|
|
2018-08-30 08:56:11 -04:00
|
|
|
|
rgbs = hex_to_rgbs(hex)
|
|
|
|
|
|
|
|
|
|
"rgba(#{rgbs.join(',')},#{opacity})"
|
|
|
|
|
end
|
2019-01-21 09:17:04 -05:00
|
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
|
|
def hex_to_rgbs(hex_color)
|
|
|
|
|
hex_color = hex_color.gsub('#', '')
|
|
|
|
|
rgbs = hex_color.scan(/../)
|
|
|
|
|
rgbs
|
|
|
|
|
.map! { |color| color.hex }
|
|
|
|
|
.map! { |rgb| rgb.to_i }
|
|
|
|
|
end
|
2013-02-27 22:39:42 -05:00
|
|
|
|
end
|