2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
require 'uri'
|
|
|
|
|
|
|
|
class TopicLink < ActiveRecord::Base
|
2015-09-25 14:07:04 -04:00
|
|
|
|
|
|
|
def self.max_domain_length
|
|
|
|
100
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.max_url_length
|
|
|
|
500
|
|
|
|
end
|
2014-06-25 21:38:23 -04:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
belongs_to :topic
|
|
|
|
belongs_to :user
|
|
|
|
belongs_to :post
|
|
|
|
belongs_to :link_topic, class_name: 'Topic'
|
2014-03-17 22:12:07 -04:00
|
|
|
belongs_to :link_post, class_name: 'Post'
|
2013-02-05 14:16:51 -05:00
|
|
|
|
|
|
|
validates_presence_of :url
|
|
|
|
|
|
|
|
validates_length_of :url, maximum: 500
|
|
|
|
|
|
|
|
validates_uniqueness_of :url, scope: [:topic_id, :post_id]
|
|
|
|
|
2013-06-13 13:41:45 -04:00
|
|
|
has_many :topic_link_clicks, dependent: :destroy
|
2013-02-05 14:16:51 -05:00
|
|
|
|
|
|
|
validate :link_to_self
|
|
|
|
|
2014-04-05 14:47:25 -04:00
|
|
|
after_commit :crawl_link_title
|
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
# Make sure a topic can't link to itself
|
|
|
|
def link_to_self
|
|
|
|
errors.add(:base, "can't link to the same topic") if (topic_id == link_topic_id)
|
|
|
|
end
|
|
|
|
|
2013-11-15 12:15:46 -05:00
|
|
|
def self.topic_map(guardian, topic_id)
|
2013-06-05 02:10:26 -04:00
|
|
|
|
|
|
|
# Sam: complicated reports are really hard in AR
|
2019-04-12 09:55:27 -04:00
|
|
|
builder = DB.build(<<~SQL)
|
|
|
|
SELECT ftl.url,
|
|
|
|
COALESCE(ft.title, ftl.title) AS title,
|
|
|
|
ftl.link_topic_id,
|
|
|
|
ftl.reflection,
|
|
|
|
ftl.internal,
|
|
|
|
ftl.domain,
|
|
|
|
MIN(ftl.user_id) AS user_id,
|
|
|
|
SUM(clicks) AS clicks
|
|
|
|
FROM topic_links AS ftl
|
|
|
|
LEFT JOIN topics AS ft ON ftl.link_topic_id = ft.id
|
|
|
|
LEFT JOIN categories AS c ON c.id = ft.category_id
|
|
|
|
/*where*/
|
|
|
|
GROUP BY ftl.url, ft.title, ftl.title, ftl.link_topic_id, ftl.reflection, ftl.internal, ftl.domain
|
|
|
|
ORDER BY clicks DESC, count(*) DESC
|
|
|
|
LIMIT 50
|
|
|
|
SQL
|
2013-06-05 02:10:26 -04:00
|
|
|
|
|
|
|
builder.where('ftl.topic_id = :topic_id', topic_id: topic_id)
|
|
|
|
builder.where('ft.deleted_at IS NULL')
|
2017-07-22 16:18:15 -04:00
|
|
|
# note that ILIKE means "case insensitive LIKE"
|
|
|
|
builder.where("NOT(ftl.url ILIKE '%.png' OR ftl.url ILIKE '%.jpg' OR ftl.url ILIKE '%.gif')")
|
2014-05-11 15:53:57 -04:00
|
|
|
builder.where("COALESCE(ft.archetype, 'regular') <> :archetype", archetype: Archetype.private_message)
|
2018-07-18 00:14:50 -04:00
|
|
|
builder.where("clicks > 0")
|
2013-06-05 02:10:26 -04:00
|
|
|
|
|
|
|
builder.secure_category(guardian.secure_category_ids)
|
|
|
|
|
2018-06-19 02:13:14 -04:00
|
|
|
builder.query
|
2013-06-05 02:10:26 -04:00
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.counts_for(guardian, topic, posts)
|
|
|
|
return {} if posts.blank?
|
|
|
|
|
2018-06-19 02:13:14 -04:00
|
|
|
# Sam: this is not tidy in AR and also happens to be a critical path
|
|
|
|
# for topic view
|
|
|
|
builder = DB.build("SELECT
|
2013-06-05 02:10:26 -04:00
|
|
|
l.post_id,
|
|
|
|
l.url,
|
|
|
|
l.clicks,
|
2014-04-05 14:47:25 -04:00
|
|
|
COALESCE(t.title, l.title) AS title,
|
2013-06-05 02:10:26 -04:00
|
|
|
l.internal,
|
2014-04-05 14:47:25 -04:00
|
|
|
l.reflection,
|
|
|
|
l.domain
|
2013-06-05 02:10:26 -04:00
|
|
|
FROM topic_links l
|
|
|
|
LEFT JOIN topics t ON t.id = l.link_topic_id
|
|
|
|
LEFT JOIN categories AS c ON c.id = t.category_id
|
2021-10-29 10:52:23 -04:00
|
|
|
/*left_join*/
|
2013-06-05 02:10:26 -04:00
|
|
|
/*where*/
|
|
|
|
ORDER BY reflection ASC, clicks DESC")
|
|
|
|
|
|
|
|
builder.where('t.deleted_at IS NULL')
|
2014-05-11 15:53:57 -04:00
|
|
|
builder.where("COALESCE(t.archetype, 'regular') <> :archetype", archetype: Archetype.private_message)
|
2013-06-05 02:10:26 -04:00
|
|
|
|
2021-10-29 10:52:23 -04:00
|
|
|
if guardian.authenticated?
|
|
|
|
builder.left_join("topic_users AS tu ON (t.id = tu.topic_id AND tu.user_id = #{guardian.user.id.to_i})")
|
|
|
|
builder.where('COALESCE(tu.notification_level,1) > :muted', muted: TopicUser.notification_levels[:muted])
|
|
|
|
end
|
|
|
|
|
2013-06-05 02:10:26 -04:00
|
|
|
# not certain if pluck is right, cause it may interfere with caching
|
2018-06-19 02:13:14 -04:00
|
|
|
builder.where('l.post_id in (:post_ids)', post_ids: posts.map(&:id))
|
2013-06-05 02:10:26 -04:00
|
|
|
builder.secure_category(guardian.secure_category_ids)
|
|
|
|
|
2018-06-19 02:13:14 -04:00
|
|
|
result = {}
|
|
|
|
builder.query.each do |l|
|
2013-06-05 02:10:26 -04:00
|
|
|
result[l.post_id] ||= []
|
|
|
|
result[l.post_id] << { url: l.url,
|
|
|
|
clicks: l.clicks,
|
|
|
|
title: l.title,
|
|
|
|
internal: l.internal,
|
|
|
|
reflection: l.reflection }
|
|
|
|
end
|
2018-06-19 02:13:14 -04:00
|
|
|
result
|
2013-06-05 02:10:26 -04:00
|
|
|
end
|
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
def self.extract_from(post)
|
2021-06-01 14:02:53 -04:00
|
|
|
return if post.blank? || post.whisper? || post.user_id.blank? || post.deleted_at.present?
|
2013-02-07 10:45:24 -05:00
|
|
|
|
2018-12-05 12:21:50 -05:00
|
|
|
current_urls = []
|
2018-10-17 22:52:45 -04:00
|
|
|
reflected_ids = []
|
|
|
|
|
|
|
|
PrettyText
|
|
|
|
.extract_links(post.cooked)
|
|
|
|
.map do |u|
|
2018-12-11 02:03:13 -05:00
|
|
|
uri = UrlHelper.relaxed_parse(u.url)
|
2018-10-17 22:52:45 -04:00
|
|
|
[u, uri]
|
|
|
|
end
|
2020-04-30 02:48:34 -04:00
|
|
|
.reject { |_, p| p.nil? || "mailto" == p.scheme }
|
2018-10-17 22:52:45 -04:00
|
|
|
.uniq { |_, p| p }
|
|
|
|
.each do |link, parsed|
|
2018-03-28 04:20:08 -04:00
|
|
|
|
2018-10-17 22:52:45 -04:00
|
|
|
TopicLink.transaction do
|
2013-02-05 14:16:51 -05:00
|
|
|
begin
|
2018-12-05 12:21:50 -05:00
|
|
|
url, reflected_id = self.ensure_entry_for(post, link, parsed)
|
|
|
|
current_urls << url unless url.nil?
|
|
|
|
reflected_ids << reflected_id unless reflected_id.nil?
|
2018-08-14 06:23:32 -04:00
|
|
|
rescue URI::Error
|
2013-02-05 14:16:51 -05:00
|
|
|
# if the URI is invalid, don't store it.
|
|
|
|
rescue ActionController::RoutingError
|
2013-02-07 10:45:24 -05:00
|
|
|
# If we can't find the route, no big deal
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2013-02-07 10:45:24 -05:00
|
|
|
end
|
2018-12-05 12:16:27 -05:00
|
|
|
end
|
|
|
|
|
2018-12-05 12:21:50 -05:00
|
|
|
self.cleanup_entries(post, current_urls, reflected_ids)
|
2018-12-05 12:16:27 -05:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2014-04-05 14:47:25 -04:00
|
|
|
|
2020-05-13 02:05:39 -04:00
|
|
|
def self.crawl_link_title(topic_link_id)
|
|
|
|
Jobs.enqueue(:crawl_topic_link, topic_link_id: topic_link_id)
|
|
|
|
end
|
|
|
|
|
2014-04-05 14:47:25 -04:00
|
|
|
def crawl_link_title
|
2020-05-13 02:05:39 -04:00
|
|
|
TopicLink.crawl_link_title(id)
|
2014-04-05 14:47:25 -04:00
|
|
|
end
|
2016-06-06 16:58:35 -04:00
|
|
|
|
|
|
|
def self.duplicate_lookup(topic)
|
2016-06-08 12:35:11 -04:00
|
|
|
results = TopicLink
|
2016-06-13 05:11:25 -04:00
|
|
|
.includes(:post, :user)
|
|
|
|
.joins(:post, :user)
|
|
|
|
.where("posts.id IS NOT NULL AND users.id IS NOT NULL")
|
2016-06-13 01:13:39 -04:00
|
|
|
.where(topic_id: topic.id, reflection: false)
|
2016-06-13 05:11:25 -04:00
|
|
|
.last(200)
|
2016-06-06 16:58:35 -04:00
|
|
|
|
|
|
|
lookup = {}
|
2016-06-08 12:35:11 -04:00
|
|
|
results.each do |tl|
|
2016-06-08 17:20:32 -04:00
|
|
|
normalized = tl.url.downcase.sub(/^https?:\/\//, '').sub(/\/$/, '')
|
2016-06-08 12:35:11 -04:00
|
|
|
lookup[normalized] = { domain: tl.domain,
|
2016-06-13 05:11:25 -04:00
|
|
|
username: tl.user.username_lower,
|
2016-06-09 13:02:44 -04:00
|
|
|
posted_at: tl.post.created_at,
|
|
|
|
post_number: tl.post.post_number }
|
2016-06-06 16:58:35 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
lookup
|
|
|
|
end
|
2018-12-05 12:21:50 -05:00
|
|
|
|
|
|
|
private
|
|
|
|
|
2020-05-13 02:05:39 -04:00
|
|
|
# This pattern is used to create topic links very efficiently with minimal
|
|
|
|
# errors under heavy concurrent use
|
|
|
|
#
|
|
|
|
# It avoids a SELECT to find out if the record is there and minimizes all
|
|
|
|
# the work it needs to do in case a record is missing
|
|
|
|
#
|
|
|
|
# It handles calling the required callback and has parity with Rails implementation
|
|
|
|
#
|
|
|
|
# Usually we would rely on ActiveRecord but in this case we have had lots of churn
|
|
|
|
# around creation of topic links leading to hard to debug log messages in production
|
|
|
|
#
|
|
|
|
def self.safe_create_topic_link(
|
|
|
|
post_id:,
|
|
|
|
user_id:,
|
|
|
|
topic_id:,
|
|
|
|
url:,
|
|
|
|
domain: nil,
|
|
|
|
internal: false,
|
|
|
|
link_topic_id: nil,
|
|
|
|
link_post_id: nil,
|
|
|
|
quote: false,
|
|
|
|
extension: nil,
|
|
|
|
reflection: false
|
|
|
|
)
|
|
|
|
|
|
|
|
domain ||= Discourse.current_hostname
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
WITH new_row AS(
|
|
|
|
INSERT INTO topic_links(
|
|
|
|
post_id,
|
|
|
|
user_id,
|
|
|
|
topic_id,
|
|
|
|
url,
|
|
|
|
domain,
|
|
|
|
internal,
|
|
|
|
link_topic_id,
|
|
|
|
link_post_id,
|
|
|
|
quote,
|
|
|
|
extension,
|
|
|
|
reflection,
|
|
|
|
created_at,
|
|
|
|
updated_at
|
|
|
|
) VALUES (
|
|
|
|
:post_id,
|
|
|
|
:user_id,
|
|
|
|
:topic_id,
|
|
|
|
:url,
|
|
|
|
:domain,
|
|
|
|
:internal,
|
|
|
|
:link_topic_id,
|
|
|
|
:link_post_id,
|
|
|
|
:quote,
|
|
|
|
:extension,
|
|
|
|
:reflection,
|
|
|
|
:now,
|
|
|
|
:now
|
|
|
|
)
|
|
|
|
ON CONFLICT DO NOTHING
|
|
|
|
RETURNING id
|
|
|
|
)
|
|
|
|
SELECT COALESCE(
|
|
|
|
(SELECT id FROM new_row),
|
|
|
|
(SELECT id FROM topic_links WHERE post_id = :post_id AND topic_id = :topic_id AND url = :url)
|
|
|
|
), (SELECT id FROM new_row) IS NOT NULL
|
|
|
|
SQL
|
|
|
|
|
|
|
|
topic_link_id, new_record = DB.query_single(sql,
|
|
|
|
post_id: post_id,
|
|
|
|
user_id: user_id,
|
|
|
|
topic_id: topic_id,
|
|
|
|
url: url,
|
|
|
|
domain: domain,
|
|
|
|
internal: internal,
|
|
|
|
link_topic_id: link_topic_id,
|
|
|
|
link_post_id: link_post_id,
|
|
|
|
quote: quote,
|
|
|
|
extension: extension,
|
|
|
|
reflection: reflection,
|
|
|
|
now: Time.now
|
|
|
|
)
|
|
|
|
|
|
|
|
if new_record
|
|
|
|
DB.after_commit do
|
|
|
|
crawl_link_title(topic_link_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
topic_link_id
|
|
|
|
end
|
|
|
|
|
2018-12-05 12:21:50 -05:00
|
|
|
def self.ensure_entry_for(post, link, parsed)
|
|
|
|
url = link.url
|
|
|
|
internal = false
|
|
|
|
topic_id = nil
|
|
|
|
post_number = nil
|
2019-12-04 01:13:20 -05:00
|
|
|
topic = nil
|
2018-12-05 12:21:50 -05:00
|
|
|
|
|
|
|
if upload = Upload.get_from_url(url)
|
|
|
|
internal = Discourse.store.internal?
|
|
|
|
# Store the same URL that will be used in the cooked version of the post
|
2019-11-17 20:25:42 -05:00
|
|
|
url = UrlHelper.cook_url(upload.url, secure: upload.secure?)
|
2018-12-05 12:21:50 -05:00
|
|
|
elsif route = Discourse.route_for(parsed)
|
|
|
|
internal = true
|
|
|
|
|
|
|
|
# We aren't interested in tracking internal links to users
|
2020-06-29 06:31:20 -04:00
|
|
|
return nil if route[:controller] == "users"
|
2018-12-05 12:21:50 -05:00
|
|
|
|
2020-06-29 06:31:20 -04:00
|
|
|
topic_id = route[:topic_id]
|
|
|
|
topic_slug = route[:slug]
|
2018-12-05 12:21:50 -05:00
|
|
|
post_number = route[:post_number] || 1
|
|
|
|
|
2020-06-29 06:31:20 -04:00
|
|
|
if route[:controller] == "topics" && route[:action] == "show"
|
|
|
|
topic_id ||= route[:id]
|
|
|
|
topic_slug ||= route[:id]
|
|
|
|
end
|
|
|
|
|
|
|
|
topic = Topic.find_by(id: topic_id) if topic_id
|
|
|
|
topic ||= Topic.find_by(slug: topic_slug) if topic_slug.present?
|
2018-12-05 12:21:50 -05:00
|
|
|
|
|
|
|
if topic.present?
|
2019-05-02 18:17:27 -04:00
|
|
|
url = +"#{Discourse.base_url_no_prefix}#{topic.relative_url}"
|
2018-12-05 12:21:50 -05:00
|
|
|
url << "/#{post_number}" if post_number.to_i > 1
|
2020-06-29 06:31:20 -04:00
|
|
|
else
|
|
|
|
topic_id = nil
|
2018-12-05 12:21:50 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Skip linking to ourselves
|
2019-12-04 01:13:20 -05:00
|
|
|
return nil if topic&.id == post.topic_id
|
2018-12-05 12:21:50 -05:00
|
|
|
|
|
|
|
reflected_post = nil
|
2019-12-04 01:13:20 -05:00
|
|
|
if post_number && topic
|
|
|
|
reflected_post = Post.find_by(topic_id: topic.id, post_number: post_number.to_i)
|
2018-12-05 12:21:50 -05:00
|
|
|
end
|
|
|
|
|
2019-01-03 06:59:22 -05:00
|
|
|
url = url[0...TopicLink.max_url_length]
|
2018-12-05 12:21:50 -05:00
|
|
|
return nil if parsed && parsed.host && parsed.host.length > TopicLink.max_domain_length
|
|
|
|
|
2020-05-13 02:05:39 -04:00
|
|
|
file_extension = File.extname(parsed.path)[1..10].downcase unless parsed.path.nil? || File.extname(parsed.path).empty?
|
|
|
|
|
|
|
|
safe_create_topic_link(
|
|
|
|
post_id: post.id,
|
|
|
|
user_id: post.user_id,
|
|
|
|
topic_id: post.topic_id,
|
|
|
|
url: url,
|
|
|
|
domain: parsed.host,
|
|
|
|
internal: internal,
|
|
|
|
link_topic_id: topic&.id,
|
2020-06-29 06:31:20 -04:00
|
|
|
link_post_id: reflected_post&.id,
|
2020-05-13 02:05:39 -04:00
|
|
|
quote: link.is_quote,
|
|
|
|
extension: file_extension,
|
|
|
|
)
|
2018-12-05 12:21:50 -05:00
|
|
|
|
|
|
|
reflected_id = nil
|
|
|
|
|
|
|
|
# Create the reflection if we can
|
2019-12-04 01:13:20 -05:00
|
|
|
if topic && post.topic && topic.archetype != 'private_message' && post.topic.archetype != 'private_message' && post.topic.visible?
|
|
|
|
prefix = Discourse.base_url_no_prefix
|
|
|
|
reflected_url = "#{prefix}#{post.topic.relative_url(post.post_number)}"
|
2018-12-05 12:21:50 -05:00
|
|
|
|
2020-05-13 02:05:39 -04:00
|
|
|
reflected_id = safe_create_topic_link(
|
|
|
|
user_id: post.user_id,
|
|
|
|
topic_id: topic&.id,
|
|
|
|
post_id: reflected_post&.id,
|
|
|
|
url: reflected_url,
|
|
|
|
domain: Discourse.current_hostname,
|
|
|
|
reflection: true,
|
|
|
|
internal: true,
|
|
|
|
link_topic_id: post.topic_id,
|
|
|
|
link_post_id: post.id
|
|
|
|
)
|
2019-12-04 01:13:20 -05:00
|
|
|
|
2018-12-05 12:21:50 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
[url, reflected_id]
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.cleanup_entries(post, current_urls, current_reflected_ids)
|
|
|
|
# Remove links that aren't there anymore
|
|
|
|
if current_urls.present?
|
|
|
|
TopicLink.where(
|
|
|
|
"(url not in (:urls)) AND (post_id = :post_id AND NOT reflection)",
|
|
|
|
urls: current_urls, post_id: post.id
|
|
|
|
).delete_all
|
|
|
|
|
|
|
|
current_reflected_ids.compact!
|
|
|
|
if current_reflected_ids.present?
|
|
|
|
TopicLink.where(
|
|
|
|
"(id not in (:reflected_ids)) AND (link_post_id = :post_id AND reflection)",
|
|
|
|
reflected_ids: current_reflected_ids, post_id: post.id
|
|
|
|
).delete_all
|
|
|
|
else
|
|
|
|
TopicLink
|
|
|
|
.where("link_post_id = :post_id AND reflection", post_id: post.id)
|
|
|
|
.delete_all
|
|
|
|
end
|
|
|
|
else
|
|
|
|
TopicLink
|
|
|
|
.where(
|
|
|
|
"(post_id = :post_id AND NOT reflection) OR (link_post_id = :post_id AND reflection)",
|
|
|
|
post_id: post.id
|
|
|
|
)
|
|
|
|
.delete_all
|
|
|
|
end
|
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2013-05-23 22:48:32 -04:00
|
|
|
|
|
|
|
# == Schema Information
|
|
|
|
#
|
|
|
|
# Table name: topic_links
|
|
|
|
#
|
|
|
|
# id :integer not null, primary key
|
|
|
|
# topic_id :integer not null
|
|
|
|
# post_id :integer
|
|
|
|
# user_id :integer not null
|
FIX: PG::StringDataRightTruncation when linking posts (#13134)
Users who use encoded slugs on their sites sometimes run into 500 error when pasting a link to another topic in a post. The problem happens when generating a backward "reflection" link that would appear in a linked topic. Link URL restricted on the database level to 500 chars in length. At first glance, it should work since we have a restriction on topic title length.
But it doesn't work when a site uses encoded slugs, like here (take a look at the URL). The link to a topic, in this case, can be much longer than 500 characters.
By the way, an error happens only when generating a "reflection" link and doesn't happen with a direct link, we truncate that link. It works because, in this case, the original long link is still present in the post body and can be used for navigation. But we can't do the same for backward "reflection" links (without rewriting their implementation), the whole link must be saved to the database.
The simplest and cleanest solution will be just to remove the restriction on the database level. Abuse is impossible here since we are already protected by the restriction on topic title length. There aren’t performance benefits in using length-constrained columns in Postgres, in fact, length-constrained columns need a few extra CPU cycles to check the length when storing data.
2021-06-02 07:27:04 -04:00
|
|
|
# url :string not null
|
2013-05-23 22:48:32 -04:00
|
|
|
# domain :string(100) not null
|
|
|
|
# internal :boolean default(FALSE), not null
|
|
|
|
# link_topic_id :integer
|
2014-08-27 01:19:25 -04:00
|
|
|
# created_at :datetime not null
|
|
|
|
# updated_at :datetime not null
|
2013-05-23 22:48:32 -04:00
|
|
|
# reflection :boolean default(FALSE)
|
|
|
|
# clicks :integer default(0), not null
|
|
|
|
# link_post_id :integer
|
2019-01-11 14:29:56 -05:00
|
|
|
# title :string
|
2014-04-08 11:35:44 -04:00
|
|
|
# crawled_at :datetime
|
2014-07-14 21:29:44 -04:00
|
|
|
# quote :boolean default(FALSE), not null
|
2017-08-16 10:38:11 -04:00
|
|
|
# extension :string(10)
|
2013-05-23 22:48:32 -04:00
|
|
|
#
|
|
|
|
# Indexes
|
|
|
|
#
|
2020-12-28 23:54:05 -05:00
|
|
|
# index_topic_links_on_extension (extension)
|
|
|
|
# index_topic_links_on_link_post_id_and_reflection (link_post_id,reflection)
|
|
|
|
# index_topic_links_on_post_id (post_id)
|
|
|
|
# index_topic_links_on_topic_id (topic_id)
|
|
|
|
# index_topic_links_on_user_and_clicks (user_id,clicks DESC,created_at DESC) WHERE ((NOT reflection) AND (NOT quote) AND (NOT internal))
|
|
|
|
# index_topic_links_on_user_id (user_id)
|
|
|
|
# unique_post_links (topic_id,post_id,url) UNIQUE
|
2013-05-23 22:48:32 -04:00
|
|
|
#
|