2014-02-20 16:07:02 -05:00
|
|
|
module CrawlerDetection
|
2017-09-28 22:31:50 -04:00
|
|
|
|
2015-01-06 04:05:45 -05:00
|
|
|
# added 'ia_archiver' based on https://meta.discourse.org/t/unable-to-archive-discourse-pages-with-the-internet-archive/21232
|
|
|
|
# added 'Wayback Save Page' based on https://meta.discourse.org/t/unable-to-archive-discourse-with-the-internet-archive-save-page-now-button/22875
|
2015-05-02 06:18:58 -04:00
|
|
|
# added 'Swiftbot' based on https://meta.discourse.org/t/how-to-add-html-markup-or-meta-tags-for-external-search-engine/28220
|
2017-09-28 22:31:50 -04:00
|
|
|
def self.to_matcher(string)
|
|
|
|
escaped = string.split('|').map { |agent| Regexp.escape(agent) }.join('|')
|
|
|
|
Regexp.new(escaped)
|
|
|
|
end
|
2015-02-14 09:24:51 -05:00
|
|
|
|
2014-02-20 16:07:02 -05:00
|
|
|
def self.crawler?(user_agent)
|
2017-09-28 22:31:50 -04:00
|
|
|
# this is done to avoid regenerating regexes
|
|
|
|
@matchers ||= {}
|
|
|
|
matcher = (@matchers[SiteSetting.crawler_user_agents] ||= to_matcher(SiteSetting.crawler_user_agents))
|
|
|
|
matcher.match?(user_agent)
|
2014-02-14 17:10:08 -05:00
|
|
|
end
|
|
|
|
end
|