Merge pull request #2128 from vikhyat/crawler-detection

Improved crawler detection
This commit is contained in:
Sam 2014-03-17 10:02:33 +11:00
commit 82e34eabb7
3 changed files with 6 additions and 1 deletions

View File

@ -16,6 +16,7 @@
<%= render :partial => "common/discourse_stylesheet" %>
<%= discourse_csrf_tags %>
<%= raw SiteContent.content_for(:head) %>
<%= yield :head %>
</head>
<body>
<%- unless customization_disabled? %>

View File

@ -1,5 +1,5 @@
module CrawlerDetection
def self.crawler?(user_agent)
!/Googlebot|Mediapartners|AdsBot/.match(user_agent).nil?
!/Googlebot|Mediapartners|AdsBot|curl|Twitterbot|facebookexternalhit|bingbot|Baiduspider/.match(user_agent).nil?
end
end

View File

@ -15,6 +15,10 @@ describe CrawlerDetection do
described_class.crawler?("(compatible; Mediapartners-Google/2.1; +http://www.google.com/bot.html)").should == true
described_class.crawler?("Mediapartners-Google").should == true
described_class.crawler?("AdsBot-Google (+http://www.google.com/adsbot.html)").should == true
described_class.crawler?("Twitterbot").should == true
described_class.crawler?("facebookexternalhit/1.1 (+http(s)://www.facebook.com/externalhit_uatext.php)").should == true
described_class.crawler?("Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)").should == true
described_class.crawler?("Baiduspider+(+http://www.baidu.com/search/spider.htm)").should == true
end
it "returns false for non-crawler user agents" do