FIX: don't block api requests when whitelisted_crawler_user_agents is set

This commit is contained in:
Neil Lalonde 2018-09-14 15:34:21 -04:00
parent 7e5000b472
commit b87a089822
2 changed files with 12 additions and 0 deletions

View File

@ -26,6 +26,8 @@ module Middleware
!@request.xhr? &&
!@request.path.ends_with?('robots.txt') &&
!@request.path.ends_with?('srv/status') &&
@request[Auth::DefaultCurrentUserProvider::API_KEY].nil? &&
@env[Auth::DefaultCurrentUserProvider::USER_API_KEY].nil? &&
CrawlerDetection.is_blocked_crawler?(@request.env['HTTP_USER_AGENT'])
end

View File

@ -189,6 +189,16 @@ describe Middleware::AnonymousCache::Helper do
expect(@status).to eq(200)
end
it "doesn't block api requests" do
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
api_key = Fabricate(:api_key)
get "/latest?api_key=#{api_key.key}&api_username=system", headers: {
"QUERY_STRING" => "api_key=#{api_key.key}&api_username=system"
}
expect(@status).to eq(200)
end
it "applies blacklisted_crawler_user_agents correctly" do
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot'