diff --git a/lib/middleware/anonymous_cache.rb b/lib/middleware/anonymous_cache.rb index f2539b06106..2cd57198767 100644 --- a/lib/middleware/anonymous_cache.rb +++ b/lib/middleware/anonymous_cache.rb @@ -26,6 +26,8 @@ module Middleware !@request.xhr? && !@request.path.ends_with?('robots.txt') && !@request.path.ends_with?('srv/status') && + @request[Auth::DefaultCurrentUserProvider::API_KEY].nil? && + @env[Auth::DefaultCurrentUserProvider::USER_API_KEY].nil? && CrawlerDetection.is_blocked_crawler?(@request.env['HTTP_USER_AGENT']) end diff --git a/spec/components/middleware/anonymous_cache_spec.rb b/spec/components/middleware/anonymous_cache_spec.rb index 4d63d4f5e45..de52d9a205f 100644 --- a/spec/components/middleware/anonymous_cache_spec.rb +++ b/spec/components/middleware/anonymous_cache_spec.rb @@ -189,6 +189,16 @@ describe Middleware::AnonymousCache::Helper do expect(@status).to eq(200) end + it "doesn't block api requests" do + SiteSetting.whitelisted_crawler_user_agents = 'Googlebot' + api_key = Fabricate(:api_key) + + get "/latest?api_key=#{api_key.key}&api_username=system", headers: { + "QUERY_STRING" => "api_key=#{api_key.key}&api_username=system" + } + expect(@status).to eq(200) + end + it "applies blacklisted_crawler_user_agents correctly" do SiteSetting.blacklisted_crawler_user_agents = 'Googlebot'