2019-04-29 20:27:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2015-10-11 05:41:23 -04:00
|
|
|
require 'rails_helper'
|
2013-02-10 19:02:57 -05:00
|
|
|
|
2017-08-31 00:06:56 -04:00
|
|
|
RSpec.describe RobotsTxtController do
|
2018-04-16 15:43:20 -04:00
|
|
|
describe '#builder' do
|
|
|
|
it "returns json information for building a robots.txt" do
|
|
|
|
get "/robots-builder.json"
|
2020-05-07 11:04:12 -04:00
|
|
|
json = response.parsed_body
|
2018-04-16 15:43:20 -04:00
|
|
|
expect(json).to be_present
|
|
|
|
expect(json['header']).to be_present
|
|
|
|
expect(json['agents']).to be_present
|
|
|
|
end
|
2019-07-15 13:47:44 -04:00
|
|
|
|
|
|
|
it "includes overridden content if robots.txt is is overridden" do
|
|
|
|
SiteSetting.overridden_robots_txt = "something"
|
|
|
|
|
|
|
|
get "/robots-builder.json"
|
|
|
|
expect(response.status).to eq(200)
|
2020-05-07 11:04:12 -04:00
|
|
|
json = response.parsed_body
|
2019-07-15 13:47:44 -04:00
|
|
|
expect(json['header']).to be_present
|
|
|
|
expect(json['agents']).to be_present
|
|
|
|
expect(json['overridden']).to eq("something")
|
|
|
|
end
|
2018-04-16 15:43:20 -04:00
|
|
|
end
|
|
|
|
|
2017-08-31 00:06:56 -04:00
|
|
|
describe '#index' do
|
2018-04-05 20:15:23 -04:00
|
|
|
|
2019-07-15 13:47:44 -04:00
|
|
|
context "header for when the content is overridden" do
|
|
|
|
it "is not prepended if there are no overrides" do
|
|
|
|
sign_in(Fabricate(:admin))
|
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).not_to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is prepended if there are overrides and the user is admin" do
|
|
|
|
SiteSetting.overridden_robots_txt = "overridden_content"
|
|
|
|
sign_in(Fabricate(:admin))
|
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "is not prepended if the user is not admin" do
|
|
|
|
SiteSetting.overridden_robots_txt = "overridden_content"
|
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).not_to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-04-11 16:05:02 -04:00
|
|
|
context 'subfolder' do
|
|
|
|
it 'prefixes the rules with the directory' do
|
2019-11-15 00:48:24 -05:00
|
|
|
set_subfolder "/forum"
|
|
|
|
|
2018-04-11 16:05:02 -04:00
|
|
|
get '/robots.txt'
|
2020-08-26 17:31:02 -04:00
|
|
|
expect(response.body).to include("\nDisallow: /forum/email/")
|
2018-04-11 16:05:02 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-04-05 20:15:23 -04:00
|
|
|
context 'crawl delay' do
|
|
|
|
it 'allows you to set crawl delay on particular bots' do
|
|
|
|
SiteSetting.allow_index_in_robots_txt = true
|
|
|
|
SiteSetting.slow_down_crawler_rate = 17
|
|
|
|
SiteSetting.slow_down_crawler_user_agents = 'bingbot|googlebot'
|
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).to include("\nUser-agent: bingbot\nCrawl-delay: 17")
|
|
|
|
expect(response.body).to include("\nUser-agent: googlebot\nCrawl-delay: 17")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-15 17:10:45 -04:00
|
|
|
context 'allow_index_in_robots_txt is true' do
|
|
|
|
|
|
|
|
def expect_allowed_and_disallowed_sections(allow_index, disallow_index)
|
|
|
|
expect(allow_index).to be_present
|
|
|
|
expect(disallow_index).to be_present
|
|
|
|
|
|
|
|
allow_section = allow_index < disallow_index ?
|
|
|
|
response.body[allow_index...disallow_index] : response.body[allow_index..-1]
|
|
|
|
|
2020-06-25 14:30:57 -04:00
|
|
|
expect(allow_section).to include('Disallow: /auth/')
|
2018-03-15 17:10:45 -04:00
|
|
|
expect(allow_section).to_not include("Disallow: /\n")
|
|
|
|
|
|
|
|
disallowed_section = allow_index < disallow_index ?
|
|
|
|
response.body[disallow_index..-1] : response.body[disallow_index...allow_index]
|
|
|
|
expect(disallowed_section).to include("Disallow: /\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
it "returns index when indexing is allowed" do
|
|
|
|
SiteSetting.allow_index_in_robots_txt = true
|
|
|
|
get '/robots.txt'
|
|
|
|
|
|
|
|
i = response.body.index('User-agent: *')
|
|
|
|
expect(i).to be_present
|
2020-06-25 14:30:57 -04:00
|
|
|
expect(response.body[i..-1]).to include("Disallow: /auth/")
|
2020-12-22 16:51:14 -05:00
|
|
|
# we have to insert Googlebot for special handling
|
|
|
|
expect(response.body[i..-1]).to include("User-agent: Googlebot")
|
2018-03-15 17:10:45 -04:00
|
|
|
end
|
|
|
|
|
2020-07-26 20:23:54 -04:00
|
|
|
it "can allowlist user agents" do
|
|
|
|
SiteSetting.allowed_crawler_user_agents = "Googlebot|Twitterbot"
|
2018-03-15 17:10:45 -04:00
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).to include('User-agent: Googlebot')
|
|
|
|
expect(response.body).to include('User-agent: Twitterbot')
|
|
|
|
|
|
|
|
allowed_index = [response.body.index('User-agent: Googlebot'), response.body.index('User-agent: Twitterbot')].min
|
|
|
|
disallow_all_index = response.body.index('User-agent: *')
|
|
|
|
|
|
|
|
expect_allowed_and_disallowed_sections(allowed_index, disallow_all_index)
|
|
|
|
end
|
|
|
|
|
2020-07-26 20:23:54 -04:00
|
|
|
it "can blocklist user agents" do
|
|
|
|
SiteSetting.blocked_crawler_user_agents = "Googlebot|Twitterbot"
|
2018-03-15 17:10:45 -04:00
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).to include('User-agent: Googlebot')
|
|
|
|
expect(response.body).to include('User-agent: Twitterbot')
|
|
|
|
|
|
|
|
disallow_index = [response.body.index('User-agent: Googlebot'), response.body.index('User-agent: Twitterbot')].min
|
|
|
|
allow_index = response.body.index('User-agent: *')
|
|
|
|
|
|
|
|
expect_allowed_and_disallowed_sections(allow_index, disallow_index)
|
|
|
|
end
|
2017-08-31 00:06:56 -04:00
|
|
|
|
2020-07-26 20:23:54 -04:00
|
|
|
it "ignores blocklist if allowlist is set" do
|
|
|
|
SiteSetting.allowed_crawler_user_agents = "Googlebot|Twitterbot"
|
|
|
|
SiteSetting.blocked_crawler_user_agents = "Bananabot"
|
2018-03-15 17:10:45 -04:00
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.body).to_not include('Bananabot')
|
|
|
|
expect(response.body).to include('User-agent: Googlebot')
|
|
|
|
expect(response.body).to include('User-agent: Twitterbot')
|
|
|
|
end
|
2013-02-10 19:02:57 -05:00
|
|
|
end
|
|
|
|
|
2013-02-26 10:42:49 -05:00
|
|
|
it "returns noindex when indexing is disallowed" do
|
2015-06-03 06:14:00 -04:00
|
|
|
SiteSetting.allow_index_in_robots_txt = false
|
2017-08-31 00:06:56 -04:00
|
|
|
get '/robots.txt'
|
2013-02-25 11:42:20 -05:00
|
|
|
|
2020-06-25 14:30:57 -04:00
|
|
|
expect(response.body).to_not include("Disallow: /auth/")
|
2020-05-10 22:14:21 -04:00
|
|
|
expect(response.body).to include("User-agent: googlebot\nAllow")
|
2017-08-31 00:06:56 -04:00
|
|
|
end
|
2019-07-15 13:47:44 -04:00
|
|
|
|
|
|
|
it "returns overridden robots.txt if the file is overridden" do
|
|
|
|
SiteSetting.overridden_robots_txt = "blah whatever"
|
|
|
|
get '/robots.txt'
|
|
|
|
expect(response.status).to eq(200)
|
|
|
|
expect(response.body).to eq(SiteSetting.overridden_robots_txt)
|
|
|
|
end
|
2013-02-10 19:02:57 -05:00
|
|
|
end
|
|
|
|
end
|