From f60dc7f5b41b372f21858047a014b24ad459d705 Mon Sep 17 00:00:00 2001 From: Joshua Rosenfeld Date: Thu, 25 Jun 2020 14:30:57 -0400 Subject: [PATCH] FIX: Broken specs `/u/` is no longer in robots.txt, so don't test for it --- spec/requests/robots_txt_controller_spec.rb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spec/requests/robots_txt_controller_spec.rb b/spec/requests/robots_txt_controller_spec.rb index 16327997c9c..151f9d60ba6 100644 --- a/spec/requests/robots_txt_controller_spec.rb +++ b/spec/requests/robots_txt_controller_spec.rb @@ -76,7 +76,7 @@ RSpec.describe RobotsTxtController do allow_section = allow_index < disallow_index ? response.body[allow_index...disallow_index] : response.body[allow_index..-1] - expect(allow_section).to include('Disallow: /u/') + expect(allow_section).to include('Disallow: /auth/') expect(allow_section).to_not include("Disallow: /\n") disallowed_section = allow_index < disallow_index ? @@ -90,7 +90,7 @@ RSpec.describe RobotsTxtController do i = response.body.index('User-agent: *') expect(i).to be_present - expect(response.body[i..-1]).to include("Disallow: /u/") + expect(response.body[i..-1]).to include("Disallow: /auth/") end it "can whitelist user agents" do @@ -131,7 +131,7 @@ RSpec.describe RobotsTxtController do SiteSetting.allow_index_in_robots_txt = false get '/robots.txt' - expect(response.body).to_not include("Disallow: /u/") + expect(response.body).to_not include("Disallow: /auth/") expect(response.body).to include("User-agent: googlebot\nAllow") end