diff --git a/spec/requests/topics_controller_spec.rb b/spec/requests/topics_controller_spec.rb
index 7825047ef50..c1d73b7506d 100644
--- a/spec/requests/topics_controller_spec.rb
+++ b/spec/requests/topics_controller_spec.rb
@@ -2677,7 +2677,6 @@ RSpec.describe TopicsController do
end
describe "crawler" do
-
context "when not a crawler" do
it "renders with the application layout" do
get topic.url
@@ -2690,14 +2689,14 @@ RSpec.describe TopicsController do
end
context "when a crawler" do
- let(:topic) { Fabricate(:topic) }
- let(:page1_time) { 3.months.ago }
- let(:page2_time) { 2.months.ago }
- let(:page3_time) { 1.month.ago }
+ it "renders with the crawler layout, and handles proper pagination" do
+ page1_time = 3.months.ago
+ page2_time = 2.months.ago
+ page3_time = 1.month.ago
- before do
freeze_time page1_time
+ topic = Fabricate(:topic)
Fabricate(:post, topic: topic)
Fabricate(:post, topic: topic)
@@ -2711,40 +2710,44 @@ RSpec.describe TopicsController do
# ugly, but no inteface to set this and we don't want to create
# 100 posts to test this thing
TopicView.stubs(:chunk_size).returns(2)
+
+ user_agent = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
+
+ get topic.url, env: { "HTTP_USER_AGENT" => user_agent }
+
+ body = response.body
+
+ expect(body).to have_tag(:body, with: { class: 'crawler' })
+ expect(body).to_not have_tag(:meta, with: { name: 'fragment' })
+ expect(body).to include(' user_agent}
+ body = response.body
+
+ expect(response.headers['Last-Modified']).to eq(page2_time.httpdate)
+
+ expect(body).to include(' user_agent}
+ body = response.body
+
+ expect(response.headers['Last-Modified']).to eq(page3_time.httpdate)
+ expect(body).to include(' user_agent, "HTTP_VIA" => via }
-
+ context "wayback machine" do
+ it "renders crawler layout" do
+ get topic.url, env: { "HTTP_USER_AGENT" => "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "HTTP_VIA" => "HTTP/1.0 web.archive.org (Wayback Save Page)" }
body = response.body
expect(body).to have_tag(:body, with: { class: 'crawler' })
expect(body).to_not have_tag(:meta, with: { name: 'fragment' })
- expect(body).to include(' user_agent, "HTTP_VIA" => via }
- body = response.body
-
- expect(response.headers['Last-Modified']).to eq(page2_time.httpdate)
-
- expect(body).to include(' user_agent, "HTTP_VIA" => via }
- body = response.body
-
- expect(response.headers['Last-Modified']).to eq(page3_time.httpdate)
- expect(body).to include('