DEV: simpler spec for wayback machine crawler layout (#7696)

DEV: simpler spec for wayback machine crawler layout (#7696)

follow-up on 42809f4d

diff --git a/spec/requests/topics_controller_spec.rb b/spec/requests/topics_controller_spec.rb
index 7825047..c1d73b7 100644
--- a/spec/requests/topics_controller_spec.rb
+++ b/spec/requests/topics_controller_spec.rb
@@ -2677,7 +2677,6 @@ RSpec.describe TopicsController do
   end
 
   describe "crawler" do
-
     context "when not a crawler" do
       it "renders with the application layout" do
         get topic.url
@@ -2690,14 +2689,14 @@ RSpec.describe TopicsController do
     end
 
     context "when a crawler" do
-      let(:topic) { Fabricate(:topic) }
-      let(:page1_time) { 3.months.ago }
-      let(:page2_time) { 2.months.ago }
-      let(:page3_time) { 1.month.ago }
+      it "renders with the crawler layout, and handles proper pagination" do
+        page1_time = 3.months.ago
+        page2_time = 2.months.ago
+        page3_time = 1.month.ago
 
-      before do
         freeze_time page1_time
 
+        topic = Fabricate(:topic)
         Fabricate(:post, topic: topic)
         Fabricate(:post, topic: topic)
 
@@ -2711,40 +2710,44 @@ RSpec.describe TopicsController do
         # ugly, but no inteface to set this and we don't want to create
         # 100 posts to test this thing
         TopicView.stubs(:chunk_size).returns(2)
-      end
 
-      shared_examples "crawler layout" do |user_agent, via|
-        it "renders with the crawler layout, and handles proper pagination" do
-          get topic.url, env: { "HTTP_USER_AGENT" => user_agent, "HTTP_VIA" => via }
+        user_agent = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
 
-          body = response.body
+        get topic.url, env: { "HTTP_USER_AGENT" => user_agent }
 
-          expect(body).to have_tag(:body, with: { class: 'crawler' })
-          expect(body).to_not have_tag(:meta, with: { name: 'fragment' })
-          expect(body).to include('<link rel="next" href="' + topic.relative_url + "?page=2")
+        body = response.body
 
-          expect(response.headers['Last-Modified']).to eq(page1_time.httpdate)
+        expect(body).to have_tag(:body, with: { class: 'crawler' })
+        expect(body).to_not have_tag(:meta, with: { name: 'fragment' })
+        expect(body).to include('<link rel="next" href="' + topic.relative_url + "?page=2")
 
-          get topic.url + "?page=2", env: { "HTTP_USER_AGENT" => user_agent, "HTTP_VIA" => via }
-          body = response.body
+        expect(response.headers['Last-Modified']).to eq(page1_time.httpdate)
 
-          expect(response.headers['Last-Modified']).to eq(page2_time.httpdate)
+        get topic.url + "?page=2", env: { "HTTP_USER_AGENT" => user_agent}
+        body = response.body
+
+        expect(response.headers['Last-Modified']).to eq(page2_time.httpdate)
+
+        expect(body).to include('<link rel="prev" href="' + topic.relative_url)
+        expect(body).to include('<link rel="next" href="' + topic.relative_url + "?page=3")
+
+        get topic.url + "?page=3", env: { "HTTP_USER_AGENT" => user_agent}
+        body = response.body
 
-          expect(body).to include('<link rel="prev" href="' + topic.relative_url)
-          expect(body).to include('<link rel="next" href="' + topic.relative_url + "?page=3")
+        expect(response.headers['Last-Modified']).to eq(page3_time.httpdate)
+        expect(body).to include('<link rel="prev" href="' + topic.relative_url + "?page=2")
+      end
 
-          get topic.url + "?page=3", env: { "HTTP_USER_AGENT" => user_agent, "HTTP_VIA" => via }
+      context "wayback machine" do
+        it "renders crawler layout" do
+          get topic.url, env: { "HTTP_USER_AGENT" => "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "HTTP_VIA" => "HTTP/1.0 web.archive.org (Wayback Save Page)" }
           body = response.body
 
-          expect(response.headers['Last-Modified']).to eq(page3_time.httpdate)
-          expect(body).to include('<link rel="prev" href="' + topic.relative_url + "?page=2")
+          expect(body).to have_tag(:body, with: { class: 'crawler' })
+          expect(body).to_not have_tag(:meta, with: { name: 'fragment' })
         end
       end
-
-      include_examples "crawler layout", "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)", nil
-      include_examples "crawler layout", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", "HTTP/1.0 web.archive.org (Wayback Save Page)"
     end
-
   end
 
   describe "#reset_bump_date" do

GitHub sha: 9779307e

1 Like

DEV: lint file

@tgxworld mentioned using the html rspec matchers in the PR review, if you feel like adding them makes the specs simpler go ahead and change it, to me the specs read ok I am not sure we need to change them.

1 Like