FIX: Broken specs

FIX: Broken specs

/u/ is no longer in robots.txt, so don’t test for it

diff --git a/spec/requests/robots_txt_controller_spec.rb b/spec/requests/robots_txt_controller_spec.rb
index 1632799..151f9d6 100644
--- a/spec/requests/robots_txt_controller_spec.rb
+++ b/spec/requests/robots_txt_controller_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe RobotsTxtController do
         allow_section = allow_index < disallow_index ?
           response.body[allow_index...disallow_index] : response.body[allow_index..-1]
 
-        expect(allow_section).to include('Disallow: /u/')
+        expect(allow_section).to include('Disallow: /auth/')
         expect(allow_section).to_not include("Disallow: /\n")
 
         disallowed_section = allow_index < disallow_index ?
@@ -90,7 +90,7 @@ RSpec.describe RobotsTxtController do
 
         i = response.body.index('User-agent: *')
         expect(i).to be_present
-        expect(response.body[i..-1]).to include("Disallow: /u/")
+        expect(response.body[i..-1]).to include("Disallow: /auth/")
       end
 
       it "can whitelist user agents" do
@@ -131,7 +131,7 @@ RSpec.describe RobotsTxtController do
       SiteSetting.allow_index_in_robots_txt = false
       get '/robots.txt'
 
-      expect(response.body).to_not include("Disallow: /u/")
+      expect(response.body).to_not include("Disallow: /auth/")
       expect(response.body).to include("User-agent: googlebot\nAllow")
     end
 

GitHub sha: f60dc7f5