diff --git a/lib/middleware/request_tracker.rb b/lib/middleware/request_tracker.rb index 0f2a126e664..f98e328c1fb 100644 --- a/lib/middleware/request_tracker.rb +++ b/lib/middleware/request_tracker.rb @@ -289,7 +289,6 @@ class Middleware::RequestTracker def block_crawler(request) request.get? && !request.xhr? && - request.env['HTTP_ACCEPT'] =~ /text\/html/ && !request.path.ends_with?('robots.txt') && CrawlerDetection.is_blocked_crawler?(request.env['HTTP_USER_AGENT']) end diff --git a/spec/components/middleware/request_tracker_spec.rb b/spec/components/middleware/request_tracker_spec.rb index a36a3f307bf..d93e4085db8 100644 --- a/spec/components/middleware/request_tracker_spec.rb +++ b/spec/components/middleware/request_tracker_spec.rb @@ -330,9 +330,9 @@ describe Middleware::RequestTracker do }.to_not change { ApplicationRequest.count } end - it "allows json requests" do + it "blocks json requests" do SiteSetting.blacklisted_crawler_user_agents = 'Googlebot' - expect_success_response(*middleware.call(env( + expect_blocked_response(*middleware.call(env( 'HTTP_USER_AGENT' => 'Googlebot/2.1 (+http://www.google.com/bot.html)', 'HTTP_ACCEPT' => 'application/json' )))