diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb index 039f5d7c934..e2c4be1ad66 100644 --- a/app/controllers/application_controller.rb +++ b/app/controllers/application_controller.rb @@ -56,12 +56,17 @@ class ApplicationController < ActionController::Base SiteSetting.enable_escaped_fragments? && params.key?("_escaped_fragment_") end + def show_browser_update? + @show_browser_update ||= CrawlerDetection.show_browser_update?(request.user_agent) + end + helper_method :show_browser_update? + def use_crawler_layout? @use_crawler_layout ||= request.user_agent && (request.content_type.blank? || request.content_type.include?('html')) && !['json', 'rss'].include?(params[:format]) && - (has_escaped_fragment? || params.key?("print") || + (has_escaped_fragment? || params.key?("print") || show_browser_update? || CrawlerDetection.crawler?(request.user_agent, request.headers["HTTP_VIA"]) ) end diff --git a/app/views/layouts/crawler.html.erb b/app/views/layouts/crawler.html.erb index 8848af5cf4e..4e0a7e7bdd4 100644 --- a/app/views/layouts/crawler.html.erb +++ b/app/views/layouts/crawler.html.erb @@ -16,7 +16,9 @@ <%= theme_lookup("head_tag") %> <%= render_google_universal_analytics_code %> <%= yield :head %> - + <% if show_browser_update? %> + + <% end %> <%= build_plugin_html 'server:before-head-close-crawler' %> @@ -67,6 +69,9 @@ <%= theme_lookup("footer") %> <%= theme_lookup("body_tag") %> + <% if show_browser_update? %> +
<%= I18n.t("js.browser_update").html_safe %>
+ <% end %> <%= yield :after_body %> diff --git a/config/site_settings.yml b/config/site_settings.yml index a2baeccf36e..c3c2801e1ff 100644 --- a/config/site_settings.yml +++ b/config/site_settings.yml @@ -1539,6 +1539,11 @@ security: default: "rss|bot|spider|crawler|facebook|archive|wayback|ping|monitor|lighthouse" type: list list_type: compact + browser_update_user_agents: + hidden: true + default: "MSIE 6|MSIE 7|MSIE 8|MSIE 9" + type: list + list_type: compact crawler_check_bypass_agents: hidden: true default: "cubot" diff --git a/lib/crawler_detection.rb b/lib/crawler_detection.rb index 36fb61b63e5..0b90dc0acb6 100644 --- a/lib/crawler_detection.rb +++ b/lib/crawler_detection.rb @@ -37,6 +37,14 @@ module CrawlerDetection end + def self.show_browser_update?(user_agent) + return false if SiteSetting.browser_update_user_agents.blank? + + @browser_update_matchers ||= {} + matcher = @browser_update_matchers[SiteSetting.browser_update_user_agents] ||= to_matcher(SiteSetting.browser_update_user_agents) + user_agent.match?(matcher) + end + # Given a user_agent that returns true from crawler?, should its request be allowed? def self.allow_crawler?(user_agent) return true if SiteSetting.allowed_crawler_user_agents.blank? && diff --git a/spec/components/crawler_detection_spec.rb b/spec/components/crawler_detection_spec.rb index c5f2a2056b9..58d7926b138 100644 --- a/spec/components/crawler_detection_spec.rb +++ b/spec/components/crawler_detection_spec.rb @@ -69,6 +69,22 @@ describe CrawlerDetection do end + describe 'show_browser_update?' do + it 'always returns false if setting is empty' do + SiteSetting.browser_update_user_agents = "" + + expect(CrawlerDetection.show_browser_update?('Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)')).to eq(false) + expect(CrawlerDetection.show_browser_update?('Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)')).to eq(false) + end + + it 'returns true if setting matches user agent' do + SiteSetting.browser_update_user_agents = "MSIE 6|MSIE 7|MSIE 8|MSIE 9" + + expect(CrawlerDetection.show_browser_update?('Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)')).to eq(false) + expect(CrawlerDetection.show_browser_update?('Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)')).to eq(true) + end + end + describe 'allow_crawler?' do it 'returns true if allowlist and blocklist are blank' do expect(CrawlerDetection.allow_crawler?('Googlebot/2.1 (+http://www.google.com/bot.html)')).to eq(true)