diff --git a/app/assets/javascripts/admin/controllers/admin-customize-robots-txt.js.es6 b/app/assets/javascripts/admin/controllers/admin-customize-robots-txt.js.es6
new file mode 100644
index 00000000000..9cb38c75ea0
--- /dev/null
+++ b/app/assets/javascripts/admin/controllers/admin-customize-robots-txt.js.es6
@@ -0,0 +1,45 @@
+import { ajax } from "discourse/lib/ajax";
+import { bufferedProperty } from "discourse/mixins/buffered-content";
+import { propertyEqual } from "discourse/lib/computed";
+
+export default Ember.Controller.extend(bufferedProperty("model"), {
+ saved: false,
+ isSaving: false,
+ saveDisabled: propertyEqual("model.robots_txt", "buffered.robots_txt"),
+ resetDisbaled: Ember.computed.not("model.overridden"),
+
+ actions: {
+ save() {
+ this.setProperties({
+ isSaving: true,
+ saved: false
+ });
+
+ ajax("robots.json", {
+ method: "PUT",
+ data: { robots_txt: this.buffered.get("robots_txt") }
+ })
+ .then(data => {
+ this.commitBuffer();
+ this.set("saved", true);
+ this.set("model.overridden", data.overridden);
+ })
+ .finally(() => this.set("isSaving", false));
+ },
+
+ reset() {
+ this.setProperties({
+ isSaving: true,
+ saved: false
+ });
+ ajax("robots.json", { method: "DELETE" })
+ .then(data => {
+ this.buffered.set("robots_txt", data.robots_txt);
+ this.commitBuffer();
+ this.set("saved", true);
+ this.set("model.overridden", false);
+ })
+ .finally(() => this.set("isSaving", false));
+ }
+ }
+});
diff --git a/app/assets/javascripts/admin/routes/admin-customize-robots-txt.js.es6 b/app/assets/javascripts/admin/routes/admin-customize-robots-txt.js.es6
new file mode 100644
index 00000000000..50acd6cac1d
--- /dev/null
+++ b/app/assets/javascripts/admin/routes/admin-customize-robots-txt.js.es6
@@ -0,0 +1,7 @@
+import { ajax } from "discourse/lib/ajax";
+
+export default Ember.Route.extend({
+ model() {
+ return ajax("/admin/customize/robots");
+ }
+});
diff --git a/app/assets/javascripts/admin/routes/admin-route-map.js.es6 b/app/assets/javascripts/admin/routes/admin-route-map.js.es6
index 9ae0063ffea..a20165db02e 100644
--- a/app/assets/javascripts/admin/routes/admin-route-map.js.es6
+++ b/app/assets/javascripts/admin/routes/admin-route-map.js.es6
@@ -86,6 +86,10 @@ export default function() {
this.route("edit", { path: "/:id" });
}
);
+ this.route("adminCustomizeRobotsTxt", {
+ path: "/robots",
+ resetNamespace: true
+ });
}
);
diff --git a/app/assets/javascripts/admin/templates/customize-robots-txt.hbs b/app/assets/javascripts/admin/templates/customize-robots-txt.hbs
new file mode 100644
index 00000000000..b556f0c537c
--- /dev/null
+++ b/app/assets/javascripts/admin/templates/customize-robots-txt.hbs
@@ -0,0 +1,20 @@
+
+
{{i18n "admin.customize.robots.title"}}
+
{{i18n "admin.customize.robots.warning"}}
+ {{#if model.overridden}}
+
+ {{i18n "admin.customize.robots.overridden"}}
+
+ {{/if}}
+ {{textarea
+ value=buffered.robots_txt
+ class="robots-txt-input"}}
+ {{#save-controls model=this action=(action "save") saved=saved saveDisabled=saveDisabled}}
+ {{d-button
+ class="btn-default"
+ disabled=resetDisbaled
+ icon="undo"
+ action=(action "reset")
+ label="admin.settings.reset"}}
+ {{/save-controls}}
+
diff --git a/app/assets/stylesheets/common/admin/customize.scss b/app/assets/stylesheets/common/admin/customize.scss
index 86290bcf031..c68f895b6a2 100644
--- a/app/assets/stylesheets/common/admin/customize.scss
+++ b/app/assets/stylesheets/common/admin/customize.scss
@@ -777,3 +777,16 @@
margin-left: 1em;
}
}
+
+.robots-txt-edit {
+ div.overridden {
+ background: $highlight-medium;
+ padding: 7px;
+ margin-bottom: 7px;
+ }
+ .robots-txt-input {
+ width: 100%;
+ box-sizing: border-box;
+ height: 600px;
+ }
+}
diff --git a/app/controllers/admin/robots_txt_controller.rb b/app/controllers/admin/robots_txt_controller.rb
new file mode 100644
index 00000000000..b269a6c9ec0
--- /dev/null
+++ b/app/controllers/admin/robots_txt_controller.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+class Admin::RobotsTxtController < Admin::AdminController
+
+ def show
+ render json: { robots_txt: current_robots_txt, overridden: @overridden }
+ end
+
+ def update
+ params.require(:robots_txt)
+ SiteSetting.overridden_robots_txt = params[:robots_txt]
+
+ render json: { robots_txt: current_robots_txt, overridden: @overridden }
+ end
+
+ def reset
+ SiteSetting.overridden_robots_txt = ""
+ render json: { robots_txt: original_robots_txt, overridden: false }
+ end
+
+ private
+
+ def current_robots_txt
+ robots_txt = SiteSetting.overridden_robots_txt.presence
+ @overridden = robots_txt.present?
+ robots_txt ||= original_robots_txt
+ robots_txt
+ end
+
+ def original_robots_txt
+ if SiteSetting.allow_index_in_robots_txt?
+ @robots_info = ::RobotsTxtController.fetch_default_robots_info
+ render_to_string "robots_txt/index"
+ else
+ render_to_string "robots_txt/no_index"
+ end
+ end
+end
diff --git a/app/controllers/robots_txt_controller.rb b/app/controllers/robots_txt_controller.rb
index 6d66579fa14..9c99cb5c1dc 100644
--- a/app/controllers/robots_txt_controller.rb
+++ b/app/controllers/robots_txt_controller.rb
@@ -4,6 +4,8 @@ class RobotsTxtController < ApplicationController
layout false
skip_before_action :preload_json, :check_xhr, :redirect_to_login_if_required
+ OVERRIDDEN_HEADER = "# This robots.txt file has been customized at /admin/customize/robots\n"
+
# NOTE: order is important!
DISALLOWED_PATHS ||= %w{
/auth/
@@ -33,8 +35,13 @@ class RobotsTxtController < ApplicationController
}
def index
+ if (overridden = SiteSetting.overridden_robots_txt.dup).present?
+ overridden.prepend(OVERRIDDEN_HEADER) if guardian.is_admin? && !is_api?
+ render plain: overridden
+ return
+ end
if SiteSetting.allow_index_in_robots_txt?
- @robots_info = fetch_robots_info
+ @robots_info = self.class.fetch_default_robots_info
render :index, content_type: 'text/plain'
else
render :no_index, content_type: 'text/plain'
@@ -46,12 +53,13 @@ class RobotsTxtController < ApplicationController
# JSON that can be used by a script to create a robots.txt that works well with your
# existing site.
def builder
- render json: fetch_robots_info
+ result = self.class.fetch_default_robots_info
+ overridden = SiteSetting.overridden_robots_txt
+ result[:overridden] = overridden if overridden.present?
+ render json: result
end
-protected
-
- def fetch_robots_info
+ def self.fetch_default_robots_info
deny_paths = DISALLOWED_PATHS.map { |p| Discourse.base_uri + p }
deny_all = [ "#{Discourse.base_uri}/" ]
@@ -87,5 +95,4 @@ protected
result
end
-
end
diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml
index fd5a24d2562..5414bf769ad 100644
--- a/config/locales/client.en.yml
+++ b/config/locales/client.en.yml
@@ -3625,7 +3625,10 @@ en:
love:
name: "love"
description: "The like button's color."
-
+ robots:
+ title: "Override your site's robots.txt file:"
+ warning: "Warning: overriding the robots.txt file will prevent all future changes to the site settings that modify robots.txt from being applied."
+ overridden: Your site's default robots.txt file is overridden.
email:
title: "Emails"
settings: "Settings"
diff --git a/config/routes.rb b/config/routes.rb
index 44a26637ffc..48f93e5d9a3 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -235,6 +235,10 @@ Discourse::Application.routes.draw do
get 'email_templates/(:id)' => 'email_templates#show', constraints: { id: /[0-9a-z_.]+/ }
put 'email_templates/(:id)' => 'email_templates#update', constraints: { id: /[0-9a-z_.]+/ }
delete 'email_templates/(:id)' => 'email_templates#revert', constraints: { id: /[0-9a-z_.]+/ }
+
+ get 'robots' => 'robots_txt#show'
+ put 'robots.json' => 'robots_txt#update'
+ delete 'robots.json' => 'robots_txt#reset'
end
resources :embeddable_hosts, constraints: AdminConstraint.new
diff --git a/config/site_settings.yml b/config/site_settings.yml
index 6e17b3ca335..ec69826d76f 100644
--- a/config/site_settings.yml
+++ b/config/site_settings.yml
@@ -1929,6 +1929,10 @@ uncategorized:
default: 50000
hidden: true
+ overridden_robots_txt:
+ default: ""
+ hidden: true
+
user_preferences:
default_email_digest_frequency:
enum: "DigestEmailSiteSetting"
diff --git a/spec/requests/admin/robots_txt_controller_spec.rb b/spec/requests/admin/robots_txt_controller_spec.rb
new file mode 100644
index 00000000000..046914017a9
--- /dev/null
+++ b/spec/requests/admin/robots_txt_controller_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe Admin::RobotsTxtController do
+ it "is a subclass of AdminController" do
+ expect(described_class < Admin::AdminController).to eq(true)
+ end
+
+ fab!(:admin) { Fabricate(:admin) }
+ fab!(:user) { Fabricate(:user) }
+
+ describe "non-admin users" do
+ before { sign_in(user) }
+
+ it "can't see #show" do
+ get "/admin/customize/robots.json"
+ expect(response.status).to eq(404)
+ end
+
+ it "can't perform #update" do
+ put "/admin/customize/robots.json", params: { robots_txt: "adasdasd" }
+ expect(response.status).to eq(404)
+ expect(SiteSetting.overridden_robots_txt).to eq("")
+ end
+
+ it "can't perform #reset" do
+ SiteSetting.overridden_robots_txt = "overridden_content"
+ delete "/admin/customize/robots.json"
+ expect(response.status).to eq(404)
+ expect(SiteSetting.overridden_robots_txt).to eq("overridden_content")
+ end
+ end
+
+ describe "#show" do
+ before { sign_in(admin) }
+
+ it "returns default content if there are no overrides" do
+ get "/admin/customize/robots.json"
+ expect(response.status).to eq(200)
+ json = JSON.parse(response.body)
+ expect(json["robots_txt"]).to be_present
+ expect(json["overridden"]).to eq(false)
+ end
+
+ it "returns overridden content if there are overrides" do
+ SiteSetting.overridden_robots_txt = "something"
+ get "/admin/customize/robots.json"
+ expect(response.status).to eq(200)
+ json = JSON.parse(response.body)
+ expect(json["robots_txt"]).to eq("something")
+ expect(json["overridden"]).to eq(true)
+ end
+ end
+
+ describe "#update" do
+ before { sign_in(admin) }
+
+ it "overrides the site's default robots.txt" do
+ put "/admin/customize/robots.json", params: { robots_txt: "new_content" }
+ expect(response.status).to eq(200)
+ json = JSON.parse(response.body)
+ expect(json["robots_txt"]).to eq("new_content")
+ expect(json["overridden"]).to eq(true)
+ expect(SiteSetting.overridden_robots_txt).to eq("new_content")
+
+ get "/robots.txt"
+ expect(response.body).to include("new_content")
+ end
+
+ it "requires `robots_txt` param to be present" do
+ SiteSetting.overridden_robots_txt = "overridden_content"
+ put "/admin/customize/robots.json", params: { robots_txt: "" }
+ expect(response.status).to eq(400)
+ end
+ end
+
+ describe "#reset" do
+ before { sign_in(admin) }
+
+ it "resets robots.txt file to the default version" do
+ SiteSetting.overridden_robots_txt = "overridden_content"
+ delete "/admin/customize/robots.json"
+ expect(response.status).to eq(200)
+ json = JSON.parse(response.body)
+ expect(json["robots_txt"]).not_to include("overridden_content")
+ expect(json["overridden"]).to eq(false)
+ expect(SiteSetting.overridden_robots_txt).to eq("")
+ end
+ end
+end
diff --git a/spec/requests/robots_txt_controller_spec.rb b/spec/requests/robots_txt_controller_spec.rb
index d18d3d4967e..413e806a3d5 100644
--- a/spec/requests/robots_txt_controller_spec.rb
+++ b/spec/requests/robots_txt_controller_spec.rb
@@ -11,10 +11,42 @@ RSpec.describe RobotsTxtController do
expect(json['header']).to be_present
expect(json['agents']).to be_present
end
+
+ it "includes overridden content if robots.txt is is overridden" do
+ SiteSetting.overridden_robots_txt = "something"
+
+ get "/robots-builder.json"
+ expect(response.status).to eq(200)
+ json = ::JSON.parse(response.body)
+ expect(json['header']).to be_present
+ expect(json['agents']).to be_present
+ expect(json['overridden']).to eq("something")
+ end
end
describe '#index' do
+ context "header for when the content is overridden" do
+ it "is not prepended if there are no overrides" do
+ sign_in(Fabricate(:admin))
+ get '/robots.txt'
+ expect(response.body).not_to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
+ end
+
+ it "is prepended if there are overrides and the user is admin" do
+ SiteSetting.overridden_robots_txt = "overridden_content"
+ sign_in(Fabricate(:admin))
+ get '/robots.txt'
+ expect(response.body).to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
+ end
+
+ it "is not prepended if the user is not admin" do
+ SiteSetting.overridden_robots_txt = "overridden_content"
+ get '/robots.txt'
+ expect(response.body).not_to start_with(RobotsTxtController::OVERRIDDEN_HEADER)
+ end
+ end
+
context 'subfolder' do
it 'prefixes the rules with the directory' do
Discourse.stubs(:base_uri).returns('/forum')
@@ -101,5 +133,12 @@ RSpec.describe RobotsTxtController do
expect(response.body).to_not include("Disallow: /u/")
end
+
+ it "returns overridden robots.txt if the file is overridden" do
+ SiteSetting.overridden_robots_txt = "blah whatever"
+ get '/robots.txt'
+ expect(response.status).to eq(200)
+ expect(response.body).to eq(SiteSetting.overridden_robots_txt)
+ end
end
end