diff --git a/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 b/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 index 6448c2565a6..49e8d9d4412 100644 --- a/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 +++ b/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 @@ -1,15 +1,9 @@ import { ajax } from "discourse/lib/ajax"; -import computed from "ember-addons/ember-computed-decorators"; export default Ember.Controller.extend({ adminBackups: Ember.inject.controller(), status: Ember.computed.alias("adminBackups.model"), - @computed - localBackupStorage() { - return this.siteSettings.backup_location === "local"; - }, - uploadLabel: function() { return I18n.t("admin.backups.upload.label"); }.property(), diff --git a/app/assets/javascripts/admin/models/backup.js.es6 b/app/assets/javascripts/admin/models/backup.js.es6 index 0f6663065dc..dd6e8046e15 100644 --- a/app/assets/javascripts/admin/models/backup.js.es6 +++ b/app/assets/javascripts/admin/models/backup.js.es6 @@ -1,4 +1,5 @@ import { ajax } from "discourse/lib/ajax"; +import PreloadStore from "preload-store"; const Backup = Discourse.Model.extend({ destroy() { @@ -15,9 +16,9 @@ const Backup = Discourse.Model.extend({ Backup.reopenClass({ find() { - return ajax("/admin/backups.json").then(backups => - backups.map(backup => Backup.create(backup)) - ); + return PreloadStore.getAndRemove("backups", () => + ajax("/admin/backups.json") + ).then(backups => backups.map(backup => Backup.create(backup))); }, start(withUploads) { diff --git a/app/assets/javascripts/admin/routes/admin-backups.js.es6 b/app/assets/javascripts/admin/routes/admin-backups.js.es6 index 4514eec2054..a0b9342c619 100644 --- a/app/assets/javascripts/admin/routes/admin-backups.js.es6 +++ b/app/assets/javascripts/admin/routes/admin-backups.js.es6 @@ -151,15 +151,6 @@ export default Discourse.Route.extend({ message: message }) ); - }, - - remoteUploadSuccess() { - Backup.find().then(backups => { - this.controllerFor("adminBackupsIndex").set( - "model", - backups.map(backup => Backup.create(backup)) - ); - }); } } }); diff --git a/app/assets/javascripts/admin/templates/backups-index.hbs b/app/assets/javascripts/admin/templates/backups-index.hbs index bcf8dccaa4d..3b20c7935e6 100644 --- a/app/assets/javascripts/admin/templates/backups-index.hbs +++ b/app/assets/javascripts/admin/templates/backups-index.hbs @@ -1,10 +1,5 @@
- {{#if localBackupStorage}} - {{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadLabel title="admin.backups.upload.title"}} - {{else}} - {{backup-uploader done="remoteUploadSuccess"}} - {{/if}} - + {{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadLabel title="admin.backups.upload.title"}} {{#if site.isReadOnly}} {{d-button icon="eye" action="toggleReadOnlyMode" disabled=status.isOperationRunning title="admin.backups.read_only.disable.title" label="admin.backups.read_only.disable.label"}} {{else}} @@ -15,7 +10,7 @@ {{i18n 'admin.backups.columns.filename'}} {{i18n 'admin.backups.columns.size'}} - + {{#each model as |backup|}} diff --git a/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs b/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs index e69083494fb..caa74a86152 100644 --- a/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs +++ b/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs @@ -1,7 +1,7 @@
One word per line diff --git a/app/assets/javascripts/discourse/components/backup-uploader.js.es6 b/app/assets/javascripts/discourse/components/backup-uploader.js.es6 deleted file mode 100644 index 3b54695e3d0..00000000000 --- a/app/assets/javascripts/discourse/components/backup-uploader.js.es6 +++ /dev/null @@ -1,51 +0,0 @@ -import { ajax } from "discourse/lib/ajax"; -import computed from "ember-addons/ember-computed-decorators"; -import UploadMixin from "discourse/mixins/upload"; - -export default Em.Component.extend(UploadMixin, { - tagName: "span", - - @computed("uploading", "uploadProgress") - uploadButtonText(uploading, progress) { - return uploading - ? I18n.t("admin.backups.upload.uploading_progress", { progress }) - : I18n.t("admin.backups.upload.label"); - }, - - validateUploadedFilesOptions() { - return { skipValidation: true }; - }, - - uploadDone() { - this.sendAction("done"); - }, - - calculateUploadUrl() { - return ""; - }, - - uploadOptions() { - return { - type: "PUT", - dataType: "xml", - autoUpload: false - }; - }, - - _init: function() { - const $upload = this.$(); - - $upload.on("fileuploadadd", (e, data) => { - ajax("/admin/backups/upload_url", { - data: { filename: data.files[0].name } - }).then(result => { - if (!result.success) { - bootbox.alert(result.message); - } else { - data.url = result.url; - data.submit(); - } - }); - }); - }.on("didInsertElement") -}); diff --git a/app/assets/javascripts/discourse/lib/utilities.js.es6 b/app/assets/javascripts/discourse/lib/utilities.js.es6 index 0ab5c4c933e..9ab42d57806 100644 --- a/app/assets/javascripts/discourse/lib/utilities.js.es6 +++ b/app/assets/javascripts/discourse/lib/utilities.js.es6 @@ -225,7 +225,6 @@ export function validateUploadedFiles(files, opts) { } export function validateUploadedFile(file, opts) { - if (opts.skipValidation) return true; if (!authorizesOneOrMoreExtensions()) return false; opts = opts || {}; diff --git a/app/assets/javascripts/discourse/mixins/upload.js.es6 b/app/assets/javascripts/discourse/mixins/upload.js.es6 index 6ff81b8ec05..69925003069 100644 --- a/app/assets/javascripts/discourse/mixins/upload.js.es6 +++ b/app/assets/javascripts/discourse/mixins/upload.js.es6 @@ -2,7 +2,6 @@ import { displayErrorForUpload, validateUploadedFiles } from "discourse/lib/utilities"; -import getUrl from "discourse-common/lib/get-url"; export default Em.Mixin.create({ uploading: false, @@ -16,25 +15,13 @@ export default Em.Mixin.create({ return {}; }, - calculateUploadUrl() { - return ( - getUrl(this.getWithDefault("uploadUrl", "/uploads")) + - ".json?client_id=" + - this.messageBus.clientId + - "&authenticity_token=" + - encodeURIComponent(Discourse.Session.currentProp("csrfToken")) - ); - }, - - uploadOptions() { - return {}; - }, - _initialize: function() { - const $upload = this.$(); - const reset = () => - this.setProperties({ uploading: false, uploadProgress: 0 }); - const maxFiles = this.getWithDefault("maxFiles", 10); + const $upload = this.$(), + csrf = Discourse.Session.currentProp("csrfToken"), + uploadUrl = Discourse.getURL( + this.getWithDefault("uploadUrl", "/uploads") + ), + reset = () => this.setProperties({ uploading: false, uploadProgress: 0 }); $upload.on("fileuploaddone", (e, data) => { let upload = data.result; @@ -42,21 +29,20 @@ export default Em.Mixin.create({ reset(); }); - $upload.fileupload( - _.merge( - { - url: this.calculateUploadUrl(), - dataType: "json", - replaceFileInput: false, - dropZone: $upload, - pasteZone: $upload - }, - this.uploadOptions() - ) - ); + $upload.fileupload({ + url: + uploadUrl + + ".json?client_id=" + + this.messageBus.clientId + + "&authenticity_token=" + + encodeURIComponent(csrf), + dataType: "json", + dropZone: $upload, + pasteZone: $upload + }); $upload.on("fileuploaddrop", (e, data) => { - if (data.files.length > maxFiles) { + if (data.files.length > 10) { bootbox.alert(I18n.t("post.errors.too_many_dragged_and_dropped_files")); return false; } else { @@ -70,8 +56,7 @@ export default Em.Mixin.create({ this.validateUploadedFilesOptions() ); const isValid = validateUploadedFiles(data.files, opts); - const type = this.get("type"); - let form = type ? { type } : {}; + let form = { type: this.get("type") }; if (this.get("data")) { form = $.extend(form, this.get("data")); } diff --git a/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs b/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs index f6b2102c92a..b43cbb0d567 100644 --- a/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs @@ -1,6 +1,6 @@ {{#if uploading}} {{i18n 'upload_selector.uploading'}} {{uploadProgress}}% diff --git a/app/assets/javascripts/discourse/templates/components/backup-uploader.hbs b/app/assets/javascripts/discourse/templates/components/backup-uploader.hbs deleted file mode 100644 index affa124962b..00000000000 --- a/app/assets/javascripts/discourse/templates/components/backup-uploader.hbs +++ /dev/null @@ -1,4 +0,0 @@ - diff --git a/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs b/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs index ad3120827f5..859433546c3 100644 --- a/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs @@ -1,6 +1,6 @@ {{#if uploading}} {{i18n 'upload_selector.uploading'}} {{uploadProgress}}% diff --git a/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs b/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs index 18ddcaef5a6..85ad4157bb3 100644 --- a/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs @@ -2,5 +2,5 @@ diff --git a/app/assets/javascripts/discourse/templates/components/image-uploader.hbs b/app/assets/javascripts/discourse/templates/components/image-uploader.hbs index 8eb03a01457..5a12cff39bc 100644 --- a/app/assets/javascripts/discourse/templates/components/image-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/image-uploader.hbs @@ -2,7 +2,7 @@
{{#if backgroundStyle}} diff --git a/app/assets/javascripts/discourse/templates/components/images-uploader.hbs b/app/assets/javascripts/discourse/templates/components/images-uploader.hbs index 6c53367c14b..a8a3f7a3812 100644 --- a/app/assets/javascripts/discourse/templates/components/images-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/images-uploader.hbs @@ -1,6 +1,6 @@ {{#if uploading}} {{i18n 'upload_selector.uploading'}} {{uploadProgress}}% diff --git a/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs b/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs index 1e8fbc61f73..05f1cb29517 100644 --- a/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs +++ b/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs @@ -10,5 +10,5 @@ {{d-icon "picture-o"}} {{/if}} - + diff --git a/app/assets/stylesheets/common/base/upload.scss b/app/assets/stylesheets/common/base/upload.scss index ddaf0d1d4ed..dc2a87aa106 100644 --- a/app/assets/stylesheets/common/base/upload.scss +++ b/app/assets/stylesheets/common/base/upload.scss @@ -14,8 +14,3 @@ background-size: contain; } } - -.hidden-upload-field { - visibility: hidden; - position: absolute; -} diff --git a/app/assets/stylesheets/wizard.scss b/app/assets/stylesheets/wizard.scss index a35b7830b18..7b4526453a5 100644 --- a/app/assets/stylesheets/wizard.scss +++ b/app/assets/stylesheets/wizard.scss @@ -328,11 +328,6 @@ body.wizard { } } - .wizard-hidden-upload-field { - visibility: hidden; - position: absolute; - } - .wizard-step-footer { display: flex; flex-direction: row; diff --git a/app/controllers/admin/backups_controller.rb b/app/controllers/admin/backups_controller.rb index 2b4ffd3512f..ef8950fd5b5 100644 --- a/app/controllers/admin/backups_controller.rb +++ b/app/controllers/admin/backups_controller.rb @@ -1,26 +1,20 @@ require "backup_restore/backup_restore" -require "backup_restore/backup_store" class Admin::BackupsController < Admin::AdminController + before_action :ensure_backups_enabled skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk] def index respond_to do |format| format.html do + store_preloaded("backups", MultiJson.dump(serialize_data(Backup.all, BackupSerializer))) store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status)) store_preloaded("logs", MultiJson.dump(BackupRestore.logs)) render "default/empty" end - format.json do - store = BackupRestore::BackupStore.create - - begin - render_serialized(store.files, BackupFileSerializer) - rescue BackupRestore::BackupStore::StorageError => e - render_json_error(e) - end + render_serialized(Backup.all, BackupSerializer) end end end @@ -52,11 +46,8 @@ class Admin::BackupsController < Admin::AdminController end def email - store = BackupRestore::BackupStore.create - - if store.file(params.fetch(:id)).present? - Jobs.enqueue( - :download_backup_email, + if backup = Backup[params.fetch(:id)] + Jobs.enqueue(:download_backup_email, user_id: current_user.id, backup_file_path: url_for(controller: 'backups', action: 'show') ) @@ -68,34 +59,28 @@ class Admin::BackupsController < Admin::AdminController end def show + if !EmailBackupToken.compare(current_user.id, params.fetch(:token)) @error = I18n.t('download_backup_mailer.no_token') - return render template: 'admin/backups/show.html.erb', layout: 'no_ember', status: 422 end - - store = BackupRestore::BackupStore.create - - if backup = store.file(params.fetch(:id), include_download_source: true) + if !@error && backup = Backup[params.fetch(:id)] EmailBackupToken.del(current_user.id) StaffActionLogger.new(current_user).log_backup_download(backup) - - if store.remote? - redirect_to backup.source - else - headers['Content-Length'] = File.size(backup.source).to_s - send_file backup.source - end + headers['Content-Length'] = File.size(backup.path).to_s + send_file backup.path else - render body: nil, status: 404 + if @error + render template: 'admin/backups/show.html.erb', layout: 'no_ember', status: 422 + else + render body: nil, status: 404 + end end end def destroy - store = BackupRestore::BackupStore.create - - if backup = store.file(params.fetch(:id)) + if backup = Backup[params.fetch(:id)] StaffActionLogger.new(current_user).log_backup_destroy(backup) - store.delete_file(backup.filename) + backup.remove render body: nil else render body: nil, status: 404 @@ -146,13 +131,13 @@ class Admin::BackupsController < Admin::AdminController end def check_backup_chunk - identifier = params.fetch(:resumableIdentifier) - filename = params.fetch(:resumableFilename) - chunk_number = params.fetch(:resumableChunkNumber) + identifier = params.fetch(:resumableIdentifier) + filename = params.fetch(:resumableFilename) + chunk_number = params.fetch(:resumableChunkNumber) current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i # path to chunk file - chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number) + chunk = Backup.chunk_path(identifier, filename, chunk_number) # check chunk upload status status = HandleChunkUpload.check_chunk(chunk, current_chunk_size: current_chunk_size) @@ -160,21 +145,21 @@ class Admin::BackupsController < Admin::AdminController end def upload_backup_chunk - filename = params.fetch(:resumableFilename) + filename = params.fetch(:resumableFilename) total_size = params.fetch(:resumableTotalSize).to_i - return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename) - return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size) - return render status: 415, plain: I18n.t("backup.invalid_filename") unless valid_filename?(filename) + return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless /\.(tar\.gz|t?gz)$/i =~ filename + return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size) + return render status: 415, plain: I18n.t("backup.invalid_filename") unless !!(/^[a-zA-Z0-9\._\-]+$/ =~ filename) - file = params.fetch(:file) - identifier = params.fetch(:resumableIdentifier) - chunk_number = params.fetch(:resumableChunkNumber).to_i - chunk_size = params.fetch(:resumableChunkSize).to_i + file = params.fetch(:file) + identifier = params.fetch(:resumableIdentifier) + chunk_number = params.fetch(:resumableChunkNumber).to_i + chunk_size = params.fetch(:resumableChunkSize).to_i current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i # path to chunk file - chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number) + chunk = Backup.chunk_path(identifier, filename, chunk_number) # upload chunk HandleChunkUpload.upload_chunk(chunk, file: file) @@ -188,24 +173,6 @@ class Admin::BackupsController < Admin::AdminController render body: nil end - def create_upload_url - params.require(:filename) - filename = params.fetch(:filename) - - return render_error("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename) - return render_error("backup.invalid_filename") unless valid_filename?(filename) - - store = BackupRestore::BackupStore.create - - begin - upload_url = store.generate_upload_url(filename) - rescue BackupRestore::BackupStore::BackupFileExists - return render_error("backup.file_exists") - end - - render json: success_json.merge(url: upload_url) - end - private def has_enough_space_on_disk?(size) @@ -216,15 +183,4 @@ class Admin::BackupsController < Admin::AdminController raise Discourse::InvalidAccess.new unless SiteSetting.enable_backups? end - def valid_extension?(filename) - /\.(tar\.gz|t?gz)$/i =~ filename - end - - def valid_filename?(filename) - !!(/^[a-zA-Z0-9\._\-]+$/ =~ filename) - end - - def render_error(message_key) - render json: failed_json.merge(message: I18n.t(message_key)) - end end diff --git a/app/controllers/admin/dashboard_next_controller.rb b/app/controllers/admin/dashboard_next_controller.rb index 8da41c075db..aaeb001a157 100644 --- a/app/controllers/admin/dashboard_next_controller.rb +++ b/app/controllers/admin/dashboard_next_controller.rb @@ -27,12 +27,8 @@ class Admin::DashboardNextController < Admin::AdminController private def last_backup_taken_at - store = BackupRestore::BackupStore.create - - begin - store.latest_file&.last_modified - rescue BackupRestore::BackupStore::StorageError - nil + if last_backup = Backup.all.first + File.ctime(last_backup.path).utc end end end diff --git a/app/jobs/regular/backup_chunks_merger.rb b/app/jobs/regular/backup_chunks_merger.rb index 0fe070b801b..23cda4d2640 100644 --- a/app/jobs/regular/backup_chunks_merger.rb +++ b/app/jobs/regular/backup_chunks_merger.rb @@ -1,6 +1,3 @@ -require_dependency "backup_restore/local_backup_store" -require_dependency "backup_restore/backup_store" - module Jobs class BackupChunksMerger < Jobs::Base @@ -15,24 +12,16 @@ module Jobs raise Discourse::InvalidParameters.new(:identifier) if identifier.blank? raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0 - backup_path = "#{BackupRestore::LocalBackupStore.base_directory}/#{filename}" + backup_path = "#{Backup.base_directory}/#{filename}" tmp_backup_path = "#{backup_path}.tmp" # path to tmp directory - tmp_directory = File.dirname(BackupRestore::LocalBackupStore.chunk_path(identifier, filename, 0)) + tmp_directory = File.dirname(Backup.chunk_path(identifier, filename, 0)) # merge all chunks - HandleChunkUpload.merge_chunks( - chunks, - upload_path: backup_path, - tmp_upload_path: tmp_backup_path, - identifier: identifier, - filename: filename, - tmp_directory: tmp_directory - ) + HandleChunkUpload.merge_chunks(chunks, upload_path: backup_path, tmp_upload_path: tmp_backup_path, model: Backup, identifier: identifier, filename: filename, tmp_directory: tmp_directory) # push an updated list to the clients - store = BackupRestore::BackupStore.create - data = ActiveModel::ArraySerializer.new(store.files, each_serializer: BackupFileSerializer).as_json + data = ActiveModel::ArraySerializer.new(Backup.all, each_serializer: BackupSerializer).as_json MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id)) end diff --git a/app/jobs/scheduled/schedule_backup.rb b/app/jobs/scheduled/schedule_backup.rb index b2e9129811a..feca26cb658 100644 --- a/app/jobs/scheduled/schedule_backup.rb +++ b/app/jobs/scheduled/schedule_backup.rb @@ -7,9 +7,8 @@ module Jobs def execute(args) return unless SiteSetting.enable_backups? && SiteSetting.automatic_backups_enabled? - store = BackupRestore::BackupStore.create - if latest_backup = store.latest_file - date = latest_backup.last_modified.to_date + if latest_backup = Backup.all[0] + date = File.ctime(latest_backup.path).getutc.to_date return if (date + SiteSetting.backup_frequency.days) > Time.now.utc.to_date end diff --git a/app/models/admin_dashboard_data.rb b/app/models/admin_dashboard_data.rb index 33d2c87858d..8295934a729 100644 --- a/app/models/admin_dashboard_data.rb +++ b/app/models/admin_dashboard_data.rb @@ -209,7 +209,7 @@ class AdminDashboardData bad_keys = (SiteSetting.s3_access_key_id.blank? || SiteSetting.s3_secret_access_key.blank?) && !SiteSetting.s3_use_iam_profile return I18n.t('dashboard.s3_config_warning') if SiteSetting.enable_s3_uploads && (bad_keys || SiteSetting.s3_upload_bucket.blank?) - return I18n.t('dashboard.s3_backup_config_warning') if SiteSetting.backup_location == BackupLocationSiteSetting::S3 && (bad_keys || SiteSetting.s3_backup_bucket.blank?) + return I18n.t('dashboard.s3_backup_config_warning') if SiteSetting.enable_s3_backups && (bad_keys || SiteSetting.s3_backup_bucket.blank?) end nil end diff --git a/app/models/backup.rb b/app/models/backup.rb new file mode 100644 index 00000000000..8dc6ae2e739 --- /dev/null +++ b/app/models/backup.rb @@ -0,0 +1,92 @@ +require 'disk_space' + +class Backup + include ActiveModel::SerializerSupport + + attr_reader :filename + attr_accessor :size, :path, :link + + def initialize(filename) + @filename = filename + end + + def self.all + Dir.glob(File.join(Backup.base_directory, "*.{gz,tgz}")) + .sort_by { |file| File.mtime(file) } + .reverse + .map { |backup| Backup.create_from_filename(File.basename(backup)) } + end + + def self.[](filename) + path = File.join(Backup.base_directory, filename) + if File.exists?(path) + Backup.create_from_filename(filename) + else + nil + end + end + + def remove + File.delete(@path) if File.exists?(path) + after_remove_hook + end + + def after_create_hook + upload_to_s3 if SiteSetting.enable_s3_backups? + DiscourseEvent.trigger(:backup_created) + end + + def after_remove_hook + remove_from_s3 if SiteSetting.enable_s3_backups? && !SiteSetting.s3_disable_cleanup? + DiskSpace.reset_cached_stats unless SiteSetting.enable_s3_backups? + end + + def s3_bucket + return @s3_bucket if @s3_bucket + raise Discourse::SiteSettingMissing.new("s3_backup_bucket") if SiteSetting.s3_backup_bucket.blank? + @s3_bucket = SiteSetting.s3_backup_bucket.downcase + end + + def s3 + require "s3_helper" unless defined? S3Helper + @s3_helper ||= S3Helper.new(s3_bucket, '', S3Helper.s3_options(SiteSetting)) + end + + def upload_to_s3 + return unless s3 + File.open(@path) do |file| + s3.upload(file, @filename) + end + end + + def remove_from_s3 + return unless s3 + s3.remove(@filename) + end + + def self.base_directory + base_directory = File.join(Rails.root, "public", "backups", RailsMultisite::ConnectionManagement.current_db) + FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory) + base_directory + end + + def self.chunk_path(identifier, filename, chunk_number) + File.join(Backup.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}") + end + + def self.create_from_filename(filename) + Backup.new(filename).tap do |b| + b.path = File.join(Backup.base_directory, b.filename) + b.link = UrlHelper.schemaless "#{Discourse.base_url}/admin/backups/#{b.filename}" + b.size = File.size(b.path) + end + end + + def self.remove_old + return if Rails.env.development? + all_backups = Backup.all + return if all_backups.size <= SiteSetting.maximum_backups + all_backups[SiteSetting.maximum_backups..-1].each(&:remove) + end + +end diff --git a/app/models/backup_file.rb b/app/models/backup_file.rb deleted file mode 100644 index 86482f687d5..00000000000 --- a/app/models/backup_file.rb +++ /dev/null @@ -1,25 +0,0 @@ -class BackupFile - include ActiveModel::SerializerSupport - - attr_reader :filename, - :size, - :last_modified, - :source - - def initialize(filename:, size:, last_modified:, source: nil) - @filename = filename - @size = size - @last_modified = last_modified - @source = source - end - - def ==(other) - attributes == other.attributes - end - - protected - - def attributes - [@filename, @size, @last_modified, @source] - end -end diff --git a/app/models/backup_location_site_setting.rb b/app/models/backup_location_site_setting.rb deleted file mode 100644 index aef3c89fec3..00000000000 --- a/app/models/backup_location_site_setting.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -require_dependency 'enum_site_setting'; - -class BackupLocationSiteSetting < EnumSiteSetting - LOCAL ||= "local" - S3 ||= "s3" - - def self.valid_value?(val) - values.any? { |v| v[:value] == val } - end - - def self.values - @values ||= [ - { name: "admin.backups.location.local", value: LOCAL }, - { name: "admin.backups.location.s3", value: S3 } - ] - end - - def self.translate_names? - true - end -end diff --git a/app/serializers/backup_file_serializer.rb b/app/serializers/backup_file_serializer.rb deleted file mode 100644 index f4cada57082..00000000000 --- a/app/serializers/backup_file_serializer.rb +++ /dev/null @@ -1,5 +0,0 @@ -class BackupFileSerializer < ApplicationSerializer - attributes :filename, - :size, - :last_modified -end diff --git a/app/serializers/backup_serializer.rb b/app/serializers/backup_serializer.rb new file mode 100644 index 00000000000..3d4356b09f4 --- /dev/null +++ b/app/serializers/backup_serializer.rb @@ -0,0 +1,3 @@ +class BackupSerializer < ApplicationSerializer + attributes :filename, :size, :link +end diff --git a/app/services/handle_chunk_upload.rb b/app/services/handle_chunk_upload.rb index 93aee6122fc..75362e54e59 100644 --- a/app/services/handle_chunk_upload.rb +++ b/app/services/handle_chunk_upload.rb @@ -36,6 +36,7 @@ class HandleChunkUpload def merge_chunks upload_path = @params[:upload_path] tmp_upload_path = @params[:tmp_upload_path] + model = @params[:model] identifier = @params[:identifier] filename = @params[:filename] tmp_directory = @params[:tmp_directory] @@ -51,7 +52,7 @@ class HandleChunkUpload File.open(tmp_upload_path, "a") do |file| (1..@chunk).each do |chunk_number| # path to chunk - chunk_path = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number) + chunk_path = model.chunk_path(identifier, filename, chunk_number) # add chunk to file file << File.open(chunk_path).read end diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml index c9bbccf1aa3..63e17921742 100644 --- a/config/locales/client.en.yml +++ b/config/locales/client.en.yml @@ -3133,7 +3133,6 @@ en: label: "Upload" title: "Upload a backup to this instance" uploading: "Uploading..." - uploading_progress: "Uploading... {{progress}}%" success: "'{{filename}}' has successfully been uploaded. The file is now being processed and will take up to a minute to show up in the list." error: "There has been an error while uploading '{{filename}}': {{message}}" operations: @@ -3164,9 +3163,6 @@ en: label: "Rollback" title: "Rollback the database to previous working state" confirm: "Are you sure you want to rollback the database to the previous working state?" - location: - local: "Local" - s3: "Amazon S3" export_csv: success: "Export initiated, you will be notified via message when the process is complete." diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index 6a5e45265a3..f450f85e75a 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -166,7 +166,6 @@ en: other: 'You specified the invalid choices %{name}' default_categories_already_selected: "You cannot select a category used in another list." s3_upload_bucket_is_required: "You cannot enable uploads to S3 unless you've provided the 's3_upload_bucket'." - s3_backup_requires_s3_settings: "You cannot use S3 as backup location unless you've provided the '%{setting_name}'." conflicting_google_user_id: 'The Google Account ID for this account has changed; staff intervention is required for security reasons. Please contact staff and point them to
https://meta.discourse.org/t/76575' activemodel: @@ -195,10 +194,6 @@ en: backup_file_should_be_tar_gz: "The backup file should be a .tar.gz archive." not_enough_space_on_disk: "There is not enough space on disk to upload this backup." invalid_filename: "The backup filename contains invalid characters. Valid characters are a-z 0-9 . - _." - file_exists: "The file you are trying to upload already exists." - location: - local: "Local" - s3: "Amazon S3" invalid_params: "You supplied invalid parameters to the request: %{message}" not_logged_in: "You need to be logged in to do that." @@ -1364,6 +1359,7 @@ en: maximum_backups: "The maximum amount of backups to keep on disk. Older backups are automatically deleted" automatic_backups_enabled: "Run automatic backups as defined in backup frequency" backup_frequency: "The number of days between backups." + enable_s3_backups: "Upload backups to S3 when complete. IMPORTANT: requires valid S3 credentials entered in Files settings." s3_backup_bucket: "The remote bucket to hold backups. WARNING: Make sure it is a private bucket." s3_endpoint: "The endpoint can be modified to backup to an S3 compatible service like DigitalOcean Spaces or Minio. WARNING: Use default if using AWS S3" s3_force_path_style: "Enforce path-style addressing for your custom endpoint. IMPORTANT: Required for using Minio uploads and backups." @@ -1371,7 +1367,6 @@ en: s3_disable_cleanup: "Disable the removal of backups from S3 when removed locally." backup_time_of_day: "Time of day UTC when the backup should occur." backup_with_uploads: "Include uploads in scheduled backups. Disabling this will only backup the database." - backup_location: "Location where backups are stored. IMPORTANT: S3 requires valid S3 credentials entered in Files settings." active_user_rate_limit_secs: "How frequently we update the 'last_seen_at' field, in seconds" verbose_localization: "Show extended localization tips in the UI" diff --git a/config/routes.rb b/config/routes.rb index 1d6caab2b07..11a6106c46f 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -280,7 +280,6 @@ Discourse::Application.routes.draw do put "readonly" => "backups#readonly" get "upload" => "backups#check_backup_chunk" post "upload" => "backups#upload_backup_chunk" - get "upload_url" => "backups#create_upload_url" end end diff --git a/config/site_settings.yml b/config/site_settings.yml index be1f8308521..6b932a524c6 100644 --- a/config/site_settings.yml +++ b/config/site_settings.yml @@ -1404,12 +1404,6 @@ backups: allow_restore: default: false shadowed_by_global: true - backup_location: - default: 'local' - type: enum - enum: 'BackupLocationSiteSetting' - shadowed_by_global: true - client: true maximum_backups: client: true default: 5 @@ -1422,6 +1416,9 @@ backups: max: 30 default: 7 shadowed_by_global: true + enable_s3_backups: + default: false + shadowed_by_global: true s3_backup_bucket: default: '' regex: '^[a-z0-9\-\/]+$' # can't use '.' when using HTTPS diff --git a/db/migrate/20180916195601_migrate_s3_backup_site_settings.rb b/db/migrate/20180916195601_migrate_s3_backup_site_settings.rb deleted file mode 100644 index 064a4046276..00000000000 --- a/db/migrate/20180916195601_migrate_s3_backup_site_settings.rb +++ /dev/null @@ -1,33 +0,0 @@ -class MigrateS3BackupSiteSettings < ActiveRecord::Migration[5.2] - def up - execute <<~SQL - UPDATE site_settings - SET name = 'backup_location', - data_type = 7, - value = 's3' - WHERE name = 'enable_s3_backups' AND value = 't'; - SQL - - execute <<~SQL - DELETE - FROM site_settings - WHERE name = 'enable_s3_backups'; - SQL - end - - def down - execute <<~SQL - UPDATE site_settings - SET name = 'enable_s3_backups', - data_type = 5, - value = 't' - WHERE name = 'backup_location' AND value = 's3'; - SQL - - execute <<~SQL - DELETE - FROM site_settings - WHERE name = 'backup_location'; - SQL - end -end diff --git a/lib/backup_restore/backup_restore.rb b/lib/backup_restore/backup_restore.rb index 8e69ed997a5..0591b31fe11 100644 --- a/lib/backup_restore/backup_restore.rb +++ b/lib/backup_restore/backup_restore.rb @@ -1,5 +1,5 @@ -require_dependency "backup_restore/backuper" -require_dependency "backup_restore/restorer" +require "backup_restore/backuper" +require "backup_restore/restorer" module BackupRestore diff --git a/lib/backup_restore/backup_store.rb b/lib/backup_restore/backup_store.rb deleted file mode 100644 index 3ca9d705292..00000000000 --- a/lib/backup_restore/backup_store.rb +++ /dev/null @@ -1,74 +0,0 @@ -module BackupRestore - # @abstract - class BackupStore - class BackupFileExists < RuntimeError; end - class StorageError < RuntimeError; end - - # @return [BackupStore] - def self.create(opts = {}) - case SiteSetting.backup_location - when BackupLocationSiteSetting::LOCAL - require_dependency "backup_restore/local_backup_store" - BackupRestore::LocalBackupStore.new(opts) - when BackupLocationSiteSetting::S3 - require_dependency "backup_restore/s3_backup_store" - BackupRestore::S3BackupStore.new(opts) - end - end - - # @return [Array] - def files - unsorted_files.sort_by { |file| -file.last_modified.to_i } - end - - # @return [BackupFile] - def latest_file - files.first - end - - def delete_old - return unless cleanup_allowed? - return if (backup_files = files).size <= SiteSetting.maximum_backups - - backup_files[SiteSetting.maximum_backups..-1].each do |file| - delete_file(file.filename) - end - end - - def remote? - fail NotImplementedError - end - - # @return [BackupFile] - def file(filename, include_download_source: false) - fail NotImplementedError - end - - def delete_file(filename) - fail NotImplementedError - end - - def download_file(filename, destination, failure_message = nil) - fail NotImplementedError - end - - def upload_file(filename, source_path, content_type) - fail NotImplementedError - end - - def generate_upload_url(filename) - fail NotImplementedError - end - - private - - # @return [Array] - def unsorted_files - fail NotImplementedError - end - - def cleanup_allowed? - true - end - end -end diff --git a/lib/backup_restore/backuper.rb b/lib/backup_restore/backuper.rb index f03d3dd50f3..e7fdc04ed6d 100644 --- a/lib/backup_restore/backuper.rb +++ b/lib/backup_restore/backuper.rb @@ -1,5 +1,4 @@ -require "disk_space" -require "mini_mime" +require 'disk_space' module BackupRestore @@ -11,7 +10,6 @@ module BackupRestore @client_id = opts[:client_id] @publish_to_message_bus = opts[:publish_to_message_bus] || false @with_uploads = opts[:with_uploads].nil? ? true : opts[:with_uploads] - @filename_override = opts[:filename] ensure_no_operation_is_running ensure_we_have_a_user @@ -44,7 +42,6 @@ module BackupRestore log "Finalizing backup..." @with_uploads ? create_archive : move_dump_backup - upload_archive after_create_hook rescue SystemExit @@ -55,9 +52,9 @@ module BackupRestore @success = false else @success = true - @backup_filename + File.join(@archive_directory, @backup_filename) ensure - delete_old + remove_old clean_up notify_user log "Finished!" @@ -78,14 +75,12 @@ module BackupRestore def initialize_state @success = false - @store = BackupRestore::BackupStore.create @current_db = RailsMultisite::ConnectionManagement.current_db @timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S") @tmp_directory = File.join(Rails.root, "tmp", "backups", @current_db, @timestamp) @dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE) - @archive_directory = BackupRestore::LocalBackupStore.base_directory(@current_db) - filename = @filename_override || "#{SiteSetting.title.parameterize}-#{@timestamp}" - @archive_basename = File.join(@archive_directory, "#{filename}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}") + @archive_directory = File.join(Rails.root, "public", "backups", @current_db) + @archive_basename = File.join(@archive_directory, "#{SiteSetting.title.parameterize}-#{@timestamp}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}") @backup_filename = if @with_uploads @@ -200,10 +195,8 @@ module BackupRestore def move_dump_backup log "Finalizing database dump file: #{@backup_filename}" - archive_filename = File.join(@archive_directory, @backup_filename) - Discourse::Utils.execute_command( - 'mv', @dump_filename, archive_filename, + 'mv', @dump_filename, File.join(@archive_directory, @backup_filename), failure_message: "Failed to move database dump file." ) @@ -250,30 +243,17 @@ module BackupRestore Discourse::Utils.execute_command('gzip', '-5', tar_filename, failure_message: "Failed to gzip archive.") end - def upload_archive - return unless @store.remote? - - log "Uploading archive..." - content_type = MiniMime.lookup_by_filename(@backup_filename).content_type - archive_path = File.join(@archive_directory, @backup_filename) - @store.upload_file(@backup_filename, archive_path, content_type) - ensure - log "Removing archive from local storage..." - FileUtils.remove_file(archive_path, force: true) - end - def after_create_hook log "Executing the after_create_hook for the backup..." - DiscourseEvent.trigger(:backup_created) + backup = Backup.create_from_filename(@backup_filename) + backup.after_create_hook end - def delete_old - return if Rails.env.development? - - log "Deleting old backups..." - @store.delete_old + def remove_old + log "Removing old backups..." + Backup.remove_old rescue => ex - log "Something went wrong while deleting old backups.", ex + log "Something went wrong while removing old backups.", ex end def notify_user diff --git a/lib/backup_restore/local_backup_store.rb b/lib/backup_restore/local_backup_store.rb deleted file mode 100644 index 8b0148ee3c3..00000000000 --- a/lib/backup_restore/local_backup_store.rb +++ /dev/null @@ -1,65 +0,0 @@ -require_dependency "backup_restore/backup_store" -require_dependency "disk_space" - -module BackupRestore - class LocalBackupStore < BackupStore - def self.base_directory(current_db = nil) - current_db ||= RailsMultisite::ConnectionManagement.current_db - base_directory = File.join(Rails.root, "public", "backups", current_db) - FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory) - base_directory - end - - def self.chunk_path(identifier, filename, chunk_number) - File.join(LocalBackupStore.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}") - end - - def initialize(opts = {}) - @base_directory = opts[:base_directory] || LocalBackupStore.base_directory - end - - def remote? - false - end - - def file(filename, include_download_source: false) - path = path_from_filename(filename) - create_file_from_path(path, include_download_source) if File.exists?(path) - end - - def delete_file(filename) - path = path_from_filename(filename) - - if File.exists?(path) - FileUtils.remove_file(path, force: true) - DiskSpace.reset_cached_stats - end - end - - def download_file(filename, destination, failure_message = "") - path = path_from_filename(filename) - Discourse::Utils.execute_command('cp', path, destination, failure_message: failure_message) - end - - private - - def unsorted_files - files = Dir.glob(File.join(@base_directory, "*.{gz,tgz}")) - files.map! { |filename| create_file_from_path(filename) } - files - end - - def path_from_filename(filename) - File.join(@base_directory, filename) - end - - def create_file_from_path(path, include_download_source = false) - BackupFile.new( - filename: File.basename(path), - size: File.size(path), - last_modified: File.mtime(path).utc, - source: include_download_source ? path : nil - ) - end - end -end diff --git a/lib/backup_restore/restorer.rb b/lib/backup_restore/restorer.rb index 62cfd24b672..e8a98f0581c 100644 --- a/lib/backup_restore/restorer.rb +++ b/lib/backup_restore/restorer.rb @@ -133,12 +133,12 @@ module BackupRestore def initialize_state @success = false - @store = BackupRestore::BackupStore.create @db_was_changed = false @current_db = RailsMultisite::ConnectionManagement.current_db @current_version = BackupRestore.current_version @timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S") @tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp) + @source_filename = File.join(Backup.base_directory, @filename) @archive_filename = File.join(@tmp_directory, @filename) @tar_filename = @archive_filename[0...-3] @meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE) @@ -195,15 +195,8 @@ module BackupRestore end def copy_archive_to_tmp_directory - if @store.remote? - log "Downloading archive to tmp directory..." - failure_message = "Failed to download archive to tmp directory." - else - log "Copying archive to tmp directory..." - failure_message = "Failed to copy archive to tmp directory." - end - - @store.download_file(@filename, @archive_filename, failure_message) + log "Copying archive to tmp directory..." + Discourse::Utils.execute_command('cp', @source_filename, @archive_filename, failure_message: "Failed to copy archive to tmp directory.") end def unzip_archive diff --git a/lib/backup_restore/s3_backup_store.rb b/lib/backup_restore/s3_backup_store.rb deleted file mode 100644 index cbbc916df08..00000000000 --- a/lib/backup_restore/s3_backup_store.rb +++ /dev/null @@ -1,95 +0,0 @@ -require_dependency "backup_restore/backup_store" -require_dependency "s3_helper" - -module BackupRestore - class S3BackupStore < BackupStore - DOWNLOAD_URL_EXPIRES_AFTER_SECONDS ||= 15 - UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours - - def initialize(opts = {}) - s3_options = S3Helper.s3_options(SiteSetting) - s3_options.merge!(opts[:s3_options]) if opts[:s3_options] - @s3_helper = S3Helper.new(SiteSetting.s3_backup_bucket, '', s3_options) - end - - def remote? - true - end - - def file(filename, include_download_source: false) - obj = @s3_helper.object(filename) - create_file_from_object(obj, include_download_source) if obj.exists? - end - - def delete_file(filename) - obj = @s3_helper.object(filename) - obj.delete if obj.exists? - end - - def download_file(filename, destination_path, failure_message = nil) - unless @s3_helper.object(filename).download_file(destination_path) - raise failure_message&.to_s || "Failed to download file" - end - end - - def upload_file(filename, source_path, content_type) - obj = @s3_helper.object(filename) - raise BackupFileExists.new if obj.exists? - - obj.upload_file(source_path, content_type: content_type) - end - - def generate_upload_url(filename) - obj = @s3_helper.object(filename) - raise BackupFileExists.new if obj.exists? - - presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS) - end - - private - - def unsorted_files - objects = [] - - @s3_helper.list.each do |obj| - if obj.key.match?(/\.t?gz$/i) - objects << create_file_from_object(obj) - end - end - - objects - rescue Aws::Errors::ServiceError => e - Rails.logger.warn("Failed to list backups from S3: #{e.message.presence || e.class.name}") - raise StorageError - end - - def create_file_from_object(obj, include_download_source = false) - BackupFile.new( - filename: File.basename(obj.key), - size: obj.size, - last_modified: obj.last_modified, - source: include_download_source ? presigned_url(obj, :get, DOWNLOAD_URL_EXPIRES_AFTER_SECONDS) : nil - ) - end - - def presigned_url(obj, method, expires_in_seconds) - ensure_cors! - obj.presigned_url(method, expires_in: expires_in_seconds) - end - - def ensure_cors! - rule = { - allowed_headers: ["*"], - allowed_methods: ["PUT"], - allowed_origins: [Discourse.base_url_no_prefix], - max_age_seconds: 3000 - } - - @s3_helper.ensure_cors!([rule]) - end - - def cleanup_allowed? - !SiteSetting.s3_disable_cleanup - end - end -end diff --git a/lib/disk_space.rb b/lib/disk_space.rb index bf9bb0db0f3..8c5e385cec6 100644 --- a/lib/disk_space.rb +++ b/lib/disk_space.rb @@ -2,8 +2,8 @@ class DiskSpace extend ActionView::Helpers::NumberHelper - DISK_SPACE_STATS_CACHE_KEY ||= 'disk_space_stats'.freeze - DISK_SPACE_STATS_UPDATED_CACHE_KEY ||= 'disk_space_stats_updated'.freeze + DISK_SPACE_STATS_CACHE_KEY = 'disk_space_stats'.freeze + DISK_SPACE_STATS_UPDATED_CACHE_KEY = 'disk_space_stats_updated'.freeze def self.uploads_used_bytes # used(uploads_path) @@ -24,7 +24,7 @@ class DiskSpace end def self.backups_path - BackupRestore::LocalBackupStore.base_directory + Backup.base_directory end def self.uploads_path diff --git a/lib/json_error.rb b/lib/json_error.rb index 7d763a23085..1675ac562b2 100644 --- a/lib/json_error.rb +++ b/lib/json_error.rb @@ -24,12 +24,6 @@ module JsonError # If we're passed an array, it's an array of error messages return { errors: obj.map(&:to_s) } if obj.is_a?(Array) && obj.present? - if obj.is_a?(Exception) - message = obj.cause.message.presence || obj.cause.class.name if obj.cause - message = obj.message.presence || obj.class.name if message.blank? - return { errors: [message] } if message.present? - end - # Log a warning (unless obj is nil) Rails.logger.warn("create_errors_json called with unrecognized type: #{obj.inspect}") if obj diff --git a/lib/s3_helper.rb b/lib/s3_helper.rb index b4bcb445197..a46fdc2d22d 100644 --- a/lib/s3_helper.rb +++ b/lib/s3_helper.rb @@ -51,7 +51,7 @@ class S3Helper # make sure we have a cors config for assets # otherwise we will have no fonts - def ensure_cors!(rules = nil) + def ensure_cors! rule = nil begin @@ -63,17 +63,17 @@ class S3Helper end unless rule - rules = [{ - allowed_headers: ["Authorization"], - allowed_methods: ["GET", "HEAD"], - allowed_origins: ["*"], - max_age_seconds: 3000 - }] if rules.nil? + puts "installing CORS rule" s3_resource.client.put_bucket_cors( bucket: @s3_bucket_name, cors_configuration: { - cors_rules: rules + cors_rules: [{ + allowed_headers: ["Authorization"], + allowed_methods: ["GET", "HEAD"], + allowed_origins: ["*"], + max_age_seconds: 3000 + }] } ) end @@ -137,7 +137,10 @@ class S3Helper end def list(prefix = "") - prefix = get_path_for_s3_upload(prefix) + if @s3_bucket_folder_path.present? + prefix = File.join(@s3_bucket_folder_path, prefix) + end + s3_bucket.objects(prefix: prefix) end @@ -156,11 +159,6 @@ class S3Helper ) end - def object(path) - path = get_path_for_s3_upload(path) - s3_bucket.object(path) - end - def self.s3_options(obj) opts = { region: obj.s3_region, endpoint: SiteSetting.s3_endpoint, diff --git a/lib/site_settings/validations.rb b/lib/site_settings/validations.rb index 0d50f488b3d..ae38285cc82 100644 --- a/lib/site_settings/validations.rb +++ b/lib/site_settings/validations.rb @@ -1,8 +1,8 @@ module SiteSettings; end module SiteSettings::Validations - def validate_error(key, opts = {}) - raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}", opts)) + def validate_error(key) + raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}")) end def validate_default_categories(new_val, default_categories_selected) @@ -53,13 +53,4 @@ module SiteSettings::Validations validate_error :s3_upload_bucket_is_required if new_val == "t" && SiteSetting.s3_upload_bucket.blank? end - def validate_backup_location(new_val) - return unless new_val == BackupLocationSiteSetting::S3 - validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_backup_bucket") if SiteSetting.s3_backup_bucket.blank? - - unless SiteSetting.s3_use_iam_profile - validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_access_key_id") if SiteSetting.s3_access_key_id.blank? - validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_secret_access_key") if SiteSetting.s3_secret_access_key.blank? - end - end end diff --git a/lib/tasks/s3.rake b/lib/tasks/s3.rake index b19da1de3a1..7b2889ae938 100644 --- a/lib/tasks/s3.rake +++ b/lib/tasks/s3.rake @@ -84,8 +84,6 @@ end task 's3:upload_assets' => :environment do ensure_s3_configured! - - puts "installing CORS rule" helper.ensure_cors! assets.each do |asset| diff --git a/script/discourse b/script/discourse index ddfc6181933..d0fccded04c 100755 --- a/script/discourse +++ b/script/discourse @@ -62,20 +62,18 @@ class DiscourseCLI < Thor require "backup_restore/backuper" puts "Starting backup..." - backuper = BackupRestore::Backuper.new(Discourse.system_user.id, filename: filename) - backup_filename = backuper.run - puts "Backup done." - - store = BackupRestore::BackupStore.create - - if store.remote? - location = BackupLocationSiteSetting.values.find { |v| v[:value] == SiteSetting.backup_location } - location = I18n.t("admin_js.#{location[:name]}") if location - puts "Output file is stored on #{location} as #{backup_filename}", "" - else - backup = store.file(backup_filename, include_download_source: true) - puts "Output file is in: #{backup.source}", "" + backuper = BackupRestore::Backuper.new(Discourse.system_user.id) + backup = backuper.run + if filename.present? + puts "Moving '#{backup}' to '#{filename}'" + puts "Including version number into '#{filename}'" + version_string = File.basename(backup)[/-#{BackupRestore::VERSION_PREFIX}\d{14}/] + filename = filename.dup.insert(filename.index('.'), version_string) + FileUtils.mv(backup, filename) + backup = filename end + puts "Backup done." + puts "Output file is in: #{backup}", "" exit(1) unless backuper.success end @@ -94,22 +92,20 @@ class DiscourseCLI < Thor discourse = './script/discourse' end - load_rails - require "backup_restore/backup_restore" - require "backup_restore/restorer" - require "backup_restore/backup_store" - if !filename puts "You must provide a filename to restore. Did you mean one of the following?\n\n" - store = BackupRestore::BackupStore.create - store.files.each do |file| - puts "#{discourse} restore #{file.filename}" + Dir["public/backups/default/*"].sort_by { |path| File.mtime(path) }.reverse.each do |f| + puts "#{discourse} restore #{File.basename(f)}" end return end + load_rails + require "backup_restore/backup_restore" + require "backup_restore/restorer" + begin puts "Starting restore: #{filename}" restorer = BackupRestore::Restorer.new(Discourse.system_user.id, filename: filename) diff --git a/spec/lib/backup_restore/local_backup_store_spec.rb b/spec/lib/backup_restore/local_backup_store_spec.rb deleted file mode 100644 index efe87583d90..00000000000 --- a/spec/lib/backup_restore/local_backup_store_spec.rb +++ /dev/null @@ -1,56 +0,0 @@ -require 'rails_helper' -require 'backup_restore/local_backup_store' -require_relative 'shared_examples_for_backup_store' - -describe BackupRestore::LocalBackupStore do - before(:all) do - @base_directory = Dir.mktmpdir - @paths = [] - end - - after(:all) do - FileUtils.remove_dir(@base_directory, true) - end - - before do - SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL - end - - subject(:store) { BackupRestore::BackupStore.create(base_directory: @base_directory) } - let(:expected_type) { BackupRestore::LocalBackupStore } - - it_behaves_like "backup store" - - it "is not a remote store" do - expect(store.remote?).to eq(false) - end - - def create_backups - create_file(filename: "b.tar.gz", last_modified: "2018-09-13T15:10:00Z", size_in_bytes: 17) - create_file(filename: "a.tgz", last_modified: "2018-02-11T09:27:00Z", size_in_bytes: 29) - create_file(filename: "r.sql.gz", last_modified: "2017-12-20T03:48:00Z", size_in_bytes: 11) - create_file(filename: "no-backup.txt", last_modified: "2018-09-05T14:27:00Z", size_in_bytes: 12) - end - - def remove_backups - @paths.each { |path| File.delete(path) if File.exists?(path) } - @paths.clear - end - - def create_file(filename:, last_modified:, size_in_bytes:) - path = File.join(@base_directory, filename) - return if File.exists?(path) - - @paths << path - FileUtils.touch(path) - File.truncate(path, size_in_bytes) - - time = Time.parse(last_modified) - File.utime(time, time, path) - end - - def source_regex(filename) - path = File.join(@base_directory, filename) - /^#{Regexp.escape(path)}$/ - end -end diff --git a/spec/lib/backup_restore/s3_backup_store_spec.rb b/spec/lib/backup_restore/s3_backup_store_spec.rb deleted file mode 100644 index 33cfa945b00..00000000000 --- a/spec/lib/backup_restore/s3_backup_store_spec.rb +++ /dev/null @@ -1,109 +0,0 @@ -require 'rails_helper' -require 'backup_restore/s3_backup_store' -require_relative 'shared_examples_for_backup_store' - -describe BackupRestore::S3BackupStore do - before(:all) do - @s3_client = Aws::S3::Client.new(stub_responses: true) - @s3_options = { client: @s3_client } - - @objects = [] - - @s3_client.stub_responses(:list_objects, -> (context) do - expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) - expect(context.params[:prefix]).to be_blank - - { contents: @objects } - end) - - @s3_client.stub_responses(:delete_object, -> (context) do - expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) - expect do - @objects.delete_if { |obj| obj[:key] == context.params[:key] } - end.to change { @objects } - end) - - @s3_client.stub_responses(:head_object, -> (context) do - expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) - - if object = @objects.find { |obj| obj[:key] == context.params[:key] } - { content_length: object[:size], last_modified: object[:last_modified] } - else - { status_code: 404, headers: {}, body: "", } - end - end) - - @s3_client.stub_responses(:get_object, -> (context) do - expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) - - if object = @objects.find { |obj| obj[:key] == context.params[:key] } - { content_length: object[:size], body: "A" * object[:size] } - else - { status_code: 404, headers: {}, body: "", } - end - end) - - @s3_client.stub_responses(:put_object, -> (context) do - expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) - - @objects << { - key: context.params[:key], - size: context.params[:body].size, - last_modified: Time.zone.now - } - end) - end - - before do - SiteSetting.s3_backup_bucket = "s3-backup-bucket" - SiteSetting.s3_access_key_id = "s3-access-key-id" - SiteSetting.s3_secret_access_key = "s3-secret-access-key" - SiteSetting.backup_location = BackupLocationSiteSetting::S3 - end - - subject(:store) { BackupRestore::BackupStore.create(s3_options: @s3_options) } - let(:expected_type) { BackupRestore::S3BackupStore } - - it_behaves_like "backup store" - it_behaves_like "remote backup store" - - context "S3 specific behavior" do - before { create_backups } - after(:all) { remove_backups } - - it "doesn't delete files when cleanup is disabled" do - SiteSetting.maximum_backups = 1 - SiteSetting.s3_disable_cleanup = true - - expect { store.delete_old }.to_not change { store.files } - end - end - - def create_backups - @objects.clear - @objects << { key: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") } - @objects << { key: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") } - @objects << { key: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") } - @objects << { key: "no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") } - end - - def remove_backups - @objects.clear - end - - def source_regex(filename) - bucket = Regexp.escape(SiteSetting.s3_backup_bucket) - filename = Regexp.escape(filename) - expires = BackupRestore::S3BackupStore::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS - - /\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/ - end - - def upload_url_regex(filename) - bucket = Regexp.escape(SiteSetting.s3_backup_bucket) - filename = Regexp.escape(filename) - expires = BackupRestore::S3BackupStore::UPLOAD_URL_EXPIRES_AFTER_SECONDS - - /\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/ - end -end diff --git a/spec/lib/backup_restore/shared_examples_for_backup_store.rb b/spec/lib/backup_restore/shared_examples_for_backup_store.rb deleted file mode 100644 index 2ab716b6030..00000000000 --- a/spec/lib/backup_restore/shared_examples_for_backup_store.rb +++ /dev/null @@ -1,176 +0,0 @@ -shared_context "backups" do - before { create_backups } - after(:all) { remove_backups } - - let(:backup1) { BackupFile.new(filename: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z")) } - let(:backup2) { BackupFile.new(filename: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z")) } - let(:backup3) { BackupFile.new(filename: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z")) } -end - -shared_examples "backup store" do - it "creates the correct backup store" do - expect(store).to be_a(expected_type) - end - - context "without backup files" do - describe "#files" do - it "returns an empty array when there are no files" do - expect(store.files).to be_empty - end - end - - describe "#latest_file" do - it "returns nil when there are no files" do - expect(store.latest_file).to be_nil - end - end - end - - context "with backup files" do - include_context "backups" - - describe "#files" do - it "sorts files by last modified date in descending order" do - expect(store.files).to eq([backup1, backup2, backup3]) - end - - it "returns only *.gz and *.tgz files" do - files = store.files - expect(files).to_not be_empty - expect(files.map(&:filename)).to contain_exactly(backup1.filename, backup2.filename, backup3.filename) - end - end - - describe "#latest_file" do - it "returns the most recent backup file" do - expect(store.latest_file).to eq(backup1) - end - - it "returns nil when there are no files" do - store.files.each { |file| store.delete_file(file.filename) } - expect(store.latest_file).to be_nil - end - end - - describe "#delete_old" do - it "does nothing if the number of files is <= maximum_backups" do - SiteSetting.maximum_backups = 3 - - store.delete_old - expect(store.files).to eq([backup1, backup2, backup3]) - end - - it "deletes files starting by the oldest" do - SiteSetting.maximum_backups = 1 - - store.delete_old - expect(store.files).to eq([backup1]) - end - end - - describe "#file" do - it "returns information about the file when the file exists" do - expect(store.file(backup1.filename)).to eq(backup1) - end - - it "returns nil when the file doesn't exist" do - expect(store.file("foo.gz")).to be_nil - end - - it "includes the file's source location if it is requested" do - file = store.file(backup1.filename, include_download_source: true) - expect(file.source).to match(source_regex(backup1.filename)) - end - end - - describe "#delete_file" do - it "deletes file when the file exists" do - expect(store.files).to include(backup1) - store.delete_file(backup1.filename) - expect(store.files).to_not include(backup1) - - expect(store.file(backup1.filename)).to be_nil - end - - it "does nothing when the file doesn't exist" do - expect { store.delete_file("foo.gz") }.to_not change { store.files } - end - end - - describe "#download_file" do - it "downloads file to the destination" do - filename = backup1.filename - - Dir.mktmpdir do |path| - destination_path = File.join(path, File.basename(filename)) - store.download_file(filename, destination_path) - - expect(File.exists?(destination_path)).to eq(true) - expect(File.size(destination_path)).to eq(backup1.size) - end - end - - it "raises an exception when the download fails" do - filename = backup1.filename - destination_path = Dir.mktmpdir { |path| File.join(path, File.basename(filename)) } - - expect { store.download_file(filename, destination_path) }.to raise_exception(StandardError) - end - end - end -end - -shared_examples "remote backup store" do - it "is a remote store" do - expect(store.remote?).to eq(true) - end - - context "with backups" do - include_context "backups" - - describe "#upload_file" do - it "uploads file into store" do - freeze_time - - backup = BackupFile.new( - filename: "foo.tar.gz", - size: 33, - last_modified: Time.zone.now - ) - - expect(store.files).to_not include(backup) - - Tempfile.create(backup.filename) do |file| - file.write("A" * backup.size) - file.close - - store.upload_file(backup.filename, file.path, "application/gzip") - end - - expect(store.files).to include(backup) - expect(store.file(backup.filename)).to eq(backup) - end - - it "raises an exception when a file with same filename exists" do - Tempfile.create(backup1.filename) do |file| - expect { store.upload_file(backup1.filename, file.path, "application/gzip") } - .to raise_exception(BackupRestore::BackupStore::BackupFileExists) - end - end - end - - describe "#generate_upload_url" do - it "generates upload URL" do - filename = "foo.tar.gz" - url = store.generate_upload_url(filename) - - expect(url).to match(upload_url_regex(filename)) - end - - it "raises an exeption when a file with same filename exists" do - expect { store.generate_upload_url(backup1.filename) } - .to raise_exception(BackupRestore::BackupStore::BackupFileExists) - end - end - end -end diff --git a/spec/models/admin_dashboard_data_spec.rb b/spec/models/admin_dashboard_data_spec.rb index 690ed831e88..6d81f623dd5 100644 --- a/spec/models/admin_dashboard_data_spec.rb +++ b/spec/models/admin_dashboard_data_spec.rb @@ -210,9 +210,9 @@ describe AdminDashboardData do end context 'when setting is enabled' do + let(:setting_enabled) { true } before do - all_setting_keys.each { |key| SiteSetting.public_send("#{key}=", 'foo') } - SiteSetting.public_send("#{setting[:key]}=", setting[:enabled_value]) + SiteSetting.public_send("#{setting_key}=", setting_enabled) SiteSetting.public_send("#{bucket_key}=", bucket_value) end @@ -229,7 +229,7 @@ describe AdminDashboardData do context 'when bucket is filled in' do let(:bucket_value) { 'a' } before do - SiteSetting.s3_use_iam_profile = use_iam_profile + SiteSetting.public_send("s3_use_iam_profile=", use_iam_profile) end context 'when using iam profile' do @@ -260,7 +260,7 @@ describe AdminDashboardData do context 'when setting is not enabled' do before do - SiteSetting.public_send("#{setting[:key]}=", setting[:disabled_value]) + SiteSetting.public_send("#{setting_key}=", false) end it "always returns nil" do @@ -272,25 +272,13 @@ describe AdminDashboardData do end describe 'uploads' do - let(:setting) do - { - key: :enable_s3_uploads, - enabled_value: true, - disabled_value: false - } - end + let(:setting_key) { :enable_s3_uploads } let(:bucket_key) { :s3_upload_bucket } include_examples 'problem detection for s3-dependent setting' end describe 'backups' do - let(:setting) do - { - key: :backup_location, - enabled_value: BackupLocationSiteSetting::S3, - disabled_value: BackupLocationSiteSetting::LOCAL - } - end + let(:setting_key) { :enable_s3_backups } let(:bucket_key) { :s3_backup_bucket } include_examples 'problem detection for s3-dependent setting' end diff --git a/spec/models/backup_spec.rb b/spec/models/backup_spec.rb new file mode 100644 index 00000000000..1247ff3d577 --- /dev/null +++ b/spec/models/backup_spec.rb @@ -0,0 +1,130 @@ +require 'rails_helper' +require "s3_helper" + +require_dependency 'backup' + +describe Backup do + + let(:b1) { Backup.new('backup1') } + let(:b2) { Backup.new('backup2') } + let(:b3) { Backup.new('backup3') } + + before do + Backup.stubs(:all).returns([b1, b2, b3]) + end + + context '#remove_old' do + it "does nothing if there aren't more backups than the setting" do + SiteSetting.maximum_backups = 3 + Backup.any_instance.expects(:remove).never + Backup.remove_old + end + + it "calls remove on the backups over our limit" do + SiteSetting.maximum_backups = 1 + b1.expects(:remove).never + b2.expects(:remove).once + b3.expects(:remove).once + Backup.remove_old + end + end + + shared_context 's3 helpers' do + let(:client) { Aws::S3::Client.new(stub_responses: true) } + let(:resource) { Aws::S3::Resource.new(client: client) } + let!(:s3_bucket) { resource.bucket("s3-upload-bucket") } + let(:s3_helper) { b1.s3 } + + before(:each) do + SiteSetting.s3_backup_bucket = "s3-upload-bucket" + SiteSetting.s3_access_key_id = "s3-access-key-id" + SiteSetting.s3_secret_access_key = "s3-secret-access-key" + end + end + + context ".after_create_hook" do + context "when SiteSetting is true" do + include_context "s3 helpers" + + before do + SiteSetting.enable_s3_backups = true + end + + it "should upload the backup to S3 with the right paths" do + b1.path = 'some/path/backup.gz' + File.expects(:open).with(b1.path).yields(stub) + + s3_helper.expects(:s3_bucket).returns(s3_bucket) + s3_object = stub + + s3_bucket.expects(:object).with(b1.filename).returns(s3_object) + s3_object.expects(:upload_file) + + b1.after_create_hook + end + + context "when s3_backup_bucket includes folders path" do + before do + SiteSetting.s3_backup_bucket = "s3-upload-bucket/discourse-backups" + end + + it "should upload the backup to S3 with the right paths" do + b1.path = 'some/path/backup.gz' + File.expects(:open).with(b1.path).yields(stub) + + s3_helper.expects(:s3_bucket).returns(s3_bucket) + s3_object = stub + + s3_bucket.expects(:object).with("discourse-backups/#{b1.filename}").returns(s3_object) + s3_object.expects(:upload_file) + + b1.after_create_hook + end + end + end + + it "calls upload_to_s3 if the SiteSetting is false" do + SiteSetting.enable_s3_backups = false + b1.expects(:upload_to_s3).never + b1.after_create_hook + end + end + + context ".after_remove_hook" do + include_context "s3 helpers" + + context "when SiteSetting is true" do + before do + SiteSetting.enable_s3_backups = true + end + + context "when s3_backup_bucket includes folders path" do + before do + SiteSetting.s3_backup_bucket = "s3-upload-bucket/discourse-backups" + end + + it "should upload the backup to S3 with the right paths" do + s3_helper.expects(:s3_bucket).returns(s3_bucket) + s3_object = stub + + s3_bucket.expects(:object).with("discourse-backups/#{b1.filename}").returns(s3_object) + s3_object.expects(:delete) + + b1.after_remove_hook + end + end + end + + context "when SiteSetting is false" do + before do + SiteSetting.enable_s3_backups = false + end + + it "doesn’t call remove_from_s3" do + b1.expects(:remove_from_s3).never + b1.after_remove_hook + end + end + end + +end diff --git a/spec/requests/admin/backups_controller_spec.rb b/spec/requests/admin/backups_controller_spec.rb index f7344138746..4224bcb07b1 100644 --- a/spec/requests/admin/backups_controller_spec.rb +++ b/spec/requests/admin/backups_controller_spec.rb @@ -4,25 +4,6 @@ RSpec.describe Admin::BackupsController do let(:admin) { Fabricate(:admin) } let(:backup_filename) { "2014-02-10-065935.tar.gz" } let(:backup_filename2) { "2014-02-11-065935.tar.gz" } - let(:store) { BackupRestore::LocalBackupStore.new } - - def create_backup_files(*filenames) - @paths = filenames.map do |filename| - path = backup_path(filename) - File.open(path, "w") { |f| f.write("test backup") } - path - end - end - - def backup_path(filename) - File.join(BackupRestore::LocalBackupStore.base_directory, filename) - end - - def map_preloaded - controller.instance_variable_get("@preloaded").map do |key, value| - [key, JSON.parse(value)] - end.to_h - end it "is a subclass of AdminController" do expect(Admin::BackupsController < Admin::AdminController).to eq(true) @@ -30,14 +11,10 @@ RSpec.describe Admin::BackupsController do before do sign_in(admin) - SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL end after do $redis.flushall - - @paths&.each { |path| File.delete(path) if File.exists?(path) } - @paths = nil end describe "#index" do @@ -52,7 +29,11 @@ RSpec.describe Admin::BackupsController do get "/admin/backups.html" expect(response.status).to eq(200) - preloaded = map_preloaded + preloaded = controller.instance_variable_get("@preloaded").map do |key, value| + [key, JSON.parse(value)] + end.to_h + + expect(preloaded["backups"].size).to eq(Backup.all.size) expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status) expect(preloaded["logs"].size).to eq(BackupRestore.logs.size) end @@ -61,14 +42,23 @@ RSpec.describe Admin::BackupsController do context "json format" do it "returns a list of all the backups" do begin - create_backup_files(backup_filename, backup_filename2) + paths = [] + [backup_filename, backup_filename2].each do |name| + path = File.join(Backup.base_directory, name) + paths << path + File.open(path, "w") { |f| f.write("hello") } + Backup.create_from_filename(name) + end get "/admin/backups.json" + expect(response.status).to eq(200) - filenames = JSON.parse(response.body).map { |backup| backup["filename"] } - expect(filenames).to include(backup_filename) - expect(filenames).to include(backup_filename2) + json = JSON.parse(response.body).map { |backup| backup["filename"] } + expect(json).to include(backup_filename) + expect(json).to include(backup_filename2) + ensure + paths.each { |path| File.delete(path) } end end end @@ -98,23 +88,36 @@ RSpec.describe Admin::BackupsController do it "uses send_file to transmit the backup" do begin token = EmailBackupToken.set(admin.id) - create_backup_files(backup_filename) + path = File.join(Backup.base_directory, backup_filename) + File.open(path, "w") { |f| f.write("hello") } + + Backup.create_from_filename(backup_filename) expect do get "/admin/backups/#{backup_filename}.json", params: { token: token } end.to change { UserHistory.where(action: UserHistory.actions[:backup_download]).count }.by(1) - expect(response.headers['Content-Length']).to eq("11") + expect(response.headers['Content-Length']).to eq("5") expect(response.headers['Content-Disposition']).to match(/attachment; filename/) + ensure + File.delete(path) + EmailBackupToken.del(admin.id) end end it "returns 422 when token is bad" do begin + path = File.join(Backup.base_directory, backup_filename) + File.open(path, "w") { |f| f.write("hello") } + + Backup.create_from_filename(backup_filename) + get "/admin/backups/#{backup_filename}.json", params: { token: "bad_value" } expect(response.status).to eq(422) expect(response.headers['Content-Disposition']).not_to match(/attachment; filename/) + ensure + File.delete(path) end end @@ -122,16 +125,20 @@ RSpec.describe Admin::BackupsController do token = EmailBackupToken.set(admin.id) get "/admin/backups/#{backup_filename}.json", params: { token: token } + EmailBackupToken.del(admin.id) expect(response.status).to eq(404) end end describe '#destroy' do + let(:b) { Backup.new(backup_filename) } + it "removes the backup if found" do begin - path = backup_path(backup_filename) - create_backup_files(backup_filename) - expect(File.exists?(path)).to eq(true) + path = File.join(Backup.base_directory, backup_filename) + File.open(path, "w") { |f| f.write("hello") } + + Backup.create_from_filename(backup_filename) expect do delete "/admin/backups/#{backup_filename}.json" @@ -139,6 +146,8 @@ RSpec.describe Admin::BackupsController do expect(response.status).to eq(200) expect(File.exists?(path)).to eq(false) + ensure + File.delete(path) if File.exists?(path) end end @@ -153,7 +162,9 @@ RSpec.describe Admin::BackupsController do get "/admin/backups/logs.html" expect(response.status).to eq(200) - preloaded = map_preloaded + preloaded = controller.instance_variable_get("@preloaded").map do |key, value| + [key, JSON.parse(value)] + end.to_h expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status) expect(preloaded["logs"].size).to eq(BackupRestore.logs.size) @@ -217,7 +228,6 @@ RSpec.describe Admin::BackupsController do described_class.any_instance.expects(:has_enough_space_on_disk?).returns(true) filename = 'test_Site-0123456789.tar.gz' - @paths = [backup_path(File.join('tmp', 'test', "#{filename}.part1"))] post "/admin/backups/upload.json", params: { resumableFilename: filename, @@ -231,6 +241,13 @@ RSpec.describe Admin::BackupsController do expect(response.status).to eq(200) expect(response.body).to eq("") + ensure + begin + File.delete( + File.join(Backup.base_directory, 'tmp', 'test', "#{filename}.part1") + ) + rescue Errno::ENOENT + end end end end @@ -267,16 +284,18 @@ RSpec.describe Admin::BackupsController do end describe "#email" do + let(:backup_filename) { "test.tar.gz" } + let(:backup) { Backup.new(backup_filename) } + it "enqueues email job" do - create_backup_files(backup_filename) + Backup.expects(:[]).with(backup_filename).returns(backup) - expect { - put "/admin/backups/#{backup_filename}.json" - }.to change { Jobs::DownloadBackupEmail.jobs.size }.by(1) + Jobs.expects(:enqueue).with(:download_backup_email, + user_id: admin.id, + backup_file_path: 'http://www.example.com/admin/backups/test.tar.gz' + ) - job_args = Jobs::DownloadBackupEmail.jobs.last["args"].first - expect(job_args["user_id"]).to eq(admin.id) - expect(job_args["backup_file_path"]).to eq("http://www.example.com/admin/backups/#{backup_filename}") + put "/admin/backups/#{backup_filename}.json" expect(response.status).to eq(200) end diff --git a/test/javascripts/lib/utilities-test.js.es6 b/test/javascripts/lib/utilities-test.js.es6 index 56804e0aca9..ba2448b35c0 100644 --- a/test/javascripts/lib/utilities-test.js.es6 +++ b/test/javascripts/lib/utilities-test.js.es6 @@ -109,14 +109,6 @@ QUnit.test("ensures an authorized upload", assert => { ); }); -QUnit.test("skipping validation works", assert => { - const files = [{ name: "backup.tar.gz" }]; - sandbox.stub(bootbox, "alert"); - - assert.not(validUpload(files, { skipValidation: false })); - assert.ok(validUpload(files, { skipValidation: true })); -}); - QUnit.test("staff can upload anything in PM", assert => { const files = [{ name: "some.docx" }]; Discourse.SiteSettings.authorized_extensions = "jpeg";