mirror of
https://github.com/discourse/discourse.git
synced 2025-05-31 15:28:30 +08:00
REFACTOR: Restoring of backups and migration of uploads to S3
This commit is contained in:
77
spec/lib/backup_restore/backup_file_handler_spec.rb
Normal file
77
spec/lib/backup_restore/backup_file_handler_spec.rb
Normal file
@ -0,0 +1,77 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require_relative 'shared_context_for_backup_restore'
|
||||
|
||||
describe BackupRestore::BackupFileHandler do
|
||||
include_context "shared stuff"
|
||||
|
||||
def expect_decompress_and_clean_up_to_work(backup_filename:, expected_dump_filename: "dump.sql",
|
||||
require_metadata_file:, require_uploads:)
|
||||
|
||||
freeze_time(DateTime.parse('2019-12-24 14:31:48'))
|
||||
|
||||
source_file = File.join(Rails.root, "spec/fixtures/backups", backup_filename)
|
||||
target_directory = BackupRestore::LocalBackupStore.base_directory
|
||||
target_file = File.join(target_directory, backup_filename)
|
||||
FileUtils.copy_file(source_file, target_file)
|
||||
|
||||
Dir.mktmpdir do |root_directory|
|
||||
current_db = RailsMultisite::ConnectionManagement.current_db
|
||||
file_handler = BackupRestore::BackupFileHandler.new(logger, backup_filename, current_db, root_directory)
|
||||
tmp_directory, db_dump_path = file_handler.decompress
|
||||
|
||||
expected_tmp_path = File.join(root_directory, "tmp/restores", current_db, "2019-12-24-143148")
|
||||
expect(tmp_directory).to eq(expected_tmp_path)
|
||||
expect(db_dump_path).to eq(File.join(expected_tmp_path, expected_dump_filename))
|
||||
|
||||
expect(Dir.exist?(tmp_directory)).to eq(true)
|
||||
expect(File.exist?(db_dump_path)).to eq(true)
|
||||
|
||||
expect(File.exist?(File.join(tmp_directory, "meta.json"))).to eq(require_metadata_file)
|
||||
|
||||
if require_uploads
|
||||
upload_filename = "uploads/default/original/3X/b/d/bd269860bb508aebcb6f08fe7289d5f117830383.png"
|
||||
expect(File.exist?(File.join(tmp_directory, upload_filename))).to eq(true)
|
||||
else
|
||||
expect(Dir.exist?(File.join(tmp_directory, "uploads"))).to eq(false)
|
||||
end
|
||||
|
||||
file_handler.clean_up
|
||||
expect(Dir.exist?(tmp_directory)).to eq(false)
|
||||
end
|
||||
ensure
|
||||
FileUtils.rm(target_file)
|
||||
|
||||
# We don't want to delete the directory unless it is empty, otherwise this could be annoying
|
||||
# when tests run for the "default" database in a development environment.
|
||||
FileUtils.rmdir(target_directory) rescue nil
|
||||
end
|
||||
|
||||
it "works with old backup file format", type: :multisite do
|
||||
test_multisite_connection("second") do
|
||||
expect_decompress_and_clean_up_to_work(
|
||||
backup_filename: "backup_till_v1.5.tar.gz",
|
||||
require_metadata_file: true,
|
||||
require_uploads: true
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
it "works with current backup file format" do
|
||||
expect_decompress_and_clean_up_to_work(
|
||||
backup_filename: "backup_since_v1.6.tar.gz",
|
||||
require_metadata_file: false,
|
||||
require_uploads: true
|
||||
)
|
||||
end
|
||||
|
||||
it "works with SQL only backup file" do
|
||||
expect_decompress_and_clean_up_to_work(
|
||||
backup_filename: "sql_only_backup.sql.gz",
|
||||
expected_dump_filename: "sql_only_backup.sql",
|
||||
require_metadata_file: false,
|
||||
require_uploads: false
|
||||
)
|
||||
end
|
||||
end
|
188
spec/lib/backup_restore/database_restorer_spec.rb
Normal file
188
spec/lib/backup_restore/database_restorer_spec.rb
Normal file
@ -0,0 +1,188 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require_relative 'shared_context_for_backup_restore'
|
||||
|
||||
describe BackupRestore::DatabaseRestorer do
|
||||
include_context "shared stuff"
|
||||
|
||||
let(:current_db) { RailsMultisite::ConnectionManagement.current_db }
|
||||
subject { BackupRestore::DatabaseRestorer.new(logger, current_db) }
|
||||
|
||||
def expect_create_readonly_functions
|
||||
Migration::BaseDropper.expects(:create_readonly_function).at_least_once
|
||||
end
|
||||
|
||||
def expect_table_move
|
||||
BackupRestore.expects(:move_tables_between_schemas).with("public", "backup").once
|
||||
end
|
||||
|
||||
def expect_psql(output_lines: ["output from psql"], exit_status: 0, stub_thread: false)
|
||||
status = mock("psql status")
|
||||
status.expects(:exitstatus).returns(exit_status).once
|
||||
Process.expects(:last_status).returns(status).once
|
||||
|
||||
if stub_thread
|
||||
thread = mock("thread")
|
||||
thread.stubs(:join)
|
||||
Thread.stubs(:new).returns(thread)
|
||||
end
|
||||
|
||||
output_lines << nil
|
||||
psql_io = mock("psql")
|
||||
psql_io.expects(:readline).returns(*output_lines).times(output_lines.size)
|
||||
IO.expects(:popen).yields(psql_io).once
|
||||
end
|
||||
|
||||
def expect_db_migrate
|
||||
Discourse::Utils.expects(:execute_command).with do |env, command, options|
|
||||
env["SKIP_POST_DEPLOYMENT_MIGRATIONS"] == "0" &&
|
||||
command == "rake db:migrate" &&
|
||||
options[:chdir] == Rails.root
|
||||
end.once
|
||||
end
|
||||
|
||||
def expect_db_reconnect
|
||||
RailsMultisite::ConnectionManagement.expects(:establish_connection).once
|
||||
end
|
||||
|
||||
def execute_stubbed_restore(stub_readonly_functions: true, stub_psql: true, stub_migrate: true,
|
||||
dump_file_path: "foo.sql")
|
||||
expect_table_move
|
||||
expect_create_readonly_functions if stub_readonly_functions
|
||||
expect_psql if stub_psql
|
||||
expect_db_migrate if stub_migrate
|
||||
subject.restore(dump_file_path)
|
||||
end
|
||||
|
||||
describe "#restore" do
|
||||
it "executes everything in the correct order" do
|
||||
restore = sequence("restore")
|
||||
expect_table_move.in_sequence(restore)
|
||||
expect_create_readonly_functions.in_sequence(restore)
|
||||
expect_psql(stub_thread: true).in_sequence(restore)
|
||||
expect_db_migrate.in_sequence(restore)
|
||||
expect_db_reconnect.in_sequence(restore)
|
||||
|
||||
subject.restore("foo.sql")
|
||||
end
|
||||
|
||||
context "with real psql" do
|
||||
after do
|
||||
DB.exec <<~SQL
|
||||
-- Drop table and execute a commit to make the drop stick,
|
||||
-- otherwise rspec will rollback the drop at the end of each test.
|
||||
-- The tests in this context do not change the DB, so this should be safe.
|
||||
DROP TABLE IF EXISTS foo;
|
||||
COMMIT;
|
||||
|
||||
-- Start a new transaction in order to suppress the
|
||||
-- "there is no transaction in progress" warnings from rspec.
|
||||
BEGIN TRANSACTION;
|
||||
SQL
|
||||
end
|
||||
|
||||
def restore(filename, stub_migrate: true)
|
||||
path = File.join(Rails.root, "spec/fixtures/db/restore", filename)
|
||||
execute_stubbed_restore(stub_psql: false, stub_migrate: stub_migrate, dump_file_path: path)
|
||||
end
|
||||
|
||||
def expect_restore_to_work(filename)
|
||||
restore(filename, stub_migrate: true)
|
||||
expect(ActiveRecord::Base.connection.table_exists?("foo")).to eq(true)
|
||||
end
|
||||
|
||||
it "restores from PostgreSQL 9.3" do
|
||||
# this covers the defaults of Discourse v1.0 up to v1.5
|
||||
expect_restore_to_work("postgresql_9.3.11.sql")
|
||||
end
|
||||
|
||||
it "restores from PostgreSQL 9.5.5" do
|
||||
# it uses a slightly different header than later 9.5.x versions
|
||||
expect_restore_to_work("postgresql_9.5.5.sql")
|
||||
end
|
||||
|
||||
it "restores from PostgreSQL 9.5" do
|
||||
# this covers the defaults of Discourse v1.6 up to v1.9
|
||||
expect_restore_to_work("postgresql_9.5.10.sql")
|
||||
end
|
||||
|
||||
it "restores from PostgreSQL 10" do
|
||||
# this covers the defaults of Discourse v1.7 up to v2.4
|
||||
expect_restore_to_work("postgresql_10.11.sql")
|
||||
end
|
||||
|
||||
it "restores from PostgreSQL 11" do
|
||||
expect_restore_to_work("postgresql_11.6.sql")
|
||||
end
|
||||
|
||||
it "restores from PostgreSQL 12" do
|
||||
expect_restore_to_work("postgresql_12.1.sql")
|
||||
end
|
||||
|
||||
it "detects error during restore" do
|
||||
expect { restore("error.sql", stub_migrate: false) }
|
||||
.to raise_error(BackupRestore::DatabaseRestoreError)
|
||||
end
|
||||
end
|
||||
|
||||
context "database connection" do
|
||||
it 'reconnects to the correct database', type: :multisite do
|
||||
RailsMultisite::ConnectionManagement.establish_connection(db: 'second')
|
||||
execute_stubbed_restore
|
||||
expect(RailsMultisite::ConnectionManagement.current_db).to eq('second')
|
||||
end
|
||||
|
||||
it 'it is not erroring for non-multisite' do
|
||||
expect { execute_stubbed_restore }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#rollback" do
|
||||
it "moves tables back when tables were moved" do
|
||||
BackupRestore.stubs(:can_rollback?).returns(true)
|
||||
BackupRestore.expects(:move_tables_between_schemas).with("backup", "public").never
|
||||
subject.rollback
|
||||
|
||||
execute_stubbed_restore
|
||||
|
||||
BackupRestore.expects(:move_tables_between_schemas).with("backup", "public").once
|
||||
subject.rollback
|
||||
end
|
||||
end
|
||||
|
||||
context "readonly functions" do
|
||||
before do
|
||||
Migration::SafeMigrate.stubs(:post_migration_path).returns("spec/fixtures/db/post_migrate")
|
||||
end
|
||||
|
||||
it "doesn't try to drop function when no functions have been created" do
|
||||
Migration::BaseDropper.expects(:drop_readonly_function).never
|
||||
subject.clean_up
|
||||
end
|
||||
|
||||
it "creates and drops all functions when none exist" do
|
||||
Migration::BaseDropper.expects(:create_readonly_function).with(:email_logs, nil)
|
||||
Migration::BaseDropper.expects(:create_readonly_function).with(:posts, :via_email)
|
||||
Migration::BaseDropper.expects(:create_readonly_function).with(:posts, :raw_email)
|
||||
execute_stubbed_restore(stub_readonly_functions: false)
|
||||
|
||||
Migration::BaseDropper.expects(:drop_readonly_function).with(:email_logs, nil)
|
||||
Migration::BaseDropper.expects(:drop_readonly_function).with(:posts, :via_email)
|
||||
Migration::BaseDropper.expects(:drop_readonly_function).with(:posts, :raw_email)
|
||||
subject.clean_up
|
||||
end
|
||||
|
||||
it "creates and drops only missing functions during restore" do
|
||||
Migration::BaseDropper.stubs(:existing_discourse_function_names)
|
||||
.returns(%w(raise_email_logs_readonly raise_posts_raw_email_readonly))
|
||||
|
||||
Migration::BaseDropper.expects(:create_readonly_function).with(:posts, :via_email)
|
||||
execute_stubbed_restore(stub_readonly_functions: false)
|
||||
|
||||
Migration::BaseDropper.expects(:drop_readonly_function).with(:posts, :via_email)
|
||||
subject.clean_up
|
||||
end
|
||||
end
|
||||
end
|
81
spec/lib/backup_restore/meta_data_handler_spec.rb
Normal file
81
spec/lib/backup_restore/meta_data_handler_spec.rb
Normal file
@ -0,0 +1,81 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require_relative 'shared_context_for_backup_restore'
|
||||
|
||||
describe BackupRestore::MetaDataHandler do
|
||||
include_context "shared stuff"
|
||||
|
||||
let!(:backup_filename) { 'discourse-2019-11-18-143242-v20191108000414.tar.gz' }
|
||||
|
||||
def with_metadata_file(content)
|
||||
Dir.mktmpdir do |directory|
|
||||
if !content.nil?
|
||||
path = File.join(directory, BackupRestore::MetaDataHandler::METADATA_FILE)
|
||||
File.write(path, content)
|
||||
end
|
||||
|
||||
yield(directory)
|
||||
end
|
||||
end
|
||||
|
||||
def validate_metadata(filename, tmp_directory)
|
||||
BackupRestore::MetaDataHandler.new(logger, filename, tmp_directory).validate
|
||||
end
|
||||
|
||||
it "extracts metadata from file when metadata file exists" do
|
||||
metadata = '{"source":"discourse","version":20160329101122}'
|
||||
|
||||
with_metadata_file(metadata) do |dir|
|
||||
expect(validate_metadata(backup_filename, dir))
|
||||
.to include(version: 20160329101122)
|
||||
end
|
||||
end
|
||||
|
||||
it "extracts metadata from filename when metadata file does not exist" do
|
||||
with_metadata_file(nil) do |dir|
|
||||
expect(validate_metadata(backup_filename, dir))
|
||||
.to include(version: 20191108000414)
|
||||
end
|
||||
end
|
||||
|
||||
it "raises an exception when the metadata file contains invalid JSON" do
|
||||
currupt_metadata = '{"version":20160329101122'
|
||||
|
||||
with_metadata_file(currupt_metadata) do |dir|
|
||||
expect { validate_metadata(backup_filename, dir) }
|
||||
.to raise_error(BackupRestore::MetaDataError)
|
||||
end
|
||||
end
|
||||
|
||||
it "raises an exception when the metadata file is empty" do
|
||||
with_metadata_file('') do |dir|
|
||||
expect { validate_metadata(backup_filename, dir) }
|
||||
.to raise_error(BackupRestore::MetaDataError)
|
||||
end
|
||||
end
|
||||
|
||||
it "raises an exception when the filename contains no version number" do
|
||||
filename = 'discourse-2019-11-18-143242.tar.gz'
|
||||
|
||||
expect { validate_metadata(filename, nil) }
|
||||
.to raise_error(BackupRestore::MetaDataError)
|
||||
end
|
||||
|
||||
it "raises an exception when the filename contains an invalid version number" do
|
||||
filename = 'discourse-2019-11-18-143242-v123456789.tar.gz'
|
||||
|
||||
expect { validate_metadata(filename, nil) }
|
||||
.to raise_error(BackupRestore::MetaDataError)
|
||||
end
|
||||
|
||||
it "raises an exception when the backup's version is newer than the current version" do
|
||||
new_backup_filename = 'discourse-2019-11-18-143242-v20191113193141.sql.gz'
|
||||
|
||||
BackupRestore.expects(:current_version)
|
||||
.returns(20191025005204).once
|
||||
|
||||
expect { validate_metadata(new_backup_filename, nil) }
|
||||
.to raise_error(BackupRestore::MigrationRequiredError)
|
||||
end
|
||||
end
|
@ -2,135 +2,6 @@
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
# Causes flakiness
|
||||
describe BackupRestore::Restorer do
|
||||
it 'detects which pg_dump output is restorable to different schemas' do
|
||||
{
|
||||
"9.6.7" => true,
|
||||
"9.6.8" => false,
|
||||
"9.6.9" => false,
|
||||
"10.2" => true,
|
||||
"10.3" => false,
|
||||
"10.3.1" => false,
|
||||
"10.4" => false,
|
||||
"11" => false,
|
||||
"11.4" => false,
|
||||
"21" => false,
|
||||
}.each do |key, value|
|
||||
expect(described_class.pg_produces_portable_dump?(key)).to eq(value)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Decompressing a backup' do
|
||||
let!(:admin) { Fabricate(:admin) }
|
||||
|
||||
before do
|
||||
SiteSetting.allow_restore = true
|
||||
@restore_path = File.join(Rails.root, "public", "backups", RailsMultisite::ConnectionManagement.current_db)
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf @restore_path
|
||||
FileUtils.rm_rf @restorer.tmp_directory
|
||||
end
|
||||
|
||||
context 'When there are uploads' do
|
||||
before do
|
||||
@restore_folder = "backup-#{SecureRandom.hex}"
|
||||
@temp_folder = "#{@restore_path}/#{@restore_folder}"
|
||||
FileUtils.mkdir_p("#{@temp_folder}/uploads")
|
||||
|
||||
Dir.chdir(@restore_path) do
|
||||
File.write("#{@restore_folder}/dump.sql", 'This is a dump')
|
||||
Compression::Gzip.new.compress(@restore_folder, 'dump.sql')
|
||||
FileUtils.rm_rf("#{@restore_folder}/dump.sql")
|
||||
File.write("#{@restore_folder}/uploads/upload.txt", 'This is an upload')
|
||||
|
||||
Compression::Tar.new.compress(@restore_path, @restore_folder)
|
||||
end
|
||||
|
||||
Compression::Gzip.new.compress(@restore_path, "#{@restore_folder}.tar")
|
||||
FileUtils.rm_rf @temp_folder
|
||||
|
||||
build_restorer("#{@restore_folder}.tar.gz")
|
||||
end
|
||||
|
||||
it '#decompress_archive works correctly' do
|
||||
@restorer.decompress_archive
|
||||
|
||||
expect(exists?("dump.sql.gz")).to eq(true)
|
||||
expect(exists?("uploads", directory: true)).to eq(true)
|
||||
end
|
||||
|
||||
it '#extract_dump works correctly' do
|
||||
@restorer.decompress_archive
|
||||
@restorer.extract_dump
|
||||
|
||||
expect(exists?('dump.sql')).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'When restoring a single file' do
|
||||
before do
|
||||
FileUtils.mkdir_p(@restore_path)
|
||||
|
||||
Dir.chdir(@restore_path) do
|
||||
File.write('dump.sql', 'This is a dump')
|
||||
Compression::Gzip.new.compress(@restore_path, 'dump.sql')
|
||||
FileUtils.rm_rf('dump.sql')
|
||||
end
|
||||
|
||||
build_restorer('dump.sql.gz')
|
||||
end
|
||||
|
||||
it '#extract_dump works correctly with a single file' do
|
||||
@restorer.extract_dump
|
||||
|
||||
expect(exists?("dump.sql")).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
def exists?(relative_path, directory: false)
|
||||
full_path = "#{@restorer.tmp_directory}/#{relative_path}"
|
||||
directory ? File.directory?(full_path) : File.exists?(full_path)
|
||||
end
|
||||
|
||||
def build_restorer(filename)
|
||||
@restorer = described_class.new(admin.id, filename: filename)
|
||||
@restorer.ensure_directory_exists(@restorer.tmp_directory)
|
||||
@restorer.copy_archive_to_tmp_directory
|
||||
end
|
||||
end
|
||||
|
||||
context 'Database connection' do
|
||||
let!(:admin) { Fabricate(:admin) }
|
||||
before do
|
||||
SiteSetting.allow_restore = true
|
||||
described_class.any_instance.stubs(ensure_we_have_a_filename: true)
|
||||
described_class.any_instance.stubs(initialize_state: true)
|
||||
end
|
||||
|
||||
after do
|
||||
SiteSetting.allow_restore = false
|
||||
described_class.any_instance.unstub(:ensure_we_have_a_filename)
|
||||
described_class.any_instance.unstub(:initialize_state)
|
||||
end
|
||||
|
||||
let(:conn) { RailsMultisite::ConnectionManagement }
|
||||
let(:restorer) { described_class.new(admin.id) }
|
||||
|
||||
it 'correctly reconnects to database', type: :multisite do
|
||||
restorer.instance_variable_set(:@current_db, 'second')
|
||||
conn.establish_connection(db: 'second')
|
||||
expect(RailsMultisite::ConnectionManagement.current_db).to eq('second')
|
||||
ActiveRecord::Base.connection_pool.spec.config[:db_key] = "incorrect_db"
|
||||
restorer.send(:reconnect_database)
|
||||
expect(RailsMultisite::ConnectionManagement.current_db).to eq('second')
|
||||
end
|
||||
|
||||
it 'it is not erroring for non multisite', type: :multisite do
|
||||
RailsMultisite::ConnectionManagement::clear_settings!
|
||||
expect { restorer.send(:reconnect_database) }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -0,0 +1,9 @@
|
||||
# frozen_string_literal: true
|
||||
#
|
||||
shared_context "shared stuff" do
|
||||
let!(:logger) do
|
||||
Class.new do
|
||||
def log(message, ex = nil); end
|
||||
end.new
|
||||
end
|
||||
end
|
152
spec/lib/backup_restore/system_interface_spec.rb
Normal file
152
spec/lib/backup_restore/system_interface_spec.rb
Normal file
@ -0,0 +1,152 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require_relative 'shared_context_for_backup_restore'
|
||||
|
||||
describe BackupRestore::SystemInterface do
|
||||
include_context "shared stuff"
|
||||
|
||||
subject { BackupRestore::SystemInterface.new(logger) }
|
||||
|
||||
context "readonly mode" do
|
||||
after do
|
||||
Discourse::READONLY_KEYS.each { |key| $redis.del(key) }
|
||||
end
|
||||
|
||||
describe "#enable_readonly_mode" do
|
||||
it "enables readonly mode" do
|
||||
Discourse.expects(:enable_readonly_mode).once
|
||||
subject.enable_readonly_mode
|
||||
end
|
||||
|
||||
it "does not enable readonly mode when it is already in readonly mode" do
|
||||
Discourse.enable_readonly_mode
|
||||
Discourse.expects(:enable_readonly_mode).never
|
||||
subject.enable_readonly_mode
|
||||
end
|
||||
end
|
||||
|
||||
describe "#disable_readonly_mode" do
|
||||
it "disables readonly mode" do
|
||||
Discourse.expects(:disable_readonly_mode).once
|
||||
subject.disable_readonly_mode
|
||||
end
|
||||
|
||||
it "does not disable readonly mode when readonly mode was explicitly enabled" do
|
||||
Discourse.enable_readonly_mode
|
||||
Discourse.expects(:disable_readonly_mode).never
|
||||
subject.disable_readonly_mode
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#mark_restore_as_running" do
|
||||
it "calls mark_restore_as_running" do
|
||||
BackupRestore.expects(:mark_as_running!).once
|
||||
subject.mark_restore_as_running
|
||||
end
|
||||
end
|
||||
|
||||
describe "#mark_restore_as_not_running" do
|
||||
it "calls mark_restore_as_not_running" do
|
||||
BackupRestore.expects(:mark_as_not_running!).once
|
||||
subject.mark_restore_as_not_running
|
||||
end
|
||||
end
|
||||
|
||||
describe "#listen_for_shutdown_signal" do
|
||||
before { BackupRestore.mark_as_running! }
|
||||
|
||||
after do
|
||||
BackupRestore.clear_shutdown_signal!
|
||||
BackupRestore.mark_as_not_running!
|
||||
end
|
||||
|
||||
it "exits the process when shutdown signal is set" do
|
||||
expect do
|
||||
thread = subject.listen_for_shutdown_signal
|
||||
BackupRestore.set_shutdown_signal!
|
||||
thread.join
|
||||
end.to raise_error(SystemExit)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#pause_sidekiq" do
|
||||
it "calls pause!" do
|
||||
Sidekiq.expects(:pause!).once
|
||||
subject.pause_sidekiq
|
||||
end
|
||||
end
|
||||
|
||||
describe "#unpause_sidekiq" do
|
||||
it "calls unpause!" do
|
||||
Sidekiq.expects(:unpause!).once
|
||||
subject.unpause_sidekiq
|
||||
end
|
||||
end
|
||||
|
||||
describe "#wait_for_sidekiq" do
|
||||
it "waits 6 seconds even when there are no running Sidekiq jobs" do
|
||||
subject.expects(:sleep).with(6).once
|
||||
subject.wait_for_sidekiq
|
||||
end
|
||||
|
||||
context "with Sidekiq workers" do
|
||||
before { $redis.flushall }
|
||||
after { $redis.flushall }
|
||||
|
||||
def create_workers(site_id: nil, all_sites: false)
|
||||
$redis.flushall
|
||||
|
||||
payload = Sidekiq::Testing.fake! do
|
||||
data = { post_id: 1 }
|
||||
|
||||
if all_sites
|
||||
data[:all_sites] = true
|
||||
else
|
||||
data[:current_site_id] = site_id || RailsMultisite::ConnectionManagement.current_db
|
||||
end
|
||||
|
||||
Jobs.enqueue(:process_post, data)
|
||||
Jobs::ProcessPost.jobs.last
|
||||
end
|
||||
|
||||
Sidekiq.redis do |conn|
|
||||
hostname = "localhost"
|
||||
pid = 7890
|
||||
key = "#{hostname}:#{pid}"
|
||||
process = { pid: pid, hostname: hostname }
|
||||
|
||||
conn.sadd('processes', key)
|
||||
conn.hmset(key, 'info', Sidekiq.dump_json(process))
|
||||
|
||||
data = Sidekiq.dump_json(
|
||||
queue: 'default',
|
||||
run_at: Time.now.to_i,
|
||||
payload: Sidekiq.dump_json(payload)
|
||||
)
|
||||
conn.hmset("#{key}:workers", '444', data)
|
||||
end
|
||||
end
|
||||
|
||||
it "waits up to 60 seconds for jobs running for the current site to finish" do
|
||||
subject.expects(:sleep).with(6).times(10)
|
||||
create_workers
|
||||
expect { subject.wait_for_sidekiq }.to raise_error(BackupRestore::RunningSidekiqJobsError)
|
||||
end
|
||||
|
||||
it "waits up to 60 seconds for jobs running on all sites to finish" do
|
||||
subject.expects(:sleep).with(6).times(10)
|
||||
create_workers(all_sites: true)
|
||||
expect { subject.wait_for_sidekiq }.to raise_error(BackupRestore::RunningSidekiqJobsError)
|
||||
end
|
||||
|
||||
it "ignores jobs of other sites" do
|
||||
subject.expects(:sleep).with(6).once
|
||||
create_workers(site_id: "another_site")
|
||||
|
||||
subject.wait_for_sidekiq
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
566
spec/lib/backup_restore/uploads_restorer_spec.rb
Normal file
566
spec/lib/backup_restore/uploads_restorer_spec.rb
Normal file
@ -0,0 +1,566 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require_relative 'shared_context_for_backup_restore'
|
||||
|
||||
describe BackupRestore::UploadsRestorer do
|
||||
include_context "shared stuff"
|
||||
|
||||
subject { BackupRestore::UploadsRestorer.new(logger) }
|
||||
|
||||
def with_temp_uploads_directory(name: "default", with_optimized: false)
|
||||
Dir.mktmpdir do |directory|
|
||||
path = File.join(directory, "uploads", name)
|
||||
FileUtils.mkdir_p(path)
|
||||
FileUtils.mkdir(File.join(path, "optimized")) if with_optimized
|
||||
yield(directory, path)
|
||||
end
|
||||
end
|
||||
|
||||
def expect_no_remap(source_site_name: nil, target_site_name:, metadata: [])
|
||||
expect_remaps(
|
||||
source_site_name: source_site_name,
|
||||
target_site_name: target_site_name,
|
||||
metadata: metadata
|
||||
)
|
||||
end
|
||||
|
||||
def expect_remap(source_site_name: nil, target_site_name:, metadata: [], from:, to:, &block)
|
||||
expect_remaps(
|
||||
source_site_name: source_site_name,
|
||||
target_site_name: target_site_name,
|
||||
metadata: metadata,
|
||||
remaps: [{ from: from, to: to }],
|
||||
&block
|
||||
)
|
||||
end
|
||||
|
||||
def expect_remaps(source_site_name: nil, target_site_name:, metadata: [], remaps: [], &block)
|
||||
source_site_name ||= metadata.find { |d| d[:name] == "db_name" }&.dig(:value) || "default"
|
||||
|
||||
if source_site_name != target_site_name
|
||||
site_rename = { from: "/uploads/#{source_site_name}/", to: uploads_path(target_site_name) }
|
||||
remaps << site_rename unless remaps.last == site_rename
|
||||
end
|
||||
|
||||
with_temp_uploads_directory(name: source_site_name, with_optimized: true) do |directory, path|
|
||||
yield(directory) if block_given?
|
||||
|
||||
Discourse.store.class.any_instance.expects(:copy_from).with(path).once
|
||||
|
||||
if remaps.blank?
|
||||
DbHelper.expects(:remap).never
|
||||
else
|
||||
DbHelper.expects(:remap).with do |from, to, args|
|
||||
args[:excluded_tables]&.include?("backup_metadata")
|
||||
remaps.shift == { from: from, to: to }
|
||||
end.times(remaps.size)
|
||||
end
|
||||
|
||||
if target_site_name == "default"
|
||||
setup_and_restore(directory, metadata)
|
||||
else
|
||||
test_multisite_connection(target_site_name) { setup_and_restore(directory, metadata) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def setup_and_restore(directory, metadata)
|
||||
metadata.each { |d| BackupMetadata.create!(d) }
|
||||
subject.restore(directory)
|
||||
end
|
||||
|
||||
def uploads_path(database)
|
||||
path = File.join("uploads", database)
|
||||
|
||||
if Discourse.is_parallel_test?
|
||||
path = File.join(path, ENV['TEST_ENV_NUMBER'].presence || '1')
|
||||
end
|
||||
|
||||
"/#{path}/"
|
||||
end
|
||||
|
||||
context "uploads" do
|
||||
let!(:multisite) { { name: "multisite", value: true } }
|
||||
let!(:no_multisite) { { name: "multisite", value: false } }
|
||||
let!(:source_db_name) { { name: "db_name", value: "foo" } }
|
||||
let!(:base_url) { { name: "base_url", value: "https://www.example.com/forum" } }
|
||||
let!(:no_cdn_url) { { name: "cdn_url", value: nil } }
|
||||
let!(:cdn_url) { { name: "cdn_url", value: "https://some-cdn.example.com" } }
|
||||
let(:target_site_name) { target_site_type == multisite ? "second" : "default" }
|
||||
let(:target_hostname) { target_site_type == multisite ? "test2.localhost" : "test.localhost" }
|
||||
|
||||
shared_context "no uploads" do
|
||||
it "does nothing when temporary uploads directory is missing or empty" do
|
||||
store_class.any_instance.expects(:copy_from).never
|
||||
|
||||
Dir.mktmpdir do |directory|
|
||||
subject.restore(directory)
|
||||
|
||||
FileUtils.mkdir(File.join(directory, "uploads"))
|
||||
subject.restore(directory)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "without metadata" do
|
||||
it "correctly remaps uploads" do
|
||||
expect_no_remap(target_site_name: "default")
|
||||
end
|
||||
|
||||
it "correctly remaps when site name is different" do
|
||||
expect_remap(
|
||||
source_site_name: "foo",
|
||||
target_site_name: "default",
|
||||
from: "/uploads/foo/",
|
||||
to: uploads_path("default")
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
shared_context "restores uploads" do
|
||||
before do
|
||||
Upload.where("id > 0").destroy_all
|
||||
Fabricate(:optimized_image)
|
||||
|
||||
upload = Fabricate(:upload_s3)
|
||||
post = Fabricate(:post, raw: "")
|
||||
post.link_post_uploads
|
||||
|
||||
FileHelper.stubs(:download).returns(file_from_fixtures("logo.png"))
|
||||
FileStore::S3Store.any_instance.stubs(:store_upload).returns do
|
||||
File.join(
|
||||
"//s3-upload-bucket.s3.dualstack.us-east-1.amazonaws.com",
|
||||
target_site_type == multisite ? "/uploads/#{target_site_name}" : "",
|
||||
"original/1X/bc975735dfc6409c1c2aa5ebf2239949bcbdbd65.png"
|
||||
)
|
||||
end
|
||||
UserAvatar.import_url_for_user("logo.png", Fabricate(:user))
|
||||
end
|
||||
|
||||
it "successfully restores uploads" do
|
||||
SiteIconManager.expects(:ensure_optimized!).once
|
||||
|
||||
with_temp_uploads_directory do |directory, path|
|
||||
store_class.any_instance.expects(:copy_from).with(path).once
|
||||
|
||||
expect { subject.restore(directory) }
|
||||
.to change { OptimizedImage.count }.by_at_most(-1)
|
||||
.and change { Jobs::CreateAvatarThumbnails.jobs.size }.by(1)
|
||||
.and change { Post.where(baked_version: nil).count }.by(1)
|
||||
end
|
||||
end
|
||||
|
||||
it "doesn't generate optimized images when backup contains optimized images" do
|
||||
SiteIconManager.expects(:ensure_optimized!).never
|
||||
|
||||
with_temp_uploads_directory(with_optimized: true) do |directory, path|
|
||||
store_class.any_instance.expects(:copy_from).with(path).once
|
||||
|
||||
expect { subject.restore(directory) }
|
||||
.to change { OptimizedImage.count }.by(0)
|
||||
.and change { Jobs::CreateAvatarThumbnails.jobs.size }.by(0)
|
||||
.and change { Post.where(baked_version: nil).count }.by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "common remaps" do
|
||||
it "remaps when `base_url` changes" do
|
||||
Discourse.expects(:base_url).returns("http://localhost").at_least_once
|
||||
|
||||
expect_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_site_type, base_url],
|
||||
from: "https://www.example.com/forum",
|
||||
to: "http://localhost"
|
||||
)
|
||||
end
|
||||
|
||||
it "doesn't remap when `cdn_url` in `backup_metadata` is empty" do
|
||||
expect_no_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_site_type, no_cdn_url]
|
||||
)
|
||||
end
|
||||
|
||||
it "remaps to new `cdn_url` when `cdn_url` changes to a different value" do
|
||||
Discourse.expects(:asset_host).returns("https://new-cdn.example.com").at_least_once
|
||||
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_site_type, cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://some-cdn.example.com/", to: "https://new-cdn.example.com/" },
|
||||
{ from: "some-cdn.example.com", to: "new-cdn.example.com" }
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
it "remaps to `base_url` when `cdn_url` changes to an empty value" do
|
||||
Discourse.expects(:base_url).returns("http://example.com/discourse").at_least_once
|
||||
Discourse.expects(:asset_host).returns(nil).at_least_once
|
||||
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_site_type, cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://some-cdn.example.com/", to: "//example.com/discourse/" },
|
||||
{ from: "some-cdn.example.com", to: "example.com" }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "remaps from local storage" do
|
||||
it "doesn't remap when `s3_base_url` in `backup_metadata` is empty" do
|
||||
expect_no_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_site_type, s3_base_url]
|
||||
)
|
||||
end
|
||||
|
||||
it "doesn't remap when `s3_cdn_url` in `backup_metadata` is empty" do
|
||||
expect_no_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_site_type, s3_cdn_url]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "currently stored locally" do
|
||||
before do
|
||||
SiteSetting.enable_s3_uploads = false
|
||||
end
|
||||
|
||||
let!(:store_class) { FileStore::LocalStore }
|
||||
|
||||
include_context "no uploads"
|
||||
include_context "restores uploads"
|
||||
|
||||
context "remaps" do
|
||||
include_examples "without metadata"
|
||||
|
||||
context "uploads previously stored locally" do
|
||||
let!(:s3_base_url) { { name: "s3_base_url", value: nil } }
|
||||
let!(:s3_cdn_url) { { name: "s3_cdn_url", value: nil } }
|
||||
|
||||
context "from regular site" do
|
||||
let!(:source_site_type) { no_multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
end
|
||||
|
||||
context "from multisite" do
|
||||
let!(:source_site_type) { multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "uploads previously stored on S3" do
|
||||
let!(:s3_base_url) { { name: "s3_base_url", value: "//old-bucket.s3-us-east-1.amazonaws.com" } }
|
||||
let!(:s3_cdn_url) { { name: "s3_cdn_url", value: "https://s3-cdn.example.com" } }
|
||||
|
||||
shared_examples "regular site remaps from S3" do
|
||||
it "remaps when `s3_base_url` changes" do
|
||||
expect_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [no_multisite, s3_base_url],
|
||||
from: "//old-bucket.s3-us-east-1.amazonaws.com/",
|
||||
to: uploads_path(target_site_name)
|
||||
)
|
||||
end
|
||||
|
||||
it "remaps when `s3_cdn_url` changes" do
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [no_multisite, s3_cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://s3-cdn.example.com/", to: "//#{target_hostname}#{uploads_path(target_site_name)}" },
|
||||
{ from: "s3-cdn.example.com", to: target_hostname }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "multisite remaps from S3" do
|
||||
it "remaps when `s3_base_url` changes" do
|
||||
expect_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_db_name, multisite, s3_base_url],
|
||||
from: "//old-bucket.s3-us-east-1.amazonaws.com/",
|
||||
to: "/"
|
||||
)
|
||||
end
|
||||
|
||||
it "remaps when `s3_cdn_url` changes" do
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_db_name, multisite, s3_cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://s3-cdn.example.com/", to: "//#{target_hostname}/" },
|
||||
{ from: "s3-cdn.example.com", to: target_hostname }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "from regular site" do
|
||||
let!(:source_site_type) { no_multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "regular site remaps from S3"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "regular site remaps from S3"
|
||||
end
|
||||
end
|
||||
|
||||
context "from multisite" do
|
||||
let!(:source_site_type) { multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "multisite remaps from S3"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "multisite remaps from S3"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "currently stored on S3" do
|
||||
before do
|
||||
SiteSetting.s3_upload_bucket = "s3-upload-bucket"
|
||||
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
||||
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
||||
SiteSetting.enable_s3_uploads = true
|
||||
end
|
||||
|
||||
let!(:store_class) { FileStore::S3Store }
|
||||
|
||||
include_context "no uploads"
|
||||
include_context "restores uploads"
|
||||
|
||||
context "remaps" do
|
||||
include_examples "without metadata"
|
||||
|
||||
context "uploads previously stored locally" do
|
||||
let!(:s3_base_url) { { name: "s3_base_url", value: nil } }
|
||||
let!(:s3_cdn_url) { { name: "s3_cdn_url", value: nil } }
|
||||
|
||||
context "from regular site" do
|
||||
let!(:source_site_type) { no_multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
end
|
||||
|
||||
context "from multisite" do
|
||||
let!(:source_site_type) { multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "remaps from local storage"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "uploads previously stored on S3" do
|
||||
let!(:s3_base_url) { { name: "s3_base_url", value: "//old-bucket.s3-us-east-1.amazonaws.com" } }
|
||||
let!(:s3_cdn_url) { { name: "s3_cdn_url", value: "https://s3-cdn.example.com" } }
|
||||
|
||||
shared_examples "regular site remaps from S3" do
|
||||
it "remaps when `s3_base_url` changes" do
|
||||
expect_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [no_multisite, s3_base_url],
|
||||
from: "//old-bucket.s3-us-east-1.amazonaws.com/",
|
||||
to: uploads_path(target_site_name)
|
||||
)
|
||||
end
|
||||
|
||||
it "remaps when `s3_cdn_url` changes" do
|
||||
SiteSetting::Upload.expects(:s3_cdn_url).returns("https://new-s3-cdn.example.com").at_least_once
|
||||
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [no_multisite, s3_cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://s3-cdn.example.com/", to: "https://new-s3-cdn.example.com#{uploads_path(target_site_name)}" },
|
||||
{ from: "s3-cdn.example.com", to: "new-s3-cdn.example.com" }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples "multisite remaps from S3" do
|
||||
it "remaps when `s3_base_url` changes" do
|
||||
expect_remap(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_db_name, multisite, s3_base_url],
|
||||
from: "//old-bucket.s3-us-east-1.amazonaws.com/",
|
||||
to: "/"
|
||||
)
|
||||
end
|
||||
|
||||
context "when `s3_cdn_url` is configured" do
|
||||
it "remaps when `s3_cdn_url` changes" do
|
||||
SiteSetting::Upload.expects(:s3_cdn_url).returns("http://new-s3-cdn.example.com").at_least_once
|
||||
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_db_name, multisite, s3_cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://s3-cdn.example.com/", to: "//new-s3-cdn.example.com/" },
|
||||
{ from: "s3-cdn.example.com", to: "new-s3-cdn.example.com" }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "when `s3_cdn_url` is not configured" do
|
||||
it "remaps to `base_url` when `s3_cdn_url` changes" do
|
||||
SiteSetting::Upload.expects(:s3_cdn_url).returns(nil).at_least_once
|
||||
|
||||
expect_remaps(
|
||||
target_site_name: target_site_name,
|
||||
metadata: [source_db_name, multisite, s3_cdn_url],
|
||||
remaps: [
|
||||
{ from: "https://s3-cdn.example.com/", to: "//#{target_hostname}/" },
|
||||
{ from: "s3-cdn.example.com", to: target_hostname }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "from regular site" do
|
||||
let!(:source_site_type) { no_multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_name) { "default" }
|
||||
let!(:target_hostname) { "test.localhost" }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "regular site remaps from S3"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_name) { "second" }
|
||||
let!(:target_hostname) { "test2.localhost" }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "regular site remaps from S3"
|
||||
end
|
||||
end
|
||||
|
||||
context "from multisite" do
|
||||
let!(:source_site_type) { multisite }
|
||||
|
||||
context "to regular site" do
|
||||
let!(:target_site_type) { no_multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "multisite remaps from S3"
|
||||
end
|
||||
|
||||
context "to multisite", type: :multisite do
|
||||
let!(:target_site_type) { multisite }
|
||||
|
||||
include_examples "common remaps"
|
||||
include_examples "multisite remaps from S3"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it "raises an exception when the store doesn't support the copy_from method" do
|
||||
Discourse.stubs(:store).returns(Object.new)
|
||||
|
||||
with_temp_uploads_directory do |directory|
|
||||
expect { subject.restore(directory) }.to raise_error(BackupRestore::UploadsRestoreError)
|
||||
end
|
||||
end
|
||||
|
||||
it "raises an exception when there are multiple folders in the uploads directory" do
|
||||
with_temp_uploads_directory do |directory|
|
||||
FileUtils.mkdir_p(File.join(directory, "uploads", "foo"))
|
||||
expect { subject.restore(directory) }.to raise_error(BackupRestore::UploadsRestoreError)
|
||||
end
|
||||
end
|
||||
|
||||
it "ignores 'PaxHeaders' and hidden directories within the uploads directory" do
|
||||
expect_remap(
|
||||
source_site_name: "xylan",
|
||||
target_site_name: "default",
|
||||
from: "/uploads/xylan/",
|
||||
to: uploads_path("default")
|
||||
) do |directory|
|
||||
FileUtils.mkdir_p(File.join(directory, "uploads", "PaxHeaders.27134"))
|
||||
FileUtils.mkdir_p(File.join(directory, "uploads", ".hidden"))
|
||||
end
|
||||
end
|
||||
end
|
Reference in New Issue
Block a user