FEATURE: Support backup uploads/downloads directly to/from S3.

This reverts commit 3c59106bac4d79f39981bda3ff9db7786c1a78a0.
This commit is contained in:
Guo Xiang Tan
2018-10-15 09:43:31 +08:00
parent 6a59187ae8
commit 84d4c81a26
52 changed files with 1079 additions and 420 deletions

View File

@ -0,0 +1,56 @@
require 'rails_helper'
require 'backup_restore/local_backup_store'
require_relative 'shared_examples_for_backup_store'
describe BackupRestore::LocalBackupStore do
before(:all) do
@base_directory = Dir.mktmpdir
@paths = []
end
after(:all) do
FileUtils.remove_dir(@base_directory, true)
end
before do
SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL
end
subject(:store) { BackupRestore::BackupStore.create(base_directory: @base_directory) }
let(:expected_type) { BackupRestore::LocalBackupStore }
it_behaves_like "backup store"
it "is not a remote store" do
expect(store.remote?).to eq(false)
end
def create_backups
create_file(filename: "b.tar.gz", last_modified: "2018-09-13T15:10:00Z", size_in_bytes: 17)
create_file(filename: "a.tgz", last_modified: "2018-02-11T09:27:00Z", size_in_bytes: 29)
create_file(filename: "r.sql.gz", last_modified: "2017-12-20T03:48:00Z", size_in_bytes: 11)
create_file(filename: "no-backup.txt", last_modified: "2018-09-05T14:27:00Z", size_in_bytes: 12)
end
def remove_backups
@paths.each { |path| File.delete(path) if File.exists?(path) }
@paths.clear
end
def create_file(filename:, last_modified:, size_in_bytes:)
path = File.join(@base_directory, filename)
return if File.exists?(path)
@paths << path
FileUtils.touch(path)
File.truncate(path, size_in_bytes)
time = Time.parse(last_modified)
File.utime(time, time, path)
end
def source_regex(filename)
path = File.join(@base_directory, filename)
/^#{Regexp.escape(path)}$/
end
end

View File

@ -0,0 +1,109 @@
require 'rails_helper'
require 'backup_restore/s3_backup_store'
require_relative 'shared_examples_for_backup_store'
describe BackupRestore::S3BackupStore do
before(:all) do
@s3_client = Aws::S3::Client.new(stub_responses: true)
@s3_options = { client: @s3_client }
@objects = []
@s3_client.stub_responses(:list_objects, -> (context) do
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
expect(context.params[:prefix]).to be_blank
{ contents: @objects }
end)
@s3_client.stub_responses(:delete_object, -> (context) do
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
expect do
@objects.delete_if { |obj| obj[:key] == context.params[:key] }
end.to change { @objects }
end)
@s3_client.stub_responses(:head_object, -> (context) do
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
if object = @objects.find { |obj| obj[:key] == context.params[:key] }
{ content_length: object[:size], last_modified: object[:last_modified] }
else
{ status_code: 404, headers: {}, body: "", }
end
end)
@s3_client.stub_responses(:get_object, -> (context) do
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
if object = @objects.find { |obj| obj[:key] == context.params[:key] }
{ content_length: object[:size], body: "A" * object[:size] }
else
{ status_code: 404, headers: {}, body: "", }
end
end)
@s3_client.stub_responses(:put_object, -> (context) do
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
@objects << {
key: context.params[:key],
size: context.params[:body].size,
last_modified: Time.zone.now
}
end)
end
before do
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
SiteSetting.s3_access_key_id = "s3-access-key-id"
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
SiteSetting.backup_location = BackupLocationSiteSetting::S3
end
subject(:store) { BackupRestore::BackupStore.create(s3_options: @s3_options) }
let(:expected_type) { BackupRestore::S3BackupStore }
it_behaves_like "backup store"
it_behaves_like "remote backup store"
context "S3 specific behavior" do
before { create_backups }
after(:all) { remove_backups }
it "doesn't delete files when cleanup is disabled" do
SiteSetting.maximum_backups = 1
SiteSetting.s3_disable_cleanup = true
expect { store.delete_old }.to_not change { store.files }
end
end
def create_backups
@objects.clear
@objects << { key: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") }
@objects << { key: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") }
@objects << { key: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") }
@objects << { key: "no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") }
end
def remove_backups
@objects.clear
end
def source_regex(filename)
bucket = Regexp.escape(SiteSetting.s3_backup_bucket)
filename = Regexp.escape(filename)
expires = BackupRestore::S3BackupStore::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS
/\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/
end
def upload_url_regex(filename)
bucket = Regexp.escape(SiteSetting.s3_backup_bucket)
filename = Regexp.escape(filename)
expires = BackupRestore::S3BackupStore::UPLOAD_URL_EXPIRES_AFTER_SECONDS
/\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/
end
end

View File

@ -0,0 +1,176 @@
shared_context "backups" do
before { create_backups }
after(:all) { remove_backups }
let(:backup1) { BackupFile.new(filename: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z")) }
let(:backup2) { BackupFile.new(filename: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z")) }
let(:backup3) { BackupFile.new(filename: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z")) }
end
shared_examples "backup store" do
it "creates the correct backup store" do
expect(store).to be_a(expected_type)
end
context "without backup files" do
describe "#files" do
it "returns an empty array when there are no files" do
expect(store.files).to be_empty
end
end
describe "#latest_file" do
it "returns nil when there are no files" do
expect(store.latest_file).to be_nil
end
end
end
context "with backup files" do
include_context "backups"
describe "#files" do
it "sorts files by last modified date in descending order" do
expect(store.files).to eq([backup1, backup2, backup3])
end
it "returns only *.gz and *.tgz files" do
files = store.files
expect(files).to_not be_empty
expect(files.map(&:filename)).to contain_exactly(backup1.filename, backup2.filename, backup3.filename)
end
end
describe "#latest_file" do
it "returns the most recent backup file" do
expect(store.latest_file).to eq(backup1)
end
it "returns nil when there are no files" do
store.files.each { |file| store.delete_file(file.filename) }
expect(store.latest_file).to be_nil
end
end
describe "#delete_old" do
it "does nothing if the number of files is <= maximum_backups" do
SiteSetting.maximum_backups = 3
store.delete_old
expect(store.files).to eq([backup1, backup2, backup3])
end
it "deletes files starting by the oldest" do
SiteSetting.maximum_backups = 1
store.delete_old
expect(store.files).to eq([backup1])
end
end
describe "#file" do
it "returns information about the file when the file exists" do
expect(store.file(backup1.filename)).to eq(backup1)
end
it "returns nil when the file doesn't exist" do
expect(store.file("foo.gz")).to be_nil
end
it "includes the file's source location if it is requested" do
file = store.file(backup1.filename, include_download_source: true)
expect(file.source).to match(source_regex(backup1.filename))
end
end
describe "#delete_file" do
it "deletes file when the file exists" do
expect(store.files).to include(backup1)
store.delete_file(backup1.filename)
expect(store.files).to_not include(backup1)
expect(store.file(backup1.filename)).to be_nil
end
it "does nothing when the file doesn't exist" do
expect { store.delete_file("foo.gz") }.to_not change { store.files }
end
end
describe "#download_file" do
it "downloads file to the destination" do
filename = backup1.filename
Dir.mktmpdir do |path|
destination_path = File.join(path, File.basename(filename))
store.download_file(filename, destination_path)
expect(File.exists?(destination_path)).to eq(true)
expect(File.size(destination_path)).to eq(backup1.size)
end
end
it "raises an exception when the download fails" do
filename = backup1.filename
destination_path = Dir.mktmpdir { |path| File.join(path, File.basename(filename)) }
expect { store.download_file(filename, destination_path) }.to raise_exception(StandardError)
end
end
end
end
shared_examples "remote backup store" do
it "is a remote store" do
expect(store.remote?).to eq(true)
end
context "with backups" do
include_context "backups"
describe "#upload_file" do
it "uploads file into store" do
freeze_time
backup = BackupFile.new(
filename: "foo.tar.gz",
size: 33,
last_modified: Time.zone.now
)
expect(store.files).to_not include(backup)
Tempfile.create(backup.filename) do |file|
file.write("A" * backup.size)
file.close
store.upload_file(backup.filename, file.path, "application/gzip")
end
expect(store.files).to include(backup)
expect(store.file(backup.filename)).to eq(backup)
end
it "raises an exception when a file with same filename exists" do
Tempfile.create(backup1.filename) do |file|
expect { store.upload_file(backup1.filename, file.path, "application/gzip") }
.to raise_exception(BackupRestore::BackupStore::BackupFileExists)
end
end
end
describe "#generate_upload_url" do
it "generates upload URL" do
filename = "foo.tar.gz"
url = store.generate_upload_url(filename)
expect(url).to match(upload_url_regex(filename))
end
it "raises an exeption when a file with same filename exists" do
expect { store.generate_upload_url(backup1.filename) }
.to raise_exception(BackupRestore::BackupStore::BackupFileExists)
end
end
end
end