mirror of
https://github.com/discourse/discourse.git
synced 2025-06-01 17:40:43 +08:00
FIX: S3 inventory data can be splitted into multiple csv files
This commit is contained in:
@ -17,8 +17,7 @@ describe "S3Inventory" do
|
||||
SiteSetting.enable_s3_inventory = true
|
||||
|
||||
client.stub_responses(:list_objects, -> (context) {
|
||||
inventory_data_path = "#{S3Inventory::INVENTORY_PREFIX}/#{S3Inventory::INVENTORY_VERSION}/bucket/original/data"
|
||||
expect(context.params[:prefix]).to eq(inventory_data_path)
|
||||
expect(context.params[:prefix]).to eq("#{S3Inventory::INVENTORY_PREFIX}/#{S3Inventory::INVENTORY_VERSION}/bucket/original/hive")
|
||||
|
||||
{
|
||||
contents: [
|
||||
@ -50,10 +49,6 @@ describe "S3Inventory" do
|
||||
})
|
||||
end
|
||||
|
||||
it "should return the latest inventory file name" do
|
||||
expect(inventory.file.key).to eq("example1.csv.gz")
|
||||
end
|
||||
|
||||
it "should raise error if an inventory file is not found" do
|
||||
client.stub_responses(:list_objects, contents: [])
|
||||
output = capture_stdout { inventory.list_missing }
|
||||
@ -69,14 +64,14 @@ describe "S3Inventory" do
|
||||
upload = Fabricate(:upload, etag: "ETag", created_at: 1.days.ago)
|
||||
Fabricate(:upload, etag: "ETag2", created_at: Time.now)
|
||||
|
||||
inventory.expects(:decompress_inventory_file)
|
||||
inventory.expects(:csv_filename).returns(csv_filename)
|
||||
inventory.file.expects(:last_modified).returns(Time.now)
|
||||
inventory.expects(:decompress_inventory_files)
|
||||
inventory.expects(:files).returns([{ key: "Key", filename: "#{csv_filename}.gz" }]).at_least(1)
|
||||
inventory.expects(:last_modified).returns(Time.now)
|
||||
|
||||
output = capture_stdout do
|
||||
inventory.list_missing
|
||||
end
|
||||
|
||||
expect(output).to eq("Downloading inventory file to tmp directory...\n#{upload.url}\n1 of 4 uploads are missing\n")
|
||||
expect(output).to eq("Downloading inventory file 'Key' to tmp directory...\n#{upload.url}\n1 of 4 uploads are missing\n")
|
||||
end
|
||||
end
|
||||
|
Reference in New Issue
Block a user