Skip to content

Commit

Permalink
Add Paginator to Recursively Delete All Objects Before Delete Bucket (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
zliang-akamai authored Aug 5, 2024
1 parent 80c54a7 commit 8949bb8
Show file tree
Hide file tree
Showing 4 changed files with 79 additions and 18 deletions.
3 changes: 2 additions & 1 deletion linodecli/plugins/obj/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,8 @@ def get_client():
COMMAND_MAP[parsed.command](
get_client, args, suppress_warnings=parsed.suppress_warnings
)
except ClientError:
except ClientError as e:
print(e)
sys.exit(ExitCodes.REQUEST_FAILED)
elif parsed.command == "regenerate-keys":
regenerate_s3_credentials(
Expand Down
17 changes: 2 additions & 15 deletions linodecli/plugins/obj/buckets.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from linodecli.exit_codes import ExitCodes
from linodecli.plugins import inherit_plugin_args
from linodecli.plugins.obj.config import PLUGIN_BASE
from linodecli.plugins.obj.helpers import _delete_all_objects


def create_bucket(
Expand Down Expand Up @@ -61,23 +62,9 @@ def delete_bucket(
bucket_name = parsed.name

if parsed.recursive:
objects = [
{"Key": obj.get("Key")}
for obj in client.list_objects_v2(Bucket=bucket_name).get(
"Contents", []
)
if obj.get("Key")
]
client.delete_objects(
Bucket=bucket_name,
Delete={
"Objects": objects,
"Quiet": False,
},
)
_delete_all_objects(client, bucket_name)

client.delete_bucket(Bucket=bucket_name)

print(f"Bucket {parsed.name} removed")

sys.exit(ExitCodes.SUCCESS)
47 changes: 47 additions & 0 deletions linodecli/plugins/obj/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,3 +142,50 @@ def flip_to_page(iterable: Iterable, page: int = 1):
sys.exit(ExitCodes.REQUEST_FAILED)

return next(iterable)


def _get_objects_for_deletion_from_page(object_type, page, versioned=False):
return [
(
{"Key": obj["Key"], "VersionId": obj["VersionId"]}
if versioned
else {"Key": obj["Key"]}
)
for obj in page.get(object_type, [])
]


def _delete_all_objects(client, bucket_name):
pages = client.get_paginator("list_objects_v2").paginate(
Bucket=bucket_name, PaginationConfig={"PageSize": 1000}
)
for page in pages:
client.delete_objects(
Bucket=bucket_name,
Delete={
"Objects": _get_objects_for_deletion_from_page(
"Contents", page
),
"Quiet": False,
},
)

for page in client.get_paginator("list_object_versions").paginate(
Bucket=bucket_name, PaginationConfig={"PageSize": 1000}
):
client.delete_objects(
Bucket=bucket_name,
Delete={
"Objects": _get_objects_for_deletion_from_page(
"Versions", page, True
)
},
)
client.delete_objects(
Bucket=bucket_name,
Delete={
"Objects": _get_objects_for_deletion_from_page(
"DeleteMarkers", page, True
)
},
)
30 changes: 28 additions & 2 deletions tests/integration/obj/test_obj_plugin.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import logging
from concurrent.futures import ThreadPoolExecutor, wait
from dataclasses import dataclass
from typing import Callable, Optional

Expand Down Expand Up @@ -94,8 +95,8 @@ def _create_bucket(bucket_name: Optional[str] = None):
for bk in created_buckets:
try:
delete_bucket(bk)
except:
logging.exception(f"Failed to cleanup bucket: {bk}")
except Exception as e:
logging.exception(f"Failed to cleanup bucket: {bk}, {e}")


def delete_bucket(bucket_name: str, force: bool = True):
Expand Down Expand Up @@ -210,6 +211,31 @@ def test_multi_files_multi_bucket(
assert "Done" in output


@pytest.mark.parametrize("num_files", [1005])
def test_large_number_of_files_single_bucket_parallel(
create_bucket: Callable[[Optional[str]], str],
generate_test_files: GetTestFilesType,
keys: Keys,
monkeypatch: MonkeyPatch,
num_files: int,
):
patch_keys(keys, monkeypatch)

bucket_name = create_bucket()
file_paths = generate_test_files(num_files)

with ThreadPoolExecutor(50) as executor:
futures = [
executor.submit(
exec_test_command,
BASE_CMD + ["put", str(file.resolve()), bucket_name],
)
for file in file_paths
]

wait(futures)


def test_all_rows(
create_bucket: Callable[[Optional[str]], str],
generate_test_files: GetTestFilesType,
Expand Down

0 comments on commit 8949bb8

Please sign in to comment.