Fix multipart upload listing API and remove duplicate upload notification
This commit is contained in:
@@ -921,6 +921,7 @@ def _maybe_handle_bucket_subresource(bucket_name: str) -> Response | None:
|
|||||||
"object-lock": _bucket_object_lock_handler,
|
"object-lock": _bucket_object_lock_handler,
|
||||||
"notification": _bucket_notification_handler,
|
"notification": _bucket_notification_handler,
|
||||||
"logging": _bucket_logging_handler,
|
"logging": _bucket_logging_handler,
|
||||||
|
"uploads": _bucket_uploads_handler,
|
||||||
}
|
}
|
||||||
requested = [key for key in handlers if key in request.args]
|
requested = [key for key in handlers if key in request.args]
|
||||||
if not requested:
|
if not requested:
|
||||||
@@ -1813,6 +1814,72 @@ def _bucket_logging_handler(bucket_name: str) -> Response:
|
|||||||
return Response(status=200)
|
return Response(status=200)
|
||||||
|
|
||||||
|
|
||||||
|
def _bucket_uploads_handler(bucket_name: str) -> Response:
|
||||||
|
if request.method != "GET":
|
||||||
|
return _method_not_allowed(["GET"])
|
||||||
|
|
||||||
|
principal, error = _require_principal()
|
||||||
|
if error:
|
||||||
|
return error
|
||||||
|
try:
|
||||||
|
_authorize_action(principal, bucket_name, "list")
|
||||||
|
except IamError as exc:
|
||||||
|
return _error_response("AccessDenied", str(exc), 403)
|
||||||
|
|
||||||
|
storage = _storage()
|
||||||
|
if not storage.bucket_exists(bucket_name):
|
||||||
|
return _error_response("NoSuchBucket", "Bucket does not exist", 404)
|
||||||
|
|
||||||
|
key_marker = request.args.get("key-marker", "")
|
||||||
|
upload_id_marker = request.args.get("upload-id-marker", "")
|
||||||
|
prefix = request.args.get("prefix", "")
|
||||||
|
delimiter = request.args.get("delimiter", "")
|
||||||
|
try:
|
||||||
|
max_uploads = max(1, min(int(request.args.get("max-uploads", 1000)), 1000))
|
||||||
|
except ValueError:
|
||||||
|
return _error_response("InvalidArgument", "max-uploads must be an integer", 400)
|
||||||
|
|
||||||
|
uploads = storage.list_multipart_uploads(bucket_name, include_orphaned=True)
|
||||||
|
|
||||||
|
if prefix:
|
||||||
|
uploads = [u for u in uploads if u["object_key"].startswith(prefix)]
|
||||||
|
if key_marker:
|
||||||
|
uploads = [u for u in uploads if u["object_key"] > key_marker or
|
||||||
|
(u["object_key"] == key_marker and upload_id_marker and u["upload_id"] > upload_id_marker)]
|
||||||
|
|
||||||
|
uploads.sort(key=lambda u: (u["object_key"], u["upload_id"]))
|
||||||
|
|
||||||
|
is_truncated = len(uploads) > max_uploads
|
||||||
|
if is_truncated:
|
||||||
|
uploads = uploads[:max_uploads]
|
||||||
|
|
||||||
|
root = Element("ListMultipartUploadsResult", xmlns="http://s3.amazonaws.com/doc/2006-03-01/")
|
||||||
|
SubElement(root, "Bucket").text = bucket_name
|
||||||
|
SubElement(root, "KeyMarker").text = key_marker
|
||||||
|
SubElement(root, "UploadIdMarker").text = upload_id_marker
|
||||||
|
if prefix:
|
||||||
|
SubElement(root, "Prefix").text = prefix
|
||||||
|
if delimiter:
|
||||||
|
SubElement(root, "Delimiter").text = delimiter
|
||||||
|
SubElement(root, "MaxUploads").text = str(max_uploads)
|
||||||
|
SubElement(root, "IsTruncated").text = "true" if is_truncated else "false"
|
||||||
|
|
||||||
|
if is_truncated and uploads:
|
||||||
|
SubElement(root, "NextKeyMarker").text = uploads[-1]["object_key"]
|
||||||
|
SubElement(root, "NextUploadIdMarker").text = uploads[-1]["upload_id"]
|
||||||
|
|
||||||
|
for upload in uploads:
|
||||||
|
upload_el = SubElement(root, "Upload")
|
||||||
|
SubElement(upload_el, "Key").text = upload["object_key"]
|
||||||
|
SubElement(upload_el, "UploadId").text = upload["upload_id"]
|
||||||
|
if upload.get("created_at"):
|
||||||
|
SubElement(upload_el, "Initiated").text = upload["created_at"]
|
||||||
|
if upload.get("orphaned"):
|
||||||
|
SubElement(upload_el, "StorageClass").text = "ORPHANED"
|
||||||
|
|
||||||
|
return _xml_response(root)
|
||||||
|
|
||||||
|
|
||||||
def _object_retention_handler(bucket_name: str, object_key: str) -> Response:
|
def _object_retention_handler(bucket_name: str, object_key: str) -> Response:
|
||||||
if request.method not in {"GET", "PUT"}:
|
if request.method not in {"GET", "PUT"}:
|
||||||
return _method_not_allowed(["GET", "PUT"])
|
return _method_not_allowed(["GET", "PUT"])
|
||||||
|
|||||||
@@ -1148,47 +1148,57 @@ class ObjectStorage:
|
|||||||
parts.sort(key=lambda x: x["PartNumber"])
|
parts.sort(key=lambda x: x["PartNumber"])
|
||||||
return parts
|
return parts
|
||||||
|
|
||||||
def list_multipart_uploads(self, bucket_name: str) -> List[Dict[str, Any]]:
|
def list_multipart_uploads(self, bucket_name: str, include_orphaned: bool = False) -> List[Dict[str, Any]]:
|
||||||
"""List all active multipart uploads for a bucket."""
|
"""List all active multipart uploads for a bucket.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bucket_name: The bucket to list uploads for.
|
||||||
|
include_orphaned: If True, also include upload directories that have
|
||||||
|
files but no valid manifest.json (orphaned/interrupted uploads).
|
||||||
|
"""
|
||||||
bucket_path = self._bucket_path(bucket_name)
|
bucket_path = self._bucket_path(bucket_name)
|
||||||
if not bucket_path.exists():
|
if not bucket_path.exists():
|
||||||
raise BucketNotFoundError("Bucket does not exist")
|
raise BucketNotFoundError("Bucket does not exist")
|
||||||
bucket_id = bucket_path.name
|
bucket_id = bucket_path.name
|
||||||
uploads = []
|
uploads = []
|
||||||
multipart_root = self._multipart_bucket_root(bucket_id)
|
|
||||||
if multipart_root.exists():
|
for multipart_root in (
|
||||||
|
self._multipart_bucket_root(bucket_id),
|
||||||
|
self._legacy_multipart_bucket_root(bucket_id),
|
||||||
|
):
|
||||||
|
if not multipart_root.exists():
|
||||||
|
continue
|
||||||
for upload_dir in multipart_root.iterdir():
|
for upload_dir in multipart_root.iterdir():
|
||||||
if not upload_dir.is_dir():
|
if not upload_dir.is_dir():
|
||||||
continue
|
continue
|
||||||
manifest_path = upload_dir / "manifest.json"
|
manifest_path = upload_dir / "manifest.json"
|
||||||
if not manifest_path.exists():
|
if manifest_path.exists():
|
||||||
continue
|
try:
|
||||||
try:
|
manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
|
||||||
manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
|
uploads.append({
|
||||||
uploads.append({
|
"upload_id": manifest.get("upload_id", upload_dir.name),
|
||||||
"upload_id": manifest.get("upload_id", upload_dir.name),
|
"object_key": manifest.get("object_key", ""),
|
||||||
"object_key": manifest.get("object_key", ""),
|
"created_at": manifest.get("created_at", ""),
|
||||||
"created_at": manifest.get("created_at", ""),
|
})
|
||||||
})
|
except (OSError, json.JSONDecodeError):
|
||||||
except (OSError, json.JSONDecodeError):
|
if include_orphaned:
|
||||||
continue
|
has_files = any(upload_dir.rglob("*"))
|
||||||
legacy_root = self._legacy_multipart_bucket_root(bucket_id)
|
if has_files:
|
||||||
if legacy_root.exists():
|
uploads.append({
|
||||||
for upload_dir in legacy_root.iterdir():
|
"upload_id": upload_dir.name,
|
||||||
if not upload_dir.is_dir():
|
"object_key": "(unknown)",
|
||||||
continue
|
"created_at": "",
|
||||||
manifest_path = upload_dir / "manifest.json"
|
"orphaned": True,
|
||||||
if not manifest_path.exists():
|
})
|
||||||
continue
|
elif include_orphaned:
|
||||||
try:
|
has_files = any(f.is_file() for f in upload_dir.rglob("*"))
|
||||||
manifest = json.loads(manifest_path.read_text(encoding="utf-8"))
|
if has_files:
|
||||||
uploads.append({
|
uploads.append({
|
||||||
"upload_id": manifest.get("upload_id", upload_dir.name),
|
"upload_id": upload_dir.name,
|
||||||
"object_key": manifest.get("object_key", ""),
|
"object_key": "(unknown)",
|
||||||
"created_at": manifest.get("created_at", ""),
|
"created_at": "",
|
||||||
})
|
"orphaned": True,
|
||||||
except (OSError, json.JSONDecodeError):
|
})
|
||||||
continue
|
|
||||||
return uploads
|
return uploads
|
||||||
|
|
||||||
def _bucket_path(self, bucket_name: str) -> Path:
|
def _bucket_path(self, bucket_name: str) -> Path:
|
||||||
|
|||||||
@@ -2527,7 +2527,6 @@
|
|||||||
|
|
||||||
if (uploadModal) uploadModal.hide();
|
if (uploadModal) uploadModal.hide();
|
||||||
showFloatingProgress();
|
showFloatingProgress();
|
||||||
showMessage({ title: 'Upload started', body: `Uploading ${files.length} file(s)...`, variant: 'info' });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileCount = files.length;
|
const fileCount = files.length;
|
||||||
|
|||||||
Reference in New Issue
Block a user