Fix SSE, KMS not encrypting files
This commit is contained in:
@@ -95,6 +95,11 @@ def create_app(
|
||||
kms_manager = KMSManager(kms_keys_path, kms_master_key_path)
|
||||
encryption_manager.set_kms_provider(kms_manager)
|
||||
|
||||
# Wrap storage with encryption layer if encryption is enabled
|
||||
if app.config.get("ENCRYPTION_ENABLED", False):
|
||||
from .encrypted_storage import EncryptedObjectStorage
|
||||
storage = EncryptedObjectStorage(storage, encryption_manager)
|
||||
|
||||
app.extensions["object_storage"] = storage
|
||||
app.extensions["iam"] = iam
|
||||
app.extensions["bucket_policies"] = bucket_policies
|
||||
|
||||
@@ -54,8 +54,10 @@ class EncryptedObjectStorage:
|
||||
encryption_config = self.storage.get_bucket_encryption(bucket_name)
|
||||
if encryption_config and encryption_config.get("Rules"):
|
||||
rule = encryption_config["Rules"][0]
|
||||
algorithm = rule.get("SSEAlgorithm", "AES256")
|
||||
kms_key_id = rule.get("KMSMasterKeyID")
|
||||
# AWS format: Rules[].ApplyServerSideEncryptionByDefault.SSEAlgorithm
|
||||
sse_default = rule.get("ApplyServerSideEncryptionByDefault", {})
|
||||
algorithm = sse_default.get("SSEAlgorithm", "AES256")
|
||||
kms_key_id = sse_default.get("KMSMasterKeyID")
|
||||
return True, algorithm, kms_key_id
|
||||
except StorageError:
|
||||
pass
|
||||
|
||||
@@ -91,7 +91,8 @@ class KMSEncryptionProvider(EncryptionProvider):
|
||||
def decrypt(self, ciphertext: bytes, nonce: bytes, encrypted_data_key: bytes,
|
||||
key_id: str, context: Dict[str, str] | None = None) -> bytes:
|
||||
"""Decrypt data using envelope encryption with KMS."""
|
||||
data_key = self.kms.decrypt_data_key(key_id, encrypted_data_key, context)
|
||||
# Note: Data key is encrypted without context (AAD), so we decrypt without context
|
||||
data_key = self.kms.decrypt_data_key(key_id, encrypted_data_key, context=None)
|
||||
|
||||
aesgcm = AESGCM(data_key)
|
||||
try:
|
||||
|
||||
@@ -784,8 +784,9 @@ def _apply_object_headers(
|
||||
metadata: Dict[str, str] | None,
|
||||
etag: str,
|
||||
) -> None:
|
||||
response.headers["Content-Length"] = str(file_stat.st_size)
|
||||
response.headers["Last-Modified"] = http_date(file_stat.st_mtime)
|
||||
if file_stat is not None:
|
||||
response.headers["Content-Length"] = str(file_stat.st_size)
|
||||
response.headers["Last-Modified"] = http_date(file_stat.st_mtime)
|
||||
response.headers["ETag"] = f'"{etag}"'
|
||||
response.headers["Accept-Ranges"] = "bytes"
|
||||
for key, value in (metadata or {}).items():
|
||||
@@ -1779,19 +1780,48 @@ def object_handler(bucket_name: str, object_key: str):
|
||||
except StorageError as exc:
|
||||
return _error_response("NoSuchKey", str(exc), 404)
|
||||
metadata = storage.get_object_metadata(bucket_name, object_key)
|
||||
stat = path.stat()
|
||||
mimetype = mimetypes.guess_type(path.name)[0] or "application/octet-stream"
|
||||
etag = storage._compute_etag(path)
|
||||
|
||||
mimetype = mimetypes.guess_type(object_key)[0] or "application/octet-stream"
|
||||
|
||||
# Check if object is encrypted and needs decryption
|
||||
is_encrypted = "x-amz-server-side-encryption" in metadata
|
||||
|
||||
if request.method == "GET":
|
||||
response = Response(_stream_file(path), mimetype=mimetype, direct_passthrough=True)
|
||||
logged_bytes = stat.st_size
|
||||
if is_encrypted and hasattr(storage, 'get_object_data'):
|
||||
# Use encrypted storage to decrypt
|
||||
try:
|
||||
data, clean_metadata = storage.get_object_data(bucket_name, object_key)
|
||||
response = Response(data, mimetype=mimetype)
|
||||
logged_bytes = len(data)
|
||||
# Use decrypted size for Content-Length
|
||||
response.headers["Content-Length"] = len(data)
|
||||
etag = hashlib.md5(data).hexdigest()
|
||||
except StorageError as exc:
|
||||
return _error_response("InternalError", str(exc), 500)
|
||||
else:
|
||||
# Stream unencrypted file directly
|
||||
stat = path.stat()
|
||||
response = Response(_stream_file(path), mimetype=mimetype, direct_passthrough=True)
|
||||
logged_bytes = stat.st_size
|
||||
etag = storage._compute_etag(path)
|
||||
else:
|
||||
response = Response(status=200)
|
||||
# HEAD request
|
||||
if is_encrypted and hasattr(storage, 'get_object_data'):
|
||||
# For encrypted objects, we need to report decrypted size
|
||||
try:
|
||||
data, _ = storage.get_object_data(bucket_name, object_key)
|
||||
response = Response(status=200)
|
||||
response.headers["Content-Length"] = len(data)
|
||||
etag = hashlib.md5(data).hexdigest()
|
||||
except StorageError as exc:
|
||||
return _error_response("InternalError", str(exc), 500)
|
||||
else:
|
||||
stat = path.stat()
|
||||
response = Response(status=200)
|
||||
etag = storage._compute_etag(path)
|
||||
response.headers["Content-Type"] = mimetype
|
||||
logged_bytes = 0
|
||||
|
||||
_apply_object_headers(response, file_stat=stat, metadata=metadata, etag=etag)
|
||||
_apply_object_headers(response, file_stat=path.stat() if not is_encrypted else None, metadata=metadata, etag=etag)
|
||||
action = "Object read" if request.method == "GET" else "Object head"
|
||||
current_app.logger.info(action, extra={"bucket": bucket_name, "key": object_key, "bytes": logged_bytes})
|
||||
return response
|
||||
|
||||
38
app/ui.py
38
app/ui.py
@@ -686,9 +686,18 @@ def bulk_download_objects(bucket_name: str):
|
||||
# But strictly we should check. Let's check.
|
||||
_authorize_ui(principal, bucket_name, "read", object_key=key)
|
||||
|
||||
path = storage.get_object_path(bucket_name, key)
|
||||
# Use the key as the filename in the zip
|
||||
zf.write(path, arcname=key)
|
||||
# Check if object is encrypted
|
||||
metadata = storage.get_object_metadata(bucket_name, key)
|
||||
is_encrypted = "x-amz-server-side-encryption" in metadata
|
||||
|
||||
if is_encrypted and hasattr(storage, 'get_object_data'):
|
||||
# Decrypt and add to zip
|
||||
data, _ = storage.get_object_data(bucket_name, key)
|
||||
zf.writestr(key, data)
|
||||
else:
|
||||
# Add unencrypted file directly
|
||||
path = storage.get_object_path(bucket_name, key)
|
||||
zf.write(path, arcname=key)
|
||||
except (StorageError, IamError):
|
||||
# Skip files we can't read or don't exist
|
||||
continue
|
||||
@@ -730,13 +739,34 @@ def purge_object_versions(bucket_name: str, object_key: str):
|
||||
@ui_bp.get("/buckets/<bucket_name>/objects/<path:object_key>/preview")
|
||||
def object_preview(bucket_name: str, object_key: str) -> Response:
|
||||
principal = _current_principal()
|
||||
storage = _storage()
|
||||
try:
|
||||
_authorize_ui(principal, bucket_name, "read", object_key=object_key)
|
||||
path = _storage().get_object_path(bucket_name, object_key)
|
||||
path = storage.get_object_path(bucket_name, object_key)
|
||||
metadata = storage.get_object_metadata(bucket_name, object_key)
|
||||
except (StorageError, IamError) as exc:
|
||||
status = 403 if isinstance(exc, IamError) else 404
|
||||
return Response(str(exc), status=status)
|
||||
|
||||
download = request.args.get("download") == "1"
|
||||
|
||||
# Check if object is encrypted and needs decryption
|
||||
is_encrypted = "x-amz-server-side-encryption" in metadata
|
||||
if is_encrypted and hasattr(storage, 'get_object_data'):
|
||||
try:
|
||||
data, _ = storage.get_object_data(bucket_name, object_key)
|
||||
import io
|
||||
import mimetypes
|
||||
mimetype = mimetypes.guess_type(object_key)[0] or "application/octet-stream"
|
||||
return send_file(
|
||||
io.BytesIO(data),
|
||||
mimetype=mimetype,
|
||||
as_attachment=download,
|
||||
download_name=path.name
|
||||
)
|
||||
except StorageError as exc:
|
||||
return Response(f"Decryption failed: {exc}", status=500)
|
||||
|
||||
return send_file(path, as_attachment=download, download_name=path.name)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user