Add new metrics function
This commit is contained in:
@@ -1,10 +1,17 @@
|
|||||||
"""Application-wide extension instances."""
|
"""Application-wide extension instances."""
|
||||||
|
from flask import g
|
||||||
from flask_limiter import Limiter
|
from flask_limiter import Limiter
|
||||||
from flask_limiter.util import get_remote_address
|
from flask_limiter.util import get_remote_address
|
||||||
from flask_wtf import CSRFProtect
|
from flask_wtf import CSRFProtect
|
||||||
|
|
||||||
|
def get_rate_limit_key():
|
||||||
|
"""Generate rate limit key based on authenticated user."""
|
||||||
|
if hasattr(g, 'principal') and g.principal:
|
||||||
|
return g.principal.access_key
|
||||||
|
return get_remote_address()
|
||||||
|
|
||||||
# Shared rate limiter instance; configured in app factory.
|
# Shared rate limiter instance; configured in app factory.
|
||||||
limiter = Limiter(key_func=get_remote_address)
|
limiter = Limiter(key_func=get_rate_limit_key)
|
||||||
|
|
||||||
# Global CSRF protection for UI routes.
|
# Global CSRF protection for UI routes.
|
||||||
csrf = CSRFProtect()
|
csrf = CSRFProtect()
|
||||||
|
|||||||
101
app/s3_api.py
101
app/s3_api.py
@@ -11,7 +11,7 @@ from typing import Any, Dict
|
|||||||
from urllib.parse import quote, urlencode, urlparse
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
from xml.etree.ElementTree import Element, SubElement, tostring, fromstring, ParseError
|
from xml.etree.ElementTree import Element, SubElement, tostring, fromstring, ParseError
|
||||||
|
|
||||||
from flask import Blueprint, Response, current_app, jsonify, request
|
from flask import Blueprint, Response, current_app, jsonify, request, g
|
||||||
from werkzeug.http import http_date
|
from werkzeug.http import http_date
|
||||||
|
|
||||||
from .bucket_policies import BucketPolicyStore
|
from .bucket_policies import BucketPolicyStore
|
||||||
@@ -127,14 +127,33 @@ def _verify_sigv4_header(req: Any, auth_header: str) -> Principal | None:
|
|||||||
if not amz_date:
|
if not amz_date:
|
||||||
raise IamError("Missing Date header")
|
raise IamError("Missing Date header")
|
||||||
|
|
||||||
|
try:
|
||||||
|
request_time = datetime.strptime(amz_date, "%Y%m%dT%H%M%SZ").replace(tzinfo=timezone.utc)
|
||||||
|
except ValueError:
|
||||||
|
raise IamError("Invalid X-Amz-Date format")
|
||||||
|
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
time_diff = abs((now - request_time).total_seconds())
|
||||||
|
if time_diff > 900: # 15 minutes
|
||||||
|
raise IamError("Request timestamp too old or too far in the future")
|
||||||
|
|
||||||
|
required_headers = {'host', 'x-amz-date'}
|
||||||
|
signed_headers_set = set(signed_headers_str.split(';'))
|
||||||
|
if not required_headers.issubset(signed_headers_set):
|
||||||
|
# Some clients might sign 'date' instead of 'x-amz-date'
|
||||||
|
if 'date' in signed_headers_set:
|
||||||
|
required_headers.remove('x-amz-date')
|
||||||
|
required_headers.add('date')
|
||||||
|
|
||||||
|
if not required_headers.issubset(signed_headers_set):
|
||||||
|
raise IamError("Required headers not signed")
|
||||||
|
|
||||||
credential_scope = f"{date_stamp}/{region}/{service}/aws4_request"
|
credential_scope = f"{date_stamp}/{region}/{service}/aws4_request"
|
||||||
string_to_sign = f"AWS4-HMAC-SHA256\n{amz_date}\n{credential_scope}\n{hashlib.sha256(canonical_request.encode('utf-8')).hexdigest()}"
|
string_to_sign = f"AWS4-HMAC-SHA256\n{amz_date}\n{credential_scope}\n{hashlib.sha256(canonical_request.encode('utf-8')).hexdigest()}"
|
||||||
|
|
||||||
# Calculate Signature
|
|
||||||
signing_key = _get_signature_key(secret_key, date_stamp, region, service)
|
signing_key = _get_signature_key(secret_key, date_stamp, region, service)
|
||||||
calculated_signature = hmac.new(signing_key, string_to_sign.encode("utf-8"), hashlib.sha256).hexdigest()
|
calculated_signature = hmac.new(signing_key, string_to_sign.encode("utf-8"), hashlib.sha256).hexdigest()
|
||||||
|
|
||||||
if calculated_signature != signature:
|
if not hmac.compare_digest(calculated_signature, signature):
|
||||||
raise IamError("SignatureDoesNotMatch")
|
raise IamError("SignatureDoesNotMatch")
|
||||||
|
|
||||||
return _iam().get_principal(access_key)
|
return _iam().get_principal(access_key)
|
||||||
@@ -155,7 +174,6 @@ def _verify_sigv4_query(req: Any) -> Principal | None:
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
raise IamError("Invalid Credential format")
|
raise IamError("Invalid Credential format")
|
||||||
|
|
||||||
# Check expiration
|
|
||||||
try:
|
try:
|
||||||
req_time = datetime.strptime(amz_date, "%Y%m%dT%H%M%SZ").replace(tzinfo=timezone.utc)
|
req_time = datetime.strptime(amz_date, "%Y%m%dT%H%M%SZ").replace(tzinfo=timezone.utc)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
@@ -190,7 +208,6 @@ def _verify_sigv4_query(req: Any) -> Principal | None:
|
|||||||
canonical_headers_parts = []
|
canonical_headers_parts = []
|
||||||
for header in signed_headers_list:
|
for header in signed_headers_list:
|
||||||
val = req.headers.get(header, "").strip()
|
val = req.headers.get(header, "").strip()
|
||||||
# Collapse multiple spaces
|
|
||||||
val = " ".join(val.split())
|
val = " ".join(val.split())
|
||||||
canonical_headers_parts.append(f"{header}:{val}\n")
|
canonical_headers_parts.append(f"{header}:{val}\n")
|
||||||
canonical_headers = "".join(canonical_headers_parts)
|
canonical_headers = "".join(canonical_headers_parts)
|
||||||
@@ -240,7 +257,6 @@ def _verify_sigv4(req: Any) -> Principal | None:
|
|||||||
|
|
||||||
|
|
||||||
def _require_principal():
|
def _require_principal():
|
||||||
# Try SigV4 first
|
|
||||||
if ("Authorization" in request.headers and request.headers["Authorization"].startswith("AWS4-HMAC-SHA256")) or \
|
if ("Authorization" in request.headers and request.headers["Authorization"].startswith("AWS4-HMAC-SHA256")) or \
|
||||||
(request.args.get("X-Amz-Algorithm") == "AWS4-HMAC-SHA256"):
|
(request.args.get("X-Amz-Algorithm") == "AWS4-HMAC-SHA256"):
|
||||||
try:
|
try:
|
||||||
@@ -1132,6 +1148,9 @@ def object_handler(bucket_name: str, object_key: str):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
if request.method in {"GET", "HEAD"}:
|
if request.method in {"GET", "HEAD"}:
|
||||||
|
if request.method == "GET" and "uploadId" in request.args:
|
||||||
|
return _list_parts(bucket_name, object_key)
|
||||||
|
|
||||||
_, error = _object_principal("read", bucket_name, object_key)
|
_, error = _object_principal("read", bucket_name, object_key)
|
||||||
if error:
|
if error:
|
||||||
return error
|
return error
|
||||||
@@ -1157,7 +1176,6 @@ def object_handler(bucket_name: str, object_key: str):
|
|||||||
current_app.logger.info(action, extra={"bucket": bucket_name, "key": object_key, "bytes": logged_bytes})
|
current_app.logger.info(action, extra={"bucket": bucket_name, "key": object_key, "bytes": logged_bytes})
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# DELETE
|
|
||||||
if "uploadId" in request.args:
|
if "uploadId" in request.args:
|
||||||
return _abort_multipart_upload(bucket_name, object_key)
|
return _abort_multipart_upload(bucket_name, object_key)
|
||||||
|
|
||||||
@@ -1175,6 +1193,51 @@ def object_handler(bucket_name: str, object_key: str):
|
|||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
|
|
||||||
|
def _list_parts(bucket_name: str, object_key: str) -> Response:
|
||||||
|
principal, error = _require_principal()
|
||||||
|
if error:
|
||||||
|
return error
|
||||||
|
try:
|
||||||
|
_authorize_action(principal, bucket_name, "read", object_key=object_key)
|
||||||
|
except IamError as exc:
|
||||||
|
return _error_response("AccessDenied", str(exc), 403)
|
||||||
|
|
||||||
|
upload_id = request.args.get("uploadId")
|
||||||
|
if not upload_id:
|
||||||
|
return _error_response("InvalidArgument", "uploadId is required", 400)
|
||||||
|
|
||||||
|
try:
|
||||||
|
parts = _storage().list_multipart_parts(bucket_name, upload_id)
|
||||||
|
except StorageError as exc:
|
||||||
|
return _error_response("NoSuchUpload", str(exc), 404)
|
||||||
|
|
||||||
|
root = Element("ListPartsResult")
|
||||||
|
SubElement(root, "Bucket").text = bucket_name
|
||||||
|
SubElement(root, "Key").text = object_key
|
||||||
|
SubElement(root, "UploadId").text = upload_id
|
||||||
|
|
||||||
|
initiator = SubElement(root, "Initiator")
|
||||||
|
SubElement(initiator, "ID").text = principal.access_key
|
||||||
|
SubElement(initiator, "DisplayName").text = principal.display_name
|
||||||
|
|
||||||
|
owner = SubElement(root, "Owner")
|
||||||
|
SubElement(owner, "ID").text = principal.access_key
|
||||||
|
SubElement(owner, "DisplayName").text = principal.display_name
|
||||||
|
|
||||||
|
SubElement(root, "StorageClass").text = "STANDARD"
|
||||||
|
SubElement(root, "PartNumberMarker").text = "0"
|
||||||
|
SubElement(root, "NextPartNumberMarker").text = str(parts[-1]["PartNumber"]) if parts else "0"
|
||||||
|
SubElement(root, "MaxParts").text = "1000"
|
||||||
|
SubElement(root, "IsTruncated").text = "false"
|
||||||
|
|
||||||
|
for part in parts:
|
||||||
|
p = SubElement(root, "Part")
|
||||||
|
SubElement(p, "PartNumber").text = str(part["PartNumber"])
|
||||||
|
SubElement(p, "LastModified").text = part["LastModified"].isoformat()
|
||||||
|
SubElement(p, "ETag").text = f'"{part["ETag"]}"'
|
||||||
|
SubElement(p, "Size").text = str(part["Size"])
|
||||||
|
|
||||||
|
return _xml_response(root)
|
||||||
|
|
||||||
|
|
||||||
@s3_api_bp.route("/bucket-policy/<bucket_name>", methods=["GET", "PUT", "DELETE"])
|
@s3_api_bp.route("/bucket-policy/<bucket_name>", methods=["GET", "PUT", "DELETE"])
|
||||||
@@ -1504,3 +1567,25 @@ def _abort_multipart_upload(bucket_name: str, object_key: str) -> Response:
|
|||||||
return _error_response("NoSuchBucket", str(exc), 404)
|
return _error_response("NoSuchBucket", str(exc), 404)
|
||||||
|
|
||||||
return Response(status=204)
|
return Response(status=204)
|
||||||
|
|
||||||
|
|
||||||
|
@s3_api_bp.before_request
|
||||||
|
def resolve_principal():
|
||||||
|
g.principal = None
|
||||||
|
# Try SigV4
|
||||||
|
try:
|
||||||
|
if ("Authorization" in request.headers and request.headers["Authorization"].startswith("AWS4-HMAC-SHA256")) or \
|
||||||
|
(request.args.get("X-Amz-Algorithm") == "AWS4-HMAC-SHA256"):
|
||||||
|
g.principal = _verify_sigv4(request)
|
||||||
|
return
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Try simple auth headers (internal/testing)
|
||||||
|
access_key = request.headers.get("X-Access-Key")
|
||||||
|
secret_key = request.headers.get("X-Secret-Key")
|
||||||
|
if access_key and secret_key:
|
||||||
|
try:
|
||||||
|
g.principal = _iam().authenticate(access_key, secret_key)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|||||||
@@ -120,10 +120,22 @@ class ObjectStorage:
|
|||||||
self._system_bucket_root(bucket_path.name).mkdir(parents=True, exist_ok=True)
|
self._system_bucket_root(bucket_path.name).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
def bucket_stats(self, bucket_name: str) -> dict[str, int]:
|
def bucket_stats(self, bucket_name: str) -> dict[str, int]:
|
||||||
"""Return object count and total size for the bucket without hashing files."""
|
"""Return object count and total size for the bucket (cached)."""
|
||||||
bucket_path = self._bucket_path(bucket_name)
|
bucket_path = self._bucket_path(bucket_name)
|
||||||
if not bucket_path.exists():
|
if not bucket_path.exists():
|
||||||
raise StorageError("Bucket does not exist")
|
raise StorageError("Bucket does not exist")
|
||||||
|
|
||||||
|
# Try to read from cache
|
||||||
|
cache_path = self._system_bucket_root(bucket_name) / "stats.json"
|
||||||
|
if cache_path.exists():
|
||||||
|
try:
|
||||||
|
# Check if cache is fresh (e.g., < 60 seconds old)
|
||||||
|
if time.time() - cache_path.stat().st_mtime < 60:
|
||||||
|
return json.loads(cache_path.read_text(encoding="utf-8"))
|
||||||
|
except (OSError, json.JSONDecodeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Calculate fresh stats
|
||||||
object_count = 0
|
object_count = 0
|
||||||
total_bytes = 0
|
total_bytes = 0
|
||||||
for path in bucket_path.rglob("*"):
|
for path in bucket_path.rglob("*"):
|
||||||
@@ -134,7 +146,17 @@ class ObjectStorage:
|
|||||||
stat = path.stat()
|
stat = path.stat()
|
||||||
object_count += 1
|
object_count += 1
|
||||||
total_bytes += stat.st_size
|
total_bytes += stat.st_size
|
||||||
return {"objects": object_count, "bytes": total_bytes}
|
|
||||||
|
stats = {"objects": object_count, "bytes": total_bytes}
|
||||||
|
|
||||||
|
# Write to cache
|
||||||
|
try:
|
||||||
|
cache_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
cache_path.write_text(json.dumps(stats), encoding="utf-8")
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return stats
|
||||||
|
|
||||||
def delete_bucket(self, bucket_name: str) -> None:
|
def delete_bucket(self, bucket_name: str) -> None:
|
||||||
bucket_path = self._bucket_path(bucket_name)
|
bucket_path = self._bucket_path(bucket_name)
|
||||||
@@ -239,7 +261,6 @@ class ObjectStorage:
|
|||||||
rel = path.relative_to(bucket_path)
|
rel = path.relative_to(bucket_path)
|
||||||
self._safe_unlink(path)
|
self._safe_unlink(path)
|
||||||
self._delete_metadata(bucket_id, rel)
|
self._delete_metadata(bucket_id, rel)
|
||||||
# Clean up now empty parents inside the bucket.
|
|
||||||
for parent in path.parents:
|
for parent in path.parents:
|
||||||
if parent == bucket_path:
|
if parent == bucket_path:
|
||||||
break
|
break
|
||||||
@@ -592,6 +613,33 @@ class ObjectStorage:
|
|||||||
if legacy_root.exists():
|
if legacy_root.exists():
|
||||||
shutil.rmtree(legacy_root, ignore_errors=True)
|
shutil.rmtree(legacy_root, ignore_errors=True)
|
||||||
|
|
||||||
|
def list_multipart_parts(self, bucket_name: str, upload_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""List uploaded parts for a multipart upload."""
|
||||||
|
bucket_path = self._bucket_path(bucket_name)
|
||||||
|
manifest, upload_root = self._load_multipart_manifest(bucket_path.name, upload_id)
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
parts_map = manifest.get("parts", {})
|
||||||
|
for part_num_str, record in parts_map.items():
|
||||||
|
part_num = int(part_num_str)
|
||||||
|
part_filename = record.get("filename")
|
||||||
|
if not part_filename:
|
||||||
|
continue
|
||||||
|
part_path = upload_root / part_filename
|
||||||
|
if not part_path.exists():
|
||||||
|
continue
|
||||||
|
|
||||||
|
stat = part_path.stat()
|
||||||
|
parts.append({
|
||||||
|
"PartNumber": part_num,
|
||||||
|
"Size": stat.st_size,
|
||||||
|
"ETag": record.get("etag"),
|
||||||
|
"LastModified": datetime.fromtimestamp(stat.st_mtime, timezone.utc)
|
||||||
|
})
|
||||||
|
|
||||||
|
parts.sort(key=lambda x: x["PartNumber"])
|
||||||
|
return parts
|
||||||
|
|
||||||
# ---------------------- internal helpers ----------------------
|
# ---------------------- internal helpers ----------------------
|
||||||
def _bucket_path(self, bucket_name: str) -> Path:
|
def _bucket_path(self, bucket_name: str) -> Path:
|
||||||
safe_name = self._sanitize_bucket_name(bucket_name)
|
safe_name = self._sanitize_bucket_name(bucket_name)
|
||||||
@@ -886,7 +934,11 @@ class ObjectStorage:
|
|||||||
normalized = unicodedata.normalize("NFC", object_key)
|
normalized = unicodedata.normalize("NFC", object_key)
|
||||||
if normalized != object_key:
|
if normalized != object_key:
|
||||||
raise StorageError("Object key must use normalized Unicode")
|
raise StorageError("Object key must use normalized Unicode")
|
||||||
|
|
||||||
candidate = Path(normalized)
|
candidate = Path(normalized)
|
||||||
|
if ".." in candidate.parts:
|
||||||
|
raise StorageError("Object key contains parent directory references")
|
||||||
|
|
||||||
if candidate.is_absolute():
|
if candidate.is_absolute():
|
||||||
raise StorageError("Absolute object keys are not allowed")
|
raise StorageError("Absolute object keys are not allowed")
|
||||||
if getattr(candidate, "drive", ""):
|
if getattr(candidate, "drive", ""):
|
||||||
|
|||||||
52
app/ui.py
52
app/ui.py
@@ -3,6 +3,8 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
|
import psutil
|
||||||
|
import shutil
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
@@ -469,8 +471,6 @@ def complete_multipart_upload(bucket_name: str, upload_id: str):
|
|||||||
normalized.append({"part_number": number, "etag": etag})
|
normalized.append({"part_number": number, "etag": etag})
|
||||||
try:
|
try:
|
||||||
result = _storage().complete_multipart_upload(bucket_name, upload_id, normalized)
|
result = _storage().complete_multipart_upload(bucket_name, upload_id, normalized)
|
||||||
|
|
||||||
# Trigger replication
|
|
||||||
_replication().trigger_replication(bucket_name, result["key"])
|
_replication().trigger_replication(bucket_name, result["key"])
|
||||||
|
|
||||||
return jsonify(result)
|
return jsonify(result)
|
||||||
@@ -1209,6 +1209,54 @@ def connections_dashboard():
|
|||||||
return render_template("connections.html", connections=connections, principal=principal)
|
return render_template("connections.html", connections=connections, principal=principal)
|
||||||
|
|
||||||
|
|
||||||
|
@ui_bp.get("/metrics")
|
||||||
|
def metrics_dashboard():
|
||||||
|
principal = _current_principal()
|
||||||
|
|
||||||
|
cpu_percent = psutil.cpu_percent(interval=None)
|
||||||
|
memory = psutil.virtual_memory()
|
||||||
|
|
||||||
|
storage_root = current_app.config["STORAGE_ROOT"]
|
||||||
|
disk = psutil.disk_usage(storage_root)
|
||||||
|
|
||||||
|
storage = _storage()
|
||||||
|
buckets = storage.list_buckets()
|
||||||
|
total_buckets = len(buckets)
|
||||||
|
|
||||||
|
total_objects = 0
|
||||||
|
total_bytes_used = 0
|
||||||
|
|
||||||
|
# Note: Uses cached stats from storage layer to improve performance
|
||||||
|
for bucket in buckets:
|
||||||
|
stats = storage.bucket_stats(bucket.name)
|
||||||
|
total_objects += stats["objects"]
|
||||||
|
total_bytes_used += stats["bytes"]
|
||||||
|
|
||||||
|
return render_template(
|
||||||
|
"metrics.html",
|
||||||
|
principal=principal,
|
||||||
|
cpu_percent=cpu_percent,
|
||||||
|
memory={
|
||||||
|
"total": _format_bytes(memory.total),
|
||||||
|
"available": _format_bytes(memory.available),
|
||||||
|
"used": _format_bytes(memory.used),
|
||||||
|
"percent": memory.percent,
|
||||||
|
},
|
||||||
|
disk={
|
||||||
|
"total": _format_bytes(disk.total),
|
||||||
|
"free": _format_bytes(disk.free),
|
||||||
|
"used": _format_bytes(disk.used),
|
||||||
|
"percent": disk.percent,
|
||||||
|
},
|
||||||
|
app={
|
||||||
|
"buckets": total_buckets,
|
||||||
|
"objects": total_objects,
|
||||||
|
"storage_used": _format_bytes(total_bytes_used),
|
||||||
|
"storage_raw": total_bytes_used,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@ui_bp.app_errorhandler(404)
|
@ui_bp.app_errorhandler(404)
|
||||||
def ui_not_found(error): # type: ignore[override]
|
def ui_not_found(error): # type: ignore[override]
|
||||||
prefix = ui_bp.url_prefix or ""
|
prefix = ui_bp.url_prefix or ""
|
||||||
|
|||||||
@@ -6,3 +6,4 @@ pytest>=7.4
|
|||||||
requests>=2.31
|
requests>=2.31
|
||||||
boto3>=1.34
|
boto3>=1.34
|
||||||
waitress>=2.1.2
|
waitress>=2.1.2
|
||||||
|
psutil>=5.9.0
|
||||||
|
|||||||
@@ -63,6 +63,9 @@
|
|||||||
{% if not can_manage_iam %}<span class="badge ms-2 text-bg-warning">Restricted</span>{% endif %}
|
{% if not can_manage_iam %}<span class="badge ms-2 text-bg-warning">Restricted</span>{% endif %}
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{ url_for('ui.metrics_dashboard') }}">Metrics</a>
|
||||||
|
</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if principal %}
|
{% if principal %}
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
|
|||||||
173
templates/metrics.html
Normal file
173
templates/metrics.html
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% block content %}
|
||||||
|
<div class="d-flex justify-content-between align-items-center mb-4">
|
||||||
|
<div>
|
||||||
|
<h1 class="h3 mb-0 fw-semibold">System Metrics</h1>
|
||||||
|
<p class="text-muted mb-0">Real-time server performance and storage usage.</p>
|
||||||
|
</div>
|
||||||
|
<button class="btn btn-outline-secondary btn-sm" onclick="window.location.reload()">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-arrow-clockwise me-1" viewBox="0 0 16 16">
|
||||||
|
<path fill-rule="evenodd" d="M8 3a5 5 0 1 0 4.546 2.914.5.5 0 0 1 .908-.417A6 6 0 1 1 8 2v1z"/>
|
||||||
|
<path d="M8 4.466V.534a.25.25 0 0 1 .41-.192l2.36 1.966c.12.1.12.284 0 .384L8.41 4.658A.25.25 0 0 1 8 4.466z"/>
|
||||||
|
</svg>
|
||||||
|
Refresh
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row g-4 mb-4">
|
||||||
|
<div class="col-md-6 col-xl-3">
|
||||||
|
<div class="card shadow-sm h-100 border-0">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="d-flex align-items-center justify-content-between mb-3">
|
||||||
|
<h6 class="card-subtitle text-muted text-uppercase small fw-bold">CPU Usage</h6>
|
||||||
|
<div class="icon-box bg-primary-subtle text-primary rounded-circle p-2">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" fill="currentColor" class="bi bi-cpu" viewBox="0 0 16 16">
|
||||||
|
<path d="M5 0a.5.5 0 0 1 .5.5V2h1V.5a.5.5 0 0 1 1 0V2h1V.5a.5.5 0 0 1 1 0V2h1V.5a.5.5 0 0 1 1 0V2A2.5 2.5 0 0 1 14 4.5h1.5a.5.5 0 0 1 0 1H14v1h1.5a.5.5 0 0 1 0 1H14v1h1.5a.5.5 0 0 1 0 1H14v1h1.5a.5.5 0 0 1 0 1H14a2.5 2.5 0 0 1-2.5 2.5v1.5a.5.5 0 0 1-1 0V14h-1v1.5a.5.5 0 0 1-1 0V14h-1v1.5a.5.5 0 0 1-1 0V14h-1v1.5a.5.5 0 0 1-1 0V14A2.5 2.5 0 0 1 2 11.5H.5a.5.5 0 0 1 0-1H2v-1H.5a.5.5 0 0 1 0-1H2v-1H.5a.5.5 0 0 1 0-1H2v-1H.5a.5.5 0 0 1 0-1H2A2.5 2.5 0 0 1 4.5 2V.5a.5.5 0 0 1 .5-.5zm6 4H5v8h6V4zM5 13h6v-1h-1v1H5v-1H4v1zm0-12v1h1V1H5zm0 1h1v1H5V2zm0 1h1v1H5V3zm-1 1H3v1h1V4zm-1 1H2v1h1V5zm-1 1H1v1h1V6zm1 2H2v1h1V8zm1 1H3v1h1V9zm1 1H4v1h1v-1zm2 0h1v1h-1v-1zm2 0h1v1h-1v-1zm2-1h1v-1h-1v1zm0-2h1V6h-1v1zm0-2h1V4h-1v1z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<h2 class="display-6 fw-bold mb-2">{{ cpu_percent }}%</h2>
|
||||||
|
<div class="progress" style="height: 6px;">
|
||||||
|
<div class="progress-bar {% if cpu_percent > 80 %}bg-danger{% elif cpu_percent > 50 %}bg-warning{% else %}bg-success{% endif %}" role="progressbar" style="width: {{ cpu_percent }}%" aria-valuenow="{{ cpu_percent }}" aria-valuemin="0" aria-valuemax="100"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-6 col-xl-3">
|
||||||
|
<div class="card shadow-sm h-100 border-0">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="d-flex align-items-center justify-content-between mb-3">
|
||||||
|
<h6 class="card-subtitle text-muted text-uppercase small fw-bold">Memory</h6>
|
||||||
|
<div class="icon-box bg-info-subtle text-info rounded-circle p-2">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" fill="currentColor" class="bi bi-memory" viewBox="0 0 16 16">
|
||||||
|
<path d="M1 3a1 1 0 0 0-1 1v8a1 1 0 0 0 1 1h4.586a1 1 0 0 0 .707-.293l.353-.353a.5.5 0 0 1 .708 0l.353.353a1 1 0 0 0 .707.293H15a1 1 0 0 0 1-1V4a1 1 0 0 0-1-1H1Zm.5 1h3a.5.5 0 0 1 .5.5v4a.5.5 0 0 1-.5.5h-3a.5.5 0 0 1-.5-.5v-4a.5.5 0 0 1 .5-.5Zm5 0h3a.5.5 0 0 1 .5.5v4a.5.5 0 0 1-.5.5h-3a.5.5 0 0 1-.5-.5v-4a.5.5 0 0 1 .5-.5Zm4.5.5a.5.5 0 0 1 .5-.5h3a.5.5 0 0 1 .5.5v4a.5.5 0 0 1-.5.5h-3a.5.5 0 0 1-.5-.5v-4ZM2 10v2H1v-2h1Zm2 0v2H3v-2h1Zm2 0v2H5v-2h1Zm3 0v2H8v-2h1Zm2 0v2h-1v-2h1Zm2 0v2h-1v-2h1Z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<h2 class="display-6 fw-bold mb-2">{{ memory.percent }}%</h2>
|
||||||
|
<div class="progress" style="height: 6px;">
|
||||||
|
<div class="progress-bar bg-info" role="progressbar" style="width: {{ memory.percent }}%" aria-valuenow="{{ memory.percent }}" aria-valuemin="0" aria-valuemax="100"></div>
|
||||||
|
</div>
|
||||||
|
<div class="mt-2 small text-muted">
|
||||||
|
{{ memory.used }} used of {{ memory.total }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-6 col-xl-3">
|
||||||
|
<div class="card shadow-sm h-100 border-0">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="d-flex align-items-center justify-content-between mb-3">
|
||||||
|
<h6 class="card-subtitle text-muted text-uppercase small fw-bold">Server Disk</h6>
|
||||||
|
<div class="icon-box bg-warning-subtle text-warning rounded-circle p-2">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" fill="currentColor" class="bi bi-hdd" viewBox="0 0 16 16">
|
||||||
|
<path d="M4.5 11a.5.5 0 1 0 0-1 .5.5 0 0 0 0 1zM3 10.5a.5.5 0 1 1-1 0 .5.5 0 0 1 1 0z"/>
|
||||||
|
<path d="M16 11a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2V9.51c0-.418.105-.83.305-1.197l2.472-4.531A1.5 1.5 0 0 1 4.094 3h7.812a1.5 1.5 0 0 1 1.317.782l2.472 4.53c.2.368.305.78.305 1.198V11zM3.655 4.26 1.592 8.043C1.724 8.014 1.86 8 2 8h12c.14 0 .276.014.408.042L12.345 4.26a.5.5 0 0 0-.439-.26H4.094a.5.5 0 0 0-.439.26zM1 10v1a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-1a1 1 0 0 0-1-1H2a1 1 0 0 0-1 1z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<h2 class="display-6 fw-bold mb-2">{{ disk.percent }}%</h2>
|
||||||
|
<div class="progress" style="height: 6px;">
|
||||||
|
<div class="progress-bar bg-warning" role="progressbar" style="width: {{ disk.percent }}%" aria-valuenow="{{ disk.percent }}" aria-valuemin="0" aria-valuemax="100"></div>
|
||||||
|
</div>
|
||||||
|
<div class="mt-2 small text-muted">
|
||||||
|
{{ disk.free }} free of {{ disk.total }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-6 col-xl-3">
|
||||||
|
<div class="card shadow-sm h-100 border-0">
|
||||||
|
<div class="card-body">
|
||||||
|
<div class="d-flex align-items-center justify-content-between mb-3">
|
||||||
|
<h6 class="card-subtitle text-muted text-uppercase small fw-bold">App Storage</h6>
|
||||||
|
<div class="icon-box bg-success-subtle text-success rounded-circle p-2">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" fill="currentColor" class="bi bi-database" viewBox="0 0 16 16">
|
||||||
|
<path d="M12.096 6.223A4.92 4.92 0 0 0 13 5.698V7c0 .289-.213.654-.753 1.007-1.724 1.124-5.803 1.124-7.528 0C4.213 7.654 4 7.289 4 7v-1.302c.343.512.773.927 1.218 1.177 1.724 1.124 5.803 1.124 7.528 0 .12-.078.233-.16.34-.246z"/>
|
||||||
|
<path d="M12.096 9.223A4.92 4.92 0 0 0 13 8.698V10c0 .289-.213.654-.753 1.007-1.724 1.124-5.803 1.124-7.528 0C4.213 10.654 4 10.289 4 10v-1.302c.343.512.773.927 1.218 1.177 1.724 1.124 5.803 1.124 7.528 0 .12-.078.233-.16.34-.246z"/>
|
||||||
|
<path d="M12.096 12.223A4.92 4.92 0 0 0 13 11.698V13c0 .289-.213.654-.753 1.007-1.724 1.124-5.803 1.124-7.528 0C4.213 13.654 4 13.289 4 13v-1.302c.343.512.773.927 1.218 1.177 1.724 1.124 5.803 1.124 7.528 0 .12-.078.233-.16.34-.246z"/>
|
||||||
|
<path d="M12.096 3.223A4.92 4.92 0 0 0 13 2.698V4c0 .289-.213.654-.753 1.007-1.724 1.124-5.803 1.124-7.528 0C4.213 4.654 4 4.289 4 4v-1.302c.343.512.773.927 1.218 1.177 1.724 1.124 5.803 1.124 7.528 0 .12-.078.233-.16.34-.246z"/>
|
||||||
|
<path d="M12.096.223A4.92 4.92 0 0 0 13-.302V1c0 .289-.213.654-.753 1.007-1.724 1.124-5.803 1.124-7.528 0C4.213 1.654 4 1.289 4 1v-1.302c.343.512.773.927 1.218 1.177 1.724 1.124 5.803 1.124 7.528 0 .12-.078.233-.16.34-.246z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<h2 class="display-6 fw-bold mb-2">{{ app.storage_used }}</h2>
|
||||||
|
<div class="mt-2 small text-muted">
|
||||||
|
{{ app.objects }} objects in {{ app.buckets }} buckets
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row g-4">
|
||||||
|
<div class="col-lg-8">
|
||||||
|
<div class="card shadow-sm border-0">
|
||||||
|
<div class="card-header bg-transparent border-0 pt-4 px-4">
|
||||||
|
<h5 class="card-title mb-0">Storage Distribution</h5>
|
||||||
|
</div>
|
||||||
|
<div class="card-body p-4">
|
||||||
|
<div class="table-responsive">
|
||||||
|
<table class="table table-hover align-middle">
|
||||||
|
<thead class="table-light">
|
||||||
|
<tr>
|
||||||
|
<th>Metric</th>
|
||||||
|
<th>Value</th>
|
||||||
|
<th>Status</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>Total Server Capacity</td>
|
||||||
|
<td class="fw-medium">{{ disk.total }}</td>
|
||||||
|
<td><span class="badge bg-secondary">Hardware</span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Available Space</td>
|
||||||
|
<td class="fw-medium">{{ disk.free }}</td>
|
||||||
|
<td>
|
||||||
|
{% if disk.percent > 90 %}
|
||||||
|
<span class="badge bg-danger">Critical</span>
|
||||||
|
{% elif disk.percent > 75 %}
|
||||||
|
<span class="badge bg-warning text-dark">Low</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="badge bg-success">Good</span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>MyFSIO Data</td>
|
||||||
|
<td class="fw-medium">{{ app.storage_used }}</td>
|
||||||
|
<td><span class="badge bg-primary">Application</span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Object Count</td>
|
||||||
|
<td class="fw-medium">{{ app.objects }}</td>
|
||||||
|
<td><span class="badge bg-info">Count</span></td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-lg-4">
|
||||||
|
<div class="card shadow-sm border-0 bg-primary text-white h-100">
|
||||||
|
<div class="card-body p-4 d-flex flex-column justify-content-center text-center">
|
||||||
|
<div class="mb-3">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" fill="currentColor" class="bi bi-cloud-check" viewBox="0 0 16 16">
|
||||||
|
<path fill-rule="evenodd" d="M10.354 6.146a.5.5 0 0 1 0 .708l-3 3a.5.5 0 0 1-.708 0l-1.5-1.5a.5.5 0 1 1 .708-.708L7 8.793l2.646-2.647a.5.5 0 0 1 .708 0z"/>
|
||||||
|
<path d="M4.406 3.342A5.53 5.53 0 0 1 8 2c2.69 0 4.923 2 5.166 4.579C14.758 6.804 16 8.137 16 9.773 16 11.569 14.502 13 12.687 13H3.781C1.708 13 0 11.366 0 9.318c0-1.763 1.266-3.223 2.942-3.593.143-.863.698-1.723 1.464-2.383zm.653.757c-.757.653-1.153 1.44-1.153 2.056v.448l-.445.049C2.064 6.805 1 7.952 1 9.318 1 10.785 2.23 12 3.781 12h8.906C13.98 12 15 10.988 15 9.773c0-1.216-1.02-2.228-2.313-2.228h-.5v-.5C12.188 4.825 10.328 3 8 3a4.53 4.53 0 0 0-2.941 1.1z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<h4 class="card-title">System Health</h4>
|
||||||
|
<p class="card-text opacity-75">All systems are running smoothly. Storage capacity is within healthy limits.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
186
tests/test_security.py
Normal file
186
tests/test_security.py
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import pytest
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
def _sign(key, msg):
|
||||||
|
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
|
||||||
|
|
||||||
|
def _get_signature_key(key, date_stamp, region_name, service_name):
|
||||||
|
k_date = _sign(("AWS4" + key).encode("utf-8"), date_stamp)
|
||||||
|
k_region = _sign(k_date, region_name)
|
||||||
|
k_service = _sign(k_region, service_name)
|
||||||
|
k_signing = _sign(k_service, "aws4_request")
|
||||||
|
return k_signing
|
||||||
|
|
||||||
|
def create_signed_headers(
|
||||||
|
method,
|
||||||
|
path,
|
||||||
|
headers=None,
|
||||||
|
body=None,
|
||||||
|
access_key="test",
|
||||||
|
secret_key="secret",
|
||||||
|
region="us-east-1",
|
||||||
|
service="s3",
|
||||||
|
timestamp=None
|
||||||
|
):
|
||||||
|
if headers is None:
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
if timestamp is None:
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
else:
|
||||||
|
now = timestamp
|
||||||
|
|
||||||
|
amz_date = now.strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
date_stamp = now.strftime("%Y%m%d")
|
||||||
|
|
||||||
|
headers["X-Amz-Date"] = amz_date
|
||||||
|
headers["Host"] = "testserver"
|
||||||
|
|
||||||
|
canonical_uri = quote(path, safe="/-_.~")
|
||||||
|
canonical_query_string = ""
|
||||||
|
|
||||||
|
canonical_headers = ""
|
||||||
|
signed_headers_list = []
|
||||||
|
for k, v in sorted(headers.items(), key=lambda x: x[0].lower()):
|
||||||
|
canonical_headers += f"{k.lower()}:{v.strip()}\n"
|
||||||
|
signed_headers_list.append(k.lower())
|
||||||
|
|
||||||
|
signed_headers = ";".join(signed_headers_list)
|
||||||
|
|
||||||
|
payload_hash = hashlib.sha256(body or b"").hexdigest()
|
||||||
|
headers["X-Amz-Content-Sha256"] = payload_hash
|
||||||
|
|
||||||
|
canonical_request = f"{method}\n{canonical_uri}\n{canonical_query_string}\n{canonical_headers}\n{signed_headers}\n{payload_hash}"
|
||||||
|
|
||||||
|
credential_scope = f"{date_stamp}/{region}/{service}/aws4_request"
|
||||||
|
string_to_sign = f"AWS4-HMAC-SHA256\n{amz_date}\n{credential_scope}\n{hashlib.sha256(canonical_request.encode('utf-8')).hexdigest()}"
|
||||||
|
|
||||||
|
signing_key = _get_signature_key(secret_key, date_stamp, region, service)
|
||||||
|
signature = hmac.new(signing_key, string_to_sign.encode("utf-8"), hashlib.sha256).hexdigest()
|
||||||
|
|
||||||
|
headers["Authorization"] = (
|
||||||
|
f"AWS4-HMAC-SHA256 Credential={access_key}/{credential_scope}, "
|
||||||
|
f"SignedHeaders={signed_headers}, Signature={signature}"
|
||||||
|
)
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def test_sigv4_old_date(client):
|
||||||
|
# Test with a date 20 minutes in the past
|
||||||
|
old_time = datetime.now(timezone.utc) - timedelta(minutes=20)
|
||||||
|
headers = create_signed_headers("GET", "/", timestamp=old_time)
|
||||||
|
|
||||||
|
response = client.get("/", headers=headers)
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert b"Request timestamp too old" in response.data
|
||||||
|
|
||||||
|
def test_sigv4_future_date(client):
|
||||||
|
# Test with a date 20 minutes in the future
|
||||||
|
future_time = datetime.now(timezone.utc) + timedelta(minutes=20)
|
||||||
|
headers = create_signed_headers("GET", "/", timestamp=future_time)
|
||||||
|
|
||||||
|
response = client.get("/", headers=headers)
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert b"Request timestamp too old" in response.data # The error message is the same
|
||||||
|
|
||||||
|
def test_path_traversal_in_key(client, signer):
|
||||||
|
headers = signer("PUT", "/test-bucket")
|
||||||
|
client.put("/test-bucket", headers=headers)
|
||||||
|
|
||||||
|
# Try to upload with .. in key
|
||||||
|
headers = signer("PUT", "/test-bucket/../secret.txt", body=b"attack")
|
||||||
|
response = client.put("/test-bucket/../secret.txt", headers=headers, data=b"attack")
|
||||||
|
|
||||||
|
# Should be rejected by storage layer or flask routing
|
||||||
|
# Flask might normalize it before it reaches the app, but if it reaches, it should fail.
|
||||||
|
# If Flask normalizes /test-bucket/../secret.txt to /secret.txt, then it hits 404 (bucket not found) or 403.
|
||||||
|
# But we want to test the storage layer check.
|
||||||
|
# We can try to encode the dots?
|
||||||
|
|
||||||
|
# If we use a key that doesn't get normalized by Flask routing easily.
|
||||||
|
# But wait, the route is /<bucket_name>/<path:object_key>
|
||||||
|
# If I send /test-bucket/folder/../file.txt, Flask might pass "folder/../file.txt" as object_key?
|
||||||
|
# Let's try.
|
||||||
|
|
||||||
|
headers = signer("PUT", "/test-bucket/folder/../file.txt", body=b"attack")
|
||||||
|
response = client.put("/test-bucket/folder/../file.txt", headers=headers, data=b"attack")
|
||||||
|
|
||||||
|
# If Flask normalizes it, it becomes /test-bucket/file.txt.
|
||||||
|
# If it doesn't, it hits our check.
|
||||||
|
|
||||||
|
# Let's try to call the storage method directly to verify the check works,
|
||||||
|
# because testing via client depends on Flask's URL handling.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_storage_path_traversal(app):
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
from app.storage import StorageError
|
||||||
|
|
||||||
|
with pytest.raises(StorageError, match="Object key contains parent directory references"):
|
||||||
|
storage._sanitize_object_key("folder/../file.txt")
|
||||||
|
|
||||||
|
with pytest.raises(StorageError, match="Object key contains parent directory references"):
|
||||||
|
storage._sanitize_object_key("..")
|
||||||
|
|
||||||
|
def test_head_bucket(client, signer):
|
||||||
|
headers = signer("PUT", "/head-test")
|
||||||
|
client.put("/head-test", headers=headers)
|
||||||
|
|
||||||
|
headers = signer("HEAD", "/head-test")
|
||||||
|
response = client.head("/head-test", headers=headers)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
headers = signer("HEAD", "/non-existent")
|
||||||
|
response = client.head("/non-existent", headers=headers)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
def test_head_object(client, signer):
|
||||||
|
headers = signer("PUT", "/head-obj-test")
|
||||||
|
client.put("/head-obj-test", headers=headers)
|
||||||
|
|
||||||
|
headers = signer("PUT", "/head-obj-test/obj", body=b"content")
|
||||||
|
client.put("/head-obj-test/obj", headers=headers, data=b"content")
|
||||||
|
|
||||||
|
headers = signer("HEAD", "/head-obj-test/obj")
|
||||||
|
response = client.head("/head-obj-test/obj", headers=headers)
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.headers["ETag"]
|
||||||
|
assert response.headers["Content-Length"] == "7"
|
||||||
|
|
||||||
|
headers = signer("HEAD", "/head-obj-test/missing")
|
||||||
|
response = client.head("/head-obj-test/missing", headers=headers)
|
||||||
|
assert response.status_code == 404
|
||||||
|
|
||||||
|
def test_list_parts(client, signer):
|
||||||
|
# Create bucket
|
||||||
|
headers = signer("PUT", "/multipart-test")
|
||||||
|
client.put("/multipart-test", headers=headers)
|
||||||
|
|
||||||
|
# Initiate multipart upload
|
||||||
|
headers = signer("POST", "/multipart-test/obj?uploads")
|
||||||
|
response = client.post("/multipart-test/obj?uploads", headers=headers)
|
||||||
|
assert response.status_code == 200
|
||||||
|
from xml.etree.ElementTree import fromstring
|
||||||
|
upload_id = fromstring(response.data).find("UploadId").text
|
||||||
|
|
||||||
|
# Upload part 1
|
||||||
|
headers = signer("PUT", f"/multipart-test/obj?partNumber=1&uploadId={upload_id}", body=b"part1")
|
||||||
|
client.put(f"/multipart-test/obj?partNumber=1&uploadId={upload_id}", headers=headers, data=b"part1")
|
||||||
|
|
||||||
|
# Upload part 2
|
||||||
|
headers = signer("PUT", f"/multipart-test/obj?partNumber=2&uploadId={upload_id}", body=b"part2")
|
||||||
|
client.put(f"/multipart-test/obj?partNumber=2&uploadId={upload_id}", headers=headers, data=b"part2")
|
||||||
|
|
||||||
|
# List parts
|
||||||
|
headers = signer("GET", f"/multipart-test/obj?uploadId={upload_id}")
|
||||||
|
response = client.get(f"/multipart-test/obj?uploadId={upload_id}", headers=headers)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
root = fromstring(response.data)
|
||||||
|
assert root.tag == "ListPartsResult"
|
||||||
|
parts = root.findall("Part")
|
||||||
|
assert len(parts) == 2
|
||||||
|
assert parts[0].find("PartNumber").text == "1"
|
||||||
|
assert parts[1].find("PartNumber").text == "2"
|
||||||
Reference in New Issue
Block a user