diff --git a/app/ui.py b/app/ui.py index 4baf5c3..3cc6c23 100644 --- a/app/ui.py +++ b/app/ui.py @@ -415,7 +415,7 @@ def list_bucket_objects(bucket_name: str): except IamError as exc: return jsonify({"error": str(exc)}), 403 - max_keys = min(int(request.args.get("max_keys", 1000)), 10000) + max_keys = min(int(request.args.get("max_keys", 1000)), 100000) continuation_token = request.args.get("continuation_token") or None prefix = request.args.get("prefix") or None @@ -434,6 +434,14 @@ def list_bucket_objects(bucket_name: str): except StorageError: versioning_enabled = False + # Pre-compute URL templates once (not per-object) for performance + # Frontend will construct actual URLs by replacing KEY_PLACEHOLDER + preview_template = url_for("ui.object_preview", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER") + delete_template = url_for("ui.delete_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER") + presign_template = url_for("ui.object_presign", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER") + versions_template = url_for("ui.object_versions", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER") + restore_template = url_for("ui.restore_object_version", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER", version_id="VERSION_ID_PLACEHOLDER") + objects_data = [] for obj in result.objects: objects_data.append({ @@ -442,13 +450,6 @@ def list_bucket_objects(bucket_name: str): "last_modified": obj.last_modified.isoformat(), "last_modified_display": obj.last_modified.strftime("%b %d, %Y %H:%M"), "etag": obj.etag, - "metadata": obj.metadata or {}, - "preview_url": url_for("ui.object_preview", bucket_name=bucket_name, object_key=obj.key), - "download_url": url_for("ui.object_preview", bucket_name=bucket_name, object_key=obj.key) + "?download=1", - "presign_endpoint": url_for("ui.object_presign", bucket_name=bucket_name, object_key=obj.key), - "delete_endpoint": url_for("ui.delete_object", bucket_name=bucket_name, object_key=obj.key), - "versions_endpoint": url_for("ui.object_versions", bucket_name=bucket_name, object_key=obj.key), - "restore_template": url_for("ui.restore_object_version", bucket_name=bucket_name, object_key=obj.key, version_id="VERSION_ID_PLACEHOLDER"), }) return jsonify({ @@ -457,6 +458,14 @@ def list_bucket_objects(bucket_name: str): "next_continuation_token": result.next_continuation_token, "total_count": result.total_count, "versioning_enabled": versioning_enabled, + "url_templates": { + "preview": preview_template, + "download": preview_template + "?download=1", + "presign": presign_template, + "delete": delete_template, + "versions": versions_template, + "restore": restore_template, + }, }) @@ -1458,11 +1467,17 @@ def update_bucket_replication(bucket_name: str): else: flash("No replication configuration to pause", "warning") elif action == "resume": + from .replication import REPLICATION_MODE_ALL rule = _replication().get_rule(bucket_name) if rule: rule.enabled = True _replication().set_rule(rule) - flash("Replication resumed", "success") + # When resuming, sync any pending objects that accumulated while paused + if rule.mode == REPLICATION_MODE_ALL: + _replication().replicate_existing_objects(bucket_name) + flash("Replication resumed. Syncing pending objects in background.", "success") + else: + flash("Replication resumed", "success") else: flash("No replication configuration to resume", "warning") elif action == "create": diff --git a/templates/bucket_detail.html b/templates/bucket_detail.html index daa8f45..6db7a77 100644 --- a/templates/bucket_detail.html +++ b/templates/bucket_detail.html @@ -173,14 +173,16 @@
Common operations using boto3.
- -import boto3
+ Common operations using popular SDKs and tools.
+
+ Python (boto3)
+import boto3
+
+s3 = boto3.client(
+ 's3',
+ endpoint_url='{{ api_base }}',
+ aws_access_key_id='<access_key>',
+ aws_secret_access_key='<secret_key>'
+)
+
+# List buckets
+buckets = s3.list_buckets()['Buckets']
+
+# Create bucket
+s3.create_bucket(Bucket='mybucket')
+
+# Upload file
+s3.upload_file('local.txt', 'mybucket', 'remote.txt')
+
+# Download file
+s3.download_file('mybucket', 'remote.txt', 'downloaded.txt')
+
+# Generate presigned URL (valid 1 hour)
+url = s3.generate_presigned_url(
+ 'get_object',
+ Params={'Bucket': 'mybucket', 'Key': 'remote.txt'},
+ ExpiresIn=3600
+)
+
+ JavaScript (AWS SDK v3)
+import { S3Client, ListBucketsCommand, PutObjectCommand } from '@aws-sdk/client-s3';
+
+const s3 = new S3Client({
+ endpoint: '{{ api_base }}',
+ region: 'us-east-1',
+ credentials: {
+ accessKeyId: '<access_key>',
+ secretAccessKey: '<secret_key>'
+ },
+ forcePathStyle: true // Required for S3-compatible services
+});
+
+// List buckets
+const { Buckets } = await s3.send(new ListBucketsCommand({}));
+
+// Upload object
+await s3.send(new PutObjectCommand({
+ Bucket: 'mybucket',
+ Key: 'hello.txt',
+ Body: 'Hello, World!'
+}));
+
+ Multipart Upload (Python)
+import boto3
s3 = boto3.client('s3', endpoint_url='{{ api_base }}')
@@ -418,9 +470,9 @@ s3 = boto3.client('s3', endpoint_url='{{ api_base }}')
response = s3.create_multipart_upload(Bucket='mybucket', Key='large.bin')
upload_id = response['UploadId']
-# Upload parts
+# Upload parts (minimum 5MB each, except last part)
parts = []
-chunks = [b'chunk1', b'chunk2'] # Example data chunks
+chunks = [b'chunk1...', b'chunk2...']
for part_number, chunk in enumerate(chunks, start=1):
response = s3.upload_part(
Bucket='mybucket',
@@ -438,6 +490,19 @@ s3.complete_multipart_upload(
UploadId=upload_id,
MultipartUpload={'Parts': parts}
)
+
+ Presigned URLs for Sharing
+# Generate a download link valid for 15 minutes
+curl -X POST "{{ api_base }}/presign/mybucket/photo.jpg" \
+ -H "Content-Type: application/json" \
+ -H "X-Access-Key: <key>" -H "X-Secret-Key: <secret>" \
+ -d '{"method": "GET", "expires_in": 900}'
+
+# Generate an upload link (PUT) valid for 1 hour
+curl -X POST "{{ api_base }}/presign/mybucket/upload.bin" \
+ -H "Content-Type: application/json" \
+ -H "X-Access-Key: <key>" -H "X-Secret-Key: <secret>" \
+ -d '{"method": "PUT", "expires_in": 3600}'
@@ -487,6 +552,86 @@ s3.complete_multipart_upload(
+
+
+
+ 09
+ Object Versioning
+
+ Keep multiple versions of objects to protect against accidental deletions and overwrites. Restore previous versions at any time.
+
+ Enabling Versioning
+
+ - Navigate to your bucket's Properties tab.
+ - Find the Versioning card and click Enable.
+ - All subsequent uploads will create new versions instead of overwriting.
+
+
+ Version Operations
+
+
+
+
+ Operation
+ Description
+
+
+
+
+ View Versions
+ Click the version icon on any object to see all historical versions with timestamps and sizes.
+
+
+ Restore Version
+ Click Restore on any version to make it the current version (creates a copy).
+
+
+ Delete Current
+ Deleting an object archives it. Previous versions remain accessible.
+
+
+ Purge All
+ Permanently delete an object and all its versions. This cannot be undone.
+
+
+
+
+
+ Archived Objects
+ When you delete a versioned object, it becomes "archived" - the current version is removed but historical versions remain. The Archived tab shows these objects so you can restore them.
+
+ API Usage
+# Enable versioning
+curl -X PUT "{{ api_base }}/<bucket>?versioning" \
+ -H "Content-Type: application/json" \
+ -H "X-Access-Key: <key>" -H "X-Secret-Key: <secret>" \
+ -d '{"Status": "Enabled"}'
+
+# Get versioning status
+curl "{{ api_base }}/<bucket>?versioning" \
+ -H "X-Access-Key: <key>" -H "X-Secret-Key: <secret>"
+
+# List object versions
+curl "{{ api_base }}/<bucket>?versions" \
+ -H "X-Access-Key: <key>" -H "X-Secret-Key: <secret>"
+
+# Get specific version
+curl "{{ api_base }}/<bucket>/<key>?versionId=<version-id>" \
+ -H "X-Access-Key: <key>" -H "X-Secret-Key: <secret>"
+
+
+
+
+
+
+
+
+ Storage Impact: Each version consumes storage. Enable quotas to limit total bucket size including all versions.
+
+
+
+
+
@@ -709,6 +854,7 @@ curl -X DELETE "{{ api_base }}/kms/keys/{key-id}?waiting_period_days=30" \
REST endpoints
API Examples
Site Replication
+ Object Versioning
Bucket Quotas
Encryption
Troubleshooting
diff --git a/tests/test_ui_pagination.py b/tests/test_ui_pagination.py
index 5e42d60..683f527 100644
--- a/tests/test_ui_pagination.py
+++ b/tests/test_ui_pagination.py
@@ -150,16 +150,21 @@ class TestPaginatedObjectListing:
assert len(data["objects"]) == 1
obj = data["objects"][0]
-
+
# Check all expected fields
assert obj["key"] == "test.txt"
assert obj["size"] == 12 # len("test content")
assert "last_modified" in obj
assert "last_modified_display" in obj
assert "etag" in obj
- assert "preview_url" in obj
- assert "download_url" in obj
- assert "delete_endpoint" in obj
+
+ # URLs are now returned as templates (not per-object) for performance
+ assert "url_templates" in data
+ templates = data["url_templates"]
+ assert "preview" in templates
+ assert "download" in templates
+ assert "delete" in templates
+ assert "KEY_PLACEHOLDER" in templates["preview"]
def test_bucket_detail_page_loads_without_objects(self, tmp_path):
"""Bucket detail page should load even with many objects."""