Optimize object listing for 100K+ objects with streaming and compression

This commit is contained in:
2026-01-12 14:25:07 +08:00
parent 0d1fe05fd0
commit 546d51af9a
8 changed files with 438 additions and 29 deletions

View File

@@ -16,6 +16,7 @@ from flask_wtf.csrf import CSRFError
from werkzeug.middleware.proxy_fix import ProxyFix from werkzeug.middleware.proxy_fix import ProxyFix
from .access_logging import AccessLoggingService from .access_logging import AccessLoggingService
from .compression import GzipMiddleware
from .acl import AclService from .acl import AclService
from .bucket_policies import BucketPolicyStore from .bucket_policies import BucketPolicyStore
from .config import AppConfig from .config import AppConfig
@@ -89,13 +90,24 @@ def create_app(
# Trust X-Forwarded-* headers from proxies # Trust X-Forwarded-* headers from proxies
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1) app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1)
# Enable gzip compression for responses (10-20x smaller JSON payloads)
if app.config.get("ENABLE_GZIP", True):
app.wsgi_app = GzipMiddleware(app.wsgi_app, compression_level=6)
_configure_cors(app) _configure_cors(app)
_configure_logging(app) _configure_logging(app)
limiter.init_app(app) limiter.init_app(app)
csrf.init_app(app) csrf.init_app(app)
storage = ObjectStorage(Path(app.config["STORAGE_ROOT"])) storage = ObjectStorage(
Path(app.config["STORAGE_ROOT"]),
cache_ttl=app.config.get("OBJECT_CACHE_TTL", 5),
)
if app.config.get("WARM_CACHE_ON_STARTUP", True) and not app.config.get("TESTING"):
storage.warm_cache_async()
iam = IamService( iam = IamService(
Path(app.config["IAM_CONFIG"]), Path(app.config["IAM_CONFIG"]),
auth_max_attempts=app.config.get("AUTH_MAX_ATTEMPTS", 5), auth_max_attempts=app.config.get("AUTH_MAX_ATTEMPTS", 5),

94
app/compression.py Normal file
View File

@@ -0,0 +1,94 @@
from __future__ import annotations
import gzip
import io
from typing import Callable, Iterable, List, Tuple
COMPRESSIBLE_MIMES = frozenset([
'application/json',
'application/javascript',
'application/xml',
'text/html',
'text/css',
'text/plain',
'text/xml',
'text/javascript',
'application/x-ndjson',
])
MIN_SIZE_FOR_COMPRESSION = 500
class GzipMiddleware:
def __init__(self, app: Callable, compression_level: int = 6, min_size: int = MIN_SIZE_FOR_COMPRESSION):
self.app = app
self.compression_level = compression_level
self.min_size = min_size
def __call__(self, environ: dict, start_response: Callable) -> Iterable[bytes]:
accept_encoding = environ.get('HTTP_ACCEPT_ENCODING', '')
if 'gzip' not in accept_encoding.lower():
return self.app(environ, start_response)
response_started = False
status_code = None
response_headers: List[Tuple[str, str]] = []
content_type = None
content_length = None
should_compress = False
exc_info_holder = [None]
def custom_start_response(status: str, headers: List[Tuple[str, str]], exc_info=None):
nonlocal response_started, status_code, response_headers, content_type, content_length, should_compress
response_started = True
status_code = int(status.split(' ', 1)[0])
response_headers = list(headers)
exc_info_holder[0] = exc_info
for name, value in headers:
name_lower = name.lower()
if name_lower == 'content-type':
content_type = value.split(';')[0].strip().lower()
elif name_lower == 'content-length':
content_length = int(value)
elif name_lower == 'content-encoding':
should_compress = False
return start_response(status, headers, exc_info)
if content_type and content_type in COMPRESSIBLE_MIMES:
if content_length is None or content_length >= self.min_size:
should_compress = True
return None
response_body = b''.join(self.app(environ, custom_start_response))
if not response_started:
return [response_body]
if should_compress and len(response_body) >= self.min_size:
buf = io.BytesIO()
with gzip.GzipFile(fileobj=buf, mode='wb', compresslevel=self.compression_level) as gz:
gz.write(response_body)
compressed = buf.getvalue()
if len(compressed) < len(response_body):
response_body = compressed
new_headers = []
for name, value in response_headers:
if name.lower() not in ('content-length', 'content-encoding'):
new_headers.append((name, value))
new_headers.append(('Content-Encoding', 'gzip'))
new_headers.append(('Content-Length', str(len(response_body))))
new_headers.append(('Vary', 'Accept-Encoding'))
response_headers = new_headers
status_str = f"{status_code} " + {
200: "OK", 201: "Created", 204: "No Content", 206: "Partial Content",
301: "Moved Permanently", 302: "Found", 304: "Not Modified",
400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found",
405: "Method Not Allowed", 409: "Conflict", 500: "Internal Server Error",
}.get(status_code, "Unknown")
start_response(status_str, response_headers, exc_info_holder[0])
return [response_body]

View File

@@ -67,6 +67,7 @@ class AppConfig:
stream_chunk_size: int stream_chunk_size: int
multipart_min_part_size: int multipart_min_part_size: int
bucket_stats_cache_ttl: int bucket_stats_cache_ttl: int
object_cache_ttl: int
encryption_enabled: bool encryption_enabled: bool
encryption_master_key_path: Path encryption_master_key_path: Path
kms_enabled: bool kms_enabled: bool
@@ -161,8 +162,9 @@ class AppConfig:
cors_allow_headers = _csv(str(_get("CORS_ALLOW_HEADERS", "*")), ["*"]) cors_allow_headers = _csv(str(_get("CORS_ALLOW_HEADERS", "*")), ["*"])
cors_expose_headers = _csv(str(_get("CORS_EXPOSE_HEADERS", "*")), ["*"]) cors_expose_headers = _csv(str(_get("CORS_EXPOSE_HEADERS", "*")), ["*"])
session_lifetime_days = int(_get("SESSION_LIFETIME_DAYS", 30)) session_lifetime_days = int(_get("SESSION_LIFETIME_DAYS", 30))
bucket_stats_cache_ttl = int(_get("BUCKET_STATS_CACHE_TTL", 60)) bucket_stats_cache_ttl = int(_get("BUCKET_STATS_CACHE_TTL", 60))
object_cache_ttl = int(_get("OBJECT_CACHE_TTL", 5))
encryption_enabled = str(_get("ENCRYPTION_ENABLED", "0")).lower() in {"1", "true", "yes", "on"} encryption_enabled = str(_get("ENCRYPTION_ENABLED", "0")).lower() in {"1", "true", "yes", "on"}
encryption_keys_dir = storage_root / ".myfsio.sys" / "keys" encryption_keys_dir = storage_root / ".myfsio.sys" / "keys"
encryption_master_key_path = Path(_get("ENCRYPTION_MASTER_KEY_PATH", encryption_keys_dir / "master.key")).resolve() encryption_master_key_path = Path(_get("ENCRYPTION_MASTER_KEY_PATH", encryption_keys_dir / "master.key")).resolve()
@@ -200,6 +202,7 @@ class AppConfig:
stream_chunk_size=stream_chunk_size, stream_chunk_size=stream_chunk_size,
multipart_min_part_size=multipart_min_part_size, multipart_min_part_size=multipart_min_part_size,
bucket_stats_cache_ttl=bucket_stats_cache_ttl, bucket_stats_cache_ttl=bucket_stats_cache_ttl,
object_cache_ttl=object_cache_ttl,
encryption_enabled=encryption_enabled, encryption_enabled=encryption_enabled,
encryption_master_key_path=encryption_master_key_path, encryption_master_key_path=encryption_master_key_path,
kms_enabled=kms_enabled, kms_enabled=kms_enabled,
@@ -315,6 +318,7 @@ class AppConfig:
"STREAM_CHUNK_SIZE": self.stream_chunk_size, "STREAM_CHUNK_SIZE": self.stream_chunk_size,
"MULTIPART_MIN_PART_SIZE": self.multipart_min_part_size, "MULTIPART_MIN_PART_SIZE": self.multipart_min_part_size,
"BUCKET_STATS_CACHE_TTL": self.bucket_stats_cache_ttl, "BUCKET_STATS_CACHE_TTL": self.bucket_stats_cache_ttl,
"OBJECT_CACHE_TTL": self.object_cache_ttl,
"LOG_LEVEL": self.log_level, "LOG_LEVEL": self.log_level,
"LOG_TO_FILE": self.log_to_file, "LOG_TO_FILE": self.log_to_file,
"LOG_FILE": str(self.log_path), "LOG_FILE": str(self.log_path),

View File

@@ -137,10 +137,10 @@ class ObjectStorage:
BUCKET_VERSIONS_DIR = "versions" BUCKET_VERSIONS_DIR = "versions"
MULTIPART_MANIFEST = "manifest.json" MULTIPART_MANIFEST = "manifest.json"
BUCKET_CONFIG_FILE = ".bucket.json" BUCKET_CONFIG_FILE = ".bucket.json"
KEY_INDEX_CACHE_TTL = 30 DEFAULT_CACHE_TTL = 5
OBJECT_CACHE_MAX_SIZE = 100 OBJECT_CACHE_MAX_SIZE = 100
def __init__(self, root: Path) -> None: def __init__(self, root: Path, cache_ttl: int = DEFAULT_CACHE_TTL) -> None:
self.root = Path(root) self.root = Path(root)
self.root.mkdir(parents=True, exist_ok=True) self.root.mkdir(parents=True, exist_ok=True)
self._ensure_system_roots() self._ensure_system_roots()
@@ -150,6 +150,7 @@ class ObjectStorage:
self._cache_version: Dict[str, int] = {} self._cache_version: Dict[str, int] = {}
self._bucket_config_cache: Dict[str, tuple[dict[str, Any], float]] = {} self._bucket_config_cache: Dict[str, tuple[dict[str, Any], float]] = {}
self._bucket_config_cache_ttl = 30.0 self._bucket_config_cache_ttl = 30.0
self._cache_ttl = cache_ttl
def _get_bucket_lock(self, bucket_id: str) -> threading.Lock: def _get_bucket_lock(self, bucket_id: str) -> threading.Lock:
"""Get or create a lock for a specific bucket. Reduces global lock contention.""" """Get or create a lock for a specific bucket. Reduces global lock contention."""
@@ -1398,7 +1399,7 @@ class ObjectStorage:
cached = self._object_cache.get(bucket_id) cached = self._object_cache.get(bucket_id)
if cached: if cached:
objects, timestamp = cached objects, timestamp = cached
if now - timestamp < self.KEY_INDEX_CACHE_TTL: if now - timestamp < self._cache_ttl:
self._object_cache.move_to_end(bucket_id) self._object_cache.move_to_end(bucket_id)
return objects return objects
cache_version = self._cache_version.get(bucket_id, 0) cache_version = self._cache_version.get(bucket_id, 0)
@@ -1409,7 +1410,7 @@ class ObjectStorage:
cached = self._object_cache.get(bucket_id) cached = self._object_cache.get(bucket_id)
if cached: if cached:
objects, timestamp = cached objects, timestamp = cached
if now - timestamp < self.KEY_INDEX_CACHE_TTL: if now - timestamp < self._cache_ttl:
self._object_cache.move_to_end(bucket_id) self._object_cache.move_to_end(bucket_id)
return objects return objects
objects = self._build_object_cache(bucket_path) objects = self._build_object_cache(bucket_path)
@@ -1455,6 +1456,36 @@ class ObjectStorage:
else: else:
objects[key] = meta objects[key] = meta
def warm_cache(self, bucket_names: Optional[List[str]] = None) -> None:
"""Pre-warm the object cache for specified buckets or all buckets.
This is called on startup to ensure the first request is fast.
"""
if bucket_names is None:
bucket_names = [b.name for b in self.list_buckets()]
for bucket_name in bucket_names:
try:
bucket_path = self._bucket_path(bucket_name)
if bucket_path.exists():
self._get_object_cache(bucket_path.name, bucket_path)
except Exception:
pass
def warm_cache_async(self, bucket_names: Optional[List[str]] = None) -> threading.Thread:
"""Start cache warming in a background thread.
Returns the thread object so caller can optionally wait for it.
"""
thread = threading.Thread(
target=self.warm_cache,
args=(bucket_names,),
daemon=True,
name="cache-warmer",
)
thread.start()
return thread
def _ensure_system_roots(self) -> None: def _ensure_system_roots(self) -> None:
for path in ( for path in (
self._system_root_path(), self._system_root_path(),

View File

@@ -399,6 +399,7 @@ def bucket_detail(bucket_name: str):
pass pass
objects_api_url = url_for("ui.list_bucket_objects", bucket_name=bucket_name) objects_api_url = url_for("ui.list_bucket_objects", bucket_name=bucket_name)
objects_stream_url = url_for("ui.stream_bucket_objects", bucket_name=bucket_name)
lifecycle_url = url_for("ui.bucket_lifecycle", bucket_name=bucket_name) lifecycle_url = url_for("ui.bucket_lifecycle", bucket_name=bucket_name)
cors_url = url_for("ui.bucket_cors", bucket_name=bucket_name) cors_url = url_for("ui.bucket_cors", bucket_name=bucket_name)
@@ -410,6 +411,7 @@ def bucket_detail(bucket_name: str):
"bucket_detail.html", "bucket_detail.html",
bucket_name=bucket_name, bucket_name=bucket_name,
objects_api_url=objects_api_url, objects_api_url=objects_api_url,
objects_stream_url=objects_stream_url,
lifecycle_url=lifecycle_url, lifecycle_url=lifecycle_url,
cors_url=cors_url, cors_url=cors_url,
acl_url=acl_url, acl_url=acl_url,
@@ -506,6 +508,100 @@ def list_bucket_objects(bucket_name: str):
}) })
@ui_bp.get("/buckets/<bucket_name>/objects/stream")
def stream_bucket_objects(bucket_name: str):
"""Streaming NDJSON endpoint for progressive object listing.
Streams objects as newline-delimited JSON for fast progressive rendering.
First line is metadata, subsequent lines are objects.
"""
principal = _current_principal()
storage = _storage()
try:
_authorize_ui(principal, bucket_name, "list")
except IamError as exc:
return jsonify({"error": str(exc)}), 403
prefix = request.args.get("prefix") or None
try:
versioning_enabled = storage.is_versioning_enabled(bucket_name)
except StorageError:
versioning_enabled = False
preview_template = url_for("ui.object_preview", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
delete_template = url_for("ui.delete_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
presign_template = url_for("ui.object_presign", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
versions_template = url_for("ui.object_versions", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
restore_template = url_for("ui.restore_object_version", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER", version_id="VERSION_ID_PLACEHOLDER")
tags_template = url_for("ui.object_tags", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
copy_template = url_for("ui.copy_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
move_template = url_for("ui.move_object", bucket_name=bucket_name, object_key="KEY_PLACEHOLDER")
def generate():
meta_line = json.dumps({
"type": "meta",
"versioning_enabled": versioning_enabled,
"url_templates": {
"preview": preview_template,
"download": preview_template + "?download=1",
"presign": presign_template,
"delete": delete_template,
"versions": versions_template,
"restore": restore_template,
"tags": tags_template,
"copy": copy_template,
"move": move_template,
},
}) + "\n"
yield meta_line
continuation_token = None
total_count = None
batch_size = 5000
while True:
try:
result = storage.list_objects(
bucket_name,
max_keys=batch_size,
continuation_token=continuation_token,
prefix=prefix,
)
except StorageError as exc:
yield json.dumps({"type": "error", "error": str(exc)}) + "\n"
return
if total_count is None:
total_count = result.total_count
yield json.dumps({"type": "count", "total_count": total_count}) + "\n"
for obj in result.objects:
yield json.dumps({
"type": "object",
"key": obj.key,
"size": obj.size,
"last_modified": obj.last_modified.isoformat(),
"last_modified_display": obj.last_modified.strftime("%b %d, %Y %H:%M"),
"etag": obj.etag,
}) + "\n"
if not result.is_truncated:
break
continuation_token = result.next_continuation_token
yield json.dumps({"type": "done"}) + "\n"
return Response(
generate(),
mimetype='application/x-ndjson',
headers={
'Cache-Control': 'no-cache',
'X-Accel-Buffering': 'no',
}
)
@ui_bp.post("/buckets/<bucket_name>/upload") @ui_bp.post("/buckets/<bucket_name>/upload")
@limiter.limit("30 per minute") @limiter.limit("30 per minute")
def upload_object(bucket_name: str): def upload_object(bucket_name: str):

View File

@@ -80,6 +80,7 @@
const objectsContainer = document.querySelector('.objects-table-container[data-bucket]'); const objectsContainer = document.querySelector('.objects-table-container[data-bucket]');
const bulkDeleteEndpoint = objectsContainer?.dataset.bulkDeleteEndpoint || ''; const bulkDeleteEndpoint = objectsContainer?.dataset.bulkDeleteEndpoint || '';
const objectsApiUrl = objectsContainer?.dataset.objectsApi || ''; const objectsApiUrl = objectsContainer?.dataset.objectsApi || '';
const objectsStreamUrl = objectsContainer?.dataset.objectsStream || '';
const versionPanel = document.getElementById('version-panel'); const versionPanel = document.getElementById('version-panel');
const versionList = document.getElementById('version-list'); const versionList = document.getElementById('version-list');
const refreshVersionsButton = document.getElementById('refreshVersionsButton'); const refreshVersionsButton = document.getElementById('refreshVersionsButton');
@@ -112,6 +113,12 @@
let currentPrefix = ''; let currentPrefix = '';
let allObjects = []; let allObjects = [];
let urlTemplates = null; let urlTemplates = null;
let streamAbortController = null;
let useStreaming = !!objectsStreamUrl;
let streamingComplete = false;
const STREAM_RENDER_BATCH = 500;
let pendingStreamObjects = [];
let streamRenderScheduled = false;
const buildUrlFromTemplate = (template, key) => { const buildUrlFromTemplate = (template, key) => {
if (!template) return ''; if (!template) return '';
@@ -411,7 +418,167 @@
} }
}; };
const loadObjects = async (append = false) => { const processStreamObject = (obj) => {
const key = obj.key;
return {
key: key,
size: obj.size,
lastModified: obj.last_modified,
lastModifiedDisplay: obj.last_modified_display,
etag: obj.etag,
previewUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.preview, key) : '',
downloadUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.download, key) : '',
presignEndpoint: urlTemplates ? buildUrlFromTemplate(urlTemplates.presign, key) : '',
deleteEndpoint: urlTemplates ? buildUrlFromTemplate(urlTemplates.delete, key) : '',
metadata: '{}',
versionsEndpoint: urlTemplates ? buildUrlFromTemplate(urlTemplates.versions, key) : '',
restoreTemplate: urlTemplates ? urlTemplates.restore.replace('KEY_PLACEHOLDER', encodeURIComponent(key).replace(/%2F/g, '/')) : '',
tagsUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.tags, key) : '',
copyUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.copy, key) : '',
moveUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.move, key) : ''
};
};
const flushPendingStreamObjects = () => {
if (pendingStreamObjects.length === 0) return;
const batch = pendingStreamObjects.splice(0, pendingStreamObjects.length);
batch.forEach(obj => {
loadedObjectCount++;
allObjects.push(obj);
});
updateObjectCountBadge();
if (loadMoreStatus) {
if (streamingComplete) {
loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()} objects`;
} else {
const countText = totalObjectCount > 0 ? ` of ${totalObjectCount.toLocaleString()}` : '';
loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()}${countText} loading...`;
}
}
refreshVirtualList();
streamRenderScheduled = false;
};
const scheduleStreamRender = () => {
if (streamRenderScheduled) return;
streamRenderScheduled = true;
requestAnimationFrame(flushPendingStreamObjects);
};
const loadObjectsStreaming = async () => {
if (isLoadingObjects) return;
isLoadingObjects = true;
streamingComplete = false;
if (objectsLoadingRow) objectsLoadingRow.style.display = '';
nextContinuationToken = null;
loadedObjectCount = 0;
totalObjectCount = 0;
allObjects = [];
pendingStreamObjects = [];
streamAbortController = new AbortController();
try {
const params = new URLSearchParams();
if (currentPrefix) params.set('prefix', currentPrefix);
const response = await fetch(`${objectsStreamUrl}?${params}`, {
signal: streamAbortController.signal
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
if (objectsLoadingRow) objectsLoadingRow.remove();
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (!line.trim()) continue;
try {
const msg = JSON.parse(line);
switch (msg.type) {
case 'meta':
urlTemplates = msg.url_templates;
versioningEnabled = msg.versioning_enabled;
if (objectsContainer) {
objectsContainer.dataset.versioning = versioningEnabled ? 'true' : 'false';
}
break;
case 'count':
totalObjectCount = msg.total_count || 0;
break;
case 'object':
pendingStreamObjects.push(processStreamObject(msg));
if (pendingStreamObjects.length >= STREAM_RENDER_BATCH) {
scheduleStreamRender();
}
break;
case 'error':
throw new Error(msg.error);
case 'done':
streamingComplete = true;
break;
}
} catch (parseErr) {
console.warn('Failed to parse stream line:', line, parseErr);
}
}
if (pendingStreamObjects.length > 0) {
scheduleStreamRender();
}
}
if (buffer.trim()) {
try {
const msg = JSON.parse(buffer);
if (msg.type === 'object') {
pendingStreamObjects.push(processStreamObject(msg));
} else if (msg.type === 'done') {
streamingComplete = true;
}
} catch (e) {}
}
flushPendingStreamObjects();
streamingComplete = true;
hasMoreObjects = false;
updateObjectCountBadge();
if (loadMoreStatus) {
loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()} objects`;
}
if (typeof updateLoadMoreButton === 'function') {
updateLoadMoreButton();
}
refreshVirtualList();
renderBreadcrumb(currentPrefix);
} catch (error) {
if (error.name === 'AbortError') return;
console.error('Streaming failed, falling back to paginated:', error);
useStreaming = false;
isLoadingObjects = false;
await loadObjectsPaginated(false);
return;
} finally {
isLoadingObjects = false;
streamAbortController = null;
}
};
const loadObjectsPaginated = async (append = false) => {
if (isLoadingObjects) return; if (isLoadingObjects) return;
isLoadingObjects = true; isLoadingObjects = true;
@@ -419,6 +586,7 @@
if (objectsLoadingRow) objectsLoadingRow.style.display = ''; if (objectsLoadingRow) objectsLoadingRow.style.display = '';
nextContinuationToken = null; nextContinuationToken = null;
loadedObjectCount = 0; loadedObjectCount = 0;
totalObjectCount = 0;
allObjects = []; allObjects = [];
} }
@@ -458,29 +626,12 @@
data.objects.forEach(obj => { data.objects.forEach(obj => {
loadedObjectCount++; loadedObjectCount++;
const key = obj.key; allObjects.push(processStreamObject(obj));
allObjects.push({
key: key,
size: obj.size,
lastModified: obj.last_modified,
lastModifiedDisplay: obj.last_modified_display,
etag: obj.etag,
previewUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.preview, key) : '',
downloadUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.download, key) : '',
presignEndpoint: urlTemplates ? buildUrlFromTemplate(urlTemplates.presign, key) : '',
deleteEndpoint: urlTemplates ? buildUrlFromTemplate(urlTemplates.delete, key) : '',
metadata: '{}',
versionsEndpoint: urlTemplates ? buildUrlFromTemplate(urlTemplates.versions, key) : '',
restoreTemplate: urlTemplates ? urlTemplates.restore.replace('KEY_PLACEHOLDER', encodeURIComponent(key).replace(/%2F/g, '/')) : '',
tagsUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.tags, key) : '',
copyUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.copy, key) : '',
moveUrl: urlTemplates ? buildUrlFromTemplate(urlTemplates.move, key) : ''
});
}); });
updateObjectCountBadge(); updateObjectCountBadge();
hasMoreObjects = data.is_truncated; hasMoreObjects = data.is_truncated;
if (loadMoreStatus) { if (loadMoreStatus) {
if (data.is_truncated) { if (data.is_truncated) {
loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()} of ${totalObjectCount.toLocaleString()} loaded`; loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()} of ${totalObjectCount.toLocaleString()} loaded`;
@@ -488,7 +639,7 @@
loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()} objects`; loadMoreStatus.textContent = `${loadedObjectCount.toLocaleString()} objects`;
} }
} }
if (typeof updateLoadMoreButton === 'function') { if (typeof updateLoadMoreButton === 'function') {
updateLoadMoreButton(); updateLoadMoreButton();
} }
@@ -511,6 +662,13 @@
} }
}; };
const loadObjects = async (append = false) => {
if (useStreaming && !append) {
return loadObjectsStreaming();
}
return loadObjectsPaginated(append);
};
const attachRowHandlers = () => { const attachRowHandlers = () => {
const objectRows = document.querySelectorAll('[data-object-row]'); const objectRows = document.querySelectorAll('[data-object-row]');
objectRows.forEach(row => { objectRows.forEach(row => {
@@ -3943,8 +4101,8 @@
deleteBucketForm.addEventListener('submit', function(e) { deleteBucketForm.addEventListener('submit', function(e) {
e.preventDefault(); e.preventDefault();
window.UICore.submitFormAjax(deleteBucketForm, { window.UICore.submitFormAjax(deleteBucketForm, {
successMessage: 'Bucket deleted',
onSuccess: function() { onSuccess: function() {
sessionStorage.setItem('flashMessage', JSON.stringify({ title: 'Bucket deleted', variant: 'success' }));
window.location.href = window.BucketDetailConfig?.endpoints?.bucketsOverview || '/ui/buckets'; window.location.href = window.BucketDetailConfig?.endpoints?.bucketsOverview || '/ui/buckets';
} }
}); });

View File

@@ -309,3 +309,16 @@ window.UICore.setupJsonAutoIndent = function(textarea) {
} }
}); });
}; };
document.addEventListener('DOMContentLoaded', function() {
var flashMessage = sessionStorage.getItem('flashMessage');
if (flashMessage) {
sessionStorage.removeItem('flashMessage');
try {
var msg = JSON.parse(flashMessage);
if (window.showToast) {
window.showToast(msg.body || msg.title, msg.title, msg.variant || 'info');
}
} catch (e) {}
}
});

View File

@@ -146,6 +146,7 @@
data-bucket="{{ bucket_name }}" data-bucket="{{ bucket_name }}"
data-versioning="{{ 'true' if versioning_enabled else 'false' }}" data-versioning="{{ 'true' if versioning_enabled else 'false' }}"
data-objects-api="{{ objects_api_url }}" data-objects-api="{{ objects_api_url }}"
data-objects-stream="{{ objects_stream_url }}"
data-bulk-delete-endpoint="{{ url_for('ui.bulk_delete_objects', bucket_name=bucket_name) }}" data-bulk-delete-endpoint="{{ url_for('ui.bulk_delete_objects', bucket_name=bucket_name) }}"
data-bulk-download-endpoint="{{ url_for('ui.bulk_download_objects', bucket_name=bucket_name) }}" data-bulk-download-endpoint="{{ url_for('ui.bulk_download_objects', bucket_name=bucket_name) }}"
data-folders-url="{{ folders_url }}" data-folders-url="{{ folders_url }}"