MyFSIO v0.1.7 Release #8
@@ -188,8 +188,11 @@ class EncryptedObjectStorage:
|
|||||||
def bucket_stats(self, bucket_name: str, cache_ttl: int = 60):
|
def bucket_stats(self, bucket_name: str, cache_ttl: int = 60):
|
||||||
return self.storage.bucket_stats(bucket_name, cache_ttl)
|
return self.storage.bucket_stats(bucket_name, cache_ttl)
|
||||||
|
|
||||||
def list_objects(self, bucket_name: str):
|
def list_objects(self, bucket_name: str, **kwargs):
|
||||||
return self.storage.list_objects(bucket_name)
|
return self.storage.list_objects(bucket_name, **kwargs)
|
||||||
|
|
||||||
|
def list_objects_all(self, bucket_name: str):
|
||||||
|
return self.storage.list_objects_all(bucket_name)
|
||||||
|
|
||||||
def get_object_path(self, bucket_name: str, object_key: str):
|
def get_object_path(self, bucket_name: str, object_key: str):
|
||||||
return self.storage.get_object_path(bucket_name, object_key)
|
return self.storage.get_object_path(bucket_name, object_key)
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ class ReplicationManager:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Get source objects
|
# Get source objects
|
||||||
source_objects = self.storage.list_objects(bucket_name)
|
source_objects = self.storage.list_objects_all(bucket_name)
|
||||||
source_keys = {obj.key: obj.size for obj in source_objects}
|
source_keys = {obj.key: obj.size for obj in source_objects}
|
||||||
|
|
||||||
# Get destination objects
|
# Get destination objects
|
||||||
@@ -219,7 +219,7 @@ class ReplicationManager:
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
objects = self.storage.list_objects(bucket_name)
|
objects = self.storage.list_objects_all(bucket_name)
|
||||||
logger.info(f"Starting replication of {len(objects)} existing objects from {bucket_name}")
|
logger.info(f"Starting replication of {len(objects)} existing objects from {bucket_name}")
|
||||||
for obj in objects:
|
for obj in objects:
|
||||||
self._executor.submit(self._replicate_task, bucket_name, obj.key, rule, connection, "write")
|
self._executor.submit(self._replicate_task, bucket_name, obj.key, rule, connection, "write")
|
||||||
|
|||||||
@@ -1155,7 +1155,7 @@ def _bucket_list_versions_handler(bucket_name: str) -> Response:
|
|||||||
storage = _storage()
|
storage = _storage()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
objects = storage.list_objects(bucket_name)
|
objects = storage.list_objects_all(bucket_name)
|
||||||
except StorageError as exc:
|
except StorageError as exc:
|
||||||
return _error_response("NoSuchBucket", str(exc), 404)
|
return _error_response("NoSuchBucket", str(exc), 404)
|
||||||
|
|
||||||
@@ -1651,7 +1651,7 @@ def bucket_handler(bucket_name: str) -> Response:
|
|||||||
return error
|
return error
|
||||||
return _error_response("AccessDenied", str(exc), 403)
|
return _error_response("AccessDenied", str(exc), 403)
|
||||||
try:
|
try:
|
||||||
objects = storage.list_objects(bucket_name)
|
objects = storage.list_objects_all(bucket_name)
|
||||||
except StorageError as exc:
|
except StorageError as exc:
|
||||||
return _error_response("NoSuchBucket", str(exc), 404)
|
return _error_response("NoSuchBucket", str(exc), 404)
|
||||||
|
|
||||||
|
|||||||
@@ -99,6 +99,15 @@ class BucketMeta:
|
|||||||
created_at: datetime
|
created_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ListObjectsResult:
|
||||||
|
"""Paginated result for object listing."""
|
||||||
|
objects: List[ObjectMeta]
|
||||||
|
is_truncated: bool
|
||||||
|
next_continuation_token: Optional[str]
|
||||||
|
total_count: Optional[int] = None # Total objects in bucket (from stats cache)
|
||||||
|
|
||||||
|
|
||||||
def _utcnow() -> datetime:
|
def _utcnow() -> datetime:
|
||||||
return datetime.now(timezone.utc)
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
@@ -241,31 +250,105 @@ class ObjectStorage:
|
|||||||
self._remove_tree(self._system_bucket_root(bucket_path.name))
|
self._remove_tree(self._system_bucket_root(bucket_path.name))
|
||||||
self._remove_tree(self._multipart_bucket_root(bucket_path.name))
|
self._remove_tree(self._multipart_bucket_root(bucket_path.name))
|
||||||
|
|
||||||
def list_objects(self, bucket_name: str) -> List[ObjectMeta]:
|
def list_objects(
|
||||||
|
self,
|
||||||
|
bucket_name: str,
|
||||||
|
*,
|
||||||
|
max_keys: int = 1000,
|
||||||
|
continuation_token: Optional[str] = None,
|
||||||
|
prefix: Optional[str] = None,
|
||||||
|
) -> ListObjectsResult:
|
||||||
|
"""List objects in a bucket with pagination support.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bucket_name: Name of the bucket
|
||||||
|
max_keys: Maximum number of objects to return (default 1000)
|
||||||
|
continuation_token: Token from previous request for pagination
|
||||||
|
prefix: Filter objects by key prefix
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ListObjectsResult with objects, truncation status, and continuation token
|
||||||
|
"""
|
||||||
bucket_path = self._bucket_path(bucket_name)
|
bucket_path = self._bucket_path(bucket_name)
|
||||||
if not bucket_path.exists():
|
if not bucket_path.exists():
|
||||||
raise StorageError("Bucket does not exist")
|
raise StorageError("Bucket does not exist")
|
||||||
bucket_id = bucket_path.name
|
bucket_id = bucket_path.name
|
||||||
|
|
||||||
objects: List[ObjectMeta] = []
|
# Collect all matching object keys first (lightweight - just paths)
|
||||||
|
all_keys: List[str] = []
|
||||||
for path in bucket_path.rglob("*"):
|
for path in bucket_path.rglob("*"):
|
||||||
if path.is_file():
|
if path.is_file():
|
||||||
stat = path.stat()
|
|
||||||
rel = path.relative_to(bucket_path)
|
rel = path.relative_to(bucket_path)
|
||||||
if rel.parts and rel.parts[0] in self.INTERNAL_FOLDERS:
|
if rel.parts and rel.parts[0] in self.INTERNAL_FOLDERS:
|
||||||
continue
|
continue
|
||||||
metadata = self._read_metadata(bucket_id, rel)
|
key = str(rel.as_posix())
|
||||||
|
if prefix and not key.startswith(prefix):
|
||||||
|
continue
|
||||||
|
all_keys.append(key)
|
||||||
|
|
||||||
|
all_keys.sort()
|
||||||
|
total_count = len(all_keys)
|
||||||
|
|
||||||
|
# Handle continuation token (the key to start after)
|
||||||
|
start_index = 0
|
||||||
|
if continuation_token:
|
||||||
|
try:
|
||||||
|
# continuation_token is the last key from previous page
|
||||||
|
for i, key in enumerate(all_keys):
|
||||||
|
if key > continuation_token:
|
||||||
|
start_index = i
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Token is past all keys
|
||||||
|
return ListObjectsResult(
|
||||||
|
objects=[],
|
||||||
|
is_truncated=False,
|
||||||
|
next_continuation_token=None,
|
||||||
|
total_count=total_count,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass # Invalid token, start from beginning
|
||||||
|
|
||||||
|
# Get the slice we need
|
||||||
|
end_index = start_index + max_keys
|
||||||
|
keys_slice = all_keys[start_index:end_index]
|
||||||
|
is_truncated = end_index < total_count
|
||||||
|
|
||||||
|
# Now load full metadata only for the objects we're returning
|
||||||
|
objects: List[ObjectMeta] = []
|
||||||
|
for key in keys_slice:
|
||||||
|
safe_key = self._sanitize_object_key(key)
|
||||||
|
path = bucket_path / safe_key
|
||||||
|
if not path.exists():
|
||||||
|
continue # Object may have been deleted
|
||||||
|
try:
|
||||||
|
stat = path.stat()
|
||||||
|
metadata = self._read_metadata(bucket_id, safe_key)
|
||||||
objects.append(
|
objects.append(
|
||||||
ObjectMeta(
|
ObjectMeta(
|
||||||
key=str(rel.as_posix()),
|
key=key,
|
||||||
size=stat.st_size,
|
size=stat.st_size,
|
||||||
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
last_modified=datetime.fromtimestamp(stat.st_mtime),
|
||||||
etag=self._compute_etag(path),
|
etag=self._compute_etag(path),
|
||||||
metadata=metadata or None,
|
metadata=metadata or None,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
objects.sort(key=lambda meta: meta.key)
|
except OSError:
|
||||||
return objects
|
continue # File may have been deleted during iteration
|
||||||
|
|
||||||
|
next_token = keys_slice[-1] if is_truncated and keys_slice else None
|
||||||
|
|
||||||
|
return ListObjectsResult(
|
||||||
|
objects=objects,
|
||||||
|
is_truncated=is_truncated,
|
||||||
|
next_continuation_token=next_token,
|
||||||
|
total_count=total_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
def list_objects_all(self, bucket_name: str) -> List[ObjectMeta]:
|
||||||
|
"""List all objects in a bucket (no pagination). Use with caution for large buckets."""
|
||||||
|
result = self.list_objects(bucket_name, max_keys=100000)
|
||||||
|
return result.objects
|
||||||
|
|
||||||
def put_object(
|
def put_object(
|
||||||
self,
|
self,
|
||||||
|
|||||||
64
app/ui.py
64
app/ui.py
@@ -294,7 +294,9 @@ def bucket_detail(bucket_name: str):
|
|||||||
storage = _storage()
|
storage = _storage()
|
||||||
try:
|
try:
|
||||||
_authorize_ui(principal, bucket_name, "list")
|
_authorize_ui(principal, bucket_name, "list")
|
||||||
objects = storage.list_objects(bucket_name)
|
# Don't load objects here - UI fetches them asynchronously via /buckets/<name>/objects
|
||||||
|
if not storage.bucket_exists(bucket_name):
|
||||||
|
raise StorageError("Bucket does not exist")
|
||||||
except (StorageError, IamError) as exc:
|
except (StorageError, IamError) as exc:
|
||||||
flash(_friendly_error_message(exc), "danger")
|
flash(_friendly_error_message(exc), "danger")
|
||||||
return redirect(url_for("ui.buckets_overview"))
|
return redirect(url_for("ui.buckets_overview"))
|
||||||
@@ -382,10 +384,13 @@ def bucket_detail(bucket_name: str):
|
|||||||
except IamError:
|
except IamError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Pass the objects API endpoint URL for async loading
|
||||||
|
objects_api_url = url_for("ui.list_bucket_objects", bucket_name=bucket_name)
|
||||||
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"bucket_detail.html",
|
"bucket_detail.html",
|
||||||
bucket_name=bucket_name,
|
bucket_name=bucket_name,
|
||||||
objects=objects,
|
objects_api_url=objects_api_url,
|
||||||
principal=principal,
|
principal=principal,
|
||||||
bucket_policy_text=policy_text,
|
bucket_policy_text=policy_text,
|
||||||
bucket_policy=bucket_policy,
|
bucket_policy=bucket_policy,
|
||||||
@@ -408,6 +413,61 @@ def bucket_detail(bucket_name: str):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ui_bp.get("/buckets/<bucket_name>/objects")
|
||||||
|
def list_bucket_objects(bucket_name: str):
|
||||||
|
"""API endpoint for paginated object listing."""
|
||||||
|
principal = _current_principal()
|
||||||
|
storage = _storage()
|
||||||
|
try:
|
||||||
|
_authorize_ui(principal, bucket_name, "list")
|
||||||
|
except IamError as exc:
|
||||||
|
return jsonify({"error": str(exc)}), 403
|
||||||
|
|
||||||
|
max_keys = min(int(request.args.get("max_keys", 100)), 1000)
|
||||||
|
continuation_token = request.args.get("continuation_token") or None
|
||||||
|
prefix = request.args.get("prefix") or None
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = storage.list_objects(
|
||||||
|
bucket_name,
|
||||||
|
max_keys=max_keys,
|
||||||
|
continuation_token=continuation_token,
|
||||||
|
prefix=prefix,
|
||||||
|
)
|
||||||
|
except StorageError as exc:
|
||||||
|
return jsonify({"error": str(exc)}), 400
|
||||||
|
|
||||||
|
try:
|
||||||
|
versioning_enabled = storage.is_versioning_enabled(bucket_name)
|
||||||
|
except StorageError:
|
||||||
|
versioning_enabled = False
|
||||||
|
|
||||||
|
objects_data = []
|
||||||
|
for obj in result.objects:
|
||||||
|
objects_data.append({
|
||||||
|
"key": obj.key,
|
||||||
|
"size": obj.size,
|
||||||
|
"last_modified": obj.last_modified.isoformat(),
|
||||||
|
"last_modified_display": obj.last_modified.strftime("%b %d, %Y %H:%M"),
|
||||||
|
"etag": obj.etag,
|
||||||
|
"metadata": obj.metadata or {},
|
||||||
|
"preview_url": url_for("ui.object_preview", bucket_name=bucket_name, object_key=obj.key),
|
||||||
|
"download_url": url_for("ui.object_preview", bucket_name=bucket_name, object_key=obj.key) + "?download=1",
|
||||||
|
"presign_endpoint": url_for("ui.object_presign", bucket_name=bucket_name, object_key=obj.key),
|
||||||
|
"delete_endpoint": url_for("ui.delete_object", bucket_name=bucket_name, object_key=obj.key),
|
||||||
|
"versions_endpoint": url_for("ui.object_versions", bucket_name=bucket_name, object_key=obj.key),
|
||||||
|
"restore_template": url_for("ui.restore_object_version", bucket_name=bucket_name, object_key=obj.key, version_id="VERSION_ID_PLACEHOLDER"),
|
||||||
|
})
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
"objects": objects_data,
|
||||||
|
"is_truncated": result.is_truncated,
|
||||||
|
"next_continuation_token": result.next_continuation_token,
|
||||||
|
"total_count": result.total_count,
|
||||||
|
"versioning_enabled": versioning_enabled,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
@ui_bp.post("/buckets/<bucket_name>/upload")
|
@ui_bp.post("/buckets/<bucket_name>/upload")
|
||||||
@limiter.limit("30 per minute")
|
@limiter.limit("30 per minute")
|
||||||
def upload_object(bucket_name: str):
|
def upload_object(bucket_name: str):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Central location for the application version string."""
|
"""Central location for the application version string."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
APP_VERSION = "0.1.6"
|
APP_VERSION = "0.1.7"
|
||||||
|
|
||||||
|
|
||||||
def get_version() -> str:
|
def get_version() -> str:
|
||||||
|
|||||||
3
pytest.ini
Normal file
3
pytest.ini
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[pytest]
|
||||||
|
testpaths = tests
|
||||||
|
norecursedirs = data .git __pycache__ .venv
|
||||||
@@ -199,7 +199,7 @@
|
|||||||
})();
|
})();
|
||||||
</script>
|
</script>
|
||||||
<script>
|
<script>
|
||||||
// Toast utility
|
|
||||||
window.showToast = function(message, title = 'Notification', type = 'info') {
|
window.showToast = function(message, title = 'Notification', type = 'info') {
|
||||||
const toastEl = document.getElementById('liveToast');
|
const toastEl = document.getElementById('liveToast');
|
||||||
const toastTitle = document.getElementById('toastTitle');
|
const toastTitle = document.getElementById('toastTitle');
|
||||||
@@ -207,8 +207,7 @@
|
|||||||
|
|
||||||
toastTitle.textContent = title;
|
toastTitle.textContent = title;
|
||||||
toastMessage.textContent = message;
|
toastMessage.textContent = message;
|
||||||
|
|
||||||
// Reset classes
|
|
||||||
toastEl.classList.remove('text-bg-primary', 'text-bg-success', 'text-bg-danger', 'text-bg-warning');
|
toastEl.classList.remove('text-bg-primary', 'text-bg-success', 'text-bg-danger', 'text-bg-warning');
|
||||||
|
|
||||||
if (type === 'success') toastEl.classList.add('text-bg-success');
|
if (type === 'success') toastEl.classList.add('text-bg-success');
|
||||||
@@ -221,13 +220,11 @@
|
|||||||
</script>
|
</script>
|
||||||
<script>
|
<script>
|
||||||
(function () {
|
(function () {
|
||||||
// Show flashed messages as toasts
|
|
||||||
{% with messages = get_flashed_messages(with_categories=true) %}
|
{% with messages = get_flashed_messages(with_categories=true) %}
|
||||||
{% if messages %}
|
{% if messages %}
|
||||||
{% for category, message in messages %}
|
{% for category, message in messages %}
|
||||||
// Map Flask categories to Toast types
|
|
||||||
// Flask: success, danger, warning, info
|
|
||||||
// Toast: success, error, warning, info
|
|
||||||
var type = "{{ category }}";
|
var type = "{{ category }}";
|
||||||
if (type === "danger") type = "error";
|
if (type === "danger") type = "error";
|
||||||
window.showToast({{ message | tojson | safe }}, "Notification", type);
|
window.showToast({{ message | tojson | safe }}, "Notification", type);
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -131,7 +131,7 @@
|
|||||||
{{ super() }}
|
{{ super() }}
|
||||||
<script>
|
<script>
|
||||||
(function () {
|
(function () {
|
||||||
// Search functionality
|
|
||||||
const searchInput = document.getElementById('bucket-search');
|
const searchInput = document.getElementById('bucket-search');
|
||||||
const bucketItems = document.querySelectorAll('.bucket-item');
|
const bucketItems = document.querySelectorAll('.bucket-item');
|
||||||
const noBucketsMsg = document.querySelector('.text-center.py-5'); // The "No buckets found" empty state
|
const noBucketsMsg = document.querySelector('.text-center.py-5'); // The "No buckets found" empty state
|
||||||
@@ -153,7 +153,6 @@
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// View toggle functionality
|
|
||||||
const viewGrid = document.getElementById('view-grid');
|
const viewGrid = document.getElementById('view-grid');
|
||||||
const viewList = document.getElementById('view-list');
|
const viewList = document.getElementById('view-list');
|
||||||
const container = document.getElementById('buckets-container');
|
const container = document.getElementById('buckets-container');
|
||||||
@@ -168,8 +167,7 @@
|
|||||||
});
|
});
|
||||||
cards.forEach(card => {
|
cards.forEach(card => {
|
||||||
card.classList.remove('h-100');
|
card.classList.remove('h-100');
|
||||||
// Optional: Add flex-row to card-body content if we want a horizontal layout
|
|
||||||
// For now, full-width stacked cards is a good list view
|
|
||||||
});
|
});
|
||||||
localStorage.setItem('bucket-view-pref', 'list');
|
localStorage.setItem('bucket-view-pref', 'list');
|
||||||
} else {
|
} else {
|
||||||
@@ -188,7 +186,6 @@
|
|||||||
viewGrid.addEventListener('change', () => setView('grid'));
|
viewGrid.addEventListener('change', () => setView('grid'));
|
||||||
viewList.addEventListener('change', () => setView('list'));
|
viewList.addEventListener('change', () => setView('list'));
|
||||||
|
|
||||||
// Restore preference
|
|
||||||
const pref = localStorage.getItem('bucket-view-pref');
|
const pref = localStorage.getItem('bucket-view-pref');
|
||||||
if (pref === 'list') {
|
if (pref === 'list') {
|
||||||
viewList.checked = true;
|
viewList.checked = true;
|
||||||
|
|||||||
@@ -181,7 +181,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Edit Connection Modal -->
|
|
||||||
<div class="modal fade" id="editConnectionModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="editConnectionModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-dialog-centered">
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -247,7 +246,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Delete Connection Modal -->
|
|
||||||
<div class="modal fade" id="deleteConnectionModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="deleteConnectionModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-dialog-centered">
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -297,7 +295,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test Connection Logic
|
|
||||||
async function testConnection(formId, resultId) {
|
async function testConnection(formId, resultId) {
|
||||||
const form = document.getElementById(formId);
|
const form = document.getElementById(formId);
|
||||||
const resultDiv = document.getElementById(resultId);
|
const resultDiv = document.getElementById(resultId);
|
||||||
@@ -335,7 +332,6 @@
|
|||||||
testConnection('editConnectionForm', 'editTestResult');
|
testConnection('editConnectionForm', 'editTestResult');
|
||||||
});
|
});
|
||||||
|
|
||||||
// Modal Event Listeners
|
|
||||||
const editModal = document.getElementById('editConnectionModal');
|
const editModal = document.getElementById('editConnectionModal');
|
||||||
editModal.addEventListener('show.bs.modal', event => {
|
editModal.addEventListener('show.bs.modal', event => {
|
||||||
const button = event.relatedTarget;
|
const button = event.relatedTarget;
|
||||||
|
|||||||
@@ -203,7 +203,6 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Create User Modal -->
|
|
||||||
<div class="modal fade" id="createUserModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="createUserModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-dialog-centered">
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -252,7 +251,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Policy Editor Modal -->
|
|
||||||
<div class="modal fade" id="policyEditorModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="policyEditorModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-lg modal-dialog-centered">
|
<div class="modal-dialog modal-lg modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -303,7 +301,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Edit User Modal -->
|
|
||||||
<div class="modal fade" id="editUserModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="editUserModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-dialog-centered">
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -338,7 +335,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Delete User Modal -->
|
|
||||||
<div class="modal fade" id="deleteUserModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="deleteUserModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-dialog-centered">
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -382,7 +378,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Rotate Secret Modal -->
|
|
||||||
<div class="modal fade" id="rotateSecretModal" tabindex="-1" aria-hidden="true">
|
<div class="modal fade" id="rotateSecretModal" tabindex="-1" aria-hidden="true">
|
||||||
<div class="modal-dialog modal-dialog-centered">
|
<div class="modal-dialog modal-dialog-centered">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
@@ -486,8 +481,7 @@
|
|||||||
|
|
||||||
const iamUsersData = document.getElementById('iamUsersJson');
|
const iamUsersData = document.getElementById('iamUsersJson');
|
||||||
const users = iamUsersData ? JSON.parse(iamUsersData.textContent || '[]') : [];
|
const users = iamUsersData ? JSON.parse(iamUsersData.textContent || '[]') : [];
|
||||||
|
|
||||||
// Policy Editor Logic
|
|
||||||
const policyModalEl = document.getElementById('policyEditorModal');
|
const policyModalEl = document.getElementById('policyEditorModal');
|
||||||
const policyModal = new bootstrap.Modal(policyModalEl);
|
const policyModal = new bootstrap.Modal(policyModalEl);
|
||||||
const userLabelEl = document.getElementById('policyEditorUserLabel');
|
const userLabelEl = document.getElementById('policyEditorUserLabel');
|
||||||
@@ -534,7 +528,6 @@
|
|||||||
button.addEventListener('click', () => applyTemplate(button.dataset.policyTemplate));
|
button.addEventListener('click', () => applyTemplate(button.dataset.policyTemplate));
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create User modal template buttons
|
|
||||||
const createUserPoliciesEl = document.getElementById('createUserPolicies');
|
const createUserPoliciesEl = document.getElementById('createUserPolicies');
|
||||||
const createTemplateButtons = document.querySelectorAll('[data-create-policy-template]');
|
const createTemplateButtons = document.querySelectorAll('[data-create-policy-template]');
|
||||||
|
|
||||||
@@ -591,7 +584,6 @@
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// Edit User Logic
|
|
||||||
const editUserModal = new bootstrap.Modal(document.getElementById('editUserModal'));
|
const editUserModal = new bootstrap.Modal(document.getElementById('editUserModal'));
|
||||||
const editUserForm = document.getElementById('editUserForm');
|
const editUserForm = document.getElementById('editUserForm');
|
||||||
const editUserDisplayName = document.getElementById('editUserDisplayName');
|
const editUserDisplayName = document.getElementById('editUserDisplayName');
|
||||||
@@ -606,7 +598,6 @@
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// Delete User Logic
|
|
||||||
const deleteUserModal = new bootstrap.Modal(document.getElementById('deleteUserModal'));
|
const deleteUserModal = new bootstrap.Modal(document.getElementById('deleteUserModal'));
|
||||||
const deleteUserForm = document.getElementById('deleteUserForm');
|
const deleteUserForm = document.getElementById('deleteUserForm');
|
||||||
const deleteUserLabel = document.getElementById('deleteUserLabel');
|
const deleteUserLabel = document.getElementById('deleteUserLabel');
|
||||||
@@ -628,7 +619,6 @@
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// Rotate Secret Logic
|
|
||||||
const rotateSecretModal = new bootstrap.Modal(document.getElementById('rotateSecretModal'));
|
const rotateSecretModal = new bootstrap.Modal(document.getElementById('rotateSecretModal'));
|
||||||
const rotateUserLabel = document.getElementById('rotateUserLabel');
|
const rotateUserLabel = document.getElementById('rotateUserLabel');
|
||||||
const confirmRotateBtn = document.getElementById('confirmRotateBtn');
|
const confirmRotateBtn = document.getElementById('confirmRotateBtn');
|
||||||
@@ -644,8 +634,7 @@
|
|||||||
btn.addEventListener('click', () => {
|
btn.addEventListener('click', () => {
|
||||||
currentRotateKey = btn.dataset.rotateUser;
|
currentRotateKey = btn.dataset.rotateUser;
|
||||||
rotateUserLabel.textContent = currentRotateKey;
|
rotateUserLabel.textContent = currentRotateKey;
|
||||||
|
|
||||||
// Reset Modal State
|
|
||||||
rotateSecretConfirm.classList.remove('d-none');
|
rotateSecretConfirm.classList.remove('d-none');
|
||||||
rotateSecretResult.classList.add('d-none');
|
rotateSecretResult.classList.add('d-none');
|
||||||
confirmRotateBtn.classList.remove('d-none');
|
confirmRotateBtn.classList.remove('d-none');
|
||||||
@@ -679,8 +668,7 @@
|
|||||||
|
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
newSecretKeyInput.value = data.secret_key;
|
newSecretKeyInput.value = data.secret_key;
|
||||||
|
|
||||||
// Show Result
|
|
||||||
rotateSecretConfirm.classList.add('d-none');
|
rotateSecretConfirm.classList.add('d-none');
|
||||||
rotateSecretResult.classList.remove('d-none');
|
rotateSecretResult.classList.remove('d-none');
|
||||||
confirmRotateBtn.classList.add('d-none');
|
confirmRotateBtn.classList.add('d-none');
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ def test_unicode_bucket_and_object_names(tmp_path: Path):
|
|||||||
assert storage.get_object_path("unicode-test", key).exists()
|
assert storage.get_object_path("unicode-test", key).exists()
|
||||||
|
|
||||||
# Verify listing
|
# Verify listing
|
||||||
objects = storage.list_objects("unicode-test")
|
objects = storage.list_objects_all("unicode-test")
|
||||||
assert any(o.key == key for o in objects)
|
assert any(o.key == key for o in objects)
|
||||||
|
|
||||||
def test_special_characters_in_metadata(tmp_path: Path):
|
def test_special_characters_in_metadata(tmp_path: Path):
|
||||||
|
|||||||
@@ -220,7 +220,7 @@ def test_bucket_config_filename_allowed(tmp_path):
|
|||||||
storage.create_bucket("demo")
|
storage.create_bucket("demo")
|
||||||
storage.put_object("demo", ".bucket.json", io.BytesIO(b"{}"))
|
storage.put_object("demo", ".bucket.json", io.BytesIO(b"{}"))
|
||||||
|
|
||||||
objects = storage.list_objects("demo")
|
objects = storage.list_objects_all("demo")
|
||||||
assert any(meta.key == ".bucket.json" for meta in objects)
|
assert any(meta.key == ".bucket.json" for meta in objects)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ def test_bulk_delete_json_route(tmp_path: Path):
|
|||||||
assert set(payload["deleted"]) == {"first.txt", "missing.txt"}
|
assert set(payload["deleted"]) == {"first.txt", "missing.txt"}
|
||||||
assert payload["errors"] == []
|
assert payload["errors"] == []
|
||||||
|
|
||||||
listing = storage.list_objects("demo")
|
listing = storage.list_objects_all("demo")
|
||||||
assert {meta.key for meta in listing} == {"second.txt"}
|
assert {meta.key for meta in listing} == {"second.txt"}
|
||||||
|
|
||||||
|
|
||||||
@@ -92,5 +92,5 @@ def test_bulk_delete_validation(tmp_path: Path):
|
|||||||
assert limit_response.status_code == 400
|
assert limit_response.status_code == 400
|
||||||
assert limit_response.get_json()["status"] == "error"
|
assert limit_response.get_json()["status"] == "error"
|
||||||
|
|
||||||
still_there = storage.list_objects("demo")
|
still_there = storage.list_objects_all("demo")
|
||||||
assert {meta.key for meta in still_there} == {"keep.txt"}
|
assert {meta.key for meta in still_there} == {"keep.txt"}
|
||||||
|
|||||||
183
tests/test_ui_pagination.py
Normal file
183
tests/test_ui_pagination.py
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
"""Tests for UI pagination of bucket objects."""
|
||||||
|
import json
|
||||||
|
from io import BytesIO
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from app import create_app
|
||||||
|
|
||||||
|
|
||||||
|
def _make_app(tmp_path: Path):
|
||||||
|
"""Create an app for testing."""
|
||||||
|
storage_root = tmp_path / "data"
|
||||||
|
iam_config = tmp_path / "iam.json"
|
||||||
|
bucket_policies = tmp_path / "bucket_policies.json"
|
||||||
|
iam_payload = {
|
||||||
|
"users": [
|
||||||
|
{
|
||||||
|
"access_key": "test",
|
||||||
|
"secret_key": "secret",
|
||||||
|
"display_name": "Test User",
|
||||||
|
"policies": [{"bucket": "*", "actions": ["list", "read", "write", "delete", "policy"]}],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
iam_config.write_text(json.dumps(iam_payload))
|
||||||
|
|
||||||
|
flask_app = create_app(
|
||||||
|
{
|
||||||
|
"TESTING": True,
|
||||||
|
"WTF_CSRF_ENABLED": False,
|
||||||
|
"STORAGE_ROOT": storage_root,
|
||||||
|
"IAM_CONFIG": iam_config,
|
||||||
|
"BUCKET_POLICY_PATH": bucket_policies,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return flask_app
|
||||||
|
|
||||||
|
|
||||||
|
class TestPaginatedObjectListing:
|
||||||
|
"""Test paginated object listing API."""
|
||||||
|
|
||||||
|
def test_objects_api_returns_paginated_results(self, tmp_path):
|
||||||
|
"""Objects API should return paginated results."""
|
||||||
|
app = _make_app(tmp_path)
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
storage.create_bucket("test-bucket")
|
||||||
|
|
||||||
|
# Create 10 test objects
|
||||||
|
for i in range(10):
|
||||||
|
storage.put_object("test-bucket", f"file{i:02d}.txt", BytesIO(b"content"))
|
||||||
|
|
||||||
|
with app.test_client() as client:
|
||||||
|
# Login first
|
||||||
|
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||||
|
|
||||||
|
# Request first page of 3 objects
|
||||||
|
resp = client.get("/ui/buckets/test-bucket/objects?max_keys=3")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
data = resp.get_json()
|
||||||
|
assert len(data["objects"]) == 3
|
||||||
|
assert data["is_truncated"] is True
|
||||||
|
assert data["next_continuation_token"] is not None
|
||||||
|
assert data["total_count"] == 10
|
||||||
|
|
||||||
|
def test_objects_api_pagination_continuation(self, tmp_path):
|
||||||
|
"""Objects API should support continuation tokens."""
|
||||||
|
app = _make_app(tmp_path)
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
storage.create_bucket("test-bucket")
|
||||||
|
|
||||||
|
# Create 5 test objects
|
||||||
|
for i in range(5):
|
||||||
|
storage.put_object("test-bucket", f"file{i:02d}.txt", BytesIO(b"content"))
|
||||||
|
|
||||||
|
with app.test_client() as client:
|
||||||
|
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||||
|
|
||||||
|
# Get first page
|
||||||
|
resp = client.get("/ui/buckets/test-bucket/objects?max_keys=2")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.get_json()
|
||||||
|
|
||||||
|
first_page_keys = [obj["key"] for obj in data["objects"]]
|
||||||
|
assert len(first_page_keys) == 2
|
||||||
|
assert data["is_truncated"] is True
|
||||||
|
|
||||||
|
# Get second page
|
||||||
|
token = data["next_continuation_token"]
|
||||||
|
resp = client.get(f"/ui/buckets/test-bucket/objects?max_keys=2&continuation_token={token}")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.get_json()
|
||||||
|
|
||||||
|
second_page_keys = [obj["key"] for obj in data["objects"]]
|
||||||
|
assert len(second_page_keys) == 2
|
||||||
|
|
||||||
|
# No overlap between pages
|
||||||
|
assert set(first_page_keys).isdisjoint(set(second_page_keys))
|
||||||
|
|
||||||
|
def test_objects_api_prefix_filter(self, tmp_path):
|
||||||
|
"""Objects API should support prefix filtering."""
|
||||||
|
app = _make_app(tmp_path)
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
storage.create_bucket("test-bucket")
|
||||||
|
|
||||||
|
# Create objects with different prefixes
|
||||||
|
storage.put_object("test-bucket", "logs/access.log", BytesIO(b"log"))
|
||||||
|
storage.put_object("test-bucket", "logs/error.log", BytesIO(b"log"))
|
||||||
|
storage.put_object("test-bucket", "data/file.txt", BytesIO(b"data"))
|
||||||
|
|
||||||
|
with app.test_client() as client:
|
||||||
|
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||||
|
|
||||||
|
# Filter by prefix
|
||||||
|
resp = client.get("/ui/buckets/test-bucket/objects?prefix=logs/")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.get_json()
|
||||||
|
|
||||||
|
keys = [obj["key"] for obj in data["objects"]]
|
||||||
|
assert all(k.startswith("logs/") for k in keys)
|
||||||
|
assert len(keys) == 2
|
||||||
|
|
||||||
|
def test_objects_api_requires_authentication(self, tmp_path):
|
||||||
|
"""Objects API should require login."""
|
||||||
|
app = _make_app(tmp_path)
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
storage.create_bucket("test-bucket")
|
||||||
|
|
||||||
|
with app.test_client() as client:
|
||||||
|
# Don't login
|
||||||
|
resp = client.get("/ui/buckets/test-bucket/objects")
|
||||||
|
# Should redirect to login
|
||||||
|
assert resp.status_code == 302
|
||||||
|
assert "/ui/login" in resp.headers.get("Location", "")
|
||||||
|
|
||||||
|
def test_objects_api_returns_object_metadata(self, tmp_path):
|
||||||
|
"""Objects API should return complete object metadata."""
|
||||||
|
app = _make_app(tmp_path)
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
storage.create_bucket("test-bucket")
|
||||||
|
storage.put_object("test-bucket", "test.txt", BytesIO(b"test content"))
|
||||||
|
|
||||||
|
with app.test_client() as client:
|
||||||
|
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||||
|
|
||||||
|
resp = client.get("/ui/buckets/test-bucket/objects")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
data = resp.get_json()
|
||||||
|
|
||||||
|
assert len(data["objects"]) == 1
|
||||||
|
obj = data["objects"][0]
|
||||||
|
|
||||||
|
# Check all expected fields
|
||||||
|
assert obj["key"] == "test.txt"
|
||||||
|
assert obj["size"] == 12 # len("test content")
|
||||||
|
assert "last_modified" in obj
|
||||||
|
assert "last_modified_display" in obj
|
||||||
|
assert "etag" in obj
|
||||||
|
assert "preview_url" in obj
|
||||||
|
assert "download_url" in obj
|
||||||
|
assert "delete_endpoint" in obj
|
||||||
|
|
||||||
|
def test_bucket_detail_page_loads_without_objects(self, tmp_path):
|
||||||
|
"""Bucket detail page should load even with many objects."""
|
||||||
|
app = _make_app(tmp_path)
|
||||||
|
storage = app.extensions["object_storage"]
|
||||||
|
storage.create_bucket("test-bucket")
|
||||||
|
|
||||||
|
# Create many objects
|
||||||
|
for i in range(100):
|
||||||
|
storage.put_object("test-bucket", f"file{i:03d}.txt", BytesIO(b"x"))
|
||||||
|
|
||||||
|
with app.test_client() as client:
|
||||||
|
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||||
|
|
||||||
|
# The page should load quickly (objects loaded via JS)
|
||||||
|
resp = client.get("/ui/buckets/test-bucket")
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
html = resp.data.decode("utf-8")
|
||||||
|
# Should have the JavaScript loading infrastructure
|
||||||
|
assert "loadObjects" in html or "objectsApiUrl" in html
|
||||||
@@ -70,8 +70,12 @@ def test_ui_bucket_policy_enforcement_toggle(tmp_path: Path, enforce: bool):
|
|||||||
assert b"Access denied by bucket policy" in response.data
|
assert b"Access denied by bucket policy" in response.data
|
||||||
else:
|
else:
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert b"vid.mp4" in response.data
|
|
||||||
assert b"Access denied by bucket policy" not in response.data
|
assert b"Access denied by bucket policy" not in response.data
|
||||||
|
# Objects are now loaded via async API - check the objects endpoint
|
||||||
|
objects_response = client.get("/ui/buckets/testbucket/objects")
|
||||||
|
assert objects_response.status_code == 200
|
||||||
|
data = objects_response.get_json()
|
||||||
|
assert any(obj["key"] == "vid.mp4" for obj in data["objects"])
|
||||||
|
|
||||||
|
|
||||||
def test_ui_bucket_policy_disabled_by_default(tmp_path: Path):
|
def test_ui_bucket_policy_disabled_by_default(tmp_path: Path):
|
||||||
@@ -109,5 +113,9 @@ def test_ui_bucket_policy_disabled_by_default(tmp_path: Path):
|
|||||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||||
response = client.get("/ui/buckets/testbucket", follow_redirects=True)
|
response = client.get("/ui/buckets/testbucket", follow_redirects=True)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert b"vid.mp4" in response.data
|
|
||||||
assert b"Access denied by bucket policy" not in response.data
|
assert b"Access denied by bucket policy" not in response.data
|
||||||
|
# Objects are now loaded via async API - check the objects endpoint
|
||||||
|
objects_response = client.get("/ui/buckets/testbucket/objects")
|
||||||
|
assert objects_response.status_code == 200
|
||||||
|
data = objects_response.get_json()
|
||||||
|
assert any(obj["key"] == "vid.mp4" for obj in data["objects"])
|
||||||
|
|||||||
Reference in New Issue
Block a user