MyFSIO v0.2.0 Release #12
@@ -157,10 +157,7 @@ class LocalKeyEncryption(EncryptionProvider):
|
||||
def decrypt(self, ciphertext: bytes, nonce: bytes, encrypted_data_key: bytes,
|
||||
key_id: str, context: Dict[str, str] | None = None) -> bytes:
|
||||
"""Decrypt data using envelope encryption."""
|
||||
# Decrypt the data key
|
||||
data_key = self._decrypt_data_key(encrypted_data_key)
|
||||
|
||||
# Decrypt the data
|
||||
aesgcm = AESGCM(data_key)
|
||||
try:
|
||||
return aesgcm.decrypt(nonce, ciphertext, None)
|
||||
|
||||
@@ -976,7 +976,6 @@ def _object_tagging_handler(bucket_name: str, object_key: str) -> Response:
|
||||
current_app.logger.info("Object tags deleted", extra={"bucket": bucket_name, "key": object_key})
|
||||
return Response(status=204)
|
||||
|
||||
# PUT
|
||||
payload = request.get_data(cache=False) or b""
|
||||
try:
|
||||
tags = _parse_tagging_document(payload)
|
||||
@@ -1044,7 +1043,7 @@ def _bucket_cors_handler(bucket_name: str) -> Response:
|
||||
return _error_response("NoSuchBucket", str(exc), 404)
|
||||
current_app.logger.info("Bucket CORS deleted", extra={"bucket": bucket_name})
|
||||
return Response(status=204)
|
||||
# PUT
|
||||
|
||||
payload = request.get_data(cache=False) or b""
|
||||
if not payload.strip():
|
||||
try:
|
||||
@@ -1291,7 +1290,6 @@ def _bucket_lifecycle_handler(bucket_name: str) -> Response:
|
||||
current_app.logger.info("Bucket lifecycle deleted", extra={"bucket": bucket_name})
|
||||
return Response(status=204)
|
||||
|
||||
# PUT
|
||||
payload = request.get_data(cache=False) or b""
|
||||
if not payload.strip():
|
||||
return _error_response("MalformedXML", "Request body is required", 400)
|
||||
@@ -1455,7 +1453,6 @@ def _bucket_quota_handler(bucket_name: str) -> Response:
|
||||
current_app.logger.info("Bucket quota deleted", extra={"bucket": bucket_name})
|
||||
return Response(status=204)
|
||||
|
||||
# PUT
|
||||
payload = request.get_json(silent=True)
|
||||
if not payload:
|
||||
return _error_response("MalformedRequest", "Request body must be JSON with quota limits", 400)
|
||||
|
||||
@@ -969,7 +969,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Warning alert for unreachable endpoint (shown by JS if endpoint is down) -->
|
||||
<div id="replication-endpoint-warning" class="alert alert-danger d-none mb-4" role="alert">
|
||||
<div class="d-flex align-items-start">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" fill="currentColor" class="flex-shrink-0 me-2" viewBox="0 0 16 16">
|
||||
@@ -1783,7 +1782,6 @@
|
||||
|
||||
{% block extra_scripts %}
|
||||
<script>
|
||||
// Auto-indent for JSON textareas
|
||||
function setupJsonAutoIndent(textarea) {
|
||||
if (!textarea) return;
|
||||
|
||||
@@ -1795,15 +1793,12 @@
|
||||
const end = this.selectionEnd;
|
||||
const value = this.value;
|
||||
|
||||
// Get the current line
|
||||
const lineStart = value.lastIndexOf('\n', start - 1) + 1;
|
||||
const currentLine = value.substring(lineStart, start);
|
||||
|
||||
// Calculate base indentation (leading whitespace of current line)
|
||||
const indentMatch = currentLine.match(/^(\s*)/);
|
||||
let indent = indentMatch ? indentMatch[1] : '';
|
||||
|
||||
// Check if the line ends with { or [ (should increase indent)
|
||||
const trimmedLine = currentLine.trim();
|
||||
const lastChar = trimmedLine.slice(-1);
|
||||
|
||||
@@ -1811,42 +1806,34 @@
|
||||
let insertAfter = '';
|
||||
|
||||
if (lastChar === '{' || lastChar === '[') {
|
||||
// Add extra indentation
|
||||
newIndent = indent + ' ';
|
||||
|
||||
// Check if we need to add closing bracket on new line
|
||||
const charAfterCursor = value.substring(start, start + 1).trim();
|
||||
if ((lastChar === '{' && charAfterCursor === '}') ||
|
||||
(lastChar === '[' && charAfterCursor === ']')) {
|
||||
insertAfter = '\n' + indent;
|
||||
}
|
||||
} else if (lastChar === ',' || lastChar === ':') {
|
||||
// Keep same indentation for continuation
|
||||
newIndent = indent;
|
||||
}
|
||||
|
||||
// Insert newline with proper indentation
|
||||
const insertion = '\n' + newIndent + insertAfter;
|
||||
const newValue = value.substring(0, start) + insertion + value.substring(end);
|
||||
|
||||
this.value = newValue;
|
||||
|
||||
// Set cursor position after the indentation
|
||||
const newCursorPos = start + 1 + newIndent.length;
|
||||
this.selectionStart = this.selectionEnd = newCursorPos;
|
||||
|
||||
// Trigger input event for any listeners
|
||||
this.dispatchEvent(new Event('input', { bubbles: true }));
|
||||
}
|
||||
|
||||
// Handle Tab key for indentation
|
||||
if (e.key === 'Tab') {
|
||||
e.preventDefault();
|
||||
const start = this.selectionStart;
|
||||
const end = this.selectionEnd;
|
||||
|
||||
if (e.shiftKey) {
|
||||
// Outdent: remove 2 spaces from start of line
|
||||
const lineStart = this.value.lastIndexOf('\n', start - 1) + 1;
|
||||
const lineContent = this.value.substring(lineStart, start);
|
||||
if (lineContent.startsWith(' ')) {
|
||||
@@ -1855,7 +1842,6 @@
|
||||
this.selectionStart = this.selectionEnd = Math.max(lineStart, start - 2);
|
||||
}
|
||||
} else {
|
||||
// Indent: insert 2 spaces
|
||||
this.value = this.value.substring(0, start) + ' ' + this.value.substring(end);
|
||||
this.selectionStart = this.selectionEnd = start + 2;
|
||||
}
|
||||
@@ -1865,7 +1851,6 @@
|
||||
});
|
||||
}
|
||||
|
||||
// Apply auto-indent to policy editor textarea
|
||||
setupJsonAutoIndent(document.getElementById('policyDocument'));
|
||||
|
||||
const formatBytes = (bytes) => {
|
||||
@@ -1970,24 +1955,21 @@
|
||||
let isLoadingObjects = false;
|
||||
let hasMoreObjects = false;
|
||||
let currentFilterTerm = '';
|
||||
let pageSize = 5000; // Load large batches for virtual scrolling
|
||||
let currentPrefix = ''; // Current folder prefix for navigation
|
||||
let allObjects = []; // All loaded object metadata (lightweight)
|
||||
let urlTemplates = null; // URL templates from API for constructing object URLs
|
||||
let pageSize = 5000;
|
||||
let currentPrefix = '';
|
||||
let allObjects = [];
|
||||
let urlTemplates = null;
|
||||
|
||||
// Helper to build URL from template by replacing KEY_PLACEHOLDER with encoded key
|
||||
const buildUrlFromTemplate = (template, key) => {
|
||||
if (!template) return '';
|
||||
return template.replace('KEY_PLACEHOLDER', encodeURIComponent(key).replace(/%2F/g, '/'));
|
||||
};
|
||||
|
||||
// Virtual scrolling state
|
||||
const ROW_HEIGHT = 53; // Height of each table row in pixels
|
||||
const BUFFER_ROWS = 10; // Extra rows to render above/below viewport
|
||||
let visibleItems = []; // Current items to display (filtered by folder/search)
|
||||
let renderedRange = { start: 0, end: 0 }; // Currently rendered row indices
|
||||
const ROW_HEIGHT = 53;
|
||||
const BUFFER_ROWS = 10;
|
||||
let visibleItems = [];
|
||||
let renderedRange = { start: 0, end: 0 };
|
||||
|
||||
// Create a row element from object data (for virtual scrolling)
|
||||
const createObjectRow = (obj, displayKey = null) => {
|
||||
const tr = document.createElement('tr');
|
||||
tr.dataset.objectRow = '';
|
||||
@@ -2110,16 +2092,12 @@
|
||||
}
|
||||
};
|
||||
|
||||
// ============== VIRTUAL SCROLLING SYSTEM ==============
|
||||
|
||||
// Spacer elements for virtual scroll height
|
||||
let topSpacer = null;
|
||||
let bottomSpacer = null;
|
||||
|
||||
const initVirtualScrollElements = () => {
|
||||
if (!objectsTableBody) return;
|
||||
|
||||
// Create spacer rows if they don't exist
|
||||
if (!topSpacer) {
|
||||
topSpacer = document.createElement('tr');
|
||||
topSpacer.id = 'virtual-top-spacer';
|
||||
@@ -2132,7 +2110,6 @@
|
||||
}
|
||||
};
|
||||
|
||||
// Compute which items should be visible based on current view
|
||||
const computeVisibleItems = () => {
|
||||
const items = [];
|
||||
const folders = new Set();
|
||||
@@ -2144,17 +2121,14 @@
|
||||
const slashIndex = remainder.indexOf('/');
|
||||
|
||||
if (slashIndex === -1) {
|
||||
// File in current folder - filter on the displayed filename (remainder)
|
||||
if (!currentFilterTerm || remainder.toLowerCase().includes(currentFilterTerm)) {
|
||||
items.push({ type: 'file', data: obj, displayKey: remainder });
|
||||
}
|
||||
} else {
|
||||
// Folder
|
||||
const folderName = remainder.slice(0, slashIndex);
|
||||
const folderPath = currentPrefix + folderName + '/';
|
||||
if (!folders.has(folderPath)) {
|
||||
folders.add(folderPath);
|
||||
// Filter on the displayed folder name only
|
||||
if (!currentFilterTerm || folderName.toLowerCase().includes(currentFilterTerm)) {
|
||||
items.push({ type: 'folder', path: folderPath, displayKey: folderName });
|
||||
}
|
||||
@@ -2162,7 +2136,6 @@
|
||||
}
|
||||
});
|
||||
|
||||
// Sort: folders first, then files
|
||||
items.sort((a, b) => {
|
||||
if (a.type === 'folder' && b.type === 'file') return -1;
|
||||
if (a.type === 'file' && b.type === 'folder') return 1;
|
||||
@@ -2174,31 +2147,25 @@
|
||||
return items;
|
||||
};
|
||||
|
||||
// Render only the visible rows based on scroll position
|
||||
const renderVirtualRows = () => {
|
||||
if (!objectsTableBody || !scrollContainer) return;
|
||||
|
||||
const containerHeight = scrollContainer.clientHeight;
|
||||
const scrollTop = scrollContainer.scrollTop;
|
||||
|
||||
// Calculate visible range
|
||||
const startIndex = Math.max(0, Math.floor(scrollTop / ROW_HEIGHT) - BUFFER_ROWS);
|
||||
const endIndex = Math.min(visibleItems.length, Math.ceil((scrollTop + containerHeight) / ROW_HEIGHT) + BUFFER_ROWS);
|
||||
|
||||
// Skip if range hasn't changed significantly
|
||||
if (startIndex === renderedRange.start && endIndex === renderedRange.end) return;
|
||||
|
||||
renderedRange = { start: startIndex, end: endIndex };
|
||||
|
||||
// Clear and rebuild
|
||||
objectsTableBody.innerHTML = '';
|
||||
|
||||
// Add top spacer
|
||||
initVirtualScrollElements();
|
||||
topSpacer.querySelector('td').style.height = `${startIndex * ROW_HEIGHT}px`;
|
||||
objectsTableBody.appendChild(topSpacer);
|
||||
|
||||
// Render visible rows
|
||||
for (let i = startIndex; i < endIndex; i++) {
|
||||
const item = visibleItems[i];
|
||||
if (!item) continue;
|
||||
@@ -2213,32 +2180,27 @@
|
||||
objectsTableBody.appendChild(row);
|
||||
}
|
||||
|
||||
// Add bottom spacer
|
||||
const remainingRows = visibleItems.length - endIndex;
|
||||
bottomSpacer.querySelector('td').style.height = `${remainingRows * ROW_HEIGHT}px`;
|
||||
objectsTableBody.appendChild(bottomSpacer);
|
||||
|
||||
// Re-attach handlers to new rows
|
||||
attachRowHandlers();
|
||||
};
|
||||
|
||||
// Debounced scroll handler for virtual scrolling
|
||||
let scrollTimeout = null;
|
||||
const handleVirtualScroll = () => {
|
||||
if (scrollTimeout) cancelAnimationFrame(scrollTimeout);
|
||||
scrollTimeout = requestAnimationFrame(renderVirtualRows);
|
||||
};
|
||||
|
||||
// Refresh the virtual list (after data changes or navigation)
|
||||
const refreshVirtualList = () => {
|
||||
visibleItems = computeVisibleItems();
|
||||
renderedRange = { start: -1, end: -1 }; // Force re-render
|
||||
renderedRange = { start: -1, end: -1 };
|
||||
|
||||
if (visibleItems.length === 0) {
|
||||
if (allObjects.length === 0 && !hasMoreObjects) {
|
||||
showEmptyState();
|
||||
} else {
|
||||
// Empty folder
|
||||
objectsTableBody.innerHTML = `
|
||||
<tr>
|
||||
<td colspan="4" class="py-5">
|
||||
@@ -2262,7 +2224,6 @@
|
||||
updateFolderViewStatus();
|
||||
};
|
||||
|
||||
// Update status bar
|
||||
const updateFolderViewStatus = () => {
|
||||
const folderViewStatusEl = document.getElementById('folder-view-status');
|
||||
if (!folderViewStatusEl) return;
|
||||
@@ -2277,8 +2238,6 @@
|
||||
}
|
||||
};
|
||||
|
||||
// ============== DATA LOADING ==============
|
||||
|
||||
const loadObjects = async (append = false) => {
|
||||
if (isLoadingObjects) return;
|
||||
isLoadingObjects = true;
|
||||
@@ -2290,7 +2249,6 @@
|
||||
allObjects = [];
|
||||
}
|
||||
|
||||
// Show loading spinner when loading more
|
||||
if (append && loadMoreSpinner) {
|
||||
loadMoreSpinner.classList.remove('d-none');
|
||||
}
|
||||
@@ -2359,7 +2317,6 @@
|
||||
updateLoadMoreButton();
|
||||
}
|
||||
|
||||
// Refresh virtual scroll view
|
||||
refreshVirtualList();
|
||||
renderBreadcrumb(currentPrefix);
|
||||
|
||||
@@ -2379,7 +2336,6 @@
|
||||
};
|
||||
|
||||
const attachRowHandlers = () => {
|
||||
// Attach handlers to object rows
|
||||
const objectRows = document.querySelectorAll('[data-object-row]');
|
||||
objectRows.forEach(row => {
|
||||
if (row.dataset.handlersAttached) return;
|
||||
@@ -2405,14 +2361,12 @@
|
||||
toggleRowSelection(row, selectCheckbox.checked);
|
||||
});
|
||||
|
||||
// Restore selection state
|
||||
if (selectedRows.has(row.dataset.key)) {
|
||||
selectCheckbox.checked = true;
|
||||
row.classList.add('table-active');
|
||||
}
|
||||
});
|
||||
|
||||
// Attach handlers to folder rows
|
||||
const folderRows = document.querySelectorAll('.folder-row');
|
||||
folderRows.forEach(row => {
|
||||
if (row.dataset.handlersAttached) return;
|
||||
@@ -2423,7 +2377,6 @@
|
||||
const checkbox = row.querySelector('[data-folder-select]');
|
||||
checkbox?.addEventListener('change', (e) => {
|
||||
e.stopPropagation();
|
||||
// Select all objects in this folder
|
||||
const folderObjects = allObjects.filter(obj => obj.key.startsWith(folderPath));
|
||||
folderObjects.forEach(obj => {
|
||||
if (checkbox.checked) {
|
||||
@@ -2450,31 +2403,26 @@
|
||||
updateBulkDeleteState();
|
||||
};
|
||||
|
||||
// Scroll container reference (needed for virtual scrolling)
|
||||
const scrollSentinel = document.getElementById('scroll-sentinel');
|
||||
const scrollContainer = document.querySelector('.objects-table-container');
|
||||
const loadMoreBtn = document.getElementById('load-more-btn');
|
||||
|
||||
// Virtual scroll: listen to scroll events
|
||||
if (scrollContainer) {
|
||||
scrollContainer.addEventListener('scroll', handleVirtualScroll, { passive: true });
|
||||
}
|
||||
|
||||
// Load More button click handler (fallback)
|
||||
loadMoreBtn?.addEventListener('click', () => {
|
||||
if (hasMoreObjects && !isLoadingObjects) {
|
||||
loadObjects(true);
|
||||
}
|
||||
});
|
||||
|
||||
// Show/hide Load More button based on hasMoreObjects
|
||||
function updateLoadMoreButton() {
|
||||
if (loadMoreBtn) {
|
||||
loadMoreBtn.classList.toggle('d-none', !hasMoreObjects);
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-load more when near bottom (for loading all data)
|
||||
if (scrollSentinel && scrollContainer) {
|
||||
const containerObserver = new IntersectionObserver((entries) => {
|
||||
entries.forEach(entry => {
|
||||
@@ -2484,7 +2432,7 @@
|
||||
});
|
||||
}, {
|
||||
root: scrollContainer,
|
||||
rootMargin: '500px', // Load more earlier for smoother experience
|
||||
rootMargin: '500px',
|
||||
threshold: 0
|
||||
});
|
||||
containerObserver.observe(scrollSentinel);
|
||||
@@ -2503,7 +2451,6 @@
|
||||
viewportObserver.observe(scrollSentinel);
|
||||
}
|
||||
|
||||
// Page size selector (now controls batch size)
|
||||
const pageSizeSelect = document.getElementById('page-size-select');
|
||||
pageSizeSelect?.addEventListener('change', (e) => {
|
||||
pageSize = parseInt(e.target.value, 10);
|
||||
@@ -2669,14 +2616,11 @@
|
||||
return tr;
|
||||
};
|
||||
|
||||
// Instant client-side folder navigation (no server round-trip!)
|
||||
const navigateToFolder = (prefix) => {
|
||||
currentPrefix = prefix;
|
||||
|
||||
// Scroll to top when navigating
|
||||
if (scrollContainer) scrollContainer.scrollTop = 0;
|
||||
|
||||
// Instant re-render from already-loaded data
|
||||
refreshVirtualList();
|
||||
renderBreadcrumb(prefix);
|
||||
|
||||
@@ -2710,9 +2654,9 @@
|
||||
if (keyCell && currentPrefix) {
|
||||
const displayName = obj.key.slice(currentPrefix.length);
|
||||
keyCell.textContent = displayName;
|
||||
keyCell.closest('.object-key').title = obj.key; // Full path in tooltip
|
||||
keyCell.closest('.object-key').title = obj.key;
|
||||
} else if (keyCell) {
|
||||
keyCell.textContent = obj.key; // Reset to full key at root
|
||||
keyCell.textContent = obj.key;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -2887,7 +2831,6 @@
|
||||
bulkDeleteConfirm.disabled = selectedCount === 0 || bulkDeleting;
|
||||
}
|
||||
if (selectAllCheckbox) {
|
||||
// With virtual scrolling, count files in current folder from visibleItems
|
||||
const filesInView = visibleItems.filter(item => item.type === 'file');
|
||||
const total = filesInView.length;
|
||||
const visibleSelectedCount = filesInView.filter(item => selectedRows.has(item.data.key)).length;
|
||||
@@ -3524,9 +3467,6 @@
|
||||
document.getElementById('object-search')?.addEventListener('input', (event) => {
|
||||
currentFilterTerm = event.target.value.toLowerCase();
|
||||
updateFilterWarning();
|
||||
|
||||
// Use the virtual scrolling system for filtering - it properly handles
|
||||
// both folder view and flat view, and works with large object counts
|
||||
refreshVirtualList();
|
||||
});
|
||||
|
||||
@@ -3886,10 +3826,8 @@
|
||||
selectAllCheckbox?.addEventListener('change', (event) => {
|
||||
const shouldSelect = Boolean(event.target?.checked);
|
||||
|
||||
// Get all file items in the current view (works with virtual scrolling)
|
||||
const filesInView = visibleItems.filter(item => item.type === 'file');
|
||||
|
||||
// Update selectedRows directly using object keys (not DOM elements)
|
||||
filesInView.forEach(item => {
|
||||
if (shouldSelect) {
|
||||
selectedRows.set(item.data.key, item.data);
|
||||
@@ -3898,12 +3836,10 @@
|
||||
}
|
||||
});
|
||||
|
||||
// Update folder checkboxes in DOM (folders are always rendered)
|
||||
document.querySelectorAll('[data-folder-select]').forEach(cb => {
|
||||
cb.checked = shouldSelect;
|
||||
});
|
||||
|
||||
// Update any currently rendered object checkboxes
|
||||
document.querySelectorAll('[data-object-row]').forEach((row) => {
|
||||
const checkbox = row.querySelector('[data-object-select]');
|
||||
if (checkbox) {
|
||||
@@ -3917,7 +3853,6 @@
|
||||
|
||||
bulkDownloadButton?.addEventListener('click', async () => {
|
||||
if (!bulkDownloadEndpoint) return;
|
||||
// Use selectedRows which tracks all selected objects (not just rendered ones)
|
||||
const selected = Array.from(selectedRows.keys());
|
||||
if (selected.length === 0) return;
|
||||
|
||||
@@ -4085,7 +4020,6 @@
|
||||
}
|
||||
});
|
||||
|
||||
// Bucket name validation for replication setup
|
||||
const targetBucketInput = document.getElementById('target_bucket');
|
||||
const targetBucketFeedback = document.getElementById('target_bucket_feedback');
|
||||
|
||||
@@ -4120,7 +4054,6 @@
|
||||
targetBucketInput?.addEventListener('input', updateBucketNameValidation);
|
||||
targetBucketInput?.addEventListener('blur', updateBucketNameValidation);
|
||||
|
||||
// Prevent form submission if bucket name is invalid
|
||||
const replicationForm = targetBucketInput?.closest('form');
|
||||
replicationForm?.addEventListener('submit', (e) => {
|
||||
const name = targetBucketInput.value.trim();
|
||||
@@ -4133,7 +4066,6 @@
|
||||
}
|
||||
});
|
||||
|
||||
// Policy JSON validation and formatting
|
||||
const formatPolicyBtn = document.getElementById('formatPolicyBtn');
|
||||
const policyValidationStatus = document.getElementById('policyValidationStatus');
|
||||
const policyValidBadge = document.getElementById('policyValidBadge');
|
||||
@@ -4176,12 +4108,10 @@
|
||||
policyTextarea.value = JSON.stringify(parsed, null, 2);
|
||||
validatePolicyJson();
|
||||
} catch (err) {
|
||||
// Show error in validation
|
||||
validatePolicyJson();
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize policy validation on page load
|
||||
if (policyTextarea && policyPreset?.value === 'custom') {
|
||||
validatePolicyJson();
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
|
||||
const searchInput = document.getElementById('bucket-search');
|
||||
const bucketItems = document.querySelectorAll('.bucket-item');
|
||||
const noBucketsMsg = document.querySelector('.text-center.py-5'); // The "No buckets found" empty state
|
||||
const noBucketsMsg = document.querySelector('.text-center.py-5');
|
||||
|
||||
if (searchInput) {
|
||||
searchInput.addEventListener('input', (e) => {
|
||||
|
||||
@@ -307,7 +307,6 @@
|
||||
|
||||
resultDiv.innerHTML = '<div class="text-info"><span class="spinner-border spinner-border-sm" role="status" aria-hidden="true"></span> Testing connection...</div>';
|
||||
|
||||
// Use AbortController to timeout client-side after 20 seconds
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 20000);
|
||||
|
||||
@@ -394,8 +393,6 @@
|
||||
form.action = "{{ url_for('ui.delete_connection', connection_id='CONN_ID') }}".replace('CONN_ID', id);
|
||||
});
|
||||
|
||||
// Check connection health for each connection in the table
|
||||
// Uses staggered requests to avoid overwhelming the server
|
||||
async function checkConnectionHealth(connectionId, statusEl) {
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
@@ -432,13 +429,11 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Stagger health checks to avoid all requests at once
|
||||
const connectionRows = document.querySelectorAll('tr[data-connection-id]');
|
||||
connectionRows.forEach((row, index) => {
|
||||
const connectionId = row.getAttribute('data-connection-id');
|
||||
const statusEl = row.querySelector('.connection-status');
|
||||
if (statusEl) {
|
||||
// Stagger requests by 200ms each
|
||||
setTimeout(() => checkConnectionHealth(connectionId, statusEl), index * 200);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -456,7 +456,6 @@
|
||||
{{ super() }}
|
||||
<script>
|
||||
(function () {
|
||||
// Auto-indent for JSON textareas
|
||||
function setupJsonAutoIndent(textarea) {
|
||||
if (!textarea) return;
|
||||
|
||||
@@ -468,61 +467,49 @@
|
||||
const end = this.selectionEnd;
|
||||
const value = this.value;
|
||||
|
||||
// Get the current line
|
||||
const lineStart = value.lastIndexOf('\n', start - 1) + 1;
|
||||
const currentLine = value.substring(lineStart, start);
|
||||
|
||||
// Calculate base indentation (leading whitespace of current line)
|
||||
const indentMatch = currentLine.match(/^(\s*)/);
|
||||
let indent = indentMatch ? indentMatch[1] : '';
|
||||
|
||||
// Check if the line ends with { or [ (should increase indent)
|
||||
const trimmedLine = currentLine.trim();
|
||||
const lastChar = trimmedLine.slice(-1);
|
||||
|
||||
// Check the character before cursor
|
||||
const charBeforeCursor = value.substring(start - 1, start).trim();
|
||||
|
||||
let newIndent = indent;
|
||||
let insertAfter = '';
|
||||
|
||||
if (lastChar === '{' || lastChar === '[') {
|
||||
// Add extra indentation
|
||||
newIndent = indent + ' ';
|
||||
|
||||
// Check if we need to add closing bracket on new line
|
||||
const charAfterCursor = value.substring(start, start + 1).trim();
|
||||
if ((lastChar === '{' && charAfterCursor === '}') ||
|
||||
(lastChar === '[' && charAfterCursor === ']')) {
|
||||
insertAfter = '\n' + indent;
|
||||
}
|
||||
} else if (lastChar === ',' || lastChar === ':') {
|
||||
// Keep same indentation for continuation
|
||||
newIndent = indent;
|
||||
}
|
||||
|
||||
// Insert newline with proper indentation
|
||||
const insertion = '\n' + newIndent + insertAfter;
|
||||
const newValue = value.substring(0, start) + insertion + value.substring(end);
|
||||
|
||||
this.value = newValue;
|
||||
|
||||
// Set cursor position after the indentation
|
||||
const newCursorPos = start + 1 + newIndent.length;
|
||||
this.selectionStart = this.selectionEnd = newCursorPos;
|
||||
|
||||
// Trigger input event for any listeners
|
||||
this.dispatchEvent(new Event('input', { bubbles: true }));
|
||||
}
|
||||
|
||||
// Handle Tab key for indentation
|
||||
if (e.key === 'Tab') {
|
||||
e.preventDefault();
|
||||
const start = this.selectionStart;
|
||||
const end = this.selectionEnd;
|
||||
|
||||
if (e.shiftKey) {
|
||||
// Outdent: remove 2 spaces from start of line
|
||||
const lineStart = this.value.lastIndexOf('\n', start - 1) + 1;
|
||||
const lineContent = this.value.substring(lineStart, start);
|
||||
if (lineContent.startsWith(' ')) {
|
||||
@@ -531,7 +518,6 @@
|
||||
this.selectionStart = this.selectionEnd = Math.max(lineStart, start - 2);
|
||||
}
|
||||
} else {
|
||||
// Indent: insert 2 spaces
|
||||
this.value = this.value.substring(0, start) + ' ' + this.value.substring(end);
|
||||
this.selectionStart = this.selectionEnd = start + 2;
|
||||
}
|
||||
@@ -541,7 +527,6 @@
|
||||
});
|
||||
}
|
||||
|
||||
// Apply auto-indent to policy editor textareas
|
||||
setupJsonAutoIndent(document.getElementById('policyEditorDocument'));
|
||||
setupJsonAutoIndent(document.getElementById('createUserPolicies'));
|
||||
|
||||
|
||||
@@ -8,8 +8,6 @@ def client(app):
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(app):
|
||||
# Create a test user and return headers
|
||||
# Using the user defined in conftest.py
|
||||
return {
|
||||
"X-Access-Key": "test",
|
||||
"X-Secret-Key": "secret"
|
||||
@@ -76,18 +74,15 @@ def test_multipart_upload_flow(client, auth_headers):
|
||||
def test_abort_multipart_upload(client, auth_headers):
|
||||
client.put("/abort-bucket", headers=auth_headers)
|
||||
|
||||
# Initiate
|
||||
resp = client.post("/abort-bucket/file.txt?uploads", headers=auth_headers)
|
||||
upload_id = fromstring(resp.data).find("UploadId").text
|
||||
|
||||
# Abort
|
||||
resp = client.delete(f"/abort-bucket/file.txt?uploadId={upload_id}", headers=auth_headers)
|
||||
assert resp.status_code == 204
|
||||
|
||||
# Try to upload part (should fail)
|
||||
resp = client.put(
|
||||
f"/abort-bucket/file.txt?partNumber=1&uploadId={upload_id}",
|
||||
headers=auth_headers,
|
||||
data=b"data"
|
||||
)
|
||||
assert resp.status_code == 404 # NoSuchUpload
|
||||
assert resp.status_code == 404
|
||||
|
||||
@@ -22,11 +22,10 @@ class TestLocalKeyEncryption:
|
||||
key_path = tmp_path / "keys" / "master.key"
|
||||
provider = LocalKeyEncryption(key_path)
|
||||
|
||||
# Access master key to trigger creation
|
||||
key = provider.master_key
|
||||
|
||||
assert key_path.exists()
|
||||
assert len(key) == 32 # 256-bit key
|
||||
assert len(key) == 32
|
||||
|
||||
def test_load_existing_master_key(self, tmp_path):
|
||||
"""Test loading an existing master key."""
|
||||
@@ -50,7 +49,6 @@ class TestLocalKeyEncryption:
|
||||
|
||||
plaintext = b"Hello, World! This is a test message."
|
||||
|
||||
# Encrypt
|
||||
result = provider.encrypt(plaintext)
|
||||
|
||||
assert result.ciphertext != plaintext
|
||||
@@ -58,7 +56,6 @@ class TestLocalKeyEncryption:
|
||||
assert len(result.nonce) == 12
|
||||
assert len(result.encrypted_data_key) > 0
|
||||
|
||||
# Decrypt
|
||||
decrypted = provider.decrypt(
|
||||
result.ciphertext,
|
||||
result.nonce,
|
||||
@@ -80,11 +77,8 @@ class TestLocalKeyEncryption:
|
||||
result1 = provider.encrypt(plaintext)
|
||||
result2 = provider.encrypt(plaintext)
|
||||
|
||||
# Different encrypted data keys
|
||||
assert result1.encrypted_data_key != result2.encrypted_data_key
|
||||
# Different nonces
|
||||
assert result1.nonce != result2.nonce
|
||||
# Different ciphertexts
|
||||
assert result1.ciphertext != result2.ciphertext
|
||||
|
||||
def test_generate_data_key(self, tmp_path):
|
||||
@@ -97,9 +91,8 @@ class TestLocalKeyEncryption:
|
||||
plaintext_key, encrypted_key = provider.generate_data_key()
|
||||
|
||||
assert len(plaintext_key) == 32
|
||||
assert len(encrypted_key) > 32 # nonce + ciphertext + tag
|
||||
assert len(encrypted_key) > 32
|
||||
|
||||
# Verify we can decrypt the key
|
||||
decrypted_key = provider._decrypt_data_key(encrypted_key)
|
||||
assert decrypted_key == plaintext_key
|
||||
|
||||
@@ -107,18 +100,15 @@ class TestLocalKeyEncryption:
|
||||
"""Test that decryption fails with wrong master key."""
|
||||
from app.encryption import LocalKeyEncryption, EncryptionError
|
||||
|
||||
# Create two providers with different keys
|
||||
key_path1 = tmp_path / "master1.key"
|
||||
key_path2 = tmp_path / "master2.key"
|
||||
|
||||
provider1 = LocalKeyEncryption(key_path1)
|
||||
provider2 = LocalKeyEncryption(key_path2)
|
||||
|
||||
# Encrypt with provider1
|
||||
plaintext = b"Secret message"
|
||||
result = provider1.encrypt(plaintext)
|
||||
|
||||
# Try to decrypt with provider2
|
||||
with pytest.raises(EncryptionError):
|
||||
provider2.decrypt(
|
||||
result.ciphertext,
|
||||
@@ -196,18 +186,15 @@ class TestStreamingEncryptor:
|
||||
provider = LocalKeyEncryption(key_path)
|
||||
encryptor = StreamingEncryptor(provider, chunk_size=1024)
|
||||
|
||||
# Create test data
|
||||
original_data = b"A" * 5000 + b"B" * 5000 + b"C" * 5000 # 15KB
|
||||
original_data = b"A" * 5000 + b"B" * 5000 + b"C" * 5000
|
||||
stream = io.BytesIO(original_data)
|
||||
|
||||
# Encrypt
|
||||
encrypted_stream, metadata = encryptor.encrypt_stream(stream)
|
||||
encrypted_data = encrypted_stream.read()
|
||||
|
||||
assert encrypted_data != original_data
|
||||
assert metadata.algorithm == "AES256"
|
||||
|
||||
# Decrypt
|
||||
encrypted_stream = io.BytesIO(encrypted_data)
|
||||
decrypted_stream = encryptor.decrypt_stream(encrypted_stream, metadata)
|
||||
decrypted_data = decrypted_stream.read()
|
||||
@@ -319,7 +306,6 @@ class TestClientEncryptionHelper:
|
||||
assert key_info["algorithm"] == "AES-256-GCM"
|
||||
assert "created_at" in key_info
|
||||
|
||||
# Verify key is 256 bits
|
||||
key = base64.b64decode(key_info["key"])
|
||||
assert len(key) == 32
|
||||
|
||||
@@ -425,7 +411,6 @@ class TestKMSManager:
|
||||
assert key is not None
|
||||
assert key.key_id == "test-key"
|
||||
|
||||
# Non-existent key
|
||||
assert kms.get_key("non-existent") is None
|
||||
|
||||
def test_enable_disable_key(self, tmp_path):
|
||||
@@ -439,14 +424,11 @@ class TestKMSManager:
|
||||
|
||||
kms.create_key("Test key", key_id="test-key")
|
||||
|
||||
# Initially enabled
|
||||
assert kms.get_key("test-key").enabled
|
||||
|
||||
# Disable
|
||||
kms.disable_key("test-key")
|
||||
assert not kms.get_key("test-key").enabled
|
||||
|
||||
# Enable
|
||||
kms.enable_key("test-key")
|
||||
assert kms.get_key("test-key").enabled
|
||||
|
||||
@@ -503,11 +485,9 @@ class TestKMSManager:
|
||||
|
||||
ciphertext = kms.encrypt("test-key", plaintext, context)
|
||||
|
||||
# Decrypt with same context succeeds
|
||||
decrypted, _ = kms.decrypt(ciphertext, context)
|
||||
assert decrypted == plaintext
|
||||
|
||||
# Decrypt with different context fails
|
||||
with pytest.raises(EncryptionError):
|
||||
kms.decrypt(ciphertext, {"different": "context"})
|
||||
|
||||
@@ -527,7 +507,6 @@ class TestKMSManager:
|
||||
assert len(plaintext_key) == 32
|
||||
assert len(encrypted_key) > 0
|
||||
|
||||
# Decrypt the encrypted key
|
||||
decrypted_key = kms.decrypt_data_key("test-key", encrypted_key)
|
||||
|
||||
assert decrypted_key == plaintext_key
|
||||
@@ -561,13 +540,8 @@ class TestKMSManager:
|
||||
|
||||
plaintext = b"Data to re-encrypt"
|
||||
|
||||
# Encrypt with key-1
|
||||
ciphertext1 = kms.encrypt("key-1", plaintext)
|
||||
|
||||
# Re-encrypt with key-2
|
||||
ciphertext2 = kms.re_encrypt(ciphertext1, "key-2")
|
||||
|
||||
# Decrypt with key-2
|
||||
decrypted, key_id = kms.decrypt(ciphertext2)
|
||||
|
||||
assert decrypted == plaintext
|
||||
@@ -587,7 +561,7 @@ class TestKMSManager:
|
||||
|
||||
assert len(random1) == 32
|
||||
assert len(random2) == 32
|
||||
assert random1 != random2 # Very unlikely to be equal
|
||||
assert random1 != random2
|
||||
|
||||
def test_keys_persist_across_instances(self, tmp_path):
|
||||
"""Test that keys persist and can be loaded by new instances."""
|
||||
@@ -596,14 +570,12 @@ class TestKMSManager:
|
||||
keys_path = tmp_path / "kms_keys.json"
|
||||
master_key_path = tmp_path / "master.key"
|
||||
|
||||
# Create key with first instance
|
||||
kms1 = KMSManager(keys_path, master_key_path)
|
||||
kms1.create_key("Test key", key_id="test-key")
|
||||
|
||||
plaintext = b"Persistent encryption test"
|
||||
ciphertext = kms1.encrypt("test-key", plaintext)
|
||||
|
||||
# Create new instance and verify key works
|
||||
kms2 = KMSManager(keys_path, master_key_path)
|
||||
|
||||
decrypted, key_id = kms2.decrypt(ciphertext)
|
||||
@@ -665,13 +637,11 @@ class TestEncryptedStorage:
|
||||
|
||||
encrypted_storage = EncryptedObjectStorage(storage, encryption)
|
||||
|
||||
# Create bucket with encryption config
|
||||
storage.create_bucket("test-bucket")
|
||||
storage.set_bucket_encryption("test-bucket", {
|
||||
"Rules": [{"SSEAlgorithm": "AES256"}]
|
||||
})
|
||||
|
||||
# Put object
|
||||
original_data = b"This is secret data that should be encrypted"
|
||||
stream = io.BytesIO(original_data)
|
||||
|
||||
@@ -683,12 +653,10 @@ class TestEncryptedStorage:
|
||||
|
||||
assert meta is not None
|
||||
|
||||
# Verify file on disk is encrypted (not plaintext)
|
||||
file_path = storage_root / "test-bucket" / "secret.txt"
|
||||
stored_data = file_path.read_bytes()
|
||||
assert stored_data != original_data
|
||||
|
||||
# Get object - should be decrypted
|
||||
data, metadata = encrypted_storage.get_object_data("test-bucket", "secret.txt")
|
||||
|
||||
assert data == original_data
|
||||
@@ -711,14 +679,12 @@ class TestEncryptedStorage:
|
||||
encrypted_storage = EncryptedObjectStorage(storage, encryption)
|
||||
|
||||
storage.create_bucket("test-bucket")
|
||||
# No encryption config
|
||||
|
||||
original_data = b"Unencrypted data"
|
||||
stream = io.BytesIO(original_data)
|
||||
|
||||
encrypted_storage.put_object("test-bucket", "plain.txt", stream)
|
||||
|
||||
# Verify file on disk is NOT encrypted
|
||||
file_path = storage_root / "test-bucket" / "plain.txt"
|
||||
stored_data = file_path.read_bytes()
|
||||
assert stored_data == original_data
|
||||
@@ -745,7 +711,6 @@ class TestEncryptedStorage:
|
||||
original_data = b"Explicitly encrypted data"
|
||||
stream = io.BytesIO(original_data)
|
||||
|
||||
# Request encryption explicitly
|
||||
encrypted_storage.put_object(
|
||||
"test-bucket",
|
||||
"encrypted.txt",
|
||||
@@ -753,11 +718,9 @@ class TestEncryptedStorage:
|
||||
server_side_encryption="AES256",
|
||||
)
|
||||
|
||||
# Verify file is encrypted
|
||||
file_path = storage_root / "test-bucket" / "encrypted.txt"
|
||||
stored_data = file_path.read_bytes()
|
||||
assert stored_data != original_data
|
||||
|
||||
# Get object - should be decrypted
|
||||
data, _ = encrypted_storage.get_object_data("test-bucket", "encrypted.txt")
|
||||
assert data == original_data
|
||||
|
||||
@@ -24,7 +24,6 @@ def kms_client(tmp_path):
|
||||
"KMS_KEYS_PATH": str(tmp_path / "kms_keys.json"),
|
||||
})
|
||||
|
||||
# Create default IAM config with admin user
|
||||
iam_config = {
|
||||
"users": [
|
||||
{
|
||||
@@ -83,7 +82,6 @@ class TestKMSKeyManagement:
|
||||
|
||||
def test_list_keys(self, kms_client, auth_headers):
|
||||
"""Test listing KMS keys."""
|
||||
# Create some keys
|
||||
kms_client.post("/kms/keys", json={"Description": "Key 1"}, headers=auth_headers)
|
||||
kms_client.post("/kms/keys", json={"Description": "Key 2"}, headers=auth_headers)
|
||||
|
||||
@@ -97,7 +95,6 @@ class TestKMSKeyManagement:
|
||||
|
||||
def test_get_key(self, kms_client, auth_headers):
|
||||
"""Test getting a specific key."""
|
||||
# Create a key
|
||||
create_response = kms_client.post(
|
||||
"/kms/keys",
|
||||
json={"KeyId": "test-key", "Description": "Test key"},
|
||||
@@ -120,36 +117,28 @@ class TestKMSKeyManagement:
|
||||
|
||||
def test_delete_key(self, kms_client, auth_headers):
|
||||
"""Test deleting a key."""
|
||||
# Create a key
|
||||
kms_client.post("/kms/keys", json={"KeyId": "test-key"}, headers=auth_headers)
|
||||
|
||||
# Delete it
|
||||
response = kms_client.delete("/kms/keys/test-key", headers=auth_headers)
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
# Verify it's gone
|
||||
get_response = kms_client.get("/kms/keys/test-key", headers=auth_headers)
|
||||
assert get_response.status_code == 404
|
||||
|
||||
def test_enable_disable_key(self, kms_client, auth_headers):
|
||||
"""Test enabling and disabling a key."""
|
||||
# Create a key
|
||||
kms_client.post("/kms/keys", json={"KeyId": "test-key"}, headers=auth_headers)
|
||||
|
||||
# Disable
|
||||
response = kms_client.post("/kms/keys/test-key/disable", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify disabled
|
||||
get_response = kms_client.get("/kms/keys/test-key", headers=auth_headers)
|
||||
assert get_response.get_json()["KeyMetadata"]["Enabled"] is False
|
||||
|
||||
# Enable
|
||||
response = kms_client.post("/kms/keys/test-key/enable", headers=auth_headers)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify enabled
|
||||
get_response = kms_client.get("/kms/keys/test-key", headers=auth_headers)
|
||||
assert get_response.get_json()["KeyMetadata"]["Enabled"] is True
|
||||
|
||||
@@ -159,13 +148,11 @@ class TestKMSEncryption:
|
||||
|
||||
def test_encrypt_decrypt(self, kms_client, auth_headers):
|
||||
"""Test encrypting and decrypting data."""
|
||||
# Create a key
|
||||
kms_client.post("/kms/keys", json={"KeyId": "test-key"}, headers=auth_headers)
|
||||
|
||||
plaintext = b"Hello, World!"
|
||||
plaintext_b64 = base64.b64encode(plaintext).decode()
|
||||
|
||||
# Encrypt
|
||||
encrypt_response = kms_client.post(
|
||||
"/kms/encrypt",
|
||||
json={"KeyId": "test-key", "Plaintext": plaintext_b64},
|
||||
@@ -178,7 +165,6 @@ class TestKMSEncryption:
|
||||
assert "CiphertextBlob" in encrypt_data
|
||||
assert encrypt_data["KeyId"] == "test-key"
|
||||
|
||||
# Decrypt
|
||||
decrypt_response = kms_client.post(
|
||||
"/kms/decrypt",
|
||||
json={"CiphertextBlob": encrypt_data["CiphertextBlob"]},
|
||||
@@ -199,7 +185,6 @@ class TestKMSEncryption:
|
||||
plaintext_b64 = base64.b64encode(plaintext).decode()
|
||||
context = {"purpose": "testing", "bucket": "my-bucket"}
|
||||
|
||||
# Encrypt with context
|
||||
encrypt_response = kms_client.post(
|
||||
"/kms/encrypt",
|
||||
json={
|
||||
@@ -213,7 +198,6 @@ class TestKMSEncryption:
|
||||
assert encrypt_response.status_code == 200
|
||||
ciphertext = encrypt_response.get_json()["CiphertextBlob"]
|
||||
|
||||
# Decrypt with same context succeeds
|
||||
decrypt_response = kms_client.post(
|
||||
"/kms/decrypt",
|
||||
json={
|
||||
@@ -225,7 +209,6 @@ class TestKMSEncryption:
|
||||
|
||||
assert decrypt_response.status_code == 200
|
||||
|
||||
# Decrypt with wrong context fails
|
||||
wrong_context_response = kms_client.post(
|
||||
"/kms/decrypt",
|
||||
json={
|
||||
@@ -325,11 +308,9 @@ class TestKMSReEncrypt:
|
||||
|
||||
def test_re_encrypt(self, kms_client, auth_headers):
|
||||
"""Test re-encrypting data with a different key."""
|
||||
# Create two keys
|
||||
kms_client.post("/kms/keys", json={"KeyId": "key-1"}, headers=auth_headers)
|
||||
kms_client.post("/kms/keys", json={"KeyId": "key-2"}, headers=auth_headers)
|
||||
|
||||
# Encrypt with key-1
|
||||
plaintext = b"Data to re-encrypt"
|
||||
encrypt_response = kms_client.post(
|
||||
"/kms/encrypt",
|
||||
@@ -342,7 +323,6 @@ class TestKMSReEncrypt:
|
||||
|
||||
ciphertext = encrypt_response.get_json()["CiphertextBlob"]
|
||||
|
||||
# Re-encrypt with key-2
|
||||
re_encrypt_response = kms_client.post(
|
||||
"/kms/re-encrypt",
|
||||
json={
|
||||
@@ -358,7 +338,6 @@ class TestKMSReEncrypt:
|
||||
assert data["SourceKeyId"] == "key-1"
|
||||
assert data["KeyId"] == "key-2"
|
||||
|
||||
# Verify new ciphertext can be decrypted
|
||||
decrypt_response = kms_client.post(
|
||||
"/kms/decrypt",
|
||||
json={"CiphertextBlob": data["CiphertextBlob"]},
|
||||
@@ -398,7 +377,7 @@ class TestKMSRandom:
|
||||
data = response.get_json()
|
||||
|
||||
random_bytes = base64.b64decode(data["Plaintext"])
|
||||
assert len(random_bytes) == 32 # Default is 32 bytes
|
||||
assert len(random_bytes) == 32
|
||||
|
||||
|
||||
class TestClientSideEncryption:
|
||||
@@ -422,11 +401,9 @@ class TestClientSideEncryption:
|
||||
|
||||
def test_client_encrypt_decrypt(self, kms_client, auth_headers):
|
||||
"""Test client-side encryption and decryption."""
|
||||
# Generate a key
|
||||
key_response = kms_client.post("/kms/client/generate-key", headers=auth_headers)
|
||||
key = key_response.get_json()["key"]
|
||||
|
||||
# Encrypt
|
||||
plaintext = b"Client-side encrypted data"
|
||||
encrypt_response = kms_client.post(
|
||||
"/kms/client/encrypt",
|
||||
@@ -440,7 +417,6 @@ class TestClientSideEncryption:
|
||||
assert encrypt_response.status_code == 200
|
||||
encrypted = encrypt_response.get_json()
|
||||
|
||||
# Decrypt
|
||||
decrypt_response = kms_client.post(
|
||||
"/kms/client/decrypt",
|
||||
json={
|
||||
@@ -461,7 +437,6 @@ class TestEncryptionMaterials:
|
||||
|
||||
def test_get_encryption_materials(self, kms_client, auth_headers):
|
||||
"""Test getting encryption materials for client-side S3 encryption."""
|
||||
# Create a key
|
||||
kms_client.post("/kms/keys", json={"KeyId": "s3-key"}, headers=auth_headers)
|
||||
|
||||
response = kms_client.post(
|
||||
@@ -478,7 +453,6 @@ class TestEncryptionMaterials:
|
||||
assert data["KeyId"] == "s3-key"
|
||||
assert data["Algorithm"] == "AES-256-GCM"
|
||||
|
||||
# Verify key is 256 bits
|
||||
key = base64.b64decode(data["PlaintextKey"])
|
||||
assert len(key) == 32
|
||||
|
||||
@@ -490,7 +464,6 @@ class TestKMSAuthentication:
|
||||
"""Test that unauthenticated requests are rejected."""
|
||||
response = kms_client.get("/kms/keys")
|
||||
|
||||
# Should fail with 403 (no credentials)
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_invalid_credentials_fail(self, kms_client):
|
||||
|
||||
@@ -4,7 +4,6 @@ import pytest
|
||||
from xml.etree.ElementTree import fromstring
|
||||
|
||||
|
||||
# Helper to create file-like stream
|
||||
def _stream(data: bytes):
|
||||
return io.BytesIO(data)
|
||||
|
||||
@@ -19,13 +18,11 @@ class TestListObjectsV2:
|
||||
"""Tests for ListObjectsV2 endpoint."""
|
||||
|
||||
def test_list_objects_v2_basic(self, client, signer, storage):
|
||||
# Create bucket and objects
|
||||
storage.create_bucket("v2-test")
|
||||
storage.put_object("v2-test", "file1.txt", _stream(b"hello"))
|
||||
storage.put_object("v2-test", "file2.txt", _stream(b"world"))
|
||||
storage.put_object("v2-test", "folder/file3.txt", _stream(b"nested"))
|
||||
|
||||
# ListObjectsV2 request
|
||||
headers = signer("GET", "/v2-test?list-type=2")
|
||||
resp = client.get("/v2-test", query_string={"list-type": "2"}, headers=headers)
|
||||
assert resp.status_code == 200
|
||||
@@ -46,7 +43,6 @@ class TestListObjectsV2:
|
||||
storage.put_object("prefix-test", "photos/2024/mar.jpg", _stream(b"mar"))
|
||||
storage.put_object("prefix-test", "docs/readme.md", _stream(b"readme"))
|
||||
|
||||
# List with prefix and delimiter
|
||||
headers = signer("GET", "/prefix-test?list-type=2&prefix=photos/&delimiter=/")
|
||||
resp = client.get(
|
||||
"/prefix-test",
|
||||
@@ -56,11 +52,10 @@ class TestListObjectsV2:
|
||||
assert resp.status_code == 200
|
||||
|
||||
root = fromstring(resp.data)
|
||||
# Should show common prefixes for 2023/ and 2024/
|
||||
prefixes = [el.find("Prefix").text for el in root.findall("CommonPrefixes")]
|
||||
assert "photos/2023/" in prefixes
|
||||
assert "photos/2024/" in prefixes
|
||||
assert len(root.findall("Contents")) == 0 # No direct files under photos/
|
||||
assert len(root.findall("Contents")) == 0
|
||||
|
||||
|
||||
class TestPutBucketVersioning:
|
||||
@@ -78,7 +73,6 @@ class TestPutBucketVersioning:
|
||||
resp = client.put("/version-test", query_string={"versioning": ""}, data=payload, headers=headers)
|
||||
assert resp.status_code == 200
|
||||
|
||||
# Verify via GET
|
||||
headers = signer("GET", "/version-test?versioning")
|
||||
resp = client.get("/version-test", query_string={"versioning": ""}, headers=headers)
|
||||
root = fromstring(resp.data)
|
||||
@@ -110,15 +104,13 @@ class TestDeleteBucketTagging:
|
||||
storage.create_bucket("tag-delete-test")
|
||||
storage.set_bucket_tags("tag-delete-test", [{"Key": "env", "Value": "test"}])
|
||||
|
||||
# Delete tags
|
||||
headers = signer("DELETE", "/tag-delete-test?tagging")
|
||||
resp = client.delete("/tag-delete-test", query_string={"tagging": ""}, headers=headers)
|
||||
assert resp.status_code == 204
|
||||
|
||||
# Verify tags are gone
|
||||
headers = signer("GET", "/tag-delete-test?tagging")
|
||||
resp = client.get("/tag-delete-test", query_string={"tagging": ""}, headers=headers)
|
||||
assert resp.status_code == 404 # NoSuchTagSet
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
class TestDeleteBucketCors:
|
||||
@@ -130,15 +122,13 @@ class TestDeleteBucketCors:
|
||||
{"AllowedOrigins": ["*"], "AllowedMethods": ["GET"]}
|
||||
])
|
||||
|
||||
# Delete CORS
|
||||
headers = signer("DELETE", "/cors-delete-test?cors")
|
||||
resp = client.delete("/cors-delete-test", query_string={"cors": ""}, headers=headers)
|
||||
assert resp.status_code == 204
|
||||
|
||||
# Verify CORS is gone
|
||||
headers = signer("GET", "/cors-delete-test?cors")
|
||||
resp = client.get("/cors-delete-test", query_string={"cors": ""}, headers=headers)
|
||||
assert resp.status_code == 404 # NoSuchCORSConfiguration
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
class TestGetBucketLocation:
|
||||
@@ -173,7 +163,6 @@ class TestBucketAcl:
|
||||
def test_put_bucket_acl(self, client, signer, storage):
|
||||
storage.create_bucket("acl-put-test")
|
||||
|
||||
# PUT with canned ACL header
|
||||
headers = signer("PUT", "/acl-put-test?acl")
|
||||
headers["x-amz-acl"] = "public-read"
|
||||
resp = client.put("/acl-put-test", query_string={"acl": ""}, headers=headers)
|
||||
@@ -188,7 +177,6 @@ class TestCopyObject:
|
||||
storage.create_bucket("copy-dst")
|
||||
storage.put_object("copy-src", "original.txt", _stream(b"original content"))
|
||||
|
||||
# Copy object
|
||||
headers = signer("PUT", "/copy-dst/copied.txt")
|
||||
headers["x-amz-copy-source"] = "/copy-src/original.txt"
|
||||
resp = client.put("/copy-dst/copied.txt", headers=headers)
|
||||
@@ -199,7 +187,6 @@ class TestCopyObject:
|
||||
assert root.find("ETag") is not None
|
||||
assert root.find("LastModified") is not None
|
||||
|
||||
# Verify copy exists
|
||||
path = storage.get_object_path("copy-dst", "copied.txt")
|
||||
assert path.read_bytes() == b"original content"
|
||||
|
||||
@@ -208,7 +195,6 @@ class TestCopyObject:
|
||||
storage.create_bucket("meta-dst")
|
||||
storage.put_object("meta-src", "source.txt", _stream(b"data"), metadata={"old": "value"})
|
||||
|
||||
# Copy with REPLACE directive
|
||||
headers = signer("PUT", "/meta-dst/target.txt")
|
||||
headers["x-amz-copy-source"] = "/meta-src/source.txt"
|
||||
headers["x-amz-metadata-directive"] = "REPLACE"
|
||||
@@ -216,7 +202,6 @@ class TestCopyObject:
|
||||
resp = client.put("/meta-dst/target.txt", headers=headers)
|
||||
assert resp.status_code == 200
|
||||
|
||||
# Verify new metadata (note: header keys are Title-Cased)
|
||||
meta = storage.get_object_metadata("meta-dst", "target.txt")
|
||||
assert "New" in meta or "new" in meta
|
||||
assert "old" not in meta and "Old" not in meta
|
||||
@@ -229,7 +214,6 @@ class TestObjectTagging:
|
||||
storage.create_bucket("obj-tag-test")
|
||||
storage.put_object("obj-tag-test", "tagged.txt", _stream(b"content"))
|
||||
|
||||
# PUT tags
|
||||
payload = b"""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Tagging>
|
||||
<TagSet>
|
||||
@@ -247,7 +231,6 @@ class TestObjectTagging:
|
||||
)
|
||||
assert resp.status_code == 204
|
||||
|
||||
# GET tags
|
||||
headers = signer("GET", "/obj-tag-test/tagged.txt?tagging")
|
||||
resp = client.get("/obj-tag-test/tagged.txt", query_string={"tagging": ""}, headers=headers)
|
||||
assert resp.status_code == 200
|
||||
@@ -257,12 +240,10 @@ class TestObjectTagging:
|
||||
assert tags["project"] == "demo"
|
||||
assert tags["env"] == "test"
|
||||
|
||||
# DELETE tags
|
||||
headers = signer("DELETE", "/obj-tag-test/tagged.txt?tagging")
|
||||
resp = client.delete("/obj-tag-test/tagged.txt", query_string={"tagging": ""}, headers=headers)
|
||||
assert resp.status_code == 204
|
||||
|
||||
# Verify empty
|
||||
headers = signer("GET", "/obj-tag-test/tagged.txt?tagging")
|
||||
resp = client.get("/obj-tag-test/tagged.txt", query_string={"tagging": ""}, headers=headers)
|
||||
root = fromstring(resp.data)
|
||||
@@ -272,7 +253,6 @@ class TestObjectTagging:
|
||||
storage.create_bucket("tag-limit")
|
||||
storage.put_object("tag-limit", "file.txt", _stream(b"x"))
|
||||
|
||||
# Try to set 11 tags (limit is 10)
|
||||
tags = "".join(f"<Tag><Key>key{i}</Key><Value>val{i}</Value></Tag>" for i in range(11))
|
||||
payload = f"<Tagging><TagSet>{tags}</TagSet></Tagging>".encode()
|
||||
|
||||
|
||||
@@ -67,7 +67,6 @@ class TestUIBucketEncryption:
|
||||
app = _make_encryption_app(tmp_path)
|
||||
client = app.test_client()
|
||||
|
||||
# Login first
|
||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
@@ -82,14 +81,11 @@ class TestUIBucketEncryption:
|
||||
app = _make_encryption_app(tmp_path)
|
||||
client = app.test_client()
|
||||
|
||||
# Login
|
||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
# Get CSRF token
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
csrf_token = get_csrf_token(response)
|
||||
|
||||
# Enable AES-256 encryption
|
||||
response = client.post(
|
||||
"/ui/buckets/test-bucket/encryption",
|
||||
data={
|
||||
@@ -102,7 +98,6 @@ class TestUIBucketEncryption:
|
||||
|
||||
assert response.status_code == 200
|
||||
html = response.data.decode("utf-8")
|
||||
# Should see success message or enabled state
|
||||
assert "AES-256" in html or "encryption enabled" in html.lower()
|
||||
|
||||
def test_enable_kms_encryption(self, tmp_path):
|
||||
@@ -110,7 +105,6 @@ class TestUIBucketEncryption:
|
||||
app = _make_encryption_app(tmp_path, kms_enabled=True)
|
||||
client = app.test_client()
|
||||
|
||||
# Create a KMS key first
|
||||
with app.app_context():
|
||||
kms = app.extensions.get("kms")
|
||||
if kms:
|
||||
@@ -119,14 +113,11 @@ class TestUIBucketEncryption:
|
||||
else:
|
||||
pytest.skip("KMS not available")
|
||||
|
||||
# Login
|
||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
# Get CSRF token
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
csrf_token = get_csrf_token(response)
|
||||
|
||||
# Enable KMS encryption
|
||||
response = client.post(
|
||||
"/ui/buckets/test-bucket/encryption",
|
||||
data={
|
||||
@@ -147,10 +138,8 @@ class TestUIBucketEncryption:
|
||||
app = _make_encryption_app(tmp_path)
|
||||
client = app.test_client()
|
||||
|
||||
# Login
|
||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
# First enable encryption
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
csrf_token = get_csrf_token(response)
|
||||
|
||||
@@ -163,7 +152,6 @@ class TestUIBucketEncryption:
|
||||
},
|
||||
)
|
||||
|
||||
# Now disable it
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
csrf_token = get_csrf_token(response)
|
||||
|
||||
@@ -185,7 +173,6 @@ class TestUIBucketEncryption:
|
||||
app = _make_encryption_app(tmp_path)
|
||||
client = app.test_client()
|
||||
|
||||
# Login
|
||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
@@ -210,10 +197,8 @@ class TestUIBucketEncryption:
|
||||
app = _make_encryption_app(tmp_path)
|
||||
client = app.test_client()
|
||||
|
||||
# Login
|
||||
client.post("/ui/login", data={"access_key": "test", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
# Enable encryption
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
csrf_token = get_csrf_token(response)
|
||||
|
||||
@@ -226,7 +211,6 @@ class TestUIBucketEncryption:
|
||||
},
|
||||
)
|
||||
|
||||
# Verify it's stored
|
||||
with app.app_context():
|
||||
storage = app.extensions["object_storage"]
|
||||
config = storage.get_bucket_encryption("test-bucket")
|
||||
@@ -244,10 +228,8 @@ class TestUIEncryptionWithoutPermission:
|
||||
app = _make_encryption_app(tmp_path)
|
||||
client = app.test_client()
|
||||
|
||||
# Login as readonly user
|
||||
client.post("/ui/login", data={"access_key": "readonly", "secret_key": "secret"}, follow_redirects=True)
|
||||
|
||||
# This should fail or be rejected
|
||||
response = client.get("/ui/buckets/test-bucket?tab=properties")
|
||||
csrf_token = get_csrf_token(response)
|
||||
|
||||
@@ -261,8 +243,6 @@ class TestUIEncryptionWithoutPermission:
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
# Should either redirect with error or show permission denied
|
||||
assert response.status_code == 200
|
||||
html = response.data.decode("utf-8")
|
||||
# Should contain error about permission denied
|
||||
assert "Access denied" in html or "permission" in html.lower() or "not authorized" in html.lower()
|
||||
|
||||
Reference in New Issue
Block a user