Update static website to include proper error handling; add missing features

This commit is contained in:
2026-04-21 20:54:00 +08:00
parent 501d563df2
commit c77c592832
14 changed files with 662 additions and 116 deletions

View File

@@ -97,50 +97,26 @@ impl ServerConfig {
}
};
let encryption_enabled = std::env::var("ENCRYPTION_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let encryption_enabled = parse_bool_env("ENCRYPTION_ENABLED", false);
let kms_enabled = std::env::var("KMS_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let kms_enabled = parse_bool_env("KMS_ENABLED", false);
let gc_enabled = std::env::var("GC_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let gc_enabled = parse_bool_env("GC_ENABLED", false);
let integrity_enabled = std::env::var("INTEGRITY_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let integrity_enabled = parse_bool_env("INTEGRITY_ENABLED", false);
let metrics_enabled = std::env::var("OPERATION_METRICS_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let metrics_enabled = parse_bool_env("OPERATION_METRICS_ENABLED", false);
let metrics_history_enabled = std::env::var("METRICS_HISTORY_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let metrics_history_enabled = parse_bool_env("METRICS_HISTORY_ENABLED", false);
let metrics_interval_minutes = parse_u64_env("OPERATION_METRICS_INTERVAL_MINUTES", 5);
let metrics_retention_hours = parse_u64_env("OPERATION_METRICS_RETENTION_HOURS", 24);
let metrics_history_interval_minutes = parse_u64_env("METRICS_HISTORY_INTERVAL_MINUTES", 5);
let metrics_history_retention_hours = parse_u64_env("METRICS_HISTORY_RETENTION_HOURS", 24);
let lifecycle_enabled = std::env::var("LIFECYCLE_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let lifecycle_enabled = parse_bool_env("LIFECYCLE_ENABLED", false);
let website_hosting_enabled = std::env::var("WEBSITE_HOSTING_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let website_hosting_enabled = parse_bool_env("WEBSITE_HOSTING_ENABLED", false);
let replication_connect_timeout_secs =
parse_u64_env("REPLICATION_CONNECT_TIMEOUT_SECONDS", 5);
@@ -151,10 +127,7 @@ impl ServerConfig {
let replication_max_failures_per_bucket =
parse_u64_env("REPLICATION_MAX_FAILURES_PER_BUCKET", 50) as usize;
let site_sync_enabled = std::env::var("SITE_SYNC_ENABLED")
.unwrap_or_else(|_| "false".to_string())
.to_lowercase()
== "true";
let site_sync_enabled = parse_bool_env("SITE_SYNC_ENABLED", false);
let site_sync_interval_secs = parse_u64_env("SITE_SYNC_INTERVAL_SECONDS", 60);
let site_sync_batch_size = parse_u64_env("SITE_SYNC_BATCH_SIZE", 100) as usize;
let site_sync_connect_timeout_secs = parse_u64_env("SITE_SYNC_CONNECT_TIMEOUT_SECONDS", 10);
@@ -166,10 +139,7 @@ impl ServerConfig {
.and_then(|s| s.parse().ok())
.unwrap_or(1.0);
let ui_enabled = std::env::var("UI_ENABLED")
.unwrap_or_else(|_| "true".to_string())
.to_lowercase()
== "true";
let ui_enabled = parse_bool_env("UI_ENABLED", true);
let templates_dir = std::env::var("TEMPLATES_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| default_templates_dir());
@@ -243,3 +213,15 @@ fn parse_u64_env(key: &str, default: u64) -> u64 {
.and_then(|s| s.parse().ok())
.unwrap_or(default)
}
fn parse_bool_env(key: &str, default: bool) -> bool {
std::env::var(key)
.ok()
.map(|value| {
matches!(
value.trim().to_ascii_lowercase().as_str(),
"1" | "true" | "yes" | "on"
)
})
.unwrap_or(default)
}

View File

@@ -46,6 +46,17 @@ fn require_admin(principal: &Principal) -> Option<Response> {
None
}
fn require_iam_action(state: &AppState, principal: &Principal, action: &str) -> Option<Response> {
if !state.iam.authorize(principal, None, action, None) {
return Some(json_error(
"AccessDenied",
&format!("Requires {} permission", action),
StatusCode::FORBIDDEN,
));
}
None
}
async fn read_json_body(body: Body) -> Option<serde_json::Value> {
let bytes = http_body_util::BodyExt::collect(body)
.await
@@ -926,7 +937,7 @@ pub async fn iam_list_users(
State(state): State<AppState>,
Extension(principal): Extension<Principal>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:list_users") {
return err;
}
let users = state.iam.list_users().await;
@@ -938,7 +949,7 @@ pub async fn iam_get_user(
Extension(principal): Extension<Principal>,
Path(identifier): Path<String>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:get_user") {
return err;
}
match state.iam.get_user(&identifier).await {
@@ -956,7 +967,7 @@ pub async fn iam_get_user_policies(
Extension(principal): Extension<Principal>,
Path(identifier): Path<String>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:get_policy") {
return err;
}
match state.iam.get_user_policies(&identifier) {
@@ -974,7 +985,7 @@ pub async fn iam_create_access_key(
Extension(principal): Extension<Principal>,
Path(identifier): Path<String>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:create_key") {
return err;
}
match state.iam.create_access_key(&identifier) {
@@ -988,7 +999,7 @@ pub async fn iam_delete_access_key(
Extension(principal): Extension<Principal>,
Path((_identifier, access_key)): Path<(String, String)>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:delete_key") {
return err;
}
match state.iam.delete_access_key(&access_key) {
@@ -1002,7 +1013,7 @@ pub async fn iam_disable_user(
Extension(principal): Extension<Principal>,
Path(identifier): Path<String>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:disable_user") {
return err;
}
match state.iam.set_user_enabled(&identifier, false).await {
@@ -1016,7 +1027,7 @@ pub async fn iam_enable_user(
Extension(principal): Extension<Principal>,
Path(identifier): Path<String>,
) -> Response {
if let Some(err) = require_admin(&principal) {
if let Some(err) = require_iam_action(&state, &principal, "iam:disable_user") {
return err;
}
match state.iam.set_user_enabled(&identifier, true).await {

View File

@@ -1156,7 +1156,11 @@ pub async fn put_object_acl(
.unwrap_or_else(|| "myfsio".to_string());
let acl = create_canned_acl(canned_acl, &owner);
store_object_acl(&mut metadata, &acl);
match state.storage.put_object_metadata(bucket, key, &metadata).await {
match state
.storage
.put_object_metadata(bucket, key, &metadata)
.await
{
Ok(()) => StatusCode::OK.into_response(),
Err(err) => storage_err(err),
}
@@ -1281,7 +1285,11 @@ pub async fn put_object_retention(
) {
return custom_xml_error(StatusCode::BAD_REQUEST, "InvalidArgument", &message);
}
match state.storage.put_object_metadata(bucket, key, &metadata).await {
match state
.storage
.put_object_metadata(bucket, key, &metadata)
.await
{
Ok(()) => StatusCode::OK.into_response(),
Err(err) => storage_err(err),
}
@@ -1294,7 +1302,11 @@ pub async fn get_object_legal_hold(state: &AppState, bucket: &str, key: &str) ->
Ok(metadata) => metadata,
Err(err) => return storage_err(err),
};
let status = if get_legal_hold(&metadata) { "ON" } else { "OFF" };
let status = if get_legal_hold(&metadata) {
"ON"
} else {
"OFF"
};
let xml = format!(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<LegalHold xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
@@ -1356,7 +1368,11 @@ pub async fn put_object_legal_hold(
Err(err) => return storage_err(err),
};
set_legal_hold(&mut metadata, enabled);
match state.storage.put_object_metadata(bucket, key, &metadata).await {
match state
.storage
.put_object_metadata(bucket, key, &metadata)
.await
{
Ok(()) => StatusCode::OK.into_response(),
Err(err) => storage_err(err),
}

View File

@@ -47,6 +47,16 @@ fn storage_err_response(err: myfsio_storage::error::StorageError) -> Response {
s3_error_response(S3Error::from(err))
}
fn trigger_replication(state: &AppState, bucket: &str, key: &str, action: &str) {
let manager = state.replication.clone();
let bucket = bucket.to_string();
let key = key.to_string();
let action = action.to_string();
tokio::spawn(async move {
manager.trigger(bucket, key, action).await;
});
}
async fn ensure_object_lock_allows_write(
state: &AppState,
bucket: &str,
@@ -632,7 +642,8 @@ pub async fn put_object(
return copy_object_handler(&state, copy_source, &bucket, &key, &headers).await;
}
if let Err(response) = ensure_object_lock_allows_write(&state, &bucket, &key, Some(&headers)).await
if let Err(response) =
ensure_object_lock_allows_write(&state, &bucket, &key, Some(&headers)).await
{
return response;
}
@@ -729,6 +740,7 @@ pub async fn put_object(
"",
"Put",
);
trigger_replication(&state, &bucket, &key, "write");
return (StatusCode::OK, resp_headers).into_response();
}
Err(e) => {
@@ -757,6 +769,7 @@ pub async fn put_object(
"",
"Put",
);
trigger_replication(&state, &bucket, &key, "write");
(StatusCode::OK, resp_headers).into_response()
}
Err(e) => storage_err_response(e),
@@ -965,7 +978,8 @@ pub async fn delete_object(
return abort_multipart_handler(&state, &bucket, upload_id).await;
}
if let Err(response) = ensure_object_lock_allows_write(&state, &bucket, &key, Some(&headers)).await
if let Err(response) =
ensure_object_lock_allows_write(&state, &bucket, &key, Some(&headers)).await
{
return response;
}
@@ -973,6 +987,7 @@ pub async fn delete_object(
match state.storage.delete_object(&bucket, &key).await {
Ok(()) => {
notifications::emit_object_removed(&state, &bucket, &key, "", "", "", "Delete");
trigger_replication(&state, &bucket, &key, "delete");
StatusCode::NO_CONTENT.into_response()
}
Err(e) => storage_err_response(e),
@@ -1196,6 +1211,7 @@ async fn complete_multipart_handler(
etag,
&format!("/{}/{}", bucket, key),
);
trigger_replication(state, bucket, key, "write");
(StatusCode::OK, [("content-type", "application/xml")], xml).into_response()
}
Err(e) => storage_err_response(e),
@@ -1289,7 +1305,9 @@ async fn copy_object_handler(
dst_key: &str,
headers: &HeaderMap,
) -> Response {
if let Err(response) = ensure_object_lock_allows_write(state, dst_bucket, dst_key, Some(headers)).await {
if let Err(response) =
ensure_object_lock_allows_write(state, dst_bucket, dst_key, Some(headers)).await
{
return response;
}
@@ -1321,6 +1339,7 @@ async fn copy_object_handler(
let etag = meta.etag.as_deref().unwrap_or("");
let last_modified = myfsio_xml::response::format_s3_datetime(&meta.last_modified);
let xml = myfsio_xml::response::copy_object_result_xml(etag, &last_modified);
trigger_replication(state, dst_bucket, dst_key, "write");
(StatusCode::OK, [("content-type", "application/xml")], xml).into_response()
}
Err(e) => storage_err_response(e),
@@ -1371,6 +1390,7 @@ async fn delete_objects_handler(state: &AppState, bucket: &str, body: Body) -> R
match state.storage.delete_object(bucket, &obj.key).await {
Ok(()) => {
notifications::emit_object_removed(state, bucket, &obj.key, "", "", "", "Delete");
trigger_replication(state, bucket, &obj.key, "delete");
deleted.push((obj.key.clone(), obj.version_id.clone()))
}
Err(e) => {
@@ -2172,8 +2192,15 @@ mod tests {
.unwrap();
let mut config = state.storage.get_bucket_config("public").await.unwrap();
config.acl = Some(Value::String(acl_to_xml(&create_canned_acl("public-read", "myfsio"))));
state.storage.set_bucket_config("public", &config).await.unwrap();
config.acl = Some(Value::String(acl_to_xml(&create_canned_acl(
"public-read",
"myfsio",
))));
state
.storage
.set_bucket_config("public", &config)
.await
.unwrap();
let app = crate::create_router(state);
let response = app

View File

@@ -1994,12 +1994,15 @@ pub async fn complete_multipart_upload(
.complete_multipart(&bucket_name, &upload_id, &parts)
.await
{
Ok(meta) => json_ok(json!({
Ok(meta) => {
super::trigger_replication(&state, &bucket_name, &meta.key, "write");
json_ok(json!({
"key": meta.key,
"size": meta.size,
"etag": meta.etag.unwrap_or_default(),
"last_modified": meta.last_modified.to_rfc3339(),
})),
}))
}
Err(err) => storage_json_error(err),
}
}
@@ -2462,13 +2465,16 @@ async fn copy_object_json(state: &AppState, bucket: &str, key: &str, body: Body)
.copy_object(bucket, key, dest_bucket, dest_key)
.await
{
Ok(_) => Json(json!({
Ok(_) => {
super::trigger_replication(state, dest_bucket, dest_key, "write");
Json(json!({
"status": "ok",
"message": format!("Copied to {}/{}", dest_bucket, dest_key),
"dest_bucket": dest_bucket,
"dest_key": dest_key,
}))
.into_response(),
.into_response()
}
Err(err) => storage_json_error(err),
}
}
@@ -2495,13 +2501,17 @@ async fn move_object_json(state: &AppState, bucket: &str, key: &str, body: Body)
match state.storage.copy_object(bucket, key, dest_bucket, dest_key).await {
Ok(_) => match state.storage.delete_object(bucket, key).await {
Ok(()) => Json(json!({
Ok(()) => {
super::trigger_replication(state, dest_bucket, dest_key, "write");
super::trigger_replication(state, bucket, key, "delete");
Json(json!({
"status": "ok",
"message": format!("Moved to {}/{}", dest_bucket, dest_key),
"dest_bucket": dest_bucket,
"dest_key": dest_key,
}))
.into_response(),
.into_response()
}
Err(_) => Json(json!({
"status": "partial",
"message": format!("Copied to {}/{} but failed to delete source", dest_bucket, dest_key),
@@ -2554,6 +2564,7 @@ async fn delete_object_json(
if let Err(err) = state.storage.delete_object(bucket, key).await {
return storage_json_error(err);
}
super::trigger_replication(state, bucket, key, "delete");
if let Err(err) = purge_object_versions_for_key(state, bucket, key).await {
return json_error(StatusCode::BAD_REQUEST, err);
}
@@ -2565,11 +2576,14 @@ async fn delete_object_json(
}
match state.storage.delete_object(bucket, key).await {
Ok(()) => Json(json!({
Ok(()) => {
super::trigger_replication(state, bucket, key, "delete");
Json(json!({
"status": "ok",
"message": format!("Deleted '{}'", key),
}))
.into_response(),
.into_response()
}
Err(err) => storage_json_error(err),
}
}
@@ -2630,6 +2644,7 @@ async fn restore_object_version_json(
{
return storage_json_error(err);
}
super::trigger_replication(state, bucket, key, "write");
let mut message = format!("Restored '{}'", key);
if live_exists && versioning_enabled {
@@ -2679,6 +2694,14 @@ fn parse_object_post_action(rest: &str) -> Option<(String, ObjectPostAction)> {
ObjectPostAction::Restore(version_id.to_string()),
));
}
if let Some(key_with_version) = rest.strip_suffix("/restore") {
if let Some((key, version_id)) = key_with_version.rsplit_once("/versions/") {
return Some((
key.to_string(),
ObjectPostAction::Restore(version_id.to_string()),
));
}
}
for (suffix, action) in [
("/delete", ObjectPostAction::Delete),
("/presign", ObjectPostAction::Presign),
@@ -2824,6 +2847,7 @@ pub async fn bulk_delete_objects(
for key in keys {
match state.storage.delete_object(&bucket_name, &key).await {
Ok(()) => {
super::trigger_replication(&state, &bucket_name, &key, "delete");
if payload.purge_versions {
if let Err(err) =
purge_object_versions_for_key(&state, &bucket_name, &key).await
@@ -3038,6 +3062,7 @@ pub async fn archived_post_dispatch(
match purge_object_versions_for_key(&state, &bucket_name, key).await {
Ok(()) => {
let _ = state.storage.delete_object(&bucket_name, key).await;
super::trigger_replication(&state, &bucket_name, key, "delete");
Json(json!({
"status": "ok",
"message": format!("Removed archived versions for '{}'", key),
@@ -3267,14 +3292,32 @@ pub async fn retry_replication_failure(
Path(bucket_name): Path<String>,
Query(q): Query<ReplicationObjectKeyQuery>,
) -> Response {
let object_key = q.object_key.trim();
retry_replication_failure_key(&state, &bucket_name, q.object_key.trim()).await
}
pub async fn retry_replication_failure_path(
State(state): State<AppState>,
Extension(_session): Extension<SessionHandle>,
Path((bucket_name, rest)): Path<(String, String)>,
) -> Response {
let Some(object_key) = rest.strip_suffix("/retry") else {
return json_error(StatusCode::NOT_FOUND, "Unknown replication failure action");
};
retry_replication_failure_key(&state, &bucket_name, object_key.trim()).await
}
async fn retry_replication_failure_key(
state: &AppState,
bucket_name: &str,
object_key: &str,
) -> Response {
if object_key.is_empty() {
return json_error(StatusCode::BAD_REQUEST, "object_key is required");
}
if state
.replication
.retry_failed(&bucket_name, object_key)
.retry_failed(bucket_name, object_key)
.await
{
json_ok(json!({
@@ -3305,12 +3348,27 @@ pub async fn dismiss_replication_failure(
Path(bucket_name): Path<String>,
Query(q): Query<ReplicationObjectKeyQuery>,
) -> Response {
let object_key = q.object_key.trim();
dismiss_replication_failure_key(&state, &bucket_name, q.object_key.trim())
}
pub async fn dismiss_replication_failure_path(
State(state): State<AppState>,
Extension(_session): Extension<SessionHandle>,
Path((bucket_name, object_key)): Path<(String, String)>,
) -> Response {
dismiss_replication_failure_key(&state, &bucket_name, object_key.trim())
}
fn dismiss_replication_failure_key(
state: &AppState,
bucket_name: &str,
object_key: &str,
) -> Response {
if object_key.is_empty() {
return json_error(StatusCode::BAD_REQUEST, "object_key is required");
}
if state.replication.dismiss_failure(&bucket_name, object_key) {
if state.replication.dismiss_failure(bucket_name, object_key) {
json_ok(json!({
"status": "dismissed",
"object_key": object_key,

View File

@@ -21,7 +21,10 @@ pub fn create_ui_router(state: state::AppState) -> Router {
.route("/", get(ui::root_redirect))
.route("/ui", get(ui::root_redirect))
.route("/ui/", get(ui::root_redirect))
.route("/ui/buckets", get(ui_pages::buckets_overview))
.route(
"/ui/buckets",
get(ui_pages::buckets_overview).post(ui_pages::create_bucket),
)
.route("/ui/buckets/create", post(ui_pages::create_bucket))
.route("/ui/buckets/{bucket_name}", get(ui_pages::bucket_detail))
.route(
@@ -64,6 +67,10 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/buckets/{bucket_name}/multipart/{upload_id}/part",
put(ui_api::upload_multipart_part),
)
.route(
"/ui/buckets/{bucket_name}/multipart/{upload_id}/parts",
put(ui_api::upload_multipart_part),
)
.route(
"/ui/buckets/{bucket_name}/multipart/{upload_id}/complete",
post(ui_api::complete_multipart_upload),
@@ -72,6 +79,10 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/buckets/{bucket_name}/multipart/{upload_id}/abort",
delete(ui_api::abort_multipart_upload),
)
.route(
"/ui/buckets/{bucket_name}/multipart/{upload_id}",
delete(ui_api::abort_multipart_upload),
)
.route(
"/ui/buckets/{bucket_name}/objects",
get(ui_api::list_bucket_objects),
@@ -88,6 +99,18 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/buckets/{bucket_name}/copy-targets",
get(ui_api::list_copy_targets),
)
.route(
"/ui/buckets/{bucket_name}/list-for-copy",
get(ui_api::list_copy_targets),
)
.route(
"/ui/buckets/{bucket_name}/objects/bulk-delete",
post(ui_api::bulk_delete_objects),
)
.route(
"/ui/buckets/{bucket_name}/objects/bulk-download",
post(ui_api::bulk_download_objects),
)
.route(
"/ui/buckets/{bucket_name}/objects/{*rest}",
get(ui_api::object_get_dispatch).post(ui_api::object_post_dispatch),
@@ -132,6 +155,11 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/buckets/{bucket_name}/replication/failures/clear",
delete(ui_api::clear_replication_failures),
)
.route(
"/ui/buckets/{bucket_name}/replication/failures/{*rest}",
post(ui_api::retry_replication_failure_path)
.delete(ui_api::dismiss_replication_failure_path),
)
.route(
"/ui/buckets/{bucket_name}/bulk-delete",
post(ui_api::bulk_delete_objects),
@@ -155,6 +183,10 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/iam/users/{user_id}/delete",
post(ui_pages::delete_iam_user),
)
.route(
"/ui/iam/users/{user_id}/update",
post(ui_pages::update_iam_user),
)
.route(
"/ui/iam/users/{user_id}/policies",
post(ui_pages::update_iam_policies),
@@ -167,12 +199,20 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/iam/users/{user_id}/rotate-secret",
post(ui_pages::rotate_iam_secret),
)
.route(
"/ui/iam/users/{user_id}/rotate",
post(ui_pages::rotate_iam_secret),
)
.route("/ui/connections/create", post(ui_pages::create_connection))
.route("/ui/connections/test", post(ui_api::test_connection))
.route(
"/ui/connections/{connection_id}",
post(ui_pages::update_connection),
)
.route(
"/ui/connections/{connection_id}/update",
post(ui_pages::update_connection),
)
.route(
"/ui/connections/{connection_id}/delete",
post(ui_pages::delete_connection),
@@ -201,7 +241,10 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/sites/peers/{site_id}/bidirectional-status",
get(ui_api::peer_bidirectional_status),
)
.route("/ui/connections", get(ui_pages::connections_dashboard))
.route(
"/ui/connections",
get(ui_pages::connections_dashboard).post(ui_pages::create_connection),
)
.route("/ui/metrics", get(ui_pages::metrics_dashboard))
.route(
"/ui/metrics/settings",
@@ -239,6 +282,10 @@ pub fn create_ui_router(state: state::AppState) -> Router {
"/ui/website-domains/{domain}",
post(ui_pages::update_website_domain),
)
.route(
"/ui/website-domains/{domain}/update",
post(ui_pages::update_website_domain),
)
.route(
"/ui/website-domains/{domain}/delete",
post(ui_pages::delete_website_domain),
@@ -445,10 +492,18 @@ pub fn create_router(state: state::AppState) -> Router {
"/admin/iam/users/{identifier}/access-keys",
axum::routing::post(handlers::admin::iam_create_access_key),
)
.route(
"/admin/iam/users/{identifier}/keys",
axum::routing::post(handlers::admin::iam_create_access_key),
)
.route(
"/admin/iam/users/{identifier}/access-keys/{access_key}",
axum::routing::delete(handlers::admin::iam_delete_access_key),
)
.route(
"/admin/iam/users/{identifier}/keys/{access_key}",
axum::routing::delete(handlers::admin::iam_delete_access_key),
)
.route(
"/admin/iam/users/{identifier}/disable",
axum::routing::post(handlers::admin::iam_disable_user),

View File

@@ -19,21 +19,59 @@ fn website_error_response(
status: StatusCode,
body: Option<Vec<u8>>,
content_type: &str,
include_body: bool,
) -> Response {
let (body, content_type) = match body {
Some(body) => (body, content_type),
None => (
default_website_error_body(status).into_bytes(),
"text/html; charset=utf-8",
),
};
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, content_type.parse().unwrap());
headers.insert(header::ACCEPT_RANGES, "bytes".parse().unwrap());
if let Some(ref body) = body {
headers.insert(
header::CONTENT_LENGTH,
body.len().to_string().parse().unwrap(),
);
if include_body {
(status, headers, body.clone()).into_response()
} else {
(status, headers).into_response()
}
}
fn default_website_error_body(status: StatusCode) -> String {
let code = status.as_u16();
let reason = status.canonical_reason().unwrap_or("Error");
format!(
"<!doctype html>\
<html lang=\"en\">\
<head>\
<meta charset=\"utf-8\">\
<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\
<title>{code} {reason}</title>\
<style>\
html{{font-family:Arial,Helvetica,sans-serif;background:#f8fafc;color:#172033}}\
body{{margin:0;min-height:100vh;display:grid;place-items:center}}\
main{{max-width:42rem;padding:3rem 2rem}}\
p.code{{font-size:.78rem;text-transform:uppercase;letter-spacing:.12em;color:#64748b;margin:0 0 .75rem}}\
h1{{font-size:2rem;line-height:1.15;margin:0 0 1rem}}\
p{{font-size:1rem;line-height:1.6;margin:0;color:#334155}}\
</style>\
</head>\
<body>\
<main>\
<p class=\"code\">HTTP {code}</p>\
<h1>{code} {reason}</h1>\
<p>The requested page could not be found. Check the URL, or return to the site root.</p>\
</main>\
</body>\
</html>"
)
}
fn parse_range_header(range_header: &str, total_size: u64) -> Option<(u64, u64)> {
let range_spec = range_header.strip_prefix("bytes=")?;
if let Some(suffix) = range_spec.strip_prefix('-') {
@@ -192,6 +230,7 @@ async fn maybe_serve_website(
return None;
}
let request_path = uri_path.trim_start_matches('/').to_string();
let include_error_body = method != axum::http::Method::HEAD;
let store = state.website_domains.as_ref()?;
let bucket = store.get_bucket(&host)?;
if !matches!(state.storage.bucket_exists(&bucket).await, Ok(true)) {
@@ -199,6 +238,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
));
}
@@ -208,6 +248,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
));
};
let Some((index_document, error_document)) = parse_website_config(website_config) else {
@@ -215,6 +256,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
));
};
@@ -252,6 +294,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
))
});
} else {
@@ -259,6 +302,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
));
}
} else if !exists {
@@ -277,6 +321,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
))
});
}
@@ -284,6 +329,7 @@ async fn maybe_serve_website(
StatusCode::NOT_FOUND,
None,
"text/plain; charset=utf-8",
include_error_body,
));
}

View File

@@ -268,11 +268,9 @@ mod tests {
let parsed = acl_from_bucket_config(&Value::String(acl_to_xml(&acl))).unwrap();
assert_eq!(parsed.owner, "owner");
assert_eq!(parsed.grants.len(), 2);
assert!(
parsed
assert!(parsed
.grants
.iter()
.any(|grant| grant.grantee == GRANTEE_AUTHENTICATED_USERS)
);
.any(|grant| grant.grantee == GRANTEE_AUTHENTICATED_USERS));
}
}

View File

@@ -302,7 +302,10 @@ impl LifecycleService {
Ok(uploads) => {
for upload in &uploads {
if upload.initiated < cutoff {
if let Err(err) = self.storage.abort_multipart(bucket, &upload.upload_id).await
if let Err(err) = self
.storage
.abort_multipart(bucket, &upload.upload_id)
.await
{
result
.errors
@@ -314,7 +317,10 @@ impl LifecycleService {
}
None
}
Err(err) => Some(format!("Failed to list multipart uploads for {}: {}", bucket, err)),
Err(err) => Some(format!(
"Failed to list multipart uploads for {}: {}",
bucket, err
)),
}
}
@@ -438,11 +444,8 @@ fn parse_lifecycle_rules_from_string(raw: &str) -> Vec<ParsedLifecycleRule> {
.find(|node| {
node.is_element()
&& node.tag_name().name() == "Filter"
&& node
.children()
.any(|child| {
child.is_element()
&& child.tag_name().name() == "Prefix"
&& node.children().any(|child| {
child.is_element() && child.tag_name().name() == "Prefix"
})
})
.and_then(|filter| child_text(&filter, "Prefix"))
@@ -469,8 +472,7 @@ fn parse_lifecycle_rules_from_string(raw: &str) -> Vec<ParsedLifecycleRule> {
abort_incomplete_multipart_days: rule
.descendants()
.find(|node| {
node.is_element()
&& node.tag_name().name() == "AbortIncompleteMultipartUpload"
node.is_element() && node.tag_name().name() == "AbortIncompleteMultipartUpload"
})
.and_then(|node| child_text(&node, "DaysAfterInitiation"))
.and_then(|value| value.parse::<u64>().ok()),
@@ -592,7 +594,9 @@ mod tests {
.await
.unwrap();
let versions_root = version_root_for_bucket(tmp.path(), "docs").join("logs").join("file.txt");
let versions_root = version_root_for_bucket(tmp.path(), "docs")
.join("logs")
.join("file.txt");
let manifest = std::fs::read_dir(&versions_root)
.unwrap()
.flatten()
@@ -621,7 +625,8 @@ mod tests {
config.lifecycle = Some(Value::String(lifecycle_xml.to_string()));
storage.set_bucket_config("docs", &config).await.unwrap();
let service = LifecycleService::new(storage.clone(), tmp.path(), LifecycleConfig::default());
let service =
LifecycleService::new(storage.clone(), tmp.path(), LifecycleConfig::default());
let result = service.run_cycle().await.unwrap();
assert_eq!(result["versions_deleted"], 1);

View File

@@ -1,5 +1,5 @@
pub mod acl;
pub mod access_logging;
pub mod acl;
pub mod gc;
pub mod integrity;
pub mod lifecycle;

View File

@@ -61,7 +61,9 @@ impl NotificationConfiguration {
}
}
pub fn parse_notification_configurations(xml: &str) -> Result<Vec<NotificationConfiguration>, String> {
pub fn parse_notification_configurations(
xml: &str,
) -> Result<Vec<NotificationConfiguration>, String> {
let doc = roxmltree::Document::parse(xml).map_err(|err| err.to_string())?;
let mut configs = Vec::new();

View File

@@ -569,7 +569,7 @@ sudo journalctl -u myfsio -f # View logs</code></pre>
<h3 class="h6 text-uppercase text-muted">Policies &amp; versioning</h3>
<ul>
<li>Toggle versioning (requires write access). Archived-only keys are flagged so you can restore them quickly.</li>
<li>The policy editor saves drafts, ships with presets, and hot-reloads <code>data/.myfsio.sys/config/bucket_policies.json</code>.</li>
<li>The policy editor saves each bucket policy in that bucket's <code>.bucket.json</code>; legacy <code>data/.myfsio.sys/config/bucket_policies.json</code> entries are still read as a fallback.</li>
</ul>
</div>
</div>
@@ -2058,7 +2058,7 @@ curl "{{ api_base | replace(from="/api", to="/ui") }}/metrics/operations/history
<tr>
<td>UI shows stale policy/object data</td>
<td>Browser cached prior state</td>
<td>Refresh; the server hot-reloads <code>data/.myfsio.sys/config/bucket_policies.json</code> and storage metadata.</td>
<td>Refresh; the server hot-reloads bucket <code>.bucket.json</code> policy data and legacy <code>data/.myfsio.sys/config/bucket_policies.json</code> fallback entries.</td>
</tr>
<tr>
<td>Presign dialog returns 403</td>

View File

@@ -1,8 +1,9 @@
use axum::body::Body;
use axum::http::{Method, Request, StatusCode};
use http_body_util::BodyExt;
use myfsio_storage::traits::StorageEngine;
use myfsio_storage::traits::{AsyncReadStream, StorageEngine};
use serde_json::Value;
use std::collections::HashMap;
use tower::ServiceExt;
const TEST_ACCESS_KEY: &str = "AKIAIOSFODNN7EXAMPLE";
@@ -236,6 +237,150 @@ fn signed_request(method: Method, uri: &str, body: Body) -> Request<Body> {
.unwrap()
}
fn test_website_state() -> (myfsio_server::state::AppState, tempfile::TempDir) {
let tmp = tempfile::TempDir::new().unwrap();
let iam_path = tmp.path().join(".myfsio.sys").join("config");
std::fs::create_dir_all(&iam_path).unwrap();
std::fs::write(
iam_path.join("iam.json"),
serde_json::json!({
"version": 2,
"users": [{
"user_id": "u-test1234",
"display_name": "admin",
"enabled": true,
"access_keys": [{
"access_key": TEST_ACCESS_KEY,
"secret_key": TEST_SECRET_KEY,
"status": "active"
}],
"policies": [{
"bucket": "*",
"actions": ["*"],
"prefix": "*"
}]
}]
})
.to_string(),
)
.unwrap();
let config = myfsio_server::config::ServerConfig {
bind_addr: "127.0.0.1:0".parse().unwrap(),
ui_bind_addr: "127.0.0.1:0".parse().unwrap(),
storage_root: tmp.path().to_path_buf(),
region: "us-east-1".to_string(),
iam_config_path: iam_path.join("iam.json"),
sigv4_timestamp_tolerance_secs: 900,
presigned_url_min_expiry: 1,
presigned_url_max_expiry: 604800,
secret_key: None,
encryption_enabled: false,
kms_enabled: false,
gc_enabled: false,
integrity_enabled: false,
metrics_enabled: false,
metrics_history_enabled: false,
metrics_interval_minutes: 5,
metrics_retention_hours: 24,
metrics_history_interval_minutes: 5,
metrics_history_retention_hours: 24,
lifecycle_enabled: false,
website_hosting_enabled: true,
replication_connect_timeout_secs: 5,
replication_read_timeout_secs: 30,
replication_max_retries: 2,
replication_streaming_threshold_bytes: 10_485_760,
replication_max_failures_per_bucket: 50,
site_sync_enabled: false,
site_sync_interval_secs: 60,
site_sync_batch_size: 100,
site_sync_connect_timeout_secs: 10,
site_sync_read_timeout_secs: 120,
site_sync_max_retries: 2,
site_sync_clock_skew_tolerance: 1.0,
ui_enabled: false,
templates_dir: std::path::PathBuf::from("templates"),
static_dir: std::path::PathBuf::from("static"),
};
(myfsio_server::state::AppState::new(config), tmp)
}
async fn put_website_object(
state: &myfsio_server::state::AppState,
bucket: &str,
key: &str,
body: &str,
content_type: &str,
) {
let mut metadata = HashMap::new();
metadata.insert("__content_type__".to_string(), content_type.to_string());
let reader: AsyncReadStream = Box::pin(std::io::Cursor::new(body.as_bytes().to_vec()));
state
.storage
.put_object(bucket, key, reader, Some(metadata))
.await
.unwrap();
}
async fn test_website_app(error_document: Option<&str>) -> (axum::Router, tempfile::TempDir) {
let (state, tmp) = test_website_state();
let bucket = "site-bucket";
state.storage.create_bucket(bucket).await.unwrap();
put_website_object(
&state,
bucket,
"index.html",
"<!doctype html><h1>Home</h1>",
"text/html",
)
.await;
if let Some(error_key) = error_document {
put_website_object(
&state,
bucket,
error_key,
"<!doctype html><h1>Bucket Not Found Page</h1>",
"text/html",
)
.await;
}
let mut config = state.storage.get_bucket_config(bucket).await.unwrap();
config.website = Some(match error_document {
Some(error_key) => serde_json::json!({
"index_document": "index.html",
"error_document": error_key,
}),
None => serde_json::json!({
"index_document": "index.html",
}),
});
state
.storage
.set_bucket_config(bucket, &config)
.await
.unwrap();
state
.website_domains
.as_ref()
.unwrap()
.set_mapping("site.example.com", bucket);
(myfsio_server::create_router(state), tmp)
}
fn website_request(method: Method, uri: &str) -> Request<Body> {
Request::builder()
.method(method)
.uri(uri)
.header("Host", "site.example.com")
.body(Body::empty())
.unwrap()
}
fn parse_select_events(body: &[u8]) -> Vec<(String, Vec<u8>)> {
let mut out = Vec::new();
let mut idx: usize = 0;
@@ -3113,6 +3258,98 @@ async fn test_select_object_content_rejects_non_xml_content_type() {
assert!(body.contains("Content-Type must be application/xml or text/xml"));
}
#[tokio::test]
async fn test_static_website_serves_configured_error_document() {
let (app, _tmp) = test_website_app(Some("404.html")).await;
let resp = app
.oneshot(website_request(Method::GET, "/missing.html"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
assert!(resp
.headers()
.get("content-type")
.unwrap()
.to_str()
.unwrap()
.starts_with("text/html"));
let body = String::from_utf8(
resp.into_body()
.collect()
.await
.unwrap()
.to_bytes()
.to_vec(),
)
.unwrap();
assert!(body.contains("Bucket Not Found Page"));
}
#[tokio::test]
async fn test_static_website_default_404_returns_html_body() {
let (app, _tmp) = test_website_app(None).await;
let resp = app
.clone()
.oneshot(website_request(Method::GET, "/missing.html"))
.await
.unwrap();
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
assert!(resp
.headers()
.get("content-type")
.unwrap()
.to_str()
.unwrap()
.starts_with("text/html"));
let content_length = resp
.headers()
.get("content-length")
.unwrap()
.to_str()
.unwrap()
.parse::<usize>()
.unwrap();
let body = String::from_utf8(
resp.into_body()
.collect()
.await
.unwrap()
.to_bytes()
.to_vec(),
)
.unwrap();
assert_eq!(body.len(), content_length);
assert!(body.contains("<h1>404 Not Found</h1>"));
assert!(body.len() > 512);
let head_resp = app
.oneshot(website_request(Method::HEAD, "/missing.html"))
.await
.unwrap();
assert_eq!(head_resp.status(), StatusCode::NOT_FOUND);
let head_content_length = head_resp
.headers()
.get("content-length")
.unwrap()
.to_str()
.unwrap()
.parse::<usize>()
.unwrap();
let head_body = head_resp
.into_body()
.collect()
.await
.unwrap()
.to_bytes()
.to_vec();
assert_eq!(head_content_length, content_length);
assert!(head_body.is_empty());
}
#[tokio::test]
async fn test_non_admin_authorization_enforced() {
let iam_json = serde_json::json!({

View File

@@ -147,6 +147,12 @@ impl FsStorageBackend {
.join(BUCKET_CONFIG_FILE)
}
fn legacy_bucket_policies_path(&self) -> PathBuf {
self.system_root_path()
.join("config")
.join("bucket_policies.json")
}
fn version_dir(&self, bucket_name: &str, key: &str) -> PathBuf {
self.bucket_versions_root(bucket_name).join(key)
}
@@ -383,7 +389,7 @@ impl FsStorageBackend {
}
let config_path = self.bucket_config_path(bucket_name);
let config = if config_path.exists() {
let mut config = if config_path.exists() {
std::fs::read_to_string(&config_path)
.ok()
.and_then(|s| serde_json::from_str::<BucketConfig>(&s).ok())
@@ -391,12 +397,55 @@ impl FsStorageBackend {
} else {
BucketConfig::default()
};
if config.policy.is_none() {
config.policy = self.read_legacy_bucket_policy_sync(bucket_name);
}
self.bucket_config_cache
.insert(bucket_name.to_string(), (config.clone(), Instant::now()));
config
}
fn read_legacy_bucket_policy_sync(&self, bucket_name: &str) -> Option<Value> {
let path = self.legacy_bucket_policies_path();
let text = std::fs::read_to_string(path).ok()?;
let value = serde_json::from_str::<Value>(&text).ok()?;
value
.get("policies")
.and_then(|policies| policies.get(bucket_name))
.cloned()
.or_else(|| value.get(bucket_name).cloned())
}
fn remove_legacy_bucket_policy_sync(&self, bucket_name: &str) -> std::io::Result<()> {
let path = self.legacy_bucket_policies_path();
if !path.exists() {
return Ok(());
}
let text = std::fs::read_to_string(&path)?;
let Ok(mut value) = serde_json::from_str::<Value>(&text) else {
return Ok(());
};
let changed = {
let Some(object) = value.as_object_mut() else {
return Ok(());
};
let mut changed = false;
if let Some(policies) = object.get_mut("policies").and_then(Value::as_object_mut) {
changed |= policies.remove(bucket_name).is_some();
}
changed |= object.remove(bucket_name).is_some();
changed
};
if !changed {
return Ok(());
}
Self::atomic_write_json_sync(&path, &value, true)
}
fn write_bucket_config_sync(
&self,
bucket_name: &str,
@@ -406,6 +455,9 @@ impl FsStorageBackend {
let json_val = serde_json::to_value(config)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
Self::atomic_write_json_sync(&config_path, &json_val, true)?;
if config.policy.is_none() {
self.remove_legacy_bucket_policy_sync(bucket_name)?;
}
self.bucket_config_cache
.insert(bucket_name.to_string(), (config.clone(), Instant::now()));
Ok(())
@@ -1881,6 +1933,63 @@ mod tests {
assert!(backend.bucket_exists("test-bucket").await.unwrap());
}
#[tokio::test]
async fn test_bucket_config_reads_legacy_global_policy() {
let (dir, backend) = create_test_backend();
backend.create_bucket("legacy-policy").await.unwrap();
let config_dir = dir.path().join(".myfsio.sys").join("config");
let policy_path = config_dir.join("bucket_policies.json");
std::fs::create_dir_all(&config_dir).unwrap();
std::fs::write(
&policy_path,
serde_json::json!({
"policies": {
"legacy-policy": {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": "*",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::legacy-policy/*"
}]
}
}
})
.to_string(),
)
.unwrap();
let config = backend.get_bucket_config("legacy-policy").await.unwrap();
assert!(config.policy.is_some());
assert_eq!(
config
.policy
.as_ref()
.and_then(|p| p.get("Version"))
.and_then(Value::as_str),
Some("2012-10-17")
);
let mut config = config;
config.policy = None;
backend
.set_bucket_config("legacy-policy", &config)
.await
.unwrap();
let legacy_file =
serde_json::from_str::<Value>(&std::fs::read_to_string(policy_path).unwrap()).unwrap();
assert!(legacy_file
.get("policies")
.and_then(|policies| policies.get("legacy-policy"))
.is_none());
assert!(backend
.get_bucket_config("legacy-policy")
.await
.unwrap()
.policy
.is_none());
}
#[tokio::test]
async fn test_delete_bucket() {
let (_dir, backend) = create_test_backend();