1621 lines
55 KiB
Rust
1621 lines
55 KiB
Rust
use axum::body::Body;
|
|
use axum::http::{HeaderMap, StatusCode};
|
|
use axum::response::{IntoResponse, Response};
|
|
use chrono::{DateTime, Utc};
|
|
|
|
use myfsio_common::error::{S3Error, S3ErrorCode};
|
|
use myfsio_storage::traits::StorageEngine;
|
|
|
|
use crate::services::acl::{
|
|
acl_from_object_metadata, acl_to_xml, create_canned_acl, store_object_acl,
|
|
};
|
|
use crate::services::notifications::parse_notification_configurations;
|
|
use crate::services::object_lock::{
|
|
ensure_retention_mutable, get_legal_hold, get_object_retention as retention_from_metadata,
|
|
set_legal_hold, set_object_retention as store_retention, ObjectLockRetention, RetentionMode,
|
|
};
|
|
use crate::state::AppState;
|
|
|
|
fn xml_response(status: StatusCode, xml: String) -> Response {
|
|
(status, [("content-type", "application/xml")], xml).into_response()
|
|
}
|
|
|
|
fn storage_err(err: myfsio_storage::error::StorageError) -> Response {
|
|
let s3err = S3Error::from(err);
|
|
let status =
|
|
StatusCode::from_u16(s3err.http_status()).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
|
|
(
|
|
status,
|
|
[("content-type", "application/xml")],
|
|
s3err.to_xml(),
|
|
)
|
|
.into_response()
|
|
}
|
|
|
|
fn json_response(status: StatusCode, value: serde_json::Value) -> Response {
|
|
(
|
|
status,
|
|
[("content-type", "application/json")],
|
|
value.to_string(),
|
|
)
|
|
.into_response()
|
|
}
|
|
|
|
fn custom_xml_error(status: StatusCode, code: &str, message: &str) -> Response {
|
|
let xml = format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<Error><Code>{}</Code><Message>{}</Message><Resource></Resource><RequestId></RequestId></Error>",
|
|
xml_escape(code),
|
|
xml_escape(message),
|
|
);
|
|
xml_response(status, xml)
|
|
}
|
|
|
|
pub async fn get_versioning(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.is_versioning_enabled(bucket).await {
|
|
Ok(enabled) => {
|
|
let status_str = if enabled { "Enabled" } else { "Suspended" };
|
|
let xml = format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<VersioningConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<Status>{}</Status>\
|
|
</VersioningConfiguration>",
|
|
status_str
|
|
);
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_versioning(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::from_code(S3ErrorCode::MalformedXML).to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
let xml_str = String::from_utf8_lossy(&body_bytes);
|
|
let enabled = xml_str.contains("<Status>Enabled</Status>");
|
|
|
|
match state.storage.set_versioning(bucket, enabled).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_tagging(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
let mut xml = String::from(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<Tagging xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><TagSet>",
|
|
);
|
|
for tag in &config.tags {
|
|
xml.push_str(&format!(
|
|
"<Tag><Key>{}</Key><Value>{}</Value></Tag>",
|
|
tag.key, tag.value
|
|
));
|
|
}
|
|
xml.push_str("</TagSet></Tagging>");
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_tagging(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::from_code(S3ErrorCode::MalformedXML).to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
let xml_str = String::from_utf8_lossy(&body_bytes);
|
|
let tags = parse_tagging_xml(&xml_str);
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.tags = tags;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_tagging(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.tags.clear();
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_cors(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(cors) = &config.cors {
|
|
xml_response(StatusCode::OK, cors.to_string())
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::new(
|
|
S3ErrorCode::NoSuchKey,
|
|
"The CORS configuration does not exist",
|
|
)
|
|
.to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_cors(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
|
|
let body_str = String::from_utf8_lossy(&body_bytes);
|
|
let value = serde_json::Value::String(body_str.to_string());
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.cors = Some(value);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_cors(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.cors = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_location(state: &AppState, _bucket: &str) -> Response {
|
|
let xml = format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<LocationConstraint xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">{}</LocationConstraint>",
|
|
state.config.region
|
|
);
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
|
|
pub async fn get_encryption(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(enc) = &config.encryption {
|
|
xml_response(StatusCode::OK, enc.to_string())
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::from_code(S3ErrorCode::ServerSideEncryptionConfigurationNotFoundError)
|
|
.to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_encryption(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
let value = serde_json::Value::String(String::from_utf8_lossy(&body_bytes).to_string());
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.encryption = Some(value);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_encryption(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.encryption = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_lifecycle(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(lc) = &config.lifecycle {
|
|
xml_response(StatusCode::OK, lc.to_string())
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::from_code(S3ErrorCode::NoSuchLifecycleConfiguration).to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_lifecycle(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
let value = serde_json::Value::String(String::from_utf8_lossy(&body_bytes).to_string());
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.lifecycle = Some(value);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_lifecycle(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.lifecycle = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_quota(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(quota) = &config.quota {
|
|
let usage = match state.storage.bucket_stats(bucket).await {
|
|
Ok(s) => s,
|
|
Err(e) => return storage_err(e),
|
|
};
|
|
json_response(
|
|
StatusCode::OK,
|
|
serde_json::json!({
|
|
"quota": {
|
|
"max_size_bytes": quota.max_bytes,
|
|
"max_objects": quota.max_objects,
|
|
},
|
|
"usage": {
|
|
"bytes": usage.bytes,
|
|
"objects": usage.objects,
|
|
}
|
|
}),
|
|
)
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::new(S3ErrorCode::NoSuchKey, "No quota configuration found").to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_quota(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::InvalidArgument, "Invalid quota payload").to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
let payload: serde_json::Value = match serde_json::from_slice(&body_bytes) {
|
|
Ok(v) => v,
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(
|
|
S3ErrorCode::InvalidArgument,
|
|
"Request body must be valid JSON",
|
|
)
|
|
.to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
let max_size = payload.get("max_size_bytes").and_then(|v| v.as_u64());
|
|
let max_objects = payload.get("max_objects").and_then(|v| v.as_u64());
|
|
|
|
if max_size.is_none() && max_objects.is_none() {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(
|
|
S3ErrorCode::InvalidArgument,
|
|
"At least one of max_size_bytes or max_objects is required",
|
|
)
|
|
.to_xml(),
|
|
);
|
|
}
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.quota = Some(myfsio_common::types::QuotaConfig {
|
|
max_bytes: max_size,
|
|
max_objects,
|
|
});
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_quota(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.quota = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_policy(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(policy) = &config.policy {
|
|
json_response(StatusCode::OK, policy.clone())
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::from_code(S3ErrorCode::NoSuchBucketPolicy).to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_policy(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::MalformedXML, "Failed to read policy body").to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
let policy: serde_json::Value = match serde_json::from_slice(&body_bytes) {
|
|
Ok(v) => v,
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::InvalidArgument, "Policy document must be JSON").to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.policy = Some(policy);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_policy(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.policy = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_policy_status(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
let is_public = config
|
|
.policy
|
|
.as_ref()
|
|
.map(policy_is_public)
|
|
.unwrap_or(false);
|
|
let xml = format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><PolicyStatus><IsPublic>{}</IsPublic></PolicyStatus>",
|
|
if is_public { "TRUE" } else { "FALSE" }
|
|
);
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_replication(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(replication) = &config.replication {
|
|
match replication {
|
|
serde_json::Value::String(s) => xml_response(StatusCode::OK, s.clone()),
|
|
other => xml_response(StatusCode::OK, other.to_string()),
|
|
}
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::new(
|
|
S3ErrorCode::NoSuchKey,
|
|
"Replication configuration not found",
|
|
)
|
|
.to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_replication(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::MalformedXML, "Failed to read replication body").to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
if body_bytes.is_empty() {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::MalformedXML, "Request body is required").to_xml(),
|
|
);
|
|
}
|
|
|
|
let body_str = String::from_utf8_lossy(&body_bytes).to_string();
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.replication = Some(serde_json::Value::String(body_str));
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_replication(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.replication = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
fn policy_is_public(policy: &serde_json::Value) -> bool {
|
|
let statements = match policy.get("Statement") {
|
|
Some(serde_json::Value::Array(items)) => items,
|
|
Some(item) => {
|
|
return is_allow_public_statement(item);
|
|
}
|
|
None => return false,
|
|
};
|
|
|
|
statements.iter().any(is_allow_public_statement)
|
|
}
|
|
|
|
fn is_allow_public_statement(statement: &serde_json::Value) -> bool {
|
|
let effect_allow = statement
|
|
.get("Effect")
|
|
.and_then(|v| v.as_str())
|
|
.map(|s| s.eq_ignore_ascii_case("allow"))
|
|
.unwrap_or(false);
|
|
if !effect_allow {
|
|
return false;
|
|
}
|
|
|
|
match statement.get("Principal") {
|
|
Some(serde_json::Value::String(s)) => s == "*",
|
|
Some(serde_json::Value::Object(obj)) => obj.values().any(|v| v == "*"),
|
|
_ => false,
|
|
}
|
|
}
|
|
|
|
pub async fn get_acl(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(acl) = &config.acl {
|
|
xml_response(StatusCode::OK, acl.to_string())
|
|
} else {
|
|
let xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<AccessControlPolicy xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<Owner><ID>myfsio</ID><DisplayName>myfsio</DisplayName></Owner>\
|
|
<AccessControlList>\
|
|
<Grant><Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\">\
|
|
<ID>myfsio</ID><DisplayName>myfsio</DisplayName></Grantee>\
|
|
<Permission>FULL_CONTROL</Permission></Grant>\
|
|
</AccessControlList></AccessControlPolicy>";
|
|
xml_response(StatusCode::OK, xml.to_string())
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_acl(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
let value = serde_json::Value::String(String::from_utf8_lossy(&body_bytes).to_string());
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.acl = Some(value);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_website(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(ws) = &config.website {
|
|
xml_response(StatusCode::OK, ws.to_string())
|
|
} else {
|
|
xml_response(
|
|
StatusCode::NOT_FOUND,
|
|
S3Error::new(
|
|
S3ErrorCode::NoSuchKey,
|
|
"The website configuration does not exist",
|
|
)
|
|
.to_xml(),
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_website(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
let value = serde_json::Value::String(String::from_utf8_lossy(&body_bytes).to_string());
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.website = Some(value);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_website(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.website = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_object_lock(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(ol) = &config.object_lock {
|
|
xml_response(StatusCode::OK, ol.to_string())
|
|
} else {
|
|
let xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<ObjectLockConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<ObjectLockEnabled>Disabled</ObjectLockEnabled>\
|
|
</ObjectLockConfiguration>";
|
|
xml_response(StatusCode::OK, xml.to_string())
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_notification(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
if let Some(n) = &config.notification {
|
|
xml_response(StatusCode::OK, n.to_string())
|
|
} else {
|
|
let xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<NotificationConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
</NotificationConfiguration>";
|
|
xml_response(StatusCode::OK, xml.to_string())
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_logging(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.bucket_exists(bucket).await {
|
|
Ok(true) => {}
|
|
Ok(false) => {
|
|
return storage_err(myfsio_storage::error::StorageError::BucketNotFound(
|
|
bucket.to_string(),
|
|
))
|
|
}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
|
|
let logging_config = if let Some(cfg) = state.access_logging.get(bucket) {
|
|
Some(cfg)
|
|
} else {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(config) => {
|
|
let legacy = legacy_logging_config(&config);
|
|
if let Some(cfg) = legacy.as_ref() {
|
|
if let Err(err) = state.access_logging.set(bucket, cfg.clone()) {
|
|
tracing::warn!(
|
|
"Failed to migrate legacy bucket logging config for {}: {}",
|
|
bucket,
|
|
err
|
|
);
|
|
}
|
|
}
|
|
legacy
|
|
}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
};
|
|
|
|
let body = match logging_config {
|
|
Some(cfg) if cfg.enabled => format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<BucketLoggingStatus xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<LoggingEnabled><TargetBucket>{}</TargetBucket><TargetPrefix>{}</TargetPrefix></LoggingEnabled>\
|
|
</BucketLoggingStatus>",
|
|
xml_escape(&cfg.target_bucket),
|
|
xml_escape(&cfg.target_prefix),
|
|
),
|
|
_ => "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<BucketLoggingStatus xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"></BucketLoggingStatus>"
|
|
.to_string(),
|
|
};
|
|
xml_response(StatusCode::OK, body)
|
|
}
|
|
|
|
fn xml_escape(s: &str) -> String {
|
|
s.replace('&', "&")
|
|
.replace('<', "<")
|
|
.replace('>', ">")
|
|
.replace('"', """)
|
|
.replace('\'', "'")
|
|
}
|
|
|
|
fn legacy_logging_config(
|
|
config: &myfsio_common::types::BucketConfig,
|
|
) -> Option<crate::services::access_logging::LoggingConfiguration> {
|
|
let value = config.logging.as_ref()?;
|
|
match value {
|
|
serde_json::Value::String(xml) => parse_logging_config_xml(xml),
|
|
serde_json::Value::Object(_) => parse_logging_config_value(value.clone()),
|
|
_ => None,
|
|
}
|
|
}
|
|
|
|
fn parse_logging_config_value(
|
|
value: serde_json::Value,
|
|
) -> Option<crate::services::access_logging::LoggingConfiguration> {
|
|
let logging_enabled = value.get("LoggingEnabled")?;
|
|
let target_bucket = logging_enabled
|
|
.get("TargetBucket")
|
|
.and_then(|value| value.as_str())
|
|
.map(str::trim)
|
|
.filter(|value| !value.is_empty())?
|
|
.to_string();
|
|
let target_prefix = logging_enabled
|
|
.get("TargetPrefix")
|
|
.and_then(|value| value.as_str())
|
|
.unwrap_or_default()
|
|
.to_string();
|
|
Some(crate::services::access_logging::LoggingConfiguration {
|
|
target_bucket,
|
|
target_prefix,
|
|
enabled: true,
|
|
})
|
|
}
|
|
|
|
fn parse_logging_config_xml(
|
|
xml: &str,
|
|
) -> Option<crate::services::access_logging::LoggingConfiguration> {
|
|
let doc = roxmltree::Document::parse(xml).ok()?;
|
|
let root = doc.root_element();
|
|
let logging_enabled = root
|
|
.children()
|
|
.find(|n| n.is_element() && n.tag_name().name() == "LoggingEnabled")?;
|
|
let target_bucket = logging_enabled
|
|
.children()
|
|
.find(|n| n.is_element() && n.tag_name().name() == "TargetBucket")
|
|
.and_then(|n| n.text())
|
|
.map(str::trim)
|
|
.filter(|value| !value.is_empty())?
|
|
.to_string();
|
|
let target_prefix = logging_enabled
|
|
.children()
|
|
.find(|n| n.is_element() && n.tag_name().name() == "TargetPrefix")
|
|
.and_then(|n| n.text())
|
|
.unwrap_or_default()
|
|
.to_string();
|
|
Some(crate::services::access_logging::LoggingConfiguration {
|
|
target_bucket,
|
|
target_prefix,
|
|
enabled: true,
|
|
})
|
|
}
|
|
|
|
pub async fn put_object_lock(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
let value = serde_json::Value::String(String::from_utf8_lossy(&body_bytes).to_string());
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.object_lock = Some(value);
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_object_lock(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.object_lock = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_notification(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"MalformedXML",
|
|
"Unable to parse XML document",
|
|
)
|
|
}
|
|
};
|
|
let raw = String::from_utf8_lossy(&body_bytes).to_string();
|
|
let notification = if raw.trim().is_empty() {
|
|
None
|
|
} else {
|
|
match parse_notification_configurations(&raw) {
|
|
Ok(_) => Some(serde_json::Value::String(raw)),
|
|
Err(message) => {
|
|
let code = if message.contains("Destination URL is required") {
|
|
"InvalidArgument"
|
|
} else {
|
|
"MalformedXML"
|
|
};
|
|
return custom_xml_error(StatusCode::BAD_REQUEST, code, &message);
|
|
}
|
|
}
|
|
};
|
|
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.notification = notification;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_notification(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.get_bucket_config(bucket).await {
|
|
Ok(mut config) => {
|
|
config.notification = None;
|
|
match state.storage.set_bucket_config(bucket, &config).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_logging(state: &AppState, bucket: &str, body: Body) -> Response {
|
|
match state.storage.bucket_exists(bucket).await {
|
|
Ok(true) => {}
|
|
Ok(false) => {
|
|
return storage_err(myfsio_storage::error::StorageError::BucketNotFound(
|
|
bucket.to_string(),
|
|
))
|
|
}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => return StatusCode::BAD_REQUEST.into_response(),
|
|
};
|
|
|
|
if body_bytes.iter().all(u8::is_ascii_whitespace) {
|
|
state.access_logging.delete(bucket);
|
|
return StatusCode::OK.into_response();
|
|
}
|
|
|
|
let xml = match std::str::from_utf8(&body_bytes) {
|
|
Ok(s) => s,
|
|
Err(_) => {
|
|
return s3_error_response(
|
|
S3ErrorCode::MalformedXML,
|
|
"Unable to parse XML document",
|
|
StatusCode::BAD_REQUEST,
|
|
)
|
|
}
|
|
};
|
|
|
|
let doc = match roxmltree::Document::parse(xml) {
|
|
Ok(d) => d,
|
|
Err(_) => {
|
|
return s3_error_response(
|
|
S3ErrorCode::MalformedXML,
|
|
"Unable to parse XML document",
|
|
StatusCode::BAD_REQUEST,
|
|
)
|
|
}
|
|
};
|
|
|
|
let root = doc.root_element();
|
|
let logging_enabled = root
|
|
.children()
|
|
.find(|n| n.is_element() && n.tag_name().name() == "LoggingEnabled");
|
|
|
|
let Some(le) = logging_enabled else {
|
|
state.access_logging.delete(bucket);
|
|
return StatusCode::OK.into_response();
|
|
};
|
|
|
|
let target_bucket = le
|
|
.children()
|
|
.find(|n| n.is_element() && n.tag_name().name() == "TargetBucket")
|
|
.and_then(|n| n.text())
|
|
.map(str::trim)
|
|
.unwrap_or_default();
|
|
|
|
if target_bucket.is_empty() {
|
|
return s3_error_response(
|
|
S3ErrorCode::InvalidArgument,
|
|
"TargetBucket is required",
|
|
StatusCode::BAD_REQUEST,
|
|
);
|
|
}
|
|
|
|
let cfg = crate::services::access_logging::LoggingConfiguration {
|
|
target_bucket: target_bucket.to_string(),
|
|
target_prefix: le
|
|
.children()
|
|
.find(|n| n.is_element() && n.tag_name().name() == "TargetPrefix")
|
|
.and_then(|n| n.text())
|
|
.unwrap_or_default()
|
|
.to_string(),
|
|
enabled: true,
|
|
};
|
|
|
|
match state.storage.bucket_exists(&cfg.target_bucket).await {
|
|
Ok(true) => {}
|
|
Ok(false) => {
|
|
return s3_error_response(
|
|
S3ErrorCode::InvalidArgument,
|
|
"Target bucket does not exist",
|
|
StatusCode::BAD_REQUEST,
|
|
)
|
|
}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
|
|
if let Err(e) = state.access_logging.set(bucket, cfg) {
|
|
tracing::error!(
|
|
"Failed to persist bucket logging config for {}: {}",
|
|
bucket,
|
|
e
|
|
);
|
|
return StatusCode::INTERNAL_SERVER_ERROR.into_response();
|
|
}
|
|
|
|
StatusCode::OK.into_response()
|
|
}
|
|
|
|
pub async fn delete_logging(state: &AppState, bucket: &str) -> Response {
|
|
match state.storage.bucket_exists(bucket).await {
|
|
Ok(true) => {}
|
|
Ok(false) => {
|
|
return storage_err(myfsio_storage::error::StorageError::BucketNotFound(
|
|
bucket.to_string(),
|
|
))
|
|
}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
state.access_logging.delete(bucket);
|
|
StatusCode::NO_CONTENT.into_response()
|
|
}
|
|
|
|
fn s3_error_response(code: S3ErrorCode, message: &str, status: StatusCode) -> Response {
|
|
let err = S3Error::new(code, message.to_string());
|
|
(status, [("content-type", "application/xml")], err.to_xml()).into_response()
|
|
}
|
|
|
|
pub async fn list_object_versions(
|
|
state: &AppState,
|
|
bucket: &str,
|
|
prefix: Option<&str>,
|
|
max_keys: usize,
|
|
) -> Response {
|
|
match state.storage.list_buckets().await {
|
|
Ok(buckets) => {
|
|
if !buckets.iter().any(|b| b.name == bucket) {
|
|
return storage_err(myfsio_storage::error::StorageError::BucketNotFound(
|
|
bucket.to_string(),
|
|
));
|
|
}
|
|
}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
|
|
let fetch_limit = max_keys.saturating_add(1).max(1);
|
|
let params = myfsio_common::types::ListParams {
|
|
max_keys: fetch_limit,
|
|
prefix: prefix.map(ToOwned::to_owned),
|
|
..Default::default()
|
|
};
|
|
|
|
let object_result = match state.storage.list_objects(bucket, ¶ms).await {
|
|
Ok(result) => result,
|
|
Err(e) => return storage_err(e),
|
|
};
|
|
let objects = object_result.objects;
|
|
let archived_versions = match state
|
|
.storage
|
|
.list_bucket_object_versions(bucket, prefix)
|
|
.await
|
|
{
|
|
Ok(versions) => versions,
|
|
Err(e) => return storage_err(e),
|
|
};
|
|
|
|
let mut xml = String::from(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<ListVersionsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">",
|
|
);
|
|
xml.push_str(&format!("<Name>{}</Name>", xml_escape(bucket)));
|
|
xml.push_str(&format!(
|
|
"<Prefix>{}</Prefix>",
|
|
xml_escape(prefix.unwrap_or(""))
|
|
));
|
|
xml.push_str(&format!("<MaxKeys>{}</MaxKeys>", max_keys));
|
|
|
|
let current_count = objects.len().min(max_keys);
|
|
let remaining = max_keys.saturating_sub(current_count);
|
|
let archived_count = archived_versions.len().min(remaining);
|
|
let is_truncated = object_result.is_truncated
|
|
|| objects.len() > current_count
|
|
|| archived_versions.len() > archived_count;
|
|
xml.push_str(&format!("<IsTruncated>{}</IsTruncated>", is_truncated));
|
|
|
|
let current_keys: std::collections::HashSet<String> = objects
|
|
.iter()
|
|
.take(current_count)
|
|
.map(|o| o.key.clone())
|
|
.collect();
|
|
let mut latest_archived_per_key: std::collections::HashMap<String, String> =
|
|
std::collections::HashMap::new();
|
|
for v in archived_versions.iter().take(archived_count) {
|
|
if current_keys.contains(&v.key) {
|
|
continue;
|
|
}
|
|
let existing = latest_archived_per_key.get(&v.key).cloned();
|
|
match existing {
|
|
None => {
|
|
latest_archived_per_key.insert(v.key.clone(), v.version_id.clone());
|
|
}
|
|
Some(existing_id) => {
|
|
let existing_ts = archived_versions
|
|
.iter()
|
|
.find(|x| x.key == v.key && x.version_id == existing_id)
|
|
.map(|x| x.last_modified)
|
|
.unwrap_or(v.last_modified);
|
|
if v.last_modified > existing_ts {
|
|
latest_archived_per_key.insert(v.key.clone(), v.version_id.clone());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
for obj in objects.iter().take(current_count) {
|
|
xml.push_str("<Version>");
|
|
xml.push_str(&format!("<Key>{}</Key>", xml_escape(&obj.key)));
|
|
xml.push_str("<VersionId>null</VersionId>");
|
|
xml.push_str("<IsLatest>true</IsLatest>");
|
|
xml.push_str(&format!(
|
|
"<LastModified>{}</LastModified>",
|
|
myfsio_xml::response::format_s3_datetime(&obj.last_modified)
|
|
));
|
|
if let Some(ref etag) = obj.etag {
|
|
xml.push_str(&format!("<ETag>\"{}\"</ETag>", xml_escape(etag)));
|
|
}
|
|
xml.push_str(&format!("<Size>{}</Size>", obj.size));
|
|
xml.push_str(&format!(
|
|
"<StorageClass>{}</StorageClass>",
|
|
xml_escape(obj.storage_class.as_deref().unwrap_or("STANDARD"))
|
|
));
|
|
xml.push_str("</Version>");
|
|
}
|
|
|
|
for version in archived_versions.iter().take(archived_count) {
|
|
let is_latest = latest_archived_per_key
|
|
.get(&version.key)
|
|
.map(|id| id == &version.version_id)
|
|
.unwrap_or(false);
|
|
let tag = if version.is_delete_marker {
|
|
"DeleteMarker"
|
|
} else {
|
|
"Version"
|
|
};
|
|
xml.push_str(&format!("<{}>", tag));
|
|
xml.push_str(&format!("<Key>{}</Key>", xml_escape(&version.key)));
|
|
xml.push_str(&format!(
|
|
"<VersionId>{}</VersionId>",
|
|
xml_escape(&version.version_id)
|
|
));
|
|
xml.push_str(&format!("<IsLatest>{}</IsLatest>", is_latest));
|
|
xml.push_str(&format!(
|
|
"<LastModified>{}</LastModified>",
|
|
myfsio_xml::response::format_s3_datetime(&version.last_modified)
|
|
));
|
|
if !version.is_delete_marker {
|
|
if let Some(ref etag) = version.etag {
|
|
xml.push_str(&format!("<ETag>\"{}\"</ETag>", xml_escape(etag)));
|
|
}
|
|
xml.push_str(&format!("<Size>{}</Size>", version.size));
|
|
xml.push_str("<StorageClass>STANDARD</StorageClass>");
|
|
}
|
|
xml.push_str(&format!("</{}>", tag));
|
|
}
|
|
|
|
xml.push_str("</ListVersionsResult>");
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
|
|
pub async fn get_object_tagging(state: &AppState, bucket: &str, key: &str) -> Response {
|
|
match state.storage.get_object_tags(bucket, key).await {
|
|
Ok(tags) => {
|
|
let mut xml = String::from(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<Tagging xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><TagSet>",
|
|
);
|
|
for tag in &tags {
|
|
xml.push_str(&format!(
|
|
"<Tag><Key>{}</Key><Value>{}</Value></Tag>",
|
|
tag.key, tag.value
|
|
));
|
|
}
|
|
xml.push_str("</TagSet></Tagging>");
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_object_tagging(state: &AppState, bucket: &str, key: &str, body: Body) -> Response {
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::from_code(S3ErrorCode::MalformedXML).to_xml(),
|
|
);
|
|
}
|
|
};
|
|
|
|
let xml_str = String::from_utf8_lossy(&body_bytes);
|
|
let tags = parse_tagging_xml(&xml_str);
|
|
if tags.len() > state.config.object_tag_limit {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(
|
|
S3ErrorCode::InvalidTag,
|
|
format!("Maximum {} tags allowed", state.config.object_tag_limit),
|
|
)
|
|
.to_xml(),
|
|
);
|
|
}
|
|
for tag in &tags {
|
|
if tag.key.is_empty() || tag.key.len() > 128 {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::InvalidTag, "Tag key length must be 1-128").to_xml(),
|
|
);
|
|
}
|
|
if tag.value.len() > 256 {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::InvalidTag, "Tag value length must be 0-256").to_xml(),
|
|
);
|
|
}
|
|
if tag.key.contains('=') {
|
|
return xml_response(
|
|
StatusCode::BAD_REQUEST,
|
|
S3Error::new(S3ErrorCode::InvalidTag, "Tag keys must not contain '='").to_xml(),
|
|
);
|
|
}
|
|
}
|
|
|
|
match state.storage.set_object_tags(bucket, key, &tags).await {
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn delete_object_tagging(state: &AppState, bucket: &str, key: &str) -> Response {
|
|
match state.storage.delete_object_tags(bucket, key).await {
|
|
Ok(()) => StatusCode::NO_CONTENT.into_response(),
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_object_acl(
|
|
state: &AppState,
|
|
bucket: &str,
|
|
key: &str,
|
|
headers: &HeaderMap,
|
|
_body: Body,
|
|
) -> Response {
|
|
match state.storage.head_object(bucket, key).await {
|
|
Ok(_) => {
|
|
let canned_acl = headers
|
|
.get("x-amz-acl")
|
|
.and_then(|value| value.to_str().ok())
|
|
.unwrap_or("private");
|
|
let mut metadata = match state.storage.get_object_metadata(bucket, key).await {
|
|
Ok(metadata) => metadata,
|
|
Err(err) => return storage_err(err),
|
|
};
|
|
let owner = acl_from_object_metadata(&metadata)
|
|
.map(|acl| acl.owner)
|
|
.unwrap_or_else(|| "myfsio".to_string());
|
|
let acl = create_canned_acl(canned_acl, &owner);
|
|
store_object_acl(&mut metadata, &acl);
|
|
match state
|
|
.storage
|
|
.put_object_metadata(bucket, key, &metadata)
|
|
.await
|
|
{
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(err) => storage_err(err),
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn get_object_retention(state: &AppState, bucket: &str, key: &str) -> Response {
|
|
match state.storage.head_object(bucket, key).await {
|
|
Ok(_) => {
|
|
let metadata = match state.storage.get_object_metadata(bucket, key).await {
|
|
Ok(metadata) => metadata,
|
|
Err(err) => return storage_err(err),
|
|
};
|
|
if let Some(retention) = retention_from_metadata(&metadata) {
|
|
let xml = format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<Retention xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<Mode>{}</Mode><RetainUntilDate>{}</RetainUntilDate></Retention>",
|
|
match retention.mode {
|
|
RetentionMode::GOVERNANCE => "GOVERNANCE",
|
|
RetentionMode::COMPLIANCE => "COMPLIANCE",
|
|
},
|
|
retention.retain_until_date.format("%Y-%m-%dT%H:%M:%S.000Z"),
|
|
);
|
|
xml_response(StatusCode::OK, xml)
|
|
} else {
|
|
custom_xml_error(
|
|
StatusCode::NOT_FOUND,
|
|
"NoSuchObjectLockConfiguration",
|
|
"No retention policy",
|
|
)
|
|
}
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_object_retention(
|
|
state: &AppState,
|
|
bucket: &str,
|
|
key: &str,
|
|
headers: &HeaderMap,
|
|
body: Body,
|
|
) -> Response {
|
|
match state.storage.head_object(bucket, key).await {
|
|
Ok(_) => {}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"MalformedXML",
|
|
"Unable to parse XML document",
|
|
)
|
|
}
|
|
};
|
|
let body_str = String::from_utf8_lossy(&body_bytes);
|
|
let doc = match roxmltree::Document::parse(&body_str) {
|
|
Ok(doc) => doc,
|
|
Err(_) => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"MalformedXML",
|
|
"Unable to parse XML document",
|
|
)
|
|
}
|
|
};
|
|
let mode = find_xml_text(&doc, "Mode").unwrap_or_default();
|
|
let retain_until = find_xml_text(&doc, "RetainUntilDate").unwrap_or_default();
|
|
if mode.is_empty() || retain_until.is_empty() {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"InvalidArgument",
|
|
"Mode and RetainUntilDate are required",
|
|
);
|
|
}
|
|
let mode = match mode.as_str() {
|
|
"GOVERNANCE" => RetentionMode::GOVERNANCE,
|
|
"COMPLIANCE" => RetentionMode::COMPLIANCE,
|
|
other => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"InvalidArgument",
|
|
&format!("Invalid retention mode: {}", other),
|
|
)
|
|
}
|
|
};
|
|
let retain_until_date = match DateTime::parse_from_rfc3339(&retain_until) {
|
|
Ok(value) => value.with_timezone(&Utc),
|
|
Err(_) => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"InvalidArgument",
|
|
&format!("Invalid date format: {}", retain_until),
|
|
)
|
|
}
|
|
};
|
|
|
|
let bypass_governance = headers
|
|
.get("x-amz-bypass-governance-retention")
|
|
.and_then(|value| value.to_str().ok())
|
|
.map(|value| value.eq_ignore_ascii_case("true"))
|
|
.unwrap_or(false);
|
|
let mut metadata = match state.storage.get_object_metadata(bucket, key).await {
|
|
Ok(metadata) => metadata,
|
|
Err(err) => return storage_err(err),
|
|
};
|
|
if let Err(message) = ensure_retention_mutable(&metadata, bypass_governance) {
|
|
return custom_xml_error(StatusCode::FORBIDDEN, "AccessDenied", &message);
|
|
}
|
|
if let Err(message) = store_retention(
|
|
&mut metadata,
|
|
&ObjectLockRetention {
|
|
mode,
|
|
retain_until_date,
|
|
},
|
|
) {
|
|
return custom_xml_error(StatusCode::BAD_REQUEST, "InvalidArgument", &message);
|
|
}
|
|
match state
|
|
.storage
|
|
.put_object_metadata(bucket, key, &metadata)
|
|
.await
|
|
{
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(err) => storage_err(err),
|
|
}
|
|
}
|
|
|
|
pub async fn get_object_legal_hold(state: &AppState, bucket: &str, key: &str) -> Response {
|
|
match state.storage.head_object(bucket, key).await {
|
|
Ok(_) => {
|
|
let metadata = match state.storage.get_object_metadata(bucket, key).await {
|
|
Ok(metadata) => metadata,
|
|
Err(err) => return storage_err(err),
|
|
};
|
|
let status = if get_legal_hold(&metadata) {
|
|
"ON"
|
|
} else {
|
|
"OFF"
|
|
};
|
|
let xml = format!(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<LegalHold xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<Status>{}</Status></LegalHold>",
|
|
status
|
|
);
|
|
xml_response(StatusCode::OK, xml)
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
pub async fn put_object_legal_hold(
|
|
state: &AppState,
|
|
bucket: &str,
|
|
key: &str,
|
|
body: Body,
|
|
) -> Response {
|
|
match state.storage.head_object(bucket, key).await {
|
|
Ok(_) => {}
|
|
Err(e) => return storage_err(e),
|
|
}
|
|
|
|
let body_bytes = match http_body_util::BodyExt::collect(body).await {
|
|
Ok(collected) => collected.to_bytes(),
|
|
Err(_) => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"MalformedXML",
|
|
"Unable to parse XML document",
|
|
)
|
|
}
|
|
};
|
|
let body_str = String::from_utf8_lossy(&body_bytes);
|
|
let doc = match roxmltree::Document::parse(&body_str) {
|
|
Ok(doc) => doc,
|
|
Err(_) => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"MalformedXML",
|
|
"Unable to parse XML document",
|
|
)
|
|
}
|
|
};
|
|
let status = find_xml_text(&doc, "Status").unwrap_or_default();
|
|
let enabled = match status.as_str() {
|
|
"ON" => true,
|
|
"OFF" => false,
|
|
_ => {
|
|
return custom_xml_error(
|
|
StatusCode::BAD_REQUEST,
|
|
"InvalidArgument",
|
|
"Status must be ON or OFF",
|
|
)
|
|
}
|
|
};
|
|
let mut metadata = match state.storage.get_object_metadata(bucket, key).await {
|
|
Ok(metadata) => metadata,
|
|
Err(err) => return storage_err(err),
|
|
};
|
|
set_legal_hold(&mut metadata, enabled);
|
|
match state
|
|
.storage
|
|
.put_object_metadata(bucket, key, &metadata)
|
|
.await
|
|
{
|
|
Ok(()) => StatusCode::OK.into_response(),
|
|
Err(err) => storage_err(err),
|
|
}
|
|
}
|
|
|
|
pub async fn get_object_acl(state: &AppState, bucket: &str, key: &str) -> Response {
|
|
match state.storage.head_object(bucket, key).await {
|
|
Ok(_) => {
|
|
let metadata = match state.storage.get_object_metadata(bucket, key).await {
|
|
Ok(metadata) => metadata,
|
|
Err(err) => return storage_err(err),
|
|
};
|
|
let acl = acl_from_object_metadata(&metadata)
|
|
.unwrap_or_else(|| create_canned_acl("private", "myfsio"));
|
|
xml_response(StatusCode::OK, acl_to_xml(&acl))
|
|
}
|
|
Err(e) => storage_err(e),
|
|
}
|
|
}
|
|
|
|
fn find_xml_text(doc: &roxmltree::Document<'_>, name: &str) -> Option<String> {
|
|
doc.descendants()
|
|
.find(|node| node.is_element() && node.tag_name().name() == name)
|
|
.and_then(|node| node.text())
|
|
.map(|text| text.trim().to_string())
|
|
.filter(|text| !text.is_empty())
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::{legacy_logging_config, parse_logging_config_xml};
|
|
use myfsio_common::types::BucketConfig;
|
|
|
|
#[test]
|
|
fn parses_legacy_logging_xml_string() {
|
|
let mut config = BucketConfig::default();
|
|
config.logging = Some(serde_json::Value::String(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<BucketLoggingStatus xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
<LoggingEnabled><TargetBucket>logs</TargetBucket><TargetPrefix>audit/</TargetPrefix></LoggingEnabled>\
|
|
</BucketLoggingStatus>"
|
|
.to_string(),
|
|
));
|
|
|
|
let parsed = legacy_logging_config(&config).expect("expected legacy logging config");
|
|
assert_eq!(parsed.target_bucket, "logs");
|
|
assert_eq!(parsed.target_prefix, "audit/");
|
|
assert!(parsed.enabled);
|
|
}
|
|
|
|
#[test]
|
|
fn parses_legacy_logging_json_object() {
|
|
let mut config = BucketConfig::default();
|
|
config.logging = Some(serde_json::json!({
|
|
"LoggingEnabled": {
|
|
"TargetBucket": "logs",
|
|
"TargetPrefix": "archive/"
|
|
}
|
|
}));
|
|
|
|
let parsed = legacy_logging_config(&config).expect("expected legacy logging config");
|
|
assert_eq!(parsed.target_bucket, "logs");
|
|
assert_eq!(parsed.target_prefix, "archive/");
|
|
assert!(parsed.enabled);
|
|
}
|
|
|
|
#[test]
|
|
fn ignores_logging_xml_without_enabled_block() {
|
|
let parsed = parse_logging_config_xml(
|
|
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
|
<BucketLoggingStatus xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
|
</BucketLoggingStatus>",
|
|
);
|
|
|
|
assert!(parsed.is_none());
|
|
}
|
|
}
|
|
|
|
fn parse_tagging_xml(xml: &str) -> Vec<myfsio_common::types::Tag> {
|
|
let mut tags = Vec::new();
|
|
let mut in_tag = false;
|
|
let mut current_key = String::new();
|
|
let mut current_value = String::new();
|
|
let mut current_element = String::new();
|
|
|
|
let mut reader = quick_xml::Reader::from_str(xml);
|
|
let mut buf = Vec::new();
|
|
|
|
loop {
|
|
match reader.read_event_into(&mut buf) {
|
|
Ok(quick_xml::events::Event::Start(ref e)) => {
|
|
let name = String::from_utf8_lossy(e.name().as_ref()).to_string();
|
|
current_element = name.clone();
|
|
if name == "Tag" {
|
|
in_tag = true;
|
|
current_key.clear();
|
|
current_value.clear();
|
|
}
|
|
}
|
|
Ok(quick_xml::events::Event::Text(ref e)) => {
|
|
if in_tag {
|
|
let text = e.unescape().unwrap_or_default().to_string();
|
|
match current_element.as_str() {
|
|
"Key" => current_key = text,
|
|
"Value" => current_value = text,
|
|
_ => {}
|
|
}
|
|
}
|
|
}
|
|
Ok(quick_xml::events::Event::End(ref e)) => {
|
|
let name = String::from_utf8_lossy(e.name().as_ref()).to_string();
|
|
if name == "Tag" && in_tag {
|
|
if !current_key.is_empty() {
|
|
tags.push(myfsio_common::types::Tag {
|
|
key: current_key.clone(),
|
|
value: current_value.clone(),
|
|
});
|
|
}
|
|
in_tag = false;
|
|
}
|
|
}
|
|
Ok(quick_xml::events::Event::Eof) => break,
|
|
Err(_) => break,
|
|
_ => {}
|
|
}
|
|
buf.clear();
|
|
}
|
|
|
|
tags
|
|
}
|