forked from Deuxfleurs/garage
Improved XML serialization
- Use quick_xml and serde for all XML response returned by the S3 API. - Include tests for all structs used to generate XML - Remove old manual XML escaping function which was unsafe
This commit is contained in:
parent
e4b9e4e24d
commit
6ccffc3162
10 changed files with 727 additions and 280 deletions
|
@ -1,13 +1,5 @@
|
|||
//! Module containing various helpers for encoding
|
||||
|
||||
/// Escape &str for xml inclusion
|
||||
pub fn xml_escape(s: &str) -> String {
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\"", """)
|
||||
}
|
||||
|
||||
/// Encode &str for use in a URI
|
||||
pub fn uri_encode(string: &str, encode_slash: bool) -> String {
|
||||
let mut result = String::with_capacity(string.len() * 2);
|
||||
|
@ -28,12 +20,3 @@ pub fn uri_encode(string: &str, encode_slash: bool) -> String {
|
|||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Encode &str either as an uri, or a valid string for xml inclusion
|
||||
pub fn xml_encode_key(k: &str, urlencode: bool) -> String {
|
||||
if urlencode {
|
||||
uri_encode(k, true)
|
||||
} else {
|
||||
xml_escape(k)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use err_derive::Error;
|
||||
use hyper::StatusCode;
|
||||
|
||||
use garage_util::error::Error as GarageError;
|
||||
|
||||
use crate::encoding::*;
|
||||
use crate::s3_xml;
|
||||
|
||||
/// Errors of this crate
|
||||
#[derive(Debug, Error)]
|
||||
|
@ -104,15 +102,22 @@ impl Error {
|
|||
}
|
||||
|
||||
pub fn aws_xml(&self, garage_region: &str, path: &str) -> String {
|
||||
let mut xml = String::new();
|
||||
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(&mut xml, "<Error>").unwrap();
|
||||
writeln!(&mut xml, "\t<Code>{}</Code>", self.aws_code()).unwrap();
|
||||
writeln!(&mut xml, "\t<Message>{}</Message>", self).unwrap();
|
||||
writeln!(&mut xml, "\t<Resource>{}</Resource>", xml_escape(path)).unwrap();
|
||||
writeln!(&mut xml, "\t<Region>{}</Region>", garage_region).unwrap();
|
||||
writeln!(&mut xml, "</Error>").unwrap();
|
||||
xml
|
||||
let error = s3_xml::Error {
|
||||
code: s3_xml::Value(self.aws_code().to_string()),
|
||||
message: s3_xml::Value(format!("{}", self)),
|
||||
resource: Some(s3_xml::Value(path.to_string())),
|
||||
region: Some(s3_xml::Value(garage_region.to_string())),
|
||||
};
|
||||
s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| {
|
||||
r#"
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Error>
|
||||
<Code>InternalError</Code>
|
||||
<Message>XML encoding of error failed</Message>
|
||||
</Error>
|
||||
"#
|
||||
.into()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,3 +18,4 @@ mod s3_delete;
|
|||
pub mod s3_get;
|
||||
mod s3_list;
|
||||
mod s3_put;
|
||||
mod s3_xml;
|
||||
|
|
|
@ -1,73 +1,20 @@
|
|||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hyper::{Body, Response};
|
||||
use quick_xml::se::to_string;
|
||||
use serde::Serialize;
|
||||
|
||||
use garage_model::garage::Garage;
|
||||
use garage_model::key_table::Key;
|
||||
use garage_util::time::*;
|
||||
|
||||
use crate::error::*;
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct CreationDate {
|
||||
#[serde(rename = "$value")]
|
||||
pub body: String,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct Name {
|
||||
#[serde(rename = "$value")]
|
||||
pub body: String,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct Bucket {
|
||||
#[serde(rename = "CreationDate")]
|
||||
pub creation_date: CreationDate,
|
||||
#[serde(rename = "Name")]
|
||||
pub name: Name,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct DisplayName {
|
||||
#[serde(rename = "$value")]
|
||||
pub body: String,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct Id {
|
||||
#[serde(rename = "$value")]
|
||||
pub body: String,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct Owner {
|
||||
#[serde(rename = "DisplayName")]
|
||||
display_name: DisplayName,
|
||||
#[serde(rename = "ID")]
|
||||
id: Id,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct BucketList {
|
||||
#[serde(rename = "Bucket")]
|
||||
pub entries: Vec<Bucket>,
|
||||
}
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
struct ListAllMyBucketsResult {
|
||||
#[serde(rename = "Buckets")]
|
||||
buckets: BucketList,
|
||||
#[serde(rename = "Owner")]
|
||||
owner: Owner,
|
||||
}
|
||||
use crate::s3_xml;
|
||||
|
||||
pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>, Error> {
|
||||
let mut xml = String::new();
|
||||
|
||||
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
r#"<LocationConstraint xmlns="http://s3.amazonaws.com/doc/2006-03-01/">{}</LocationConstraint>"#,
|
||||
garage.config.s3_api.s3_region
|
||||
)
|
||||
.unwrap();
|
||||
let loc = s3_xml::LocationConstraint {
|
||||
xmlns: (),
|
||||
region: garage.config.s3_api.s3_region.to_string(),
|
||||
};
|
||||
let xml = s3_xml::to_xml_with_header(&loc)?;
|
||||
|
||||
Ok(Response::builder()
|
||||
.header("Content-Type", "application/xml")
|
||||
|
@ -75,34 +22,25 @@ pub fn handle_get_bucket_location(garage: Arc<Garage>) -> Result<Response<Body>,
|
|||
}
|
||||
|
||||
pub fn handle_list_buckets(api_key: &Key) -> Result<Response<Body>, Error> {
|
||||
let list_buckets = ListAllMyBucketsResult {
|
||||
owner: Owner {
|
||||
display_name: DisplayName {
|
||||
body: api_key.name.get().to_string(),
|
||||
},
|
||||
id: Id {
|
||||
body: api_key.key_id.to_string(),
|
||||
},
|
||||
let list_buckets = s3_xml::ListAllMyBucketsResult {
|
||||
owner: s3_xml::Owner {
|
||||
display_name: s3_xml::Value(api_key.name.get().to_string()),
|
||||
id: s3_xml::Value(api_key.key_id.to_string()),
|
||||
},
|
||||
buckets: BucketList {
|
||||
buckets: s3_xml::BucketList {
|
||||
entries: api_key
|
||||
.authorized_buckets
|
||||
.items()
|
||||
.iter()
|
||||
.map(|(name, ts, _)| Bucket {
|
||||
creation_date: CreationDate {
|
||||
body: msec_to_rfc3339(*ts),
|
||||
},
|
||||
name: Name {
|
||||
body: name.to_string(),
|
||||
},
|
||||
.map(|(name, ts, _)| s3_xml::Bucket {
|
||||
creation_date: s3_xml::Value(msec_to_rfc3339(*ts)),
|
||||
name: s3_xml::Value(name.to_string()),
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
};
|
||||
|
||||
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
|
||||
xml.push_str(&to_string(&list_buckets)?);
|
||||
let xml = s3_xml::to_xml_with_header(&list_buckets)?;
|
||||
trace!("xml: {}", xml);
|
||||
|
||||
Ok(Response::builder()
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hyper::{Body, Request, Response};
|
||||
|
@ -14,6 +13,7 @@ use garage_model::version_table::*;
|
|||
|
||||
use crate::error::*;
|
||||
use crate::s3_put::get_headers;
|
||||
use crate::s3_xml;
|
||||
|
||||
pub async fn handle_copy(
|
||||
garage: Arc<Garage>,
|
||||
|
@ -61,6 +61,8 @@ pub async fn handle_copy(
|
|||
_ => old_meta.clone(),
|
||||
};
|
||||
|
||||
let etag = new_meta.etag.to_string();
|
||||
|
||||
// Save object copy
|
||||
match source_last_state {
|
||||
ObjectVersionData::DeleteMarker => unreachable!(),
|
||||
|
@ -158,13 +160,13 @@ pub async fn handle_copy(
|
|||
}
|
||||
|
||||
let last_modified = msec_to_rfc3339(new_timestamp);
|
||||
let mut xml = String::new();
|
||||
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(&mut xml, r#"<CopyObjectResult>"#).unwrap();
|
||||
writeln!(&mut xml, "\t<LastModified>{}</LastModified>", last_modified).unwrap();
|
||||
writeln!(&mut xml, "</CopyObjectResult>").unwrap();
|
||||
let result = s3_xml::CopyObjectResult {
|
||||
last_modified: s3_xml::Value(last_modified),
|
||||
etag: s3_xml::Value(etag),
|
||||
};
|
||||
let xml = s3_xml::to_xml_with_header(&result)?;
|
||||
|
||||
Ok(Response::builder()
|
||||
.header("Content-Type", "application/xml")
|
||||
.body(Body::from(xml.into_bytes()))?)
|
||||
.body(Body::from(xml))?)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hyper::{Body, Request, Response};
|
||||
|
@ -9,8 +8,8 @@ use garage_util::time::*;
|
|||
use garage_model::garage::Garage;
|
||||
use garage_model::object_table::*;
|
||||
|
||||
use crate::encoding::*;
|
||||
use crate::error::*;
|
||||
use crate::s3_xml;
|
||||
use crate::signature::verify_signed_content;
|
||||
|
||||
async fn handle_delete_internal(
|
||||
|
@ -85,13 +84,8 @@ pub async fn handle_delete_objects(
|
|||
let cmd_xml = roxmltree::Document::parse(&std::str::from_utf8(&body)?)?;
|
||||
let cmd = parse_delete_objects_xml(&cmd_xml).ok_or_bad_request("Invalid delete XML query")?;
|
||||
|
||||
let mut retxml = String::new();
|
||||
writeln!(&mut retxml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(
|
||||
&mut retxml,
|
||||
r#"<DeleteResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"#
|
||||
)
|
||||
.unwrap();
|
||||
let mut ret_deleted = Vec::new();
|
||||
let mut ret_errors = Vec::new();
|
||||
|
||||
for obj in cmd.objects.iter() {
|
||||
match handle_delete_internal(&garage, bucket, &obj.key).await {
|
||||
|
@ -99,42 +93,32 @@ pub async fn handle_delete_objects(
|
|||
if cmd.quiet {
|
||||
continue;
|
||||
}
|
||||
writeln!(&mut retxml, "\t<Deleted>").unwrap();
|
||||
writeln!(&mut retxml, "\t\t<Key>{}</Key>", xml_escape(&obj.key)).unwrap();
|
||||
writeln!(
|
||||
&mut retxml,
|
||||
"\t\t<VersionId>{}</VersionId>",
|
||||
hex::encode(deleted_version)
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
&mut retxml,
|
||||
"\t\t<DeleteMarkerVersionId>{}</DeleteMarkerVersionId>",
|
||||
hex::encode(delete_marker_version)
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut retxml, "\t</Deleted>").unwrap();
|
||||
ret_deleted.push(s3_xml::Deleted {
|
||||
key: s3_xml::Value(obj.key.clone()),
|
||||
version_id: s3_xml::Value(hex::encode(deleted_version)),
|
||||
delete_marker_version_id: s3_xml::Value(hex::encode(delete_marker_version)),
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
writeln!(&mut retxml, "\t<Error>").unwrap();
|
||||
writeln!(&mut retxml, "\t\t<Code>{}</Code>", e.http_status_code()).unwrap();
|
||||
writeln!(&mut retxml, "\t\t<Key>{}</Key>", xml_escape(&obj.key)).unwrap();
|
||||
writeln!(
|
||||
&mut retxml,
|
||||
"\t\t<Message>{}</Message>",
|
||||
xml_escape(&format!("{}", e))
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut retxml, "\t</Error>").unwrap();
|
||||
ret_errors.push(s3_xml::DeleteError {
|
||||
code: s3_xml::Value(e.aws_code().to_string()),
|
||||
key: Some(s3_xml::Value(obj.key.clone())),
|
||||
message: s3_xml::Value(format!("{}", e)),
|
||||
version_id: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(&mut retxml, "</DeleteResult>").unwrap();
|
||||
let xml = s3_xml::to_xml_with_header(&s3_xml::DeleteResult {
|
||||
xmlns: (),
|
||||
deleted: ret_deleted,
|
||||
errors: ret_errors,
|
||||
})?;
|
||||
|
||||
Ok(Response::builder()
|
||||
.header("Content-Type", "application/xml")
|
||||
.body(Body::from(retxml.into_bytes()))?)
|
||||
.body(Body::from(xml))?)
|
||||
}
|
||||
|
||||
struct DeleteRequest {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::{BTreeMap, BTreeSet, HashMap};
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hyper::{Body, Response};
|
||||
|
@ -14,6 +13,7 @@ use garage_table::DeletedFilter;
|
|||
|
||||
use crate::encoding::*;
|
||||
use crate::error::*;
|
||||
use crate::s3_xml;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ListObjectsQuery {
|
||||
|
@ -163,126 +163,81 @@ pub async fn handle_list(
|
|||
}
|
||||
}
|
||||
|
||||
let mut xml = String::new();
|
||||
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
r#"<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"#
|
||||
)
|
||||
.unwrap();
|
||||
let mut result = s3_xml::ListBucketResult {
|
||||
xmlns: (),
|
||||
name: s3_xml::Value(query.bucket.to_string()),
|
||||
prefix: uriencode_maybe(&query.prefix, query.urlencode_resp),
|
||||
marker: None,
|
||||
next_marker: None,
|
||||
start_after: None,
|
||||
continuation_token: None,
|
||||
next_continuation_token: None,
|
||||
max_keys: s3_xml::IntValue(query.max_keys as i64),
|
||||
delimiter: query
|
||||
.delimiter
|
||||
.as_ref()
|
||||
.map(|x| uriencode_maybe(x, query.urlencode_resp)),
|
||||
encoding_type: match query.urlencode_resp {
|
||||
true => Some(s3_xml::Value("url".to_string())),
|
||||
false => None,
|
||||
},
|
||||
|
||||
writeln!(&mut xml, "\t<Name>{}</Name>", query.bucket).unwrap();
|
||||
|
||||
// TODO: in V1, is this supposed to be urlencoded when encoding-type is URL??
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<Prefix>{}</Prefix>",
|
||||
xml_encode_key(&query.prefix, query.urlencode_resp),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if let Some(delim) = &query.delimiter {
|
||||
// TODO: in V1, is this supposed to be urlencoded when encoding-type is URL??
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<Delimiter>{}</Delimiter>",
|
||||
xml_encode_key(delim, query.urlencode_resp),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
writeln!(&mut xml, "\t<MaxKeys>{}</MaxKeys>", query.max_keys).unwrap();
|
||||
if query.urlencode_resp {
|
||||
writeln!(&mut xml, "\t<EncodingType>url</EncodingType>").unwrap();
|
||||
}
|
||||
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<KeyCount>{}</KeyCount>",
|
||||
result_keys.len() + result_common_prefixes.len()
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<IsTruncated>{}</IsTruncated>",
|
||||
truncated.is_some()
|
||||
)
|
||||
.unwrap();
|
||||
key_count: Some(s3_xml::IntValue(
|
||||
result_keys.len() as i64 + result_common_prefixes.len() as i64,
|
||||
)),
|
||||
is_truncated: s3_xml::Value(format!("{}", truncated.is_some())),
|
||||
contents: vec![],
|
||||
common_prefixes: vec![],
|
||||
};
|
||||
|
||||
if query.is_v2 {
|
||||
if let Some(ct) = &query.continuation_token {
|
||||
writeln!(&mut xml, "\t<ContinuationToken>{}</ContinuationToken>", ct).unwrap();
|
||||
result.continuation_token = Some(s3_xml::Value(ct.to_string()));
|
||||
}
|
||||
if let Some(sa) = &query.start_after {
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<StartAfter>{}</StartAfter>",
|
||||
xml_encode_key(sa, query.urlencode_resp)
|
||||
)
|
||||
.unwrap();
|
||||
result.start_after = Some(uriencode_maybe(sa, query.urlencode_resp));
|
||||
}
|
||||
if let Some(nct) = truncated {
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<NextContinuationToken>{}</NextContinuationToken>",
|
||||
base64::encode(nct.as_bytes())
|
||||
)
|
||||
.unwrap();
|
||||
result.next_continuation_token = Some(s3_xml::Value(base64::encode(nct.as_bytes())));
|
||||
}
|
||||
} else {
|
||||
// TODO: are these supposed to be urlencoded when encoding-type is URL??
|
||||
if let Some(mkr) = &query.marker {
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<Marker>{}</Marker>",
|
||||
xml_encode_key(mkr, query.urlencode_resp)
|
||||
)
|
||||
.unwrap();
|
||||
result.marker = Some(uriencode_maybe(mkr, query.urlencode_resp));
|
||||
}
|
||||
if let Some(next_marker) = truncated {
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<NextMarker>{}</NextMarker>",
|
||||
xml_encode_key(&next_marker, query.urlencode_resp)
|
||||
)
|
||||
.unwrap();
|
||||
result.next_marker = Some(uriencode_maybe(&next_marker, query.urlencode_resp));
|
||||
}
|
||||
}
|
||||
|
||||
for (key, info) in result_keys.iter() {
|
||||
let last_modif = msec_to_rfc3339(info.last_modified);
|
||||
writeln!(&mut xml, "\t<Contents>").unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t\t<Key>{}</Key>",
|
||||
xml_encode_key(key, query.urlencode_resp),
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut xml, "\t\t<LastModified>{}</LastModified>", last_modif).unwrap();
|
||||
writeln!(&mut xml, "\t\t<Size>{}</Size>", info.size).unwrap();
|
||||
if !info.etag.is_empty() {
|
||||
writeln!(&mut xml, "\t\t<ETag>\"{}\"</ETag>", info.etag).unwrap();
|
||||
}
|
||||
writeln!(&mut xml, "\t\t<StorageClass>STANDARD</StorageClass>").unwrap();
|
||||
writeln!(&mut xml, "\t</Contents>").unwrap();
|
||||
result.contents.push(s3_xml::ListBucketItem {
|
||||
key: uriencode_maybe(key, query.urlencode_resp),
|
||||
last_modified: s3_xml::Value(msec_to_rfc3339(info.last_modified)),
|
||||
size: s3_xml::IntValue(info.size as i64),
|
||||
etag: s3_xml::Value(info.etag.to_string()),
|
||||
storage_class: s3_xml::Value("STANDARD".to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
for pfx in result_common_prefixes.iter() {
|
||||
writeln!(&mut xml, "\t<CommonPrefixes>").unwrap();
|
||||
//TODO: in V1, are these urlencoded when urlencode_resp is true ?? (proably)
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t\t<Prefix>{}</Prefix>",
|
||||
xml_encode_key(pfx, query.urlencode_resp),
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut xml, "\t</CommonPrefixes>").unwrap();
|
||||
result.common_prefixes.push(s3_xml::CommonPrefix {
|
||||
prefix: uriencode_maybe(pfx, query.urlencode_resp),
|
||||
});
|
||||
}
|
||||
|
||||
writeln!(&mut xml, "</ListBucketResult>").unwrap();
|
||||
debug!("{}", xml);
|
||||
let xml = s3_xml::to_xml_with_header(&result)?;
|
||||
|
||||
Ok(Response::builder()
|
||||
.header("Content-Type", "application/xml")
|
||||
.body(Body::from(xml.into_bytes()))?)
|
||||
}
|
||||
|
||||
fn uriencode_maybe(s: &str, yes: bool) -> s3_xml::Value {
|
||||
if yes {
|
||||
s3_xml::Value(uri_encode(s, true))
|
||||
} else {
|
||||
s3_xml::Value(s.to_string())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::{BTreeMap, VecDeque};
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use futures::stream::*;
|
||||
|
@ -18,8 +17,8 @@ use garage_model::garage::Garage;
|
|||
use garage_model::object_table::*;
|
||||
use garage_model::version_table::*;
|
||||
|
||||
use crate::encoding::*;
|
||||
use crate::error::*;
|
||||
use crate::s3_xml;
|
||||
use crate::signature::verify_signed_content;
|
||||
|
||||
pub async fn handle_put(
|
||||
|
@ -339,22 +338,13 @@ pub async fn handle_create_multipart_upload(
|
|||
garage.version_table.insert(&version).await?;
|
||||
|
||||
// Send success response
|
||||
let mut xml = String::new();
|
||||
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
r#"<InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"#
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut xml, "\t<Bucket>{}</Bucket>", bucket).unwrap();
|
||||
writeln!(&mut xml, "\t<Key>{}</Key>", xml_escape(key)).unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<UploadId>{}</UploadId>",
|
||||
hex::encode(version_uuid)
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut xml, "</InitiateMultipartUploadResult>").unwrap();
|
||||
let result = s3_xml::InitiateMultipartUploadResult {
|
||||
xmlns: (),
|
||||
bucket: s3_xml::Value(bucket.to_string()),
|
||||
key: s3_xml::Value(key.to_string()),
|
||||
upload_id: s3_xml::Value(hex::encode(version_uuid)),
|
||||
};
|
||||
let xml = s3_xml::to_xml_with_header(&result)?;
|
||||
|
||||
Ok(Response::new(Body::from(xml.into_bytes())))
|
||||
}
|
||||
|
@ -520,7 +510,7 @@ pub async fn handle_complete_multipart_upload(
|
|||
ObjectVersionMeta {
|
||||
headers,
|
||||
size: total_size,
|
||||
etag,
|
||||
etag: etag.clone(),
|
||||
},
|
||||
version.blocks.items()[0].1.hash,
|
||||
));
|
||||
|
@ -529,22 +519,14 @@ pub async fn handle_complete_multipart_upload(
|
|||
garage.object_table.insert(&final_object).await?;
|
||||
|
||||
// Send response saying ok we're done
|
||||
let mut xml = String::new();
|
||||
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
r#"<CompleteMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"#
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
&mut xml,
|
||||
"\t<Location>{}</Location>",
|
||||
garage.config.s3_api.s3_region
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(&mut xml, "\t<Bucket>{}</Bucket>", bucket).unwrap();
|
||||
writeln!(&mut xml, "\t<Key>{}</Key>", xml_escape(&key)).unwrap();
|
||||
writeln!(&mut xml, "</CompleteMultipartUploadResult>").unwrap();
|
||||
let result = s3_xml::CompleteMultipartUploadResult {
|
||||
xmlns: (),
|
||||
location: None,
|
||||
bucket: s3_xml::Value(bucket),
|
||||
key: s3_xml::Value(key),
|
||||
etag: s3_xml::Value(etag),
|
||||
};
|
||||
let xml = s3_xml::to_xml_with_header(&result)?;
|
||||
|
||||
Ok(Response::new(Body::from(xml.into_bytes())))
|
||||
}
|
||||
|
|
597
src/api/s3_xml.rs
Normal file
597
src/api/s3_xml.rs
Normal file
|
@ -0,0 +1,597 @@
|
|||
use quick_xml::se::to_string;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use crate::Error as ApiError;
|
||||
|
||||
pub fn to_xml_with_header<T: Serialize>(x: &T) -> Result<String, ApiError> {
|
||||
let mut xml = r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string();
|
||||
xml.push_str(&to_string(x)?);
|
||||
Ok(xml)
|
||||
}
|
||||
|
||||
fn xmlns_tag<S: Serializer>(_v: &(), s: S) -> Result<S::Ok, S::Error> {
|
||||
s.serialize_str("http://s3.amazonaws.com/doc/2006-03-01/")
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct Value(#[serde(rename = "$value")] pub String);
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct IntValue(#[serde(rename = "$value")] pub i64);
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct Bucket {
|
||||
#[serde(rename = "CreationDate")]
|
||||
pub creation_date: Value,
|
||||
#[serde(rename = "Name")]
|
||||
pub name: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct DisplayName(#[serde(rename = "$value")] pub String);
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct Id(#[serde(rename = "$value")] pub String);
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct Owner {
|
||||
#[serde(rename = "DisplayName")]
|
||||
pub display_name: Value,
|
||||
#[serde(rename = "ID")]
|
||||
pub id: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct BucketList {
|
||||
#[serde(rename = "Bucket")]
|
||||
pub entries: Vec<Bucket>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct ListAllMyBucketsResult {
|
||||
#[serde(rename = "Buckets")]
|
||||
pub buckets: BucketList,
|
||||
#[serde(rename = "Owner")]
|
||||
pub owner: Owner,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct LocationConstraint {
|
||||
#[serde(serialize_with = "xmlns_tag")]
|
||||
pub xmlns: (),
|
||||
#[serde(rename = "$value")]
|
||||
pub region: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct Deleted {
|
||||
#[serde(rename = "Key")]
|
||||
pub key: Value,
|
||||
#[serde(rename = "VersionId")]
|
||||
pub version_id: Value,
|
||||
#[serde(rename = "DeleteMarkerVersionId")]
|
||||
pub delete_marker_version_id: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct Error {
|
||||
#[serde(rename = "Code")]
|
||||
pub code: Value,
|
||||
#[serde(rename = "Message")]
|
||||
pub message: Value,
|
||||
#[serde(rename = "Resource")]
|
||||
pub resource: Option<Value>,
|
||||
#[serde(rename = "Region")]
|
||||
pub region: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct DeleteError {
|
||||
#[serde(rename = "Code")]
|
||||
pub code: Value,
|
||||
#[serde(rename = "Key")]
|
||||
pub key: Option<Value>,
|
||||
#[serde(rename = "Message")]
|
||||
pub message: Value,
|
||||
#[serde(rename = "VersionId")]
|
||||
pub version_id: Option<Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct DeleteResult {
|
||||
#[serde(serialize_with = "xmlns_tag")]
|
||||
pub xmlns: (),
|
||||
#[serde(rename = "Deleted")]
|
||||
pub deleted: Vec<Deleted>,
|
||||
#[serde(rename = "Error")]
|
||||
pub errors: Vec<DeleteError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct CopyObjectResult {
|
||||
#[serde(rename = "LastModified")]
|
||||
pub last_modified: Value,
|
||||
#[serde(rename = "ETag")]
|
||||
pub etag: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct InitiateMultipartUploadResult {
|
||||
#[serde(serialize_with = "xmlns_tag")]
|
||||
pub xmlns: (),
|
||||
#[serde(rename = "Bucket")]
|
||||
pub bucket: Value,
|
||||
#[serde(rename = "Key")]
|
||||
pub key: Value,
|
||||
#[serde(rename = "UploadId")]
|
||||
pub upload_id: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct CompleteMultipartUploadResult {
|
||||
#[serde(serialize_with = "xmlns_tag")]
|
||||
pub xmlns: (),
|
||||
#[serde(rename = "Location")]
|
||||
pub location: Option<Value>,
|
||||
#[serde(rename = "Bucket")]
|
||||
pub bucket: Value,
|
||||
#[serde(rename = "Key")]
|
||||
pub key: Value,
|
||||
#[serde(rename = "ETag")]
|
||||
pub etag: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct ListBucketItem {
|
||||
#[serde(rename = "Key")]
|
||||
pub key: Value,
|
||||
#[serde(rename = "LastModified")]
|
||||
pub last_modified: Value,
|
||||
#[serde(rename = "ETag")]
|
||||
pub etag: Value,
|
||||
#[serde(rename = "Size")]
|
||||
pub size: IntValue,
|
||||
#[serde(rename = "StorageClass")]
|
||||
pub storage_class: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct CommonPrefix {
|
||||
#[serde(rename = "Prefix")]
|
||||
pub prefix: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, PartialEq)]
|
||||
pub struct ListBucketResult {
|
||||
#[serde(serialize_with = "xmlns_tag")]
|
||||
pub xmlns: (),
|
||||
#[serde(rename = "Name")]
|
||||
pub name: Value,
|
||||
#[serde(rename = "Prefix")]
|
||||
pub prefix: Value,
|
||||
#[serde(rename = "Marker")]
|
||||
pub marker: Option<Value>,
|
||||
#[serde(rename = "NextMarker")]
|
||||
pub next_marker: Option<Value>,
|
||||
#[serde(rename = "StartAfter")]
|
||||
pub start_after: Option<Value>,
|
||||
#[serde(rename = "ContinuationToken")]
|
||||
pub continuation_token: Option<Value>,
|
||||
#[serde(rename = "NextContinuationToken")]
|
||||
pub next_continuation_token: Option<Value>,
|
||||
#[serde(rename = "KeyCount")]
|
||||
pub key_count: Option<IntValue>,
|
||||
#[serde(rename = "MaxKeys")]
|
||||
pub max_keys: IntValue,
|
||||
#[serde(rename = "Delimiter")]
|
||||
pub delimiter: Option<Value>,
|
||||
#[serde(rename = "EncodingType")]
|
||||
pub encoding_type: Option<Value>,
|
||||
#[serde(rename = "IsTruncated")]
|
||||
pub is_truncated: Value,
|
||||
#[serde(rename = "Contents")]
|
||||
pub contents: Vec<ListBucketItem>,
|
||||
#[serde(rename = "CommonPrefixes")]
|
||||
pub common_prefixes: Vec<CommonPrefix>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use garage_util::time::*;
|
||||
|
||||
#[test]
|
||||
fn error_message() -> Result<(), ApiError> {
|
||||
let error = Error {
|
||||
code: Value("TestError".to_string()),
|
||||
message: Value("A dummy error message".to_string()),
|
||||
resource: Some(Value("/bucket/a/plop".to_string())),
|
||||
region: Some(Value("garage".to_string())),
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&error)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<Error>\
|
||||
<Code>TestError</Code>\
|
||||
<Message>A dummy error message</Message>\
|
||||
<Resource>/bucket/a/plop</Resource>\
|
||||
<Region>garage</Region>\
|
||||
</Error>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_all_my_buckets_result() -> Result<(), ApiError> {
|
||||
let list_buckets = ListAllMyBucketsResult {
|
||||
owner: Owner {
|
||||
display_name: Value("owner_name".to_string()),
|
||||
id: Value("qsdfjklm".to_string()),
|
||||
},
|
||||
buckets: BucketList {
|
||||
entries: vec![
|
||||
Bucket {
|
||||
creation_date: Value(msec_to_rfc3339(0)),
|
||||
name: Value("bucket_A".to_string()),
|
||||
},
|
||||
Bucket {
|
||||
creation_date: Value(msec_to_rfc3339(3600 * 24 * 1000)),
|
||||
name: Value("bucket_B".to_string()),
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&list_buckets)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<ListAllMyBucketsResult>\
|
||||
<Buckets>\
|
||||
<Bucket>\
|
||||
<CreationDate>1970-01-01T00:00:00.000Z</CreationDate>\
|
||||
<Name>bucket_A</Name>\
|
||||
</Bucket>\
|
||||
<Bucket>\
|
||||
<CreationDate>1970-01-02T00:00:00.000Z</CreationDate>\
|
||||
<Name>bucket_B</Name>\
|
||||
</Bucket>\
|
||||
</Buckets>\
|
||||
<Owner>\
|
||||
<DisplayName>owner_name</DisplayName>\
|
||||
<ID>qsdfjklm</ID>\
|
||||
</Owner>\
|
||||
</ListAllMyBucketsResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_bucket_location_result() -> Result<(), ApiError> {
|
||||
let get_bucket_location = LocationConstraint {
|
||||
xmlns: (),
|
||||
region: "garage".to_string(),
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&get_bucket_location)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<LocationConstraint xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">garage</LocationConstraint>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delete_result() -> Result<(), ApiError> {
|
||||
let delete_result = DeleteResult {
|
||||
xmlns: (),
|
||||
deleted: vec![
|
||||
Deleted {
|
||||
key: Value("a/plop".to_string()),
|
||||
version_id: Value("qsdfjklm".to_string()),
|
||||
delete_marker_version_id: Value("wxcvbn".to_string()),
|
||||
},
|
||||
Deleted {
|
||||
key: Value("b/plip".to_string()),
|
||||
version_id: Value("1234".to_string()),
|
||||
delete_marker_version_id: Value("4321".to_string()),
|
||||
},
|
||||
],
|
||||
errors: vec![
|
||||
DeleteError {
|
||||
code: Value("NotFound".to_string()),
|
||||
key: Some(Value("c/plap".to_string())),
|
||||
message: Value("Object c/plap not found".to_string()),
|
||||
version_id: None,
|
||||
},
|
||||
DeleteError {
|
||||
code: Value("Forbidden".to_string()),
|
||||
key: Some(Value("d/plep".to_string())),
|
||||
message: Value("Not authorized".to_string()),
|
||||
version_id: Some(Value("789".to_string())),
|
||||
},
|
||||
],
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&delete_result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<DeleteResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Deleted>\
|
||||
<Key>a/plop</Key>\
|
||||
<VersionId>qsdfjklm</VersionId>\
|
||||
<DeleteMarkerVersionId>wxcvbn</DeleteMarkerVersionId>\
|
||||
</Deleted>\
|
||||
<Deleted>\
|
||||
<Key>b/plip</Key>\
|
||||
<VersionId>1234</VersionId>\
|
||||
<DeleteMarkerVersionId>4321</DeleteMarkerVersionId>\
|
||||
</Deleted>\
|
||||
<Error>\
|
||||
<Code>NotFound</Code>\
|
||||
<Key>c/plap</Key>\
|
||||
<Message>Object c/plap not found</Message>\
|
||||
</Error>\
|
||||
<Error>\
|
||||
<Code>Forbidden</Code>\
|
||||
<Key>d/plep</Key>\
|
||||
<Message>Not authorized</Message>\
|
||||
<VersionId>789</VersionId>\
|
||||
</Error>\
|
||||
</DeleteResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn copy_object_result() -> Result<(), ApiError> {
|
||||
let copy_result = CopyObjectResult {
|
||||
last_modified: Value(msec_to_rfc3339(0)),
|
||||
etag: Value("9b2cf535f27731c974343645a3985328".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(©_result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<CopyObjectResult>\
|
||||
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
|
||||
<ETag>9b2cf535f27731c974343645a3985328</ETag>\
|
||||
</CopyObjectResult>\
|
||||
"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn initiate_multipart_upload_result() -> Result<(), ApiError> {
|
||||
let result = InitiateMultipartUploadResult {
|
||||
xmlns: (),
|
||||
bucket: Value("mybucket".to_string()),
|
||||
key: Value("a/plop".to_string()),
|
||||
upload_id: Value("azerty".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<InitiateMultipartUploadResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Bucket>mybucket</Bucket>\
|
||||
<Key>a/plop</Key>\
|
||||
<UploadId>azerty</UploadId>\
|
||||
</InitiateMultipartUploadResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complete_multipart_upload_result() -> Result<(), ApiError> {
|
||||
let result = CompleteMultipartUploadResult {
|
||||
xmlns: (),
|
||||
location: Some(Value("https://garage.tld/mybucket/a/plop".to_string())),
|
||||
bucket: Value("mybucket".to_string()),
|
||||
key: Value("a/plop".to_string()),
|
||||
etag: Value("3858f62230ac3c915f300c664312c11f-9".to_string()),
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<CompleteMultipartUploadResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Location>https://garage.tld/mybucket/a/plop</Location>\
|
||||
<Bucket>mybucket</Bucket>\
|
||||
<Key>a/plop</Key>\
|
||||
<ETag>3858f62230ac3c915f300c664312c11f-9</ETag>\
|
||||
</CompleteMultipartUploadResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_objects_v1_1() -> Result<(), ApiError> {
|
||||
let result = ListBucketResult {
|
||||
xmlns: (),
|
||||
name: Value("example-bucket".to_string()),
|
||||
prefix: Value("".to_string()),
|
||||
marker: Some(Value("".to_string())),
|
||||
next_marker: None,
|
||||
start_after: None,
|
||||
continuation_token: None,
|
||||
next_continuation_token: None,
|
||||
key_count: None,
|
||||
max_keys: IntValue(1000),
|
||||
encoding_type: None,
|
||||
delimiter: Some(Value("/".to_string())),
|
||||
is_truncated: Value("false".to_string()),
|
||||
contents: vec![ListBucketItem {
|
||||
key: Value("sample.jpg".to_string()),
|
||||
last_modified: Value(msec_to_rfc3339(0)),
|
||||
etag: Value("bf1d737a4d46a19f3bced6905cc8b902".to_string()),
|
||||
size: IntValue(142863),
|
||||
storage_class: Value("STANDARD".to_string()),
|
||||
}],
|
||||
common_prefixes: vec![CommonPrefix {
|
||||
prefix: Value("photos/".to_string()),
|
||||
}],
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Name>example-bucket</Name>\
|
||||
<Prefix></Prefix>\
|
||||
<Marker></Marker>\
|
||||
<MaxKeys>1000</MaxKeys>\
|
||||
<Delimiter>/</Delimiter>\
|
||||
<IsTruncated>false</IsTruncated>\
|
||||
<Contents>\
|
||||
<Key>sample.jpg</Key>\
|
||||
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
|
||||
<ETag>bf1d737a4d46a19f3bced6905cc8b902</ETag>\
|
||||
<Size>142863</Size>\
|
||||
<StorageClass>STANDARD</StorageClass>\
|
||||
</Contents>\
|
||||
<CommonPrefixes>\
|
||||
<Prefix>photos/</Prefix>\
|
||||
</CommonPrefixes>\
|
||||
</ListBucketResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_objects_v1_2() -> Result<(), ApiError> {
|
||||
let result = ListBucketResult {
|
||||
xmlns: (),
|
||||
name: Value("example-bucket".to_string()),
|
||||
prefix: Value("photos/2006/".to_string()),
|
||||
marker: Some(Value("".to_string())),
|
||||
next_marker: None,
|
||||
start_after: None,
|
||||
continuation_token: None,
|
||||
next_continuation_token: None,
|
||||
key_count: None,
|
||||
max_keys: IntValue(1000),
|
||||
delimiter: Some(Value("/".to_string())),
|
||||
encoding_type: None,
|
||||
is_truncated: Value("false".to_string()),
|
||||
contents: vec![],
|
||||
common_prefixes: vec![
|
||||
CommonPrefix {
|
||||
prefix: Value("photos/2006/February/".to_string()),
|
||||
},
|
||||
CommonPrefix {
|
||||
prefix: Value("photos/2006/January/".to_string()),
|
||||
},
|
||||
],
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Name>example-bucket</Name>\
|
||||
<Prefix>photos/2006/</Prefix>\
|
||||
<Marker></Marker>\
|
||||
<MaxKeys>1000</MaxKeys>\
|
||||
<Delimiter>/</Delimiter>\
|
||||
<IsTruncated>false</IsTruncated>\
|
||||
<CommonPrefixes>\
|
||||
<Prefix>photos/2006/February/</Prefix>\
|
||||
</CommonPrefixes>\
|
||||
<CommonPrefixes>\
|
||||
<Prefix>photos/2006/January/</Prefix>\
|
||||
</CommonPrefixes>\
|
||||
</ListBucketResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_objects_v2_1() -> Result<(), ApiError> {
|
||||
let result = ListBucketResult {
|
||||
xmlns: (),
|
||||
name: Value("quotes".to_string()),
|
||||
prefix: Value("E".to_string()),
|
||||
marker: None,
|
||||
next_marker: None,
|
||||
start_after: Some(Value("ExampleGuide.pdf".to_string())),
|
||||
continuation_token: None,
|
||||
next_continuation_token: None,
|
||||
key_count: None,
|
||||
max_keys: IntValue(3),
|
||||
delimiter: None,
|
||||
encoding_type: None,
|
||||
is_truncated: Value("false".to_string()),
|
||||
contents: vec![ListBucketItem {
|
||||
key: Value("ExampleObject.txt".to_string()),
|
||||
last_modified: Value(msec_to_rfc3339(0)),
|
||||
etag: Value("599bab3ed2c697f1d26842727561fd94".to_string()),
|
||||
size: IntValue(857),
|
||||
storage_class: Value("REDUCED_REDUNDANCY".to_string()),
|
||||
}],
|
||||
common_prefixes: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Name>quotes</Name>\
|
||||
<Prefix>E</Prefix>\
|
||||
<StartAfter>ExampleGuide.pdf</StartAfter>\
|
||||
<MaxKeys>3</MaxKeys>\
|
||||
<IsTruncated>false</IsTruncated>\
|
||||
<Contents>\
|
||||
<Key>ExampleObject.txt</Key>\
|
||||
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
|
||||
<ETag>599bab3ed2c697f1d26842727561fd94</ETag>\
|
||||
<Size>857</Size>\
|
||||
<StorageClass>REDUCED_REDUNDANCY</StorageClass>\
|
||||
</Contents>\
|
||||
</ListBucketResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_objects_v2_2() -> Result<(), ApiError> {
|
||||
let result = ListBucketResult {
|
||||
xmlns: (),
|
||||
name: Value("bucket".to_string()),
|
||||
prefix: Value("".to_string()),
|
||||
marker: None,
|
||||
next_marker: None,
|
||||
start_after: None,
|
||||
continuation_token: Some(Value(
|
||||
"1ueGcxLPRx1Tr/XYExHnhbYLgveDs2J/wm36Hy4vbOwM=".to_string(),
|
||||
)),
|
||||
next_continuation_token: Some(Value("qsdfjklm".to_string())),
|
||||
key_count: Some(IntValue(112)),
|
||||
max_keys: IntValue(1000),
|
||||
delimiter: None,
|
||||
encoding_type: None,
|
||||
is_truncated: Value("false".to_string()),
|
||||
contents: vec![ListBucketItem {
|
||||
key: Value("happyfacex.jpg".to_string()),
|
||||
last_modified: Value(msec_to_rfc3339(0)),
|
||||
etag: Value("70ee1738b6b21e2c8a43f3a5ab0eee71".to_string()),
|
||||
size: IntValue(1111),
|
||||
storage_class: Value("STANDARD".to_string()),
|
||||
}],
|
||||
common_prefixes: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
to_xml_with_header(&result)?,
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
|
||||
<ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\
|
||||
<Name>bucket</Name>\
|
||||
<Prefix></Prefix>\
|
||||
<ContinuationToken>1ueGcxLPRx1Tr/XYExHnhbYLgveDs2J/wm36Hy4vbOwM=</ContinuationToken>\
|
||||
<NextContinuationToken>qsdfjklm</NextContinuationToken>\
|
||||
<KeyCount>112</KeyCount>\
|
||||
<MaxKeys>1000</MaxKeys>\
|
||||
<IsTruncated>false</IsTruncated>\
|
||||
<Contents>\
|
||||
<Key>happyfacex.jpg</Key>\
|
||||
<LastModified>1970-01-01T00:00:00.000Z</LastModified>\
|
||||
<ETag>70ee1738b6b21e2c8a43f3a5ab0eee71</ETag>\
|
||||
<Size>1111</Size>\
|
||||
<StorageClass>STANDARD</StorageClass>\
|
||||
</Contents>\
|
||||
</ListBucketResult>"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -16,5 +16,5 @@ pub fn msec_to_rfc3339(msecs: u64) -> String {
|
|||
let secs = msecs as i64 / 1000;
|
||||
let nanos = (msecs as i64 % 1000) as u32 * 1_000_000;
|
||||
let timestamp = Utc.timestamp(secs, nanos);
|
||||
timestamp.to_rfc3339_opts(SecondsFormat::Secs, true)
|
||||
timestamp.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue