diff --git a/src/api/encoding.rs b/src/api/encoding.rs
index b3fbbe3..e286a78 100644
--- a/src/api/encoding.rs
+++ b/src/api/encoding.rs
@@ -1,13 +1,5 @@
//! Module containing various helpers for encoding
-/// Escape &str for xml inclusion
-pub fn xml_escape(s: &str) -> String {
- s.replace("&", "&")
- .replace("<", "<")
- .replace(">", ">")
- .replace("\"", """)
-}
-
/// Encode &str for use in a URI
pub fn uri_encode(string: &str, encode_slash: bool) -> String {
let mut result = String::with_capacity(string.len() * 2);
@@ -28,12 +20,3 @@ pub fn uri_encode(string: &str, encode_slash: bool) -> String {
}
result
}
-
-/// Encode &str either as an uri, or a valid string for xml inclusion
-pub fn xml_encode_key(k: &str, urlencode: bool) -> String {
- if urlencode {
- uri_encode(k, true)
- } else {
- xml_escape(k)
- }
-}
diff --git a/src/api/error.rs b/src/api/error.rs
index a49ba21..7d97366 100644
--- a/src/api/error.rs
+++ b/src/api/error.rs
@@ -1,11 +1,9 @@
-use std::fmt::Write;
-
use err_derive::Error;
use hyper::StatusCode;
use garage_util::error::Error as GarageError;
-use crate::encoding::*;
+use crate::s3_xml;
/// Errors of this crate
#[derive(Debug, Error)]
@@ -104,15 +102,22 @@ impl Error {
}
pub fn aws_xml(&self, garage_region: &str, path: &str) -> String {
- let mut xml = String::new();
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(&mut xml, "").unwrap();
- writeln!(&mut xml, "\t{}
", self.aws_code()).unwrap();
- writeln!(&mut xml, "\t{}", self).unwrap();
- writeln!(&mut xml, "\t{}", xml_escape(path)).unwrap();
- writeln!(&mut xml, "\t{}", garage_region).unwrap();
- writeln!(&mut xml, "").unwrap();
- xml
+ let error = s3_xml::Error {
+ code: s3_xml::Value(self.aws_code().to_string()),
+ message: s3_xml::Value(format!("{}", self)),
+ resource: Some(s3_xml::Value(path.to_string())),
+ region: Some(s3_xml::Value(garage_region.to_string())),
+ };
+ s3_xml::to_xml_with_header(&error).unwrap_or_else(|_| {
+ r#"
+
+
+ InternalError
+ XML encoding of error failed
+
+ "#
+ .into()
+ })
}
}
diff --git a/src/api/lib.rs b/src/api/lib.rs
index 6c6447d..9a23c23 100644
--- a/src/api/lib.rs
+++ b/src/api/lib.rs
@@ -18,3 +18,4 @@ mod s3_delete;
pub mod s3_get;
mod s3_list;
mod s3_put;
+mod s3_xml;
diff --git a/src/api/s3_bucket.rs b/src/api/s3_bucket.rs
index e7d8969..88d13ca 100644
--- a/src/api/s3_bucket.rs
+++ b/src/api/s3_bucket.rs
@@ -1,73 +1,20 @@
-use std::fmt::Write;
use std::sync::Arc;
use hyper::{Body, Response};
-use quick_xml::se::to_string;
-use serde::Serialize;
use garage_model::garage::Garage;
use garage_model::key_table::Key;
use garage_util::time::*;
use crate::error::*;
-
-#[derive(Debug, Serialize, PartialEq)]
-struct CreationDate {
- #[serde(rename = "$value")]
- pub body: String,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct Name {
- #[serde(rename = "$value")]
- pub body: String,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct Bucket {
- #[serde(rename = "CreationDate")]
- pub creation_date: CreationDate,
- #[serde(rename = "Name")]
- pub name: Name,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct DisplayName {
- #[serde(rename = "$value")]
- pub body: String,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct Id {
- #[serde(rename = "$value")]
- pub body: String,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct Owner {
- #[serde(rename = "DisplayName")]
- display_name: DisplayName,
- #[serde(rename = "ID")]
- id: Id,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct BucketList {
- #[serde(rename = "Bucket")]
- pub entries: Vec,
-}
-#[derive(Debug, Serialize, PartialEq)]
-struct ListAllMyBucketsResult {
- #[serde(rename = "Buckets")]
- buckets: BucketList,
- #[serde(rename = "Owner")]
- owner: Owner,
-}
+use crate::s3_xml;
pub fn handle_get_bucket_location(garage: Arc) -> Result, Error> {
- let mut xml = String::new();
-
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(
- &mut xml,
- r#"{}"#,
- garage.config.s3_api.s3_region
- )
- .unwrap();
+ let loc = s3_xml::LocationConstraint {
+ xmlns: (),
+ region: garage.config.s3_api.s3_region.to_string(),
+ };
+ let xml = s3_xml::to_xml_with_header(&loc)?;
Ok(Response::builder()
.header("Content-Type", "application/xml")
@@ -75,34 +22,25 @@ pub fn handle_get_bucket_location(garage: Arc) -> Result,
}
pub fn handle_list_buckets(api_key: &Key) -> Result, Error> {
- let list_buckets = ListAllMyBucketsResult {
- owner: Owner {
- display_name: DisplayName {
- body: api_key.name.get().to_string(),
- },
- id: Id {
- body: api_key.key_id.to_string(),
- },
+ let list_buckets = s3_xml::ListAllMyBucketsResult {
+ owner: s3_xml::Owner {
+ display_name: s3_xml::Value(api_key.name.get().to_string()),
+ id: s3_xml::Value(api_key.key_id.to_string()),
},
- buckets: BucketList {
+ buckets: s3_xml::BucketList {
entries: api_key
.authorized_buckets
.items()
.iter()
- .map(|(name, ts, _)| Bucket {
- creation_date: CreationDate {
- body: msec_to_rfc3339(*ts),
- },
- name: Name {
- body: name.to_string(),
- },
+ .map(|(name, ts, _)| s3_xml::Bucket {
+ creation_date: s3_xml::Value(msec_to_rfc3339(*ts)),
+ name: s3_xml::Value(name.to_string()),
})
.collect(),
},
};
- let mut xml = r#""#.to_string();
- xml.push_str(&to_string(&list_buckets)?);
+ let xml = s3_xml::to_xml_with_header(&list_buckets)?;
trace!("xml: {}", xml);
Ok(Response::builder()
diff --git a/src/api/s3_copy.rs b/src/api/s3_copy.rs
index 7069489..9ade698 100644
--- a/src/api/s3_copy.rs
+++ b/src/api/s3_copy.rs
@@ -1,4 +1,3 @@
-use std::fmt::Write;
use std::sync::Arc;
use hyper::{Body, Request, Response};
@@ -14,6 +13,7 @@ use garage_model::version_table::*;
use crate::error::*;
use crate::s3_put::get_headers;
+use crate::s3_xml;
pub async fn handle_copy(
garage: Arc,
@@ -61,6 +61,8 @@ pub async fn handle_copy(
_ => old_meta.clone(),
};
+ let etag = new_meta.etag.to_string();
+
// Save object copy
match source_last_state {
ObjectVersionData::DeleteMarker => unreachable!(),
@@ -158,13 +160,13 @@ pub async fn handle_copy(
}
let last_modified = msec_to_rfc3339(new_timestamp);
- let mut xml = String::new();
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(&mut xml, "\t{}", last_modified).unwrap();
- writeln!(&mut xml, "").unwrap();
+ let result = s3_xml::CopyObjectResult {
+ last_modified: s3_xml::Value(last_modified),
+ etag: s3_xml::Value(etag),
+ };
+ let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::builder()
.header("Content-Type", "application/xml")
- .body(Body::from(xml.into_bytes()))?)
+ .body(Body::from(xml))?)
}
diff --git a/src/api/s3_delete.rs b/src/api/s3_delete.rs
index 9d2a67f..a391a33 100644
--- a/src/api/s3_delete.rs
+++ b/src/api/s3_delete.rs
@@ -1,4 +1,3 @@
-use std::fmt::Write;
use std::sync::Arc;
use hyper::{Body, Request, Response};
@@ -9,8 +8,8 @@ use garage_util::time::*;
use garage_model::garage::Garage;
use garage_model::object_table::*;
-use crate::encoding::*;
use crate::error::*;
+use crate::s3_xml;
use crate::signature::verify_signed_content;
async fn handle_delete_internal(
@@ -85,13 +84,8 @@ pub async fn handle_delete_objects(
let cmd_xml = roxmltree::Document::parse(&std::str::from_utf8(&body)?)?;
let cmd = parse_delete_objects_xml(&cmd_xml).ok_or_bad_request("Invalid delete XML query")?;
- let mut retxml = String::new();
- writeln!(&mut retxml, r#""#).unwrap();
- writeln!(
- &mut retxml,
- r#""#
- )
- .unwrap();
+ let mut ret_deleted = Vec::new();
+ let mut ret_errors = Vec::new();
for obj in cmd.objects.iter() {
match handle_delete_internal(&garage, bucket, &obj.key).await {
@@ -99,42 +93,32 @@ pub async fn handle_delete_objects(
if cmd.quiet {
continue;
}
- writeln!(&mut retxml, "\t").unwrap();
- writeln!(&mut retxml, "\t\t{}", xml_escape(&obj.key)).unwrap();
- writeln!(
- &mut retxml,
- "\t\t{}",
- hex::encode(deleted_version)
- )
- .unwrap();
- writeln!(
- &mut retxml,
- "\t\t{}",
- hex::encode(delete_marker_version)
- )
- .unwrap();
- writeln!(&mut retxml, "\t").unwrap();
+ ret_deleted.push(s3_xml::Deleted {
+ key: s3_xml::Value(obj.key.clone()),
+ version_id: s3_xml::Value(hex::encode(deleted_version)),
+ delete_marker_version_id: s3_xml::Value(hex::encode(delete_marker_version)),
+ });
}
Err(e) => {
- writeln!(&mut retxml, "\t").unwrap();
- writeln!(&mut retxml, "\t\t{}
", e.http_status_code()).unwrap();
- writeln!(&mut retxml, "\t\t{}", xml_escape(&obj.key)).unwrap();
- writeln!(
- &mut retxml,
- "\t\t{}",
- xml_escape(&format!("{}", e))
- )
- .unwrap();
- writeln!(&mut retxml, "\t").unwrap();
+ ret_errors.push(s3_xml::DeleteError {
+ code: s3_xml::Value(e.aws_code().to_string()),
+ key: Some(s3_xml::Value(obj.key.clone())),
+ message: s3_xml::Value(format!("{}", e)),
+ version_id: None,
+ });
}
}
}
- writeln!(&mut retxml, "").unwrap();
+ let xml = s3_xml::to_xml_with_header(&s3_xml::DeleteResult {
+ xmlns: (),
+ deleted: ret_deleted,
+ errors: ret_errors,
+ })?;
Ok(Response::builder()
.header("Content-Type", "application/xml")
- .body(Body::from(retxml.into_bytes()))?)
+ .body(Body::from(xml))?)
}
struct DeleteRequest {
diff --git a/src/api/s3_list.rs b/src/api/s3_list.rs
index 80fefd5..384346e 100644
--- a/src/api/s3_list.rs
+++ b/src/api/s3_list.rs
@@ -1,5 +1,4 @@
use std::collections::{BTreeMap, BTreeSet, HashMap};
-use std::fmt::Write;
use std::sync::Arc;
use hyper::{Body, Response};
@@ -14,6 +13,7 @@ use garage_table::DeletedFilter;
use crate::encoding::*;
use crate::error::*;
+use crate::s3_xml;
#[derive(Debug)]
pub struct ListObjectsQuery {
@@ -163,126 +163,81 @@ pub async fn handle_list(
}
}
- let mut xml = String::new();
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(
- &mut xml,
- r#""#
- )
- .unwrap();
+ let mut result = s3_xml::ListBucketResult {
+ xmlns: (),
+ name: s3_xml::Value(query.bucket.to_string()),
+ prefix: uriencode_maybe(&query.prefix, query.urlencode_resp),
+ marker: None,
+ next_marker: None,
+ start_after: None,
+ continuation_token: None,
+ next_continuation_token: None,
+ max_keys: s3_xml::IntValue(query.max_keys as i64),
+ delimiter: query
+ .delimiter
+ .as_ref()
+ .map(|x| uriencode_maybe(x, query.urlencode_resp)),
+ encoding_type: match query.urlencode_resp {
+ true => Some(s3_xml::Value("url".to_string())),
+ false => None,
+ },
- writeln!(&mut xml, "\t{}", query.bucket).unwrap();
-
- // TODO: in V1, is this supposed to be urlencoded when encoding-type is URL??
- writeln!(
- &mut xml,
- "\t{}",
- xml_encode_key(&query.prefix, query.urlencode_resp),
- )
- .unwrap();
-
- if let Some(delim) = &query.delimiter {
- // TODO: in V1, is this supposed to be urlencoded when encoding-type is URL??
- writeln!(
- &mut xml,
- "\t{}",
- xml_encode_key(delim, query.urlencode_resp),
- )
- .unwrap();
- }
-
- writeln!(&mut xml, "\t{}", query.max_keys).unwrap();
- if query.urlencode_resp {
- writeln!(&mut xml, "\turl").unwrap();
- }
-
- writeln!(
- &mut xml,
- "\t{}",
- result_keys.len() + result_common_prefixes.len()
- )
- .unwrap();
- writeln!(
- &mut xml,
- "\t{}",
- truncated.is_some()
- )
- .unwrap();
+ key_count: Some(s3_xml::IntValue(
+ result_keys.len() as i64 + result_common_prefixes.len() as i64,
+ )),
+ is_truncated: s3_xml::Value(format!("{}", truncated.is_some())),
+ contents: vec![],
+ common_prefixes: vec![],
+ };
if query.is_v2 {
if let Some(ct) = &query.continuation_token {
- writeln!(&mut xml, "\t{}", ct).unwrap();
+ result.continuation_token = Some(s3_xml::Value(ct.to_string()));
}
if let Some(sa) = &query.start_after {
- writeln!(
- &mut xml,
- "\t{}",
- xml_encode_key(sa, query.urlencode_resp)
- )
- .unwrap();
+ result.start_after = Some(uriencode_maybe(sa, query.urlencode_resp));
}
if let Some(nct) = truncated {
- writeln!(
- &mut xml,
- "\t{}",
- base64::encode(nct.as_bytes())
- )
- .unwrap();
+ result.next_continuation_token = Some(s3_xml::Value(base64::encode(nct.as_bytes())));
}
} else {
// TODO: are these supposed to be urlencoded when encoding-type is URL??
if let Some(mkr) = &query.marker {
- writeln!(
- &mut xml,
- "\t{}",
- xml_encode_key(mkr, query.urlencode_resp)
- )
- .unwrap();
+ result.marker = Some(uriencode_maybe(mkr, query.urlencode_resp));
}
if let Some(next_marker) = truncated {
- writeln!(
- &mut xml,
- "\t{}",
- xml_encode_key(&next_marker, query.urlencode_resp)
- )
- .unwrap();
+ result.next_marker = Some(uriencode_maybe(&next_marker, query.urlencode_resp));
}
}
for (key, info) in result_keys.iter() {
- let last_modif = msec_to_rfc3339(info.last_modified);
- writeln!(&mut xml, "\t").unwrap();
- writeln!(
- &mut xml,
- "\t\t{}",
- xml_encode_key(key, query.urlencode_resp),
- )
- .unwrap();
- writeln!(&mut xml, "\t\t{}", last_modif).unwrap();
- writeln!(&mut xml, "\t\t{}", info.size).unwrap();
- if !info.etag.is_empty() {
- writeln!(&mut xml, "\t\t\"{}\"", info.etag).unwrap();
- }
- writeln!(&mut xml, "\t\tSTANDARD").unwrap();
- writeln!(&mut xml, "\t").unwrap();
+ result.contents.push(s3_xml::ListBucketItem {
+ key: uriencode_maybe(key, query.urlencode_resp),
+ last_modified: s3_xml::Value(msec_to_rfc3339(info.last_modified)),
+ size: s3_xml::IntValue(info.size as i64),
+ etag: s3_xml::Value(info.etag.to_string()),
+ storage_class: s3_xml::Value("STANDARD".to_string()),
+ });
}
for pfx in result_common_prefixes.iter() {
- writeln!(&mut xml, "\t").unwrap();
//TODO: in V1, are these urlencoded when urlencode_resp is true ?? (proably)
- writeln!(
- &mut xml,
- "\t\t{}",
- xml_encode_key(pfx, query.urlencode_resp),
- )
- .unwrap();
- writeln!(&mut xml, "\t").unwrap();
+ result.common_prefixes.push(s3_xml::CommonPrefix {
+ prefix: uriencode_maybe(pfx, query.urlencode_resp),
+ });
}
- writeln!(&mut xml, "").unwrap();
- debug!("{}", xml);
+ let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::builder()
.header("Content-Type", "application/xml")
.body(Body::from(xml.into_bytes()))?)
}
+
+fn uriencode_maybe(s: &str, yes: bool) -> s3_xml::Value {
+ if yes {
+ s3_xml::Value(uri_encode(s, true))
+ } else {
+ s3_xml::Value(s.to_string())
+ }
+}
diff --git a/src/api/s3_put.rs b/src/api/s3_put.rs
index bb6cf57..aa28523 100644
--- a/src/api/s3_put.rs
+++ b/src/api/s3_put.rs
@@ -1,5 +1,4 @@
use std::collections::{BTreeMap, VecDeque};
-use std::fmt::Write;
use std::sync::Arc;
use futures::stream::*;
@@ -18,8 +17,8 @@ use garage_model::garage::Garage;
use garage_model::object_table::*;
use garage_model::version_table::*;
-use crate::encoding::*;
use crate::error::*;
+use crate::s3_xml;
use crate::signature::verify_signed_content;
pub async fn handle_put(
@@ -339,22 +338,13 @@ pub async fn handle_create_multipart_upload(
garage.version_table.insert(&version).await?;
// Send success response
- let mut xml = String::new();
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(
- &mut xml,
- r#""#
- )
- .unwrap();
- writeln!(&mut xml, "\t{}", bucket).unwrap();
- writeln!(&mut xml, "\t{}", xml_escape(key)).unwrap();
- writeln!(
- &mut xml,
- "\t{}",
- hex::encode(version_uuid)
- )
- .unwrap();
- writeln!(&mut xml, "").unwrap();
+ let result = s3_xml::InitiateMultipartUploadResult {
+ xmlns: (),
+ bucket: s3_xml::Value(bucket.to_string()),
+ key: s3_xml::Value(key.to_string()),
+ upload_id: s3_xml::Value(hex::encode(version_uuid)),
+ };
+ let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::new(Body::from(xml.into_bytes())))
}
@@ -520,7 +510,7 @@ pub async fn handle_complete_multipart_upload(
ObjectVersionMeta {
headers,
size: total_size,
- etag,
+ etag: etag.clone(),
},
version.blocks.items()[0].1.hash,
));
@@ -529,22 +519,14 @@ pub async fn handle_complete_multipart_upload(
garage.object_table.insert(&final_object).await?;
// Send response saying ok we're done
- let mut xml = String::new();
- writeln!(&mut xml, r#""#).unwrap();
- writeln!(
- &mut xml,
- r#""#
- )
- .unwrap();
- writeln!(
- &mut xml,
- "\t{}",
- garage.config.s3_api.s3_region
- )
- .unwrap();
- writeln!(&mut xml, "\t{}", bucket).unwrap();
- writeln!(&mut xml, "\t{}", xml_escape(&key)).unwrap();
- writeln!(&mut xml, "").unwrap();
+ let result = s3_xml::CompleteMultipartUploadResult {
+ xmlns: (),
+ location: None,
+ bucket: s3_xml::Value(bucket),
+ key: s3_xml::Value(key),
+ etag: s3_xml::Value(etag),
+ };
+ let xml = s3_xml::to_xml_with_header(&result)?;
Ok(Response::new(Body::from(xml.into_bytes())))
}
diff --git a/src/api/s3_xml.rs b/src/api/s3_xml.rs
new file mode 100644
index 0000000..ba9b908
--- /dev/null
+++ b/src/api/s3_xml.rs
@@ -0,0 +1,597 @@
+use quick_xml::se::to_string;
+use serde::{Serialize, Serializer};
+
+use crate::Error as ApiError;
+
+pub fn to_xml_with_header(x: &T) -> Result {
+ let mut xml = r#""#.to_string();
+ xml.push_str(&to_string(x)?);
+ Ok(xml)
+}
+
+fn xmlns_tag(_v: &(), s: S) -> Result {
+ s.serialize_str("http://s3.amazonaws.com/doc/2006-03-01/")
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct Value(#[serde(rename = "$value")] pub String);
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct IntValue(#[serde(rename = "$value")] pub i64);
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct Bucket {
+ #[serde(rename = "CreationDate")]
+ pub creation_date: Value,
+ #[serde(rename = "Name")]
+ pub name: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct DisplayName(#[serde(rename = "$value")] pub String);
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct Id(#[serde(rename = "$value")] pub String);
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct Owner {
+ #[serde(rename = "DisplayName")]
+ pub display_name: Value,
+ #[serde(rename = "ID")]
+ pub id: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct BucketList {
+ #[serde(rename = "Bucket")]
+ pub entries: Vec,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct ListAllMyBucketsResult {
+ #[serde(rename = "Buckets")]
+ pub buckets: BucketList,
+ #[serde(rename = "Owner")]
+ pub owner: Owner,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct LocationConstraint {
+ #[serde(serialize_with = "xmlns_tag")]
+ pub xmlns: (),
+ #[serde(rename = "$value")]
+ pub region: String,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct Deleted {
+ #[serde(rename = "Key")]
+ pub key: Value,
+ #[serde(rename = "VersionId")]
+ pub version_id: Value,
+ #[serde(rename = "DeleteMarkerVersionId")]
+ pub delete_marker_version_id: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct Error {
+ #[serde(rename = "Code")]
+ pub code: Value,
+ #[serde(rename = "Message")]
+ pub message: Value,
+ #[serde(rename = "Resource")]
+ pub resource: Option,
+ #[serde(rename = "Region")]
+ pub region: Option,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct DeleteError {
+ #[serde(rename = "Code")]
+ pub code: Value,
+ #[serde(rename = "Key")]
+ pub key: Option,
+ #[serde(rename = "Message")]
+ pub message: Value,
+ #[serde(rename = "VersionId")]
+ pub version_id: Option,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct DeleteResult {
+ #[serde(serialize_with = "xmlns_tag")]
+ pub xmlns: (),
+ #[serde(rename = "Deleted")]
+ pub deleted: Vec,
+ #[serde(rename = "Error")]
+ pub errors: Vec,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct CopyObjectResult {
+ #[serde(rename = "LastModified")]
+ pub last_modified: Value,
+ #[serde(rename = "ETag")]
+ pub etag: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct InitiateMultipartUploadResult {
+ #[serde(serialize_with = "xmlns_tag")]
+ pub xmlns: (),
+ #[serde(rename = "Bucket")]
+ pub bucket: Value,
+ #[serde(rename = "Key")]
+ pub key: Value,
+ #[serde(rename = "UploadId")]
+ pub upload_id: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct CompleteMultipartUploadResult {
+ #[serde(serialize_with = "xmlns_tag")]
+ pub xmlns: (),
+ #[serde(rename = "Location")]
+ pub location: Option,
+ #[serde(rename = "Bucket")]
+ pub bucket: Value,
+ #[serde(rename = "Key")]
+ pub key: Value,
+ #[serde(rename = "ETag")]
+ pub etag: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct ListBucketItem {
+ #[serde(rename = "Key")]
+ pub key: Value,
+ #[serde(rename = "LastModified")]
+ pub last_modified: Value,
+ #[serde(rename = "ETag")]
+ pub etag: Value,
+ #[serde(rename = "Size")]
+ pub size: IntValue,
+ #[serde(rename = "StorageClass")]
+ pub storage_class: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct CommonPrefix {
+ #[serde(rename = "Prefix")]
+ pub prefix: Value,
+}
+
+#[derive(Debug, Serialize, PartialEq)]
+pub struct ListBucketResult {
+ #[serde(serialize_with = "xmlns_tag")]
+ pub xmlns: (),
+ #[serde(rename = "Name")]
+ pub name: Value,
+ #[serde(rename = "Prefix")]
+ pub prefix: Value,
+ #[serde(rename = "Marker")]
+ pub marker: Option,
+ #[serde(rename = "NextMarker")]
+ pub next_marker: Option,
+ #[serde(rename = "StartAfter")]
+ pub start_after: Option,
+ #[serde(rename = "ContinuationToken")]
+ pub continuation_token: Option,
+ #[serde(rename = "NextContinuationToken")]
+ pub next_continuation_token: Option,
+ #[serde(rename = "KeyCount")]
+ pub key_count: Option,
+ #[serde(rename = "MaxKeys")]
+ pub max_keys: IntValue,
+ #[serde(rename = "Delimiter")]
+ pub delimiter: Option,
+ #[serde(rename = "EncodingType")]
+ pub encoding_type: Option,
+ #[serde(rename = "IsTruncated")]
+ pub is_truncated: Value,
+ #[serde(rename = "Contents")]
+ pub contents: Vec,
+ #[serde(rename = "CommonPrefixes")]
+ pub common_prefixes: Vec,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use garage_util::time::*;
+
+ #[test]
+ fn error_message() -> Result<(), ApiError> {
+ let error = Error {
+ code: Value("TestError".to_string()),
+ message: Value("A dummy error message".to_string()),
+ resource: Some(Value("/bucket/a/plop".to_string())),
+ region: Some(Value("garage".to_string())),
+ };
+ assert_eq!(
+ to_xml_with_header(&error)?,
+ "\
+\
+ TestError
\
+ A dummy error message\
+ /bucket/a/plop\
+ garage\
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn list_all_my_buckets_result() -> Result<(), ApiError> {
+ let list_buckets = ListAllMyBucketsResult {
+ owner: Owner {
+ display_name: Value("owner_name".to_string()),
+ id: Value("qsdfjklm".to_string()),
+ },
+ buckets: BucketList {
+ entries: vec![
+ Bucket {
+ creation_date: Value(msec_to_rfc3339(0)),
+ name: Value("bucket_A".to_string()),
+ },
+ Bucket {
+ creation_date: Value(msec_to_rfc3339(3600 * 24 * 1000)),
+ name: Value("bucket_B".to_string()),
+ },
+ ],
+ },
+ };
+ assert_eq!(
+ to_xml_with_header(&list_buckets)?,
+ "\
+\
+ \
+ \
+ 1970-01-01T00:00:00.000Z\
+ bucket_A\
+ \
+ \
+ 1970-01-02T00:00:00.000Z\
+ bucket_B\
+ \
+ \
+ \
+ owner_name\
+ qsdfjklm\
+ \
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn get_bucket_location_result() -> Result<(), ApiError> {
+ let get_bucket_location = LocationConstraint {
+ xmlns: (),
+ region: "garage".to_string(),
+ };
+ assert_eq!(
+ to_xml_with_header(&get_bucket_location)?,
+ "\
+garage"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn delete_result() -> Result<(), ApiError> {
+ let delete_result = DeleteResult {
+ xmlns: (),
+ deleted: vec![
+ Deleted {
+ key: Value("a/plop".to_string()),
+ version_id: Value("qsdfjklm".to_string()),
+ delete_marker_version_id: Value("wxcvbn".to_string()),
+ },
+ Deleted {
+ key: Value("b/plip".to_string()),
+ version_id: Value("1234".to_string()),
+ delete_marker_version_id: Value("4321".to_string()),
+ },
+ ],
+ errors: vec![
+ DeleteError {
+ code: Value("NotFound".to_string()),
+ key: Some(Value("c/plap".to_string())),
+ message: Value("Object c/plap not found".to_string()),
+ version_id: None,
+ },
+ DeleteError {
+ code: Value("Forbidden".to_string()),
+ key: Some(Value("d/plep".to_string())),
+ message: Value("Not authorized".to_string()),
+ version_id: Some(Value("789".to_string())),
+ },
+ ],
+ };
+ assert_eq!(
+ to_xml_with_header(&delete_result)?,
+ "\
+\
+ \
+ a/plop\
+ qsdfjklm\
+ wxcvbn\
+ \
+ \
+ b/plip\
+ 1234\
+ 4321\
+ \
+ \
+ NotFound
\
+ c/plap\
+ Object c/plap not found\
+ \
+ \
+ Forbidden
\
+ d/plep\
+ Not authorized\
+ 789\
+ \
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn copy_object_result() -> Result<(), ApiError> {
+ let copy_result = CopyObjectResult {
+ last_modified: Value(msec_to_rfc3339(0)),
+ etag: Value("9b2cf535f27731c974343645a3985328".to_string()),
+ };
+ assert_eq!(
+ to_xml_with_header(©_result)?,
+ "\
+\
+ 1970-01-01T00:00:00.000Z\
+ 9b2cf535f27731c974343645a3985328\
+\
+ "
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn initiate_multipart_upload_result() -> Result<(), ApiError> {
+ let result = InitiateMultipartUploadResult {
+ xmlns: (),
+ bucket: Value("mybucket".to_string()),
+ key: Value("a/plop".to_string()),
+ upload_id: Value("azerty".to_string()),
+ };
+ assert_eq!(
+ to_xml_with_header(&result)?,
+ "\
+\
+ mybucket\
+ a/plop\
+ azerty\
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn complete_multipart_upload_result() -> Result<(), ApiError> {
+ let result = CompleteMultipartUploadResult {
+ xmlns: (),
+ location: Some(Value("https://garage.tld/mybucket/a/plop".to_string())),
+ bucket: Value("mybucket".to_string()),
+ key: Value("a/plop".to_string()),
+ etag: Value("3858f62230ac3c915f300c664312c11f-9".to_string()),
+ };
+ assert_eq!(
+ to_xml_with_header(&result)?,
+ "\
+\
+ https://garage.tld/mybucket/a/plop\
+ mybucket\
+ a/plop\
+ 3858f62230ac3c915f300c664312c11f-9\
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn list_objects_v1_1() -> Result<(), ApiError> {
+ let result = ListBucketResult {
+ xmlns: (),
+ name: Value("example-bucket".to_string()),
+ prefix: Value("".to_string()),
+ marker: Some(Value("".to_string())),
+ next_marker: None,
+ start_after: None,
+ continuation_token: None,
+ next_continuation_token: None,
+ key_count: None,
+ max_keys: IntValue(1000),
+ encoding_type: None,
+ delimiter: Some(Value("/".to_string())),
+ is_truncated: Value("false".to_string()),
+ contents: vec![ListBucketItem {
+ key: Value("sample.jpg".to_string()),
+ last_modified: Value(msec_to_rfc3339(0)),
+ etag: Value("bf1d737a4d46a19f3bced6905cc8b902".to_string()),
+ size: IntValue(142863),
+ storage_class: Value("STANDARD".to_string()),
+ }],
+ common_prefixes: vec![CommonPrefix {
+ prefix: Value("photos/".to_string()),
+ }],
+ };
+ assert_eq!(
+ to_xml_with_header(&result)?,
+ "\
+\
+ example-bucket\
+ \
+ \
+ 1000\
+ /\
+ false\
+ \
+ sample.jpg\
+ 1970-01-01T00:00:00.000Z\
+ bf1d737a4d46a19f3bced6905cc8b902\
+ 142863\
+ STANDARD\
+ \
+ \
+ photos/\
+ \
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn list_objects_v1_2() -> Result<(), ApiError> {
+ let result = ListBucketResult {
+ xmlns: (),
+ name: Value("example-bucket".to_string()),
+ prefix: Value("photos/2006/".to_string()),
+ marker: Some(Value("".to_string())),
+ next_marker: None,
+ start_after: None,
+ continuation_token: None,
+ next_continuation_token: None,
+ key_count: None,
+ max_keys: IntValue(1000),
+ delimiter: Some(Value("/".to_string())),
+ encoding_type: None,
+ is_truncated: Value("false".to_string()),
+ contents: vec![],
+ common_prefixes: vec![
+ CommonPrefix {
+ prefix: Value("photos/2006/February/".to_string()),
+ },
+ CommonPrefix {
+ prefix: Value("photos/2006/January/".to_string()),
+ },
+ ],
+ };
+ assert_eq!(
+ to_xml_with_header(&result)?,
+ "\
+\
+ example-bucket\
+ photos/2006/\
+ \
+ 1000\
+ /\
+ false\
+ \
+ photos/2006/February/\
+ \
+ \
+ photos/2006/January/\
+ \
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn list_objects_v2_1() -> Result<(), ApiError> {
+ let result = ListBucketResult {
+ xmlns: (),
+ name: Value("quotes".to_string()),
+ prefix: Value("E".to_string()),
+ marker: None,
+ next_marker: None,
+ start_after: Some(Value("ExampleGuide.pdf".to_string())),
+ continuation_token: None,
+ next_continuation_token: None,
+ key_count: None,
+ max_keys: IntValue(3),
+ delimiter: None,
+ encoding_type: None,
+ is_truncated: Value("false".to_string()),
+ contents: vec![ListBucketItem {
+ key: Value("ExampleObject.txt".to_string()),
+ last_modified: Value(msec_to_rfc3339(0)),
+ etag: Value("599bab3ed2c697f1d26842727561fd94".to_string()),
+ size: IntValue(857),
+ storage_class: Value("REDUCED_REDUNDANCY".to_string()),
+ }],
+ common_prefixes: vec![],
+ };
+ assert_eq!(
+ to_xml_with_header(&result)?,
+ "\
+\
+ quotes\
+ E\
+ ExampleGuide.pdf\
+ 3\
+ false\
+ \
+ ExampleObject.txt\
+ 1970-01-01T00:00:00.000Z\
+ 599bab3ed2c697f1d26842727561fd94\
+ 857\
+ REDUCED_REDUNDANCY\
+ \
+"
+ );
+ Ok(())
+ }
+
+ #[test]
+ fn list_objects_v2_2() -> Result<(), ApiError> {
+ let result = ListBucketResult {
+ xmlns: (),
+ name: Value("bucket".to_string()),
+ prefix: Value("".to_string()),
+ marker: None,
+ next_marker: None,
+ start_after: None,
+ continuation_token: Some(Value(
+ "1ueGcxLPRx1Tr/XYExHnhbYLgveDs2J/wm36Hy4vbOwM=".to_string(),
+ )),
+ next_continuation_token: Some(Value("qsdfjklm".to_string())),
+ key_count: Some(IntValue(112)),
+ max_keys: IntValue(1000),
+ delimiter: None,
+ encoding_type: None,
+ is_truncated: Value("false".to_string()),
+ contents: vec![ListBucketItem {
+ key: Value("happyfacex.jpg".to_string()),
+ last_modified: Value(msec_to_rfc3339(0)),
+ etag: Value("70ee1738b6b21e2c8a43f3a5ab0eee71".to_string()),
+ size: IntValue(1111),
+ storage_class: Value("STANDARD".to_string()),
+ }],
+ common_prefixes: vec![],
+ };
+ assert_eq!(
+ to_xml_with_header(&result)?,
+ "\
+\
+ bucket\
+ \
+ 1ueGcxLPRx1Tr/XYExHnhbYLgveDs2J/wm36Hy4vbOwM=\
+ qsdfjklm\
+ 112\
+ 1000\
+ false\
+ \
+ happyfacex.jpg\
+ 1970-01-01T00:00:00.000Z\
+ 70ee1738b6b21e2c8a43f3a5ab0eee71\
+ 1111\
+ STANDARD\
+ \
+"
+ );
+ Ok(())
+ }
+}
diff --git a/src/util/time.rs b/src/util/time.rs
index dfedcb2..238db2c 100644
--- a/src/util/time.rs
+++ b/src/util/time.rs
@@ -16,5 +16,5 @@ pub fn msec_to_rfc3339(msecs: u64) -> String {
let secs = msecs as i64 / 1000;
let nanos = (msecs as i64 % 1000) as u32 * 1_000_000;
let timestamp = Utc.timestamp(secs, nanos);
- timestamp.to_rfc3339_opts(SecondsFormat::Secs, true)
+ timestamp.to_rfc3339_opts(SecondsFormat::Millis, true)
}