diff --git a/Cargo.lock b/Cargo.lock
index aa3fb4cf..f7c86808 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -374,6 +374,7 @@ dependencies = [
"httpdate 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)",
"url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
diff --git a/src/api/Cargo.toml b/src/api/Cargo.toml
index 0da02338..11a42be9 100644
--- a/src/api/Cargo.toml
+++ b/src/api/Cargo.toml
@@ -30,5 +30,5 @@ http = "0.2"
hyper = "0.13"
url = "2.1"
httpdate = "0.3"
-
+percent-encoding = "2.1.0"
diff --git a/src/api/api_server.rs b/src/api/api_server.rs
index af331a39..642697da 100644
--- a/src/api/api_server.rs
+++ b/src/api/api_server.rs
@@ -73,7 +73,11 @@ async fn handler_inner(
req: Request
,
) -> Result, Error> {
let path = req.uri().path().to_string();
- let (bucket, key) = parse_bucket_key(path.as_str())?;
+ let path = percent_encoding::percent_decode_str(&path)
+ .decode_utf8()
+ .map_err(|e| Error::BadRequest(format!("Invalid utf8 key ({})", e)))?;
+
+ let (bucket, key) = parse_bucket_key(&path)?;
if bucket.len() == 0 {
return Err(Error::Forbidden(format!(
"Operations on buckets not allowed"
@@ -120,7 +124,12 @@ async fn handler_inner(
} else if req.headers().contains_key("x-amz-copy-source") {
// CopyObject query
let copy_source = req.headers().get("x-amz-copy-source").unwrap().to_str()?;
- let (source_bucket, source_key) = parse_bucket_key(copy_source)?;
+ let copy_source = percent_encoding::percent_decode_str(©_source)
+ .decode_utf8()
+ .map_err(|e| {
+ Error::BadRequest(format!("Invalid utf8 copy_source ({})", e))
+ })?;
+ let (source_bucket, source_key) = parse_bucket_key(©_source)?;
if !api_key.allow_read(&source_bucket) {
return Err(Error::Forbidden(format!(
"Reading from bucket {} not allowed for this key",
@@ -145,8 +154,7 @@ async fn handler_inner(
} else {
// DeleteObject query
let version_uuid = handle_delete(garage, &bucket, &key).await?;
- let response = format!("{}\n", hex::encode(version_uuid));
- Ok(Response::new(Box::new(BytesBody::from(response))))
+ Ok(put_response(version_uuid))
}
}
&Method::POST => {
@@ -170,9 +178,14 @@ async fn handler_inner(
}
} else {
match req.method() {
- &Method::PUT | &Method::HEAD => {
- // If PUT: CreateBucket, if HEAD: HeadBucket
+ &Method::PUT => {
+ // CreateBucket
// If we're here, the bucket already exists, so just answer ok
+ println!(
+ "Body: {}",
+ std::str::from_utf8(&hyper::body::to_bytes(req.into_body()).await?)
+ .unwrap_or("")
+ );
let empty_body: BodyType = Box::new(BytesBody::from(vec![]));
let response = Response::builder()
.header("Location", format!("/{}", bucket))
@@ -180,6 +193,12 @@ async fn handler_inner(
.unwrap();
Ok(response)
}
+ &Method::HEAD => {
+ // HeadBucket
+ let empty_body: BodyType = Box::new(BytesBody::from(vec![]));
+ let response = Response::builder().body(empty_body).unwrap();
+ Ok(response)
+ }
&Method::DELETE => {
// DeleteBucket query
Err(Error::Forbidden(
@@ -187,38 +206,32 @@ async fn handler_inner(
))
}
&Method::GET => {
- if params.contains_key(&"prefix".to_string()) {
- // ListObjects query
- let delimiter = params.get("delimiter").map(|x| x.as_str()).unwrap_or(&"");
- let max_keys = params
- .get("max-keys")
- .map(|x| {
- x.parse::().map_err(|e| {
- Error::BadRequest(format!("Invalid value for max-keys: {}", e))
- })
+ // ListObjects query
+ let delimiter = params.get("delimiter").map(|x| x.as_str()).unwrap_or(&"");
+ let max_keys = params
+ .get("max-keys")
+ .map(|x| {
+ x.parse::().map_err(|e| {
+ Error::BadRequest(format!("Invalid value for max-keys: {}", e))
})
- .unwrap_or(Ok(1000))?;
- let prefix = params.get("prefix").unwrap();
- let urlencode_resp = params
- .get("encoding-type")
- .map(|x| x == "url")
- .unwrap_or(false);
- let marker = params.get("marker").map(String::as_str);
- Ok(handle_list(
- garage,
- bucket,
- delimiter,
- max_keys,
- prefix,
- marker,
- urlencode_resp,
- )
- .await?)
- } else {
- Err(Error::BadRequest(format!(
- "Not a list call, so what is it?"
- )))
- }
+ })
+ .unwrap_or(Ok(1000))?;
+ let prefix = params.get("prefix").map(|x| x.as_str()).unwrap_or(&"");
+ let urlencode_resp = params
+ .get("encoding-type")
+ .map(|x| x == "url")
+ .unwrap_or(false);
+ let marker = params.get("marker").map(String::as_str);
+ Ok(handle_list(
+ garage,
+ bucket,
+ delimiter,
+ max_keys,
+ prefix,
+ marker,
+ urlencode_resp,
+ )
+ .await?)
}
_ => Err(Error::BadRequest(format!("Invalid method"))),
}
@@ -229,7 +242,14 @@ fn parse_bucket_key(path: &str) -> Result<(&str, Option<&str>), Error> {
let path = path.trim_start_matches('/');
match path.find('/') {
- Some(i) => Ok((&path[..i], Some(&path[i + 1..]))),
+ Some(i) => {
+ let key = &path[i + 1..];
+ if key.len() > 0 {
+ Ok((&path[..i], Some(key)))
+ } else {
+ Ok((&path[..i], None))
+ }
+ }
None => Ok((path, None)),
}
}
diff --git a/src/api/http_util.rs b/src/api/http_util.rs
index 029b7020..8a8cf9d8 100644
--- a/src/api/http_util.rs
+++ b/src/api/http_util.rs
@@ -82,3 +82,7 @@ impl From> for BytesBody {
Self::new(Bytes::from(x))
}
}
+
+pub fn empty_body() -> BodyType {
+ Box::new(BytesBody::from(vec![]))
+}
diff --git a/src/api/s3_list.rs b/src/api/s3_list.rs
index ffde609f..df792da2 100644
--- a/src/api/s3_list.rs
+++ b/src/api/s3_list.rs
@@ -29,6 +29,7 @@ pub async fn handle_list(
) -> Result, Error> {
let mut result_keys = BTreeMap::::new();
let mut result_common_prefixes = BTreeSet::::new();
+
let mut truncated = true;
let mut next_chunk_start = marker.unwrap_or(prefix).to_string();
@@ -44,12 +45,19 @@ pub async fn handle_list(
max_keys,
)
.await?;
+ debug!(
+ "List: get range {} (max {}), results: {}",
+ next_chunk_start,
+ max_keys,
+ objects.len()
+ );
+
for object in objects.iter() {
+ if !object.key.starts_with(prefix) {
+ truncated = false;
+ break;
+ }
if let Some(version) = object.versions().iter().find(|x| x.is_data()) {
- if !object.key.starts_with(prefix) {
- truncated = false;
- break;
- }
let common_prefix = if delimiter.len() > 0 {
let relative_key = &object.key[prefix.len()..];
match relative_key.find(delimiter) {
@@ -117,7 +125,7 @@ pub async fn handle_list(
for pfx in result_common_prefixes.iter() {
writeln!(
&mut xml,
- "\t{}",
+ "\t\t{}",
xml_escape(pfx),
//xml_encode_key(pfx, urlencode_resp)
)
@@ -126,6 +134,7 @@ pub async fn handle_list(
writeln!(&mut xml, "\t").unwrap();
}
writeln!(&mut xml, "").unwrap();
+ println!("{}", xml);
Ok(Response::new(Box::new(BytesBody::from(xml.into_bytes()))))
}
diff --git a/src/api/s3_put.rs b/src/api/s3_put.rs
index e6df5bc0..e1e4c02d 100644
--- a/src/api/s3_put.rs
+++ b/src/api/s3_put.rs
@@ -195,9 +195,11 @@ impl BodyChunker {
}
}
-fn put_response(version_uuid: UUID) -> Response {
- let resp_bytes = format!("{}\n", hex::encode(version_uuid));
- Response::new(Box::new(BytesBody::from(resp_bytes)))
+pub fn put_response(version_uuid: UUID) -> Response {
+ Response::builder()
+ .header("x-amz-version-id", hex::encode(version_uuid))
+ .body(empty_body())
+ .unwrap()
}
pub async fn handle_create_multipart_upload(