2020-04-26 16:22:33 +00:00
|
|
|
use std::collections::{BTreeMap, BTreeSet};
|
2020-04-24 18:47:11 +00:00
|
|
|
use std::fmt::Write;
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
2020-04-24 18:56:00 +00:00
|
|
|
use chrono::{DateTime, NaiveDateTime, SecondsFormat, Utc};
|
2020-07-07 15:15:53 +00:00
|
|
|
use hyper::{Body, Response};
|
2020-04-24 18:47:11 +00:00
|
|
|
|
|
|
|
use garage_util::error::Error;
|
|
|
|
|
2020-07-07 11:59:22 +00:00
|
|
|
use garage_model::garage::Garage;
|
2020-07-08 15:33:24 +00:00
|
|
|
use garage_model::object_table::*;
|
2020-04-24 18:47:11 +00:00
|
|
|
|
2020-04-28 10:18:14 +00:00
|
|
|
use crate::encoding::*;
|
2020-04-24 18:47:11 +00:00
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
struct ListResultInfo {
|
|
|
|
last_modified: u64,
|
|
|
|
size: u64,
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn handle_list(
|
|
|
|
garage: Arc<Garage>,
|
|
|
|
bucket: &str,
|
|
|
|
delimiter: &str,
|
|
|
|
max_keys: usize,
|
|
|
|
prefix: &str,
|
2020-04-28 10:18:14 +00:00
|
|
|
marker: Option<&str>,
|
|
|
|
urlencode_resp: bool,
|
2020-07-07 15:15:53 +00:00
|
|
|
) -> Result<Response<Body>, Error> {
|
2020-04-26 16:22:33 +00:00
|
|
|
let mut result_keys = BTreeMap::<String, ListResultInfo>::new();
|
|
|
|
let mut result_common_prefixes = BTreeSet::<String>::new();
|
2020-05-01 14:30:50 +00:00
|
|
|
|
2020-04-28 10:18:14 +00:00
|
|
|
let mut next_chunk_start = marker.unwrap_or(prefix).to_string();
|
2020-04-24 18:47:11 +00:00
|
|
|
|
2020-04-24 19:27:27 +00:00
|
|
|
debug!("List request: `{}` {} `{}`", delimiter, max_keys, prefix);
|
2020-04-24 18:47:11 +00:00
|
|
|
|
2020-05-01 15:52:35 +00:00
|
|
|
let truncated;
|
|
|
|
'query_loop: loop {
|
2020-04-24 18:47:11 +00:00
|
|
|
let objects = garage
|
|
|
|
.object_table
|
|
|
|
.get_range(
|
|
|
|
&bucket.to_string(),
|
|
|
|
Some(next_chunk_start.clone()),
|
|
|
|
Some(()),
|
2020-05-01 15:52:35 +00:00
|
|
|
max_keys + 1,
|
2020-04-24 18:47:11 +00:00
|
|
|
)
|
|
|
|
.await?;
|
2020-05-01 14:30:50 +00:00
|
|
|
debug!(
|
|
|
|
"List: get range {} (max {}), results: {}",
|
|
|
|
next_chunk_start,
|
2020-05-01 15:52:35 +00:00
|
|
|
max_keys + 1,
|
2020-05-01 14:30:50 +00:00
|
|
|
objects.len()
|
|
|
|
);
|
|
|
|
|
2020-04-24 18:47:11 +00:00
|
|
|
for object in objects.iter() {
|
2020-05-01 14:30:50 +00:00
|
|
|
if !object.key.starts_with(prefix) {
|
|
|
|
truncated = false;
|
2020-05-01 15:52:35 +00:00
|
|
|
break 'query_loop;
|
2020-05-01 14:30:50 +00:00
|
|
|
}
|
2020-04-26 18:55:13 +00:00
|
|
|
if let Some(version) = object.versions().iter().find(|x| x.is_data()) {
|
2020-05-04 13:09:23 +00:00
|
|
|
if result_keys.len() + result_common_prefixes.len() >= max_keys {
|
|
|
|
truncated = true;
|
|
|
|
break 'query_loop;
|
|
|
|
}
|
2020-04-26 16:22:33 +00:00
|
|
|
let common_prefix = if delimiter.len() > 0 {
|
|
|
|
let relative_key = &object.key[prefix.len()..];
|
2020-04-24 20:28:15 +00:00
|
|
|
match relative_key.find(delimiter) {
|
2020-04-26 18:55:13 +00:00
|
|
|
Some(i) => Some(&object.key[..prefix.len() + i + delimiter.len()]),
|
2020-04-26 16:22:33 +00:00
|
|
|
None => None,
|
2020-04-24 20:28:15 +00:00
|
|
|
}
|
|
|
|
} else {
|
2020-04-26 16:22:33 +00:00
|
|
|
None
|
2020-04-24 18:47:11 +00:00
|
|
|
};
|
2020-04-26 16:22:33 +00:00
|
|
|
if let Some(pfx) = common_prefix {
|
|
|
|
result_common_prefixes.insert(pfx.to_string());
|
|
|
|
} else {
|
2020-07-08 15:33:24 +00:00
|
|
|
let size = match &version.state {
|
|
|
|
ObjectVersionState::Complete(ObjectVersionData::Inline(meta, _)) => meta.size,
|
|
|
|
ObjectVersionState::Complete(ObjectVersionData::FirstBlock(meta, _)) => meta.size,
|
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
2020-04-26 16:22:33 +00:00
|
|
|
let info = match result_keys.get(&object.key) {
|
|
|
|
None => ListResultInfo {
|
|
|
|
last_modified: version.timestamp,
|
2020-07-08 15:33:24 +00:00
|
|
|
size,
|
2020-04-26 16:22:33 +00:00
|
|
|
},
|
2020-04-26 18:55:13 +00:00
|
|
|
Some(_lri) => {
|
|
|
|
return Err(Error::Message(format!("Duplicate key?? {}", object.key)))
|
|
|
|
}
|
2020-04-26 16:22:33 +00:00
|
|
|
};
|
|
|
|
result_keys.insert(object.key.clone(), info);
|
2020-04-24 18:47:11 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
2020-05-01 15:52:35 +00:00
|
|
|
if objects.len() < max_keys + 1 {
|
2020-05-04 13:09:23 +00:00
|
|
|
truncated = false;
|
|
|
|
break 'query_loop;
|
2020-04-24 18:47:11 +00:00
|
|
|
}
|
|
|
|
if objects.len() > 0 {
|
|
|
|
next_chunk_start = objects[objects.len() - 1].key.clone();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut xml = String::new();
|
|
|
|
writeln!(&mut xml, r#"<?xml version="1.0" encoding="UTF-8"?>"#).unwrap();
|
|
|
|
writeln!(
|
|
|
|
&mut xml,
|
|
|
|
r#"<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">"#
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
writeln!(&mut xml, "\t<Bucket>{}</Bucket>", bucket).unwrap();
|
|
|
|
writeln!(&mut xml, "\t<Prefix>{}</Prefix>", prefix).unwrap();
|
2020-04-26 16:22:33 +00:00
|
|
|
writeln!(&mut xml, "\t<KeyCount>{}</KeyCount>", result_keys.len()).unwrap();
|
2020-04-24 18:47:11 +00:00
|
|
|
writeln!(&mut xml, "\t<MaxKeys>{}</MaxKeys>", max_keys).unwrap();
|
|
|
|
writeln!(&mut xml, "\t<IsTruncated>{}</IsTruncated>", truncated).unwrap();
|
2020-04-26 16:22:33 +00:00
|
|
|
for (key, info) in result_keys.iter() {
|
2020-04-24 18:47:11 +00:00
|
|
|
let last_modif = NaiveDateTime::from_timestamp(info.last_modified as i64 / 1000, 0);
|
|
|
|
let last_modif = DateTime::<Utc>::from_utc(last_modif, Utc);
|
|
|
|
let last_modif = last_modif.to_rfc3339_opts(SecondsFormat::Millis, true);
|
|
|
|
writeln!(&mut xml, "\t<Contents>").unwrap();
|
2020-04-28 10:18:14 +00:00
|
|
|
writeln!(
|
|
|
|
&mut xml,
|
|
|
|
"\t\t<Key>{}</Key>",
|
2020-04-28 10:35:04 +00:00
|
|
|
xml_escape(key),
|
|
|
|
//xml_encode_key(key, urlencode_resp) // doesn't work with nextcloud, wtf
|
2020-04-28 10:18:14 +00:00
|
|
|
)
|
|
|
|
.unwrap();
|
2020-04-24 18:47:11 +00:00
|
|
|
writeln!(&mut xml, "\t\t<LastModified>{}</LastModified>", last_modif).unwrap();
|
|
|
|
writeln!(&mut xml, "\t\t<Size>{}</Size>", info.size).unwrap();
|
|
|
|
writeln!(&mut xml, "\t\t<StorageClass>STANDARD</StorageClass>").unwrap();
|
|
|
|
writeln!(&mut xml, "\t</Contents>").unwrap();
|
|
|
|
}
|
2020-04-26 16:22:33 +00:00
|
|
|
if result_common_prefixes.len() > 0 {
|
|
|
|
writeln!(&mut xml, "\t<CommonPrefixes>").unwrap();
|
|
|
|
for pfx in result_common_prefixes.iter() {
|
2020-04-28 10:18:14 +00:00
|
|
|
writeln!(
|
|
|
|
&mut xml,
|
2020-05-01 14:30:50 +00:00
|
|
|
"\t\t<Prefix>{}</Prefix>",
|
2020-04-28 10:35:04 +00:00
|
|
|
xml_escape(pfx),
|
|
|
|
//xml_encode_key(pfx, urlencode_resp)
|
2020-04-28 10:18:14 +00:00
|
|
|
)
|
|
|
|
.unwrap();
|
2020-04-26 16:22:33 +00:00
|
|
|
}
|
|
|
|
writeln!(&mut xml, "\t</CommonPrefixes>").unwrap();
|
|
|
|
}
|
2020-04-24 18:47:11 +00:00
|
|
|
writeln!(&mut xml, "</ListBucketResult>").unwrap();
|
2020-05-01 14:30:50 +00:00
|
|
|
println!("{}", xml);
|
2020-04-24 18:47:11 +00:00
|
|
|
|
2020-07-07 15:15:53 +00:00
|
|
|
Ok(Response::new(Body::from(xml.into_bytes())))
|
2020-04-26 18:55:13 +00:00
|
|
|
}
|