format + WIP calendar-query

This commit is contained in:
Quentin 2024-05-16 17:38:34 +02:00
parent 6b9542088c
commit 32dfd25f57
Signed by: quentin
GPG key ID: E9602264D639FF68
46 changed files with 2453 additions and 1446 deletions

1
Cargo.lock generated
View file

@ -90,6 +90,7 @@ dependencies = [
"http-body-util", "http-body-util",
"hyper 1.2.0", "hyper 1.2.0",
"hyper-util", "hyper-util",
"icalendar",
"imap-codec", "imap-codec",
"imap-flow", "imap-flow",
"quick-xml", "quick-xml",

View file

@ -4,12 +4,12 @@ use anyhow::{anyhow, bail, Result};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use aero_bayou::Bayou; use aero_bayou::Bayou;
use aero_user::login::Credentials;
use aero_user::cryptoblob::{self, gen_key, Key}; use aero_user::cryptoblob::{self, gen_key, Key};
use aero_user::login::Credentials;
use aero_user::storage::{self, BlobRef, BlobVal, Store}; use aero_user::storage::{self, BlobRef, BlobVal, Store};
use crate::davdag::{BlobId, DavDag, IndexEntry, SyncChange, Token};
use crate::unique_ident::*; use crate::unique_ident::*;
use crate::davdag::{DavDag, IndexEntry, Token, BlobId, SyncChange};
pub struct Calendar { pub struct Calendar {
pub(super) id: UniqueIdent, pub(super) id: UniqueIdent,
@ -17,10 +17,7 @@ pub struct Calendar {
} }
impl Calendar { impl Calendar {
pub(crate) async fn open( pub(crate) async fn open(creds: &Credentials, id: UniqueIdent) -> Result<Self> {
creds: &Credentials,
id: UniqueIdent,
) -> Result<Self> {
let bayou_path = format!("calendar/dag/{}", id); let bayou_path = format!("calendar/dag/{}", id);
let cal_path = format!("calendar/events/{}", id); let cal_path = format!("calendar/events/{}", id);
@ -126,7 +123,7 @@ impl CalendarInternal {
async fn put<'a>(&mut self, name: &str, evt: &'a [u8]) -> Result<(Token, IndexEntry)> { async fn put<'a>(&mut self, name: &str, evt: &'a [u8]) -> Result<(Token, IndexEntry)> {
let message_key = gen_key(); let message_key = gen_key();
let blob_id = gen_ident(); let blob_id = gen_ident();
let encrypted_msg_key = cryptoblob::seal(&message_key.as_ref(), &self.encryption_key)?; let encrypted_msg_key = cryptoblob::seal(&message_key.as_ref(), &self.encryption_key)?;
let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_msg_key); let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_msg_key);
@ -138,9 +135,7 @@ impl CalendarInternal {
) )
.with_meta(MESSAGE_KEY.to_string(), key_header); .with_meta(MESSAGE_KEY.to_string(), key_header);
let etag = self.storage let etag = self.storage.blob_insert(blob_val).await?;
.blob_insert(blob_val)
.await?;
// Add entry to Bayou // Add entry to Bayou
let entry: IndexEntry = (blob_id, name.to_string(), etag); let entry: IndexEntry = (blob_id, name.to_string(), etag);
@ -181,7 +176,7 @@ impl CalendarInternal {
let heads = davstate.heads_vec(); let heads = davstate.heads_vec();
let token = match heads.as_slice() { let token = match heads.as_slice() {
[ token ] => *token, [token] => *token,
_ => { _ => {
let op_mg = davstate.op_merge(); let op_mg = davstate.op_merge();
let token = op_mg.token(); let token = op_mg.token();

View file

@ -1,16 +1,16 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use std::collections::{HashMap, BTreeMap}; use std::collections::{BTreeMap, HashMap};
use std::sync::{Weak, Arc}; use std::sync::{Arc, Weak};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use aero_bayou::timestamp::now_msec; use aero_bayou::timestamp::now_msec;
use aero_user::storage;
use aero_user::cryptoblob::{open_deserialize, seal_serialize}; use aero_user::cryptoblob::{open_deserialize, seal_serialize};
use aero_user::storage;
use super::Calendar;
use crate::unique_ident::{gen_ident, UniqueIdent}; use crate::unique_ident::{gen_ident, UniqueIdent};
use crate::user::User; use crate::user::User;
use super::Calendar;
pub(crate) const CAL_LIST_PK: &str = "calendars"; pub(crate) const CAL_LIST_PK: &str = "calendars";
pub(crate) const CAL_LIST_SK: &str = "list"; pub(crate) const CAL_LIST_SK: &str = "list";
@ -46,7 +46,7 @@ impl CalendarNs {
} }
let cal = Arc::new(Calendar::open(&user.creds, id).await?); let cal = Arc::new(Calendar::open(&user.creds, id).await?);
let mut cache = self.0.lock().unwrap(); let mut cache = self.0.lock().unwrap();
if let Some(concurrent_cal) = cache.get(&id).and_then(Weak::upgrade) { if let Some(concurrent_cal) = cache.get(&id).and_then(Weak::upgrade) {
drop(cal); // we worked for nothing but at least we didn't starve someone else drop(cal); // we worked for nothing but at least we didn't starve someone else
@ -117,13 +117,15 @@ impl CalendarNs {
CalendarExists::Created(_) => (), CalendarExists::Created(_) => (),
} }
list.save(user, ct).await?; list.save(user, ct).await?;
Ok(()) Ok(())
} }
/// Has calendar /// Has calendar
pub async fn has(&self, user: &Arc<User>, name: &str) -> Result<bool> { pub async fn has(&self, user: &Arc<User>, name: &str) -> Result<bool> {
CalendarList::load(user).await.map(|(list, _)| list.has(name)) CalendarList::load(user)
.await
.map(|(list, _)| list.has(name))
} }
} }
@ -161,7 +163,8 @@ impl CalendarList {
for v in row_vals { for v in row_vals {
if let storage::Alternative::Value(vbytes) = v { if let storage::Alternative::Value(vbytes) = v {
let list2 = open_deserialize::<CalendarList>(&vbytes, &user.creds.keys.master)?; let list2 =
open_deserialize::<CalendarList>(&vbytes, &user.creds.keys.master)?;
list.merge(list2); list.merge(list2);
} }
} }
@ -200,7 +203,7 @@ impl CalendarList {
/// (Don't forget to save if it returns CalendarExists::Created) /// (Don't forget to save if it returns CalendarExists::Created)
fn create(&mut self, name: &str) -> CalendarExists { fn create(&mut self, name: &str) -> CalendarExists {
if let Some(CalendarListEntry { if let Some(CalendarListEntry {
id_lww: (_, Some(id)) id_lww: (_, Some(id)),
}) = self.0.get(name) }) = self.0.get(name)
{ {
return CalendarExists::Existed(*id); return CalendarExists::Existed(*id);
@ -222,9 +225,10 @@ impl CalendarList {
/// For a given calendar name, get its Unique Identifier /// For a given calendar name, get its Unique Identifier
fn get(&self, name: &str) -> Option<UniqueIdent> { fn get(&self, name: &str) -> Option<UniqueIdent> {
self.0.get(name).map(|CalendarListEntry { self.0
id_lww: (_, ident), .get(name)
}| *ident).flatten() .map(|CalendarListEntry { id_lww: (_, ident) }| *ident)
.flatten()
} }
/// Check if a given calendar name exists /// Check if a given calendar name exists
@ -271,9 +275,7 @@ impl CalendarList {
(now_msec(), id) (now_msec(), id)
} }
} }
Some(CalendarListEntry { Some(CalendarListEntry { id_lww }) => {
id_lww,
}) => {
if id_lww.1 == id { if id_lww.1 == id {
// Entry is already equals to the requested id (Option<UniqueIdent) // Entry is already equals to the requested id (Option<UniqueIdent)
// Nothing to do // Nothing to do
@ -281,20 +283,15 @@ impl CalendarList {
} else { } else {
// Entry does not equal to what we know internally // Entry does not equal to what we know internally
// We update the Last Write Win CRDT here with the new id value // We update the Last Write Win CRDT here with the new id value
( (std::cmp::max(id_lww.0 + 1, now_msec()), id)
std::cmp::max(id_lww.0 + 1, now_msec()),
id,
)
} }
} }
}; };
// If we did not return here, that's because we have to update // If we did not return here, that's because we have to update
// something in our internal index. // something in our internal index.
self.0.insert( self.0
name.into(), .insert(name.into(), CalendarListEntry { id_lww: (ts, id) });
CalendarListEntry { id_lww: (ts, id) },
);
Some(()) Some(())
} }

View file

@ -1,6 +1,6 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use im::{ordset, OrdMap, OrdSet};
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use im::{OrdMap, OrdSet, ordset};
use aero_bayou::*; use aero_bayou::*;
@ -26,7 +26,6 @@ pub struct DavDag {
pub idx_by_filename: OrdMap<FileName, BlobId>, pub idx_by_filename: OrdMap<FileName, BlobId>,
// ------------ Below this line, data is ephemeral, ie. not checkpointed // ------------ Below this line, data is ephemeral, ie. not checkpointed
/// Partial synchronization graph /// Partial synchronization graph
pub ancestors: OrdMap<Token, OrdSet<Token>>, pub ancestors: OrdMap<Token, OrdSet<Token>>,
@ -84,7 +83,7 @@ impl DavDag {
// HELPER functions // HELPER functions
pub fn heads_vec(&self) -> Vec<Token> { pub fn heads_vec(&self) -> Vec<Token> {
self.heads.clone().into_iter().collect() self.heads.clone().into_iter().collect()
} }
/// A sync descriptor /// A sync descriptor
@ -99,7 +98,7 @@ impl DavDag {
// We can't capture all missing events if we are not connected // We can't capture all missing events if we are not connected
// to all sinks of the graph, // to all sinks of the graph,
// ie. if we don't already know all the sinks, // ie. if we don't already know all the sinks,
// ie. if we are missing so much history that // ie. if we are missing so much history that
// the event log has been transformed into a checkpoint // the event log has been transformed into a checkpoint
if !self.origins.is_subset(already_known.clone()) { if !self.origins.is_subset(already_known.clone()) {
bail!("Not enough history to produce a correct diff, a full resync is needed"); bail!("Not enough history to produce a correct diff, a full resync is needed");
@ -124,7 +123,7 @@ impl DavDag {
if all_known.insert(cursor).is_some() { if all_known.insert(cursor).is_some() {
// Item already processed // Item already processed
continue continue;
} }
// Collect parents // Collect parents
@ -167,7 +166,8 @@ impl DavDag {
self.idx_by_filename.remove(filename); self.idx_by_filename.remove(filename);
// Record the change in the ephemeral synchronization map // Record the change in the ephemeral synchronization map
self.change.insert(sync_token, SyncChange::NotFound(filename.to_string())); self.change
.insert(sync_token, SyncChange::NotFound(filename.to_string()));
// Finally clear item from the source of trust // Finally clear item from the source of trust
self.table.remove(blob_id); self.table.remove(blob_id);
@ -179,10 +179,13 @@ impl DavDag {
// --- Update ANCESTORS // --- Update ANCESTORS
// We register ancestors as it is required for the sync algorithm // We register ancestors as it is required for the sync algorithm
self.ancestors.insert(*child, parents.iter().fold(ordset![], |mut acc, p| { self.ancestors.insert(
acc.insert(*p); *child,
acc parents.iter().fold(ordset![], |mut acc, p| {
})); acc.insert(*p);
acc
}),
);
// --- Update ORIGINS // --- Update ORIGINS
// If this event has no parents, it's an origin // If this event has no parents, it's an origin
@ -192,11 +195,13 @@ impl DavDag {
// --- Update HEADS // --- Update HEADS
// Remove from HEADS this event's parents // Remove from HEADS this event's parents
parents.iter().for_each(|par| { self.heads.remove(par); }); parents.iter().for_each(|par| {
self.heads.remove(par);
});
// This event becomes a new HEAD in turn // This event becomes a new HEAD in turn
self.heads.insert(*child); self.heads.insert(*child);
// --- Update ALL NODES // --- Update ALL NODES
self.all_nodes.insert(*child); self.all_nodes.insert(*child);
} }
@ -217,16 +222,16 @@ impl BayouState for DavDag {
fn apply(&self, op: &Self::Op) -> Self { fn apply(&self, op: &Self::Op) -> Self {
let mut new = self.clone(); let mut new = self.clone();
match op { match op {
DavDagOp::Put(sync_desc, entry) => { DavDagOp::Put(sync_desc, entry) => {
new.sync_dag(sync_desc); new.sync_dag(sync_desc);
new.register(Some(sync_desc.1), entry.clone()); new.register(Some(sync_desc.1), entry.clone());
}, }
DavDagOp::Delete(sync_desc, blob_id) => { DavDagOp::Delete(sync_desc, blob_id) => {
new.sync_dag(sync_desc); new.sync_dag(sync_desc);
new.unregister(sync_desc.1, blob_id); new.unregister(sync_desc.1, blob_id);
}, }
DavDagOp::Merge(sync_desc) => { DavDagOp::Merge(sync_desc) => {
new.sync_dag(sync_desc); new.sync_dag(sync_desc);
} }
@ -252,7 +257,9 @@ impl<'de> Deserialize<'de> for DavDag {
let mut davdag = DavDag::default(); let mut davdag = DavDag::default();
// Build the table + index // Build the table + index
val.items.into_iter().for_each(|entry| davdag.register(None, entry)); val.items
.into_iter()
.for_each(|entry| davdag.register(None, entry));
// Initialize the synchronization DAG with its roots // Initialize the synchronization DAG with its roots
val.heads.into_iter().for_each(|ident| { val.heads.into_iter().for_each(|ident| {

View file

@ -1,5 +1,5 @@
pub mod unique_ident;
pub mod davdag;
pub mod user;
pub mod mail;
pub mod calendar; pub mod calendar;
pub mod davdag;
pub mod mail;
pub mod unique_ident;
pub mod user;

View file

@ -8,16 +8,16 @@ use futures::{future::BoxFuture, FutureExt};
use tokio::sync::watch; use tokio::sync::watch;
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
use aero_bayou::timestamp::now_msec;
use aero_user::cryptoblob; use aero_user::cryptoblob;
use aero_user::login::{Credentials, PublicCredentials}; use aero_user::login::{Credentials, PublicCredentials};
use aero_user::storage; use aero_user::storage;
use aero_bayou::timestamp::now_msec;
use crate::mail::mailbox::Mailbox; use crate::mail::mailbox::Mailbox;
use crate::mail::uidindex::ImapUidvalidity; use crate::mail::uidindex::ImapUidvalidity;
use crate::mail::IMF;
use crate::unique_ident::*; use crate::unique_ident::*;
use crate::user::User; use crate::user::User;
use crate::mail::IMF;
const INCOMING_PK: &str = "incoming"; const INCOMING_PK: &str = "incoming";
const INCOMING_LOCK_SK: &str = "lock"; const INCOMING_LOCK_SK: &str = "lock";

View file

@ -2,15 +2,15 @@ use anyhow::{anyhow, bail, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use aero_bayou::timestamp::now_msec;
use aero_bayou::Bayou;
use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key}; use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key};
use aero_user::login::Credentials; use aero_user::login::Credentials;
use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store}; use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store};
use aero_bayou::Bayou;
use aero_bayou::timestamp::now_msec;
use crate::unique_ident::*;
use crate::mail::uidindex::*; use crate::mail::uidindex::*;
use crate::mail::IMF; use crate::mail::IMF;
use crate::unique_ident::*;
pub struct Mailbox { pub struct Mailbox {
pub(super) id: UniqueIdent, pub(super) id: UniqueIdent,

View file

@ -1,9 +1,9 @@
pub mod incoming; pub mod incoming;
pub mod mailbox; pub mod mailbox;
pub mod namespace;
pub mod query; pub mod query;
pub mod snapshot; pub mod snapshot;
pub mod uidindex; pub mod uidindex;
pub mod namespace;
// Internet Message Format // Internet Message Format
// aka RFC 822 - RFC 2822 - RFC 5322 // aka RFC 822 - RFC 2822 - RFC 5322

View file

@ -104,7 +104,11 @@ impl MailboxList {
/// Ensures mailbox `name` maps to id `id`. /// Ensures mailbox `name` maps to id `id`.
/// If it already mapped to that, returns None. /// If it already mapped to that, returns None.
/// If a change had to be done, returns Some(new uidvalidity in mailbox). /// If a change had to be done, returns Some(new uidvalidity in mailbox).
pub(crate) fn set_mailbox(&mut self, name: &str, id: Option<UniqueIdent>) -> Option<ImapUidvalidity> { pub(crate) fn set_mailbox(
&mut self,
name: &str,
id: Option<UniqueIdent>,
) -> Option<ImapUidvalidity> {
let (ts, id, uidvalidity) = match self.0.get_mut(name) { let (ts, id, uidvalidity) = match self.0.get_mut(name) {
None => { None => {
if id.is_none() { if id.is_none() {

View file

@ -2,10 +2,10 @@ use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use crate::unique_ident::UniqueIdent;
use super::mailbox::Mailbox; use super::mailbox::Mailbox;
use super::query::{Query, QueryScope}; use super::query::{Query, QueryScope};
use super::uidindex::UidIndex; use super::uidindex::UidIndex;
use crate::unique_ident::UniqueIdent;
/// A Frozen Mailbox has a snapshot of the current mailbox /// A Frozen Mailbox has a snapshot of the current mailbox
/// state that is desynchronized with the real mailbox state. /// state that is desynchronized with the real mailbox state.

View file

@ -3,8 +3,8 @@ use std::num::{NonZeroU32, NonZeroU64};
use im::{HashMap, OrdMap, OrdSet}; use im::{HashMap, OrdMap, OrdSet};
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use aero_bayou::*;
use crate::unique_ident::UniqueIdent; use crate::unique_ident::UniqueIdent;
use aero_bayou::*;
pub type ModSeq = NonZeroU64; pub type ModSeq = NonZeroU64;
pub type ImapUid = NonZeroU32; pub type ImapUid = NonZeroU32;

View file

@ -9,12 +9,15 @@ use aero_user::cryptoblob::{open_deserialize, seal_serialize};
use aero_user::login::Credentials; use aero_user::login::Credentials;
use aero_user::storage; use aero_user::storage;
use crate::calendar::namespace::CalendarNs;
use crate::mail::incoming::incoming_mail_watch_process; use crate::mail::incoming::incoming_mail_watch_process;
use crate::mail::mailbox::Mailbox; use crate::mail::mailbox::Mailbox;
use crate::mail::namespace::{
CreatedMailbox, MailboxList, ARCHIVE, DRAFTS, INBOX, MAILBOX_HIERARCHY_DELIMITER,
MAILBOX_LIST_PK, MAILBOX_LIST_SK, SENT, TRASH,
};
use crate::mail::uidindex::ImapUidvalidity; use crate::mail::uidindex::ImapUidvalidity;
use crate::unique_ident::UniqueIdent; use crate::unique_ident::UniqueIdent;
use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox};
use crate::calendar::namespace::CalendarNs;
//@FIXME User should be totally rewriten //@FIXME User should be totally rewriten
// to extract the local mailbox list // to extract the local mailbox list

View file

@ -1,79 +1,79 @@
#![no_main] #![no_main]
use libfuzzer_sys::fuzz_target;
use libfuzzer_sys::arbitrary; use libfuzzer_sys::arbitrary;
use libfuzzer_sys::arbitrary::Arbitrary; use libfuzzer_sys::arbitrary::Arbitrary;
use libfuzzer_sys::fuzz_target;
use aero_dav::{types, realization, xml}; use aero_dav::{realization, types, xml};
use quick_xml::reader::NsReader; use quick_xml::reader::NsReader;
use tokio::runtime::Runtime;
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
use tokio::runtime::Runtime;
// Split this file // Split this file
const tokens: [&str; 63] = [ const tokens: [&str; 63] = [
"0", "0",
"1", "1",
"activelock", "activelock",
"allprop", "allprop",
"encoding", "encoding",
"utf-8", "utf-8",
"http://ns.example.com/boxschema/", "http://ns.example.com/boxschema/",
"HTTP/1.1 200 OK", "HTTP/1.1 200 OK",
"1997-12-01T18:27:21-08:00", "1997-12-01T18:27:21-08:00",
"Mon, 12 Jan 1998 09:25:56 GMT", "Mon, 12 Jan 1998 09:25:56 GMT",
"\"abcdef\"", "\"abcdef\"",
"cannot-modify-protected-property", "cannot-modify-protected-property",
"collection", "collection",
"creationdate", "creationdate",
"DAV:", "DAV:",
"D", "D",
"C", "C",
"xmlns:D", "xmlns:D",
"depth", "depth",
"displayname", "displayname",
"error", "error",
"exclusive", "exclusive",
"getcontentlanguage", "getcontentlanguage",
"getcontentlength", "getcontentlength",
"getcontenttype", "getcontenttype",
"getetag", "getetag",
"getlastmodified", "getlastmodified",
"href", "href",
"include", "include",
"Infinite", "Infinite",
"infinity", "infinity",
"location", "location",
"lockdiscovery", "lockdiscovery",
"lockentry", "lockentry",
"lockinfo", "lockinfo",
"lockroot", "lockroot",
"lockscope", "lockscope",
"locktoken", "locktoken",
"lock-token-matches-request-uri", "lock-token-matches-request-uri",
"lock-token-submitted", "lock-token-submitted",
"locktype", "locktype",
"multistatus", "multistatus",
"no-conflicting-lock", "no-conflicting-lock",
"no-external-entities", "no-external-entities",
"owner", "owner",
"preserved-live-properties", "preserved-live-properties",
"prop", "prop",
"propertyupdate", "propertyupdate",
"propfind", "propfind",
"propfind-finite-depth", "propfind-finite-depth",
"propname", "propname",
"propstat", "propstat",
"remove", "remove",
"resourcetype", "resourcetype",
"response", "response",
"responsedescription", "responsedescription",
"set", "set",
"shared", "shared",
"status", "status",
"supportedlock", "supportedlock",
"text/html", "text/html",
"timeout", "timeout",
"write", "write",
]; ];
#[derive(Arbitrary)] #[derive(Arbitrary)]
@ -106,7 +106,7 @@ impl Tag {
acc.push_str("D:"); acc.push_str("D:");
acc.push_str(self.name.serialize().as_str()); acc.push_str(self.name.serialize().as_str());
if let Some((k,v)) = &self.attr { if let Some((k, v)) = &self.attr {
acc.push_str(" "); acc.push_str(" ");
acc.push_str(k.serialize().as_str()); acc.push_str(k.serialize().as_str());
acc.push_str("=\""); acc.push_str("=\"");
@ -123,7 +123,6 @@ impl Tag {
} }
} }
#[derive(Arbitrary)] #[derive(Arbitrary)]
enum XmlNode { enum XmlNode {
//@FIXME: build RFC3339 and RFC822 Dates with chrono based on timestamps //@FIXME: build RFC3339 and RFC822 Dates with chrono based on timestamps
@ -145,9 +144,14 @@ impl XmlNode {
let stag = tag.start(); let stag = tag.start();
match children.is_empty() { match children.is_empty() {
true => format!("<{}/>", stag), true => format!("<{}/>", stag),
false => format!("<{}>{}</{}>", stag, children.iter().map(|v| v.serialize()).collect::<String>(), tag.end()), false => format!(
"<{}>{}</{}>",
stag,
children.iter().map(|v| v.serialize()).collect::<String>(),
tag.end()
),
} }
}, }
Self::Number(v) => format!("{}", v), Self::Number(v) => format!("{}", v),
Self::Text(v) => v.serialize(), Self::Text(v) => v.serialize(),
} }
@ -158,19 +162,22 @@ async fn serialize(elem: &impl xml::QWrite) -> Vec<u8> {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer);
let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; let ns_to_apply = vec![("xmlns:D".into(), "DAV:".into())];
let mut writer = xml::Writer { q, ns_to_apply }; let mut writer = xml::Writer { q, ns_to_apply };
elem.qwrite(&mut writer).await.expect("xml serialization"); elem.qwrite(&mut writer).await.expect("xml serialization");
tokio_buffer.flush().await.expect("tokio buffer flush"); tokio_buffer.flush().await.expect("tokio buffer flush");
return buffer return buffer;
} }
type Object = types::Multistatus<realization::Core, types::PropValue<realization::Core>>; type Object = types::Multistatus<realization::Core, types::PropValue<realization::Core>>;
fuzz_target!(|nodes: XmlNode| { fuzz_target!(|nodes: XmlNode| {
let gen = format!("<D:multistatus xmlns:D=\"DAV:\">{}<D:/multistatus>", nodes.serialize()); let gen = format!(
"<D:multistatus xmlns:D=\"DAV:\">{}<D:/multistatus>",
nodes.serialize()
);
//println!("--------\n{}", gen); //println!("--------\n{}", gen);
let data = gen.as_bytes(); let data = gen.as_bytes();
@ -191,7 +198,9 @@ fuzz_target!(|nodes: XmlNode| {
let my_serialization = serialize(&reference).await; let my_serialization = serialize(&reference).await;
// 3. De-serialize my serialization // 3. De-serialize my serialization
let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())).await.expect("XML Reader init"); let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice()))
.await
.expect("XML Reader init");
let comparison = rdr2.find::<Object>().await.expect("Deserialize again"); let comparison = rdr2.find::<Object>().await.expect("Deserialize again");
// 4. Both the first decoding and last decoding must be identical // 4. Both the first decoding and last decoding must be identical

View file

@ -1,23 +1,31 @@
use super::acltypes::*; use super::acltypes::*;
use super::types as dav;
use super::xml::{QRead, Reader, IRead, DAV_URN};
use super::error::ParsingError; use super::error::ParsingError;
use super::types as dav;
use super::xml::{IRead, QRead, Reader, DAV_URN};
impl QRead<Property> for Property { impl QRead<Property> for Property {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
if xml.maybe_open_start(DAV_URN, "owner").await?.is_some() { if xml.maybe_open_start(DAV_URN, "owner").await?.is_some() {
let href = xml.find().await?; let href = xml.find().await?;
xml.close().await?; xml.close().await?;
return Ok(Self::Owner(href)) return Ok(Self::Owner(href));
} }
if xml.maybe_open_start(DAV_URN, "current-user-principal").await?.is_some() { if xml
.maybe_open_start(DAV_URN, "current-user-principal")
.await?
.is_some()
{
let user = xml.find().await?; let user = xml.find().await?;
xml.close().await?; xml.close().await?;
return Ok(Self::CurrentUserPrincipal(user)) return Ok(Self::CurrentUserPrincipal(user));
} }
if xml.maybe_open_start(DAV_URN, "current-user-privilege-set").await?.is_some() { if xml
.maybe_open_start(DAV_URN, "current-user-privilege-set")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
return Ok(Self::CurrentUserPrivilegeSet(vec![])) return Ok(Self::CurrentUserPrivilegeSet(vec![]));
} }
Err(ParsingError::Recoverable) Err(ParsingError::Recoverable)
@ -28,17 +36,25 @@ impl QRead<PropertyRequest> for PropertyRequest {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
if xml.maybe_open(DAV_URN, "owner").await?.is_some() { if xml.maybe_open(DAV_URN, "owner").await?.is_some() {
xml.close().await?; xml.close().await?;
return Ok(Self::Owner) return Ok(Self::Owner);
} }
if xml.maybe_open(DAV_URN, "current-user-principal").await?.is_some() { if xml
.maybe_open(DAV_URN, "current-user-principal")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
return Ok(Self::CurrentUserPrincipal) return Ok(Self::CurrentUserPrincipal);
} }
if xml.maybe_open(DAV_URN, "current-user-privilege-set").await?.is_some() { if xml
.maybe_open(DAV_URN, "current-user-privilege-set")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
return Ok(Self::CurrentUserPrivilegeSet) return Ok(Self::CurrentUserPrivilegeSet);
} }
Err(ParsingError::Recoverable) Err(ParsingError::Recoverable)
@ -49,7 +65,7 @@ impl QRead<ResourceType> for ResourceType {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
if xml.maybe_open(DAV_URN, "principal").await?.is_some() { if xml.maybe_open(DAV_URN, "principal").await?.is_some() {
xml.close().await?; xml.close().await?;
return Ok(Self::Principal) return Ok(Self::Principal);
} }
Err(ParsingError::Recoverable) Err(ParsingError::Recoverable)
} }
@ -60,7 +76,7 @@ impl QRead<User> for User {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
if xml.maybe_open(DAV_URN, "unauthenticated").await?.is_some() { if xml.maybe_open(DAV_URN, "unauthenticated").await?.is_some() {
xml.close().await?; xml.close().await?;
return Ok(Self::Unauthenticated) return Ok(Self::Unauthenticated);
} }
dav::Href::qread(xml).await.map(Self::Authenticated) dav::Href::qread(xml).await.map(Self::Authenticated)

View file

@ -1,9 +1,9 @@
use quick_xml::Error as QError;
use quick_xml::events::Event; use quick_xml::events::Event;
use quick_xml::Error as QError;
use super::acltypes::*; use super::acltypes::*;
use super::xml::{QWrite, Writer, IWrite};
use super::error::ParsingError; use super::error::ParsingError;
use super::xml::{IWrite, QWrite, Writer};
impl QWrite for Property { impl QWrite for Property {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
@ -14,18 +14,18 @@ impl QWrite for Property {
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
href.qwrite(xml).await?; href.qwrite(xml).await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::CurrentUserPrincipal(user) => { Self::CurrentUserPrincipal(user) => {
let start = xml.create_dav_element("current-user-principal"); let start = xml.create_dav_element("current-user-principal");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
user.qwrite(xml).await?; user.qwrite(xml).await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::CurrentUserPrivilegeSet(_) => { Self::CurrentUserPrivilegeSet(_) => {
let empty_tag = xml.create_dav_element("current-user-privilege-set"); let empty_tag = xml.create_dav_element("current-user-privilege-set");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
} }
} }
} }
@ -64,7 +64,7 @@ impl QWrite for User {
Self::Unauthenticated => { Self::Unauthenticated => {
let tag = xml.create_dav_element("unauthenticated"); let tag = xml.create_dav_element("unauthenticated");
xml.q.write_event_async(Event::Empty(tag)).await xml.q.write_event_async(Event::Empty(tag)).await
}, }
Self::Authenticated(href) => href.qwrite(xml).await, Self::Authenticated(href) => href.qwrite(xml).await,
} }
} }

View file

@ -2,14 +2,12 @@ use super::types as dav;
//RFC covered: RFC3744 (ACL core) + RFC5397 (ACL Current Principal Extension) //RFC covered: RFC3744 (ACL core) + RFC5397 (ACL Current Principal Extension)
//@FIXME required for a full CalDAV implementation //@FIXME required for a full CalDAV implementation
// See section 6. of the CalDAV RFC // See section 6. of the CalDAV RFC
// It seems mainly required for free-busy that I will not implement now. // It seems mainly required for free-busy that I will not implement now.
// It can also be used for discovering main calendar, not sure it is used. // It can also be used for discovering main calendar, not sure it is used.
// Note: it is used by Thunderbird // Note: it is used by Thunderbird
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum PropertyRequest { pub enum PropertyRequest {
Owner, Owner,

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,9 @@
use quick_xml::events::{BytesText, Event};
use quick_xml::Error as QError; use quick_xml::Error as QError;
use quick_xml::events::{Event, BytesText};
use super::caltypes::*; use super::caltypes::*;
use super::xml::{Node, QWrite, IWrite, Writer};
use super::types::Extension; use super::types::Extension;
use super::xml::{IWrite, Node, QWrite, Writer};
// ==================== Calendar Types Serialization ========================= // ==================== Calendar Types Serialization =========================
@ -54,7 +53,7 @@ impl<E: Extension> QWrite for CalendarQuery<E> {
selector.qwrite(xml).await?; selector.qwrite(xml).await?;
} }
self.filter.qwrite(xml).await?; self.filter.qwrite(xml).await?;
if let Some(tz) = &self.timezone { if let Some(tz) = &self.timezone {
tz.qwrite(xml).await?; tz.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
@ -106,8 +105,8 @@ impl QWrite for PropertyRequest {
Self::MinDateTime => atom("min-date-time").await, Self::MinDateTime => atom("min-date-time").await,
Self::MaxDateTime => atom("max-date-time").await, Self::MaxDateTime => atom("max-date-time").await,
Self::MaxInstances => atom("max-instances").await, Self::MaxInstances => atom("max-instances").await,
Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await, Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await,
Self::SupportedCollationSet => atom("supported-collation-set").await, Self::SupportedCollationSet => atom("supported-collation-set").await,
Self::CalendarData(req) => req.qwrite(xml).await, Self::CalendarData(req) => req.qwrite(xml).await,
} }
} }
@ -130,17 +129,21 @@ impl QWrite for Property {
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(text))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(text)))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::CalendarTimezone(payload) => { Self::CalendarTimezone(payload) => {
let start = xml.create_cal_element("calendar-timezone"); let start = xml.create_cal_element("calendar-timezone");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(payload))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(payload)))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::SupportedCalendarComponentSet(many_comp) => { Self::SupportedCalendarComponentSet(many_comp) => {
let start = xml.create_cal_element("supported-calendar-component-set"); let start = xml.create_cal_element("supported-calendar-component-set");
let end = start.to_end(); let end = start.to_end();
@ -150,7 +153,7 @@ impl QWrite for Property {
comp.qwrite(xml).await?; comp.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::SupportedCalendarData(many_mime) => { Self::SupportedCalendarData(many_mime) => {
let start = xml.create_cal_element("supported-calendar-data"); let start = xml.create_cal_element("supported-calendar-data");
let end = start.to_end(); let end = start.to_end();
@ -160,49 +163,59 @@ impl QWrite for Property {
mime.qwrite(xml).await?; mime.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::MaxResourceSize(bytes) => { Self::MaxResourceSize(bytes) => {
let start = xml.create_cal_element("max-resource-size"); let start = xml.create_cal_element("max-resource-size");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::MinDateTime(dt) => { Self::MinDateTime(dt) => {
let start = xml.create_cal_element("min-date-time"); let start = xml.create_cal_element("min-date-time");
let end = start.to_end(); let end = start.to_end();
let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT));
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(dtstr.as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::MaxDateTime(dt) => { Self::MaxDateTime(dt) => {
let start = xml.create_cal_element("max-date-time"); let start = xml.create_cal_element("max-date-time");
let end = start.to_end(); let end = start.to_end();
let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT)); let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT));
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(dtstr.as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::MaxInstances(count) => { Self::MaxInstances(count) => {
let start = xml.create_cal_element("max-instances"); let start = xml.create_cal_element("max-instances");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(count.to_string().as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::MaxAttendeesPerInstance(count) => { Self::MaxAttendeesPerInstance(count) => {
let start = xml.create_cal_element("max-attendees-per-instance"); let start = xml.create_cal_element("max-attendees-per-instance");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(count.to_string().as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::SupportedCollationSet(many_collations) => { Self::SupportedCollationSet(many_collations) => {
let start = xml.create_cal_element("supported-collation-set"); let start = xml.create_cal_element("supported-collation-set");
let end = start.to_end(); let end = start.to_end();
@ -211,8 +224,8 @@ impl QWrite for Property {
for collation in many_collations.iter() { for collation in many_collations.iter() {
collation.qwrite(xml).await?; collation.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::CalendarData(inner) => inner.qwrite(xml).await, Self::CalendarData(inner) => inner.qwrite(xml).await,
} }
} }
@ -225,7 +238,7 @@ impl QWrite for ResourceType {
Self::Calendar => { Self::Calendar => {
let empty_tag = xml.create_cal_element("calendar"); let empty_tag = xml.create_cal_element("calendar");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
} }
} }
} }
@ -245,7 +258,7 @@ impl QWrite for Violation {
Self::NeedPrivileges => { Self::NeedPrivileges => {
let empty_tag = xml.create_dav_element("need-privileges"); let empty_tag = xml.create_dav_element("need-privileges");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
// Regular CalDAV errors // Regular CalDAV errors
Self::ResourceMustBeNull => atom("resource-must-be-null").await, Self::ResourceMustBeNull => atom("resource-must-be-null").await,
@ -262,7 +275,7 @@ impl QWrite for Violation {
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
href.qwrite(xml).await?; href.qwrite(xml).await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::MaxResourceSize => atom("max-resource-size").await, Self::MaxResourceSize => atom("max-resource-size").await,
Self::MinDateTime => atom("min-date-time").await, Self::MinDateTime => atom("min-date-time").await,
Self::MaxDateTime => atom("max-date-time").await, Self::MaxDateTime => atom("max-date-time").await,
@ -284,13 +297,12 @@ impl QWrite for Violation {
param_item.qwrite(xml).await?; param_item.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await, Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await,
} }
} }
} }
// ---------------------------- Inner XML ------------------------------------ // ---------------------------- Inner XML ------------------------------------
impl QWrite for SupportedCollation { impl QWrite for SupportedCollation {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
@ -300,19 +312,20 @@ impl QWrite for SupportedCollation {
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
self.0.qwrite(xml).await?; self.0.qwrite(xml).await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
impl QWrite for Collation { impl QWrite for Collation {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let col = match self { let col = match self {
Self::AsciiCaseMap => "i;ascii-casemap", Self::AsciiCaseMap => "i;ascii-casemap",
Self::Octet => "i;octet", Self::Octet => "i;octet",
Self::Unknown(v) => v.as_str(), Self::Unknown(v) => v.as_str(),
}; };
xml.q.write_event_async(Event::Text(BytesText::new(col))).await xml.q
.write_event_async(Event::Text(BytesText::new(col)))
.await
} }
} }
@ -332,7 +345,9 @@ impl QWrite for CalendarDataPayload {
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(self.payload.as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -347,7 +362,7 @@ impl QWrite for CalendarDataRequest {
// Empty tag // Empty tag
if self.comp.is_none() && self.recurrence.is_none() && self.limit_freebusy_set.is_none() { if self.comp.is_none() && self.recurrence.is_none() && self.limit_freebusy_set.is_none() {
return xml.q.write_event_async(Event::Empty(start.clone())).await return xml.q.write_event_async(Event::Empty(start.clone())).await;
} }
let end = start.to_end(); let end = start.to_end();
@ -392,7 +407,7 @@ impl QWrite for Comp {
comp_kind.qwrite(xml).await?; comp_kind.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
} }
} }
} }
@ -411,7 +426,7 @@ impl QWrite for CompKind {
Self::AllComp => { Self::AllComp => {
let empty_tag = xml.create_cal_element("allcomp"); let empty_tag = xml.create_cal_element("allcomp");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::Comp(many_comp) => { Self::Comp(many_comp) => {
for comp in many_comp.iter() { for comp in many_comp.iter() {
// Required: recursion in an async fn requires boxing // Required: recursion in an async fn requires boxing
@ -420,7 +435,10 @@ impl QWrite for CompKind {
// For more information about this error, try `rustc --explain E0391`. // For more information about this error, try `rustc --explain E0391`.
// https://github.com/rust-lang/rust/issues/78649 // https://github.com/rust-lang/rust/issues/78649
#[inline(always)] #[inline(always)]
fn recurse<'a>(comp: &'a Comp, xml: &'a mut Writer<impl IWrite>) -> futures::future::BoxFuture<'a, Result<(), QError>> { fn recurse<'a>(
comp: &'a Comp,
xml: &'a mut Writer<impl IWrite>,
) -> futures::future::BoxFuture<'a, Result<(), QError>> {
Box::pin(comp.qwrite(xml)) Box::pin(comp.qwrite(xml))
} }
recurse(comp, xml).await?; recurse(comp, xml).await?;
@ -437,7 +455,7 @@ impl QWrite for PropKind {
Self::AllProp => { Self::AllProp => {
let empty_tag = xml.create_cal_element("allprop"); let empty_tag = xml.create_cal_element("allprop");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::Prop(many_prop) => { Self::Prop(many_prop) => {
for prop in many_prop.iter() { for prop in many_prop.iter() {
prop.qwrite(xml).await?; prop.qwrite(xml).await?;
@ -473,8 +491,14 @@ impl QWrite for RecurrenceModifier {
impl QWrite for Expand { impl QWrite for Expand {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let mut empty = xml.create_cal_element("expand"); let mut empty = xml.create_cal_element("expand");
empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute((
empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); "start",
format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(),
));
empty.push_attribute((
"end",
format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(),
));
xml.q.write_event_async(Event::Empty(empty)).await xml.q.write_event_async(Event::Empty(empty)).await
} }
} }
@ -482,8 +506,14 @@ impl QWrite for Expand {
impl QWrite for LimitRecurrenceSet { impl QWrite for LimitRecurrenceSet {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let mut empty = xml.create_cal_element("limit-recurrence-set"); let mut empty = xml.create_cal_element("limit-recurrence-set");
empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute((
empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); "start",
format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(),
));
empty.push_attribute((
"end",
format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(),
));
xml.q.write_event_async(Event::Empty(empty)).await xml.q.write_event_async(Event::Empty(empty)).await
} }
} }
@ -491,8 +521,14 @@ impl QWrite for LimitRecurrenceSet {
impl QWrite for LimitFreebusySet { impl QWrite for LimitFreebusySet {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let mut empty = xml.create_cal_element("limit-freebusy-set"); let mut empty = xml.create_cal_element("limit-freebusy-set");
empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute((
empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str())); "start",
format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(),
));
empty.push_attribute((
"end",
format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(),
));
xml.q.write_event_async(Event::Empty(empty)).await xml.q.write_event_async(Event::Empty(empty)).await
} }
} }
@ -503,11 +539,11 @@ impl<E: Extension> QWrite for CalendarSelector<E> {
Self::AllProp => { Self::AllProp => {
let empty_tag = xml.create_dav_element("allprop"); let empty_tag = xml.create_dav_element("allprop");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::PropName => { Self::PropName => {
let empty_tag = xml.create_dav_element("propname"); let empty_tag = xml.create_dav_element("propname");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::Prop(prop) => prop.qwrite(xml).await, Self::Prop(prop) => prop.qwrite(xml).await,
} }
} }
@ -534,10 +570,10 @@ impl QWrite for CompFilter {
impl QWrite for CompFilterRules { impl QWrite for CompFilterRules {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
match self { match self {
Self::IsNotDefined => { Self::IsNotDefined => {
let empty_tag = xml.create_dav_element("is-not-defined"); let empty_tag = xml.create_dav_element("is-not-defined");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::Matches(cfm) => cfm.qwrite(xml).await, Self::Matches(cfm) => cfm.qwrite(xml).await,
} }
} }
@ -559,7 +595,10 @@ impl QWrite for CompFilterMatch {
// For more information about this error, try `rustc --explain E0391`. // For more information about this error, try `rustc --explain E0391`.
// https://github.com/rust-lang/rust/issues/78649 // https://github.com/rust-lang/rust/issues/78649
#[inline(always)] #[inline(always)]
fn recurse<'a>(comp: &'a CompFilter, xml: &'a mut Writer<impl IWrite>) -> futures::future::BoxFuture<'a, Result<(), QError>> { fn recurse<'a>(
comp: &'a CompFilter,
xml: &'a mut Writer<impl IWrite>,
) -> futures::future::BoxFuture<'a, Result<(), QError>> {
Box::pin(comp.qwrite(xml)) Box::pin(comp.qwrite(xml))
} }
recurse(comp_item, xml).await?; recurse(comp_item, xml).await?;
@ -591,7 +630,7 @@ impl QWrite for PropFilterRules {
Self::IsNotDefined => { Self::IsNotDefined => {
let empty_tag = xml.create_dav_element("is-not-defined"); let empty_tag = xml.create_dav_element("is-not-defined");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::Match(prop_match) => prop_match.qwrite(xml).await, Self::Match(prop_match) => prop_match.qwrite(xml).await,
} }
} }
@ -635,7 +674,9 @@ impl QWrite for TextMatch {
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(self.text.as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(self.text.as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -663,7 +704,7 @@ impl QWrite for ParamFilterMatch {
Self::IsNotDefined => { Self::IsNotDefined => {
let empty_tag = xml.create_dav_element("is-not-defined"); let empty_tag = xml.create_dav_element("is-not-defined");
xml.q.write_event_async(Event::Empty(empty_tag)).await xml.q.write_event_async(Event::Empty(empty_tag)).await
}, }
Self::Match(tm) => tm.qwrite(xml).await, Self::Match(tm) => tm.qwrite(xml).await,
} }
} }
@ -675,7 +716,9 @@ impl QWrite for TimeZone {
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(self.0.as_str()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(self.0.as_str())))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -695,11 +738,20 @@ impl QWrite for TimeRange {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let mut empty = xml.create_cal_element("time-range"); let mut empty = xml.create_cal_element("time-range");
match self { match self {
Self::OnlyStart(start) => empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())), Self::OnlyStart(start) => empty.push_attribute((
Self::OnlyEnd(end) => empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())), "start",
format!("{}", start.format(ICAL_DATETIME_FMT)).as_str(),
)),
Self::OnlyEnd(end) => {
empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str()))
}
Self::FullRange(start, end) => { Self::FullRange(start, end) => {
empty.push_attribute(("start", format!("{}", start.format(ICAL_DATETIME_FMT)).as_str())); empty.push_attribute((
empty.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str())); "start",
format!("{}", start.format(ICAL_DATETIME_FMT)).as_str(),
));
empty
.push_attribute(("end", format!("{}", end.format(ICAL_DATETIME_FMT)).as_str()));
} }
} }
xml.q.write_event_async(Event::Empty(empty)).await xml.q.write_event_async(Event::Empty(empty)).await
@ -709,16 +761,16 @@ impl QWrite for TimeRange {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::types as dav;
use crate::realization::Calendar; use crate::realization::Calendar;
use crate::types as dav;
use chrono::{TimeZone, Utc};
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
use chrono::{Utc,TimeZone};
async fn serialize(elem: &impl QWrite) -> String { async fn serialize(elem: &impl QWrite) -> String {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer);
let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
let ns_to_apply = vec![ let ns_to_apply = vec![
("xmlns:D".into(), "DAV:".into()), ("xmlns:D".into(), "DAV:".into()),
("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()), ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()),
]; ];
@ -728,91 +780,120 @@ mod tests {
tokio_buffer.flush().await.expect("tokio buffer flush"); tokio_buffer.flush().await.expect("tokio buffer flush");
let got = std::str::from_utf8(buffer.as_slice()).unwrap(); let got = std::str::from_utf8(buffer.as_slice()).unwrap();
return got.into() return got.into();
} }
#[tokio::test] #[tokio::test]
async fn basic_violation() { async fn basic_violation() {
let got = serialize( let got = serialize(&dav::Error::<Calendar>(vec![dav::Violation::Extension(
&dav::Error::<Calendar>(vec![ Violation::ResourceMustBeNull,
dav::Violation::Extension(Violation::ResourceMustBeNull), )]))
]) .await;
).await;
let expected = r#"<D:error xmlns:D="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav"> let expected = r#"<D:error xmlns:D="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav">
<C:resource-must-be-null/> <C:resource-must-be-null/>
</D:error>"#; </D:error>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
} }
#[tokio::test] #[tokio::test]
async fn rfc_calendar_query1_req() { async fn rfc_calendar_query1_req() {
let got = serialize( let got = serialize(&CalendarQuery::<Calendar> {
&CalendarQuery::<Calendar> { selector: Some(CalendarSelector::Prop(dav::PropName(vec![
selector: Some(CalendarSelector::Prop(dav::PropName(vec![ dav::PropertyRequest::GetEtag,
dav::PropertyRequest::GetEtag, dav::PropertyRequest::Extension(PropertyRequest::CalendarData(
dav::PropertyRequest::Extension(PropertyRequest::CalendarData(CalendarDataRequest { CalendarDataRequest {
mime: None, mime: None,
comp: Some(Comp { comp: Some(Comp {
name: Component::VCalendar, name: Component::VCalendar,
prop_kind: Some(PropKind::Prop(vec![ prop_kind: Some(PropKind::Prop(vec![CalProp {
CalProp { name: ComponentProperty("VERSION".into()),
name: ComponentProperty("VERSION".into()), novalue: None,
novalue: None, }])),
}
])),
comp_kind: Some(CompKind::Comp(vec![ comp_kind: Some(CompKind::Comp(vec![
Comp { Comp {
name: Component::VEvent, name: Component::VEvent,
prop_kind: Some(PropKind::Prop(vec![ prop_kind: Some(PropKind::Prop(vec![
CalProp { name: ComponentProperty("SUMMARY".into()), novalue: None }, CalProp {
CalProp { name: ComponentProperty("UID".into()), novalue: None }, name: ComponentProperty("SUMMARY".into()),
CalProp { name: ComponentProperty("DTSTART".into()), novalue: None }, novalue: None,
CalProp { name: ComponentProperty("DTEND".into()), novalue: None }, },
CalProp { name: ComponentProperty("DURATION".into()), novalue: None }, CalProp {
CalProp { name: ComponentProperty("RRULE".into()), novalue: None }, name: ComponentProperty("UID".into()),
CalProp { name: ComponentProperty("RDATE".into()), novalue: None }, novalue: None,
CalProp { name: ComponentProperty("EXRULE".into()), novalue: None }, },
CalProp { name: ComponentProperty("EXDATE".into()), novalue: None }, CalProp {
CalProp { name: ComponentProperty("RECURRENCE-ID".into()), novalue: None }, name: ComponentProperty("DTSTART".into()),
])), novalue: None,
comp_kind: None, },
}, CalProp {
Comp { name: ComponentProperty("DTEND".into()),
name: Component::VTimeZone, novalue: None,
prop_kind: None, },
comp_kind: None, CalProp {
} name: ComponentProperty("DURATION".into()),
])), novalue: None,
}), },
CalProp {
name: ComponentProperty("RRULE".into()),
novalue: None,
},
CalProp {
name: ComponentProperty("RDATE".into()),
novalue: None,
},
CalProp {
name: ComponentProperty("EXRULE".into()),
novalue: None,
},
CalProp {
name: ComponentProperty("EXDATE".into()),
novalue: None,
},
CalProp {
name: ComponentProperty("RECURRENCE-ID".into()),
novalue: None,
},
])),
comp_kind: None,
},
Comp {
name: Component::VTimeZone,
prop_kind: None,
comp_kind: None,
},
])),
}),
recurrence: None, recurrence: None,
limit_freebusy_set: None, limit_freebusy_set: None,
})), },
]))), )),
filter: Filter(CompFilter { ]))),
name: Component::VCalendar, filter: Filter(CompFilter {
additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { name: Component::VCalendar,
time_range: None, additional_rules: Some(CompFilterRules::Matches(CompFilterMatch {
prop_filter: vec![], time_range: None,
comp_filter: vec![ prop_filter: vec![],
CompFilter { comp_filter: vec![CompFilter {
name: Component::VEvent, name: Component::VEvent,
additional_rules: Some(CompFilterRules::Matches(CompFilterMatch { additional_rules: Some(CompFilterRules::Matches(CompFilterMatch {
time_range: Some(TimeRange::FullRange( time_range: Some(TimeRange::FullRange(
Utc.with_ymd_and_hms(2006,1,4,0,0,0).unwrap(), Utc.with_ymd_and_hms(2006, 1, 4, 0, 0, 0).unwrap(),
Utc.with_ymd_and_hms(2006,1,5,0,0,0).unwrap(), Utc.with_ymd_and_hms(2006, 1, 5, 0, 0, 0).unwrap(),
)), )),
prop_filter: vec![], prop_filter: vec![],
comp_filter: vec![], comp_filter: vec![],
})), })),
}, }],
], })),
})), }),
}), timezone: None,
timezone: None, })
} .await;
).await;
let expected = r#"<C:calendar-query xmlns:D="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav"> let expected = r#"<C:calendar-query xmlns:D="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:prop> <D:prop>
@ -844,59 +925,69 @@ mod tests {
</C:comp-filter> </C:comp-filter>
</C:filter> </C:filter>
</C:calendar-query>"#; </C:calendar-query>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
} }
#[tokio::test] #[tokio::test]
async fn rfc_calendar_query1_res() { async fn rfc_calendar_query1_res() {
let got = serialize( let got = serialize(&dav::Multistatus::<Calendar> {
&dav::Multistatus::<Calendar> { responses: vec![
responses: vec![ dav::Response {
dav::Response { status_or_propstat: dav::StatusOrPropstat::PropStat(
status_or_propstat: dav::StatusOrPropstat::PropStat( dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()),
dav::Href("http://cal.example.com/bernard/work/abcd2.ics".into()), vec![dav::PropStat {
vec![dav::PropStat {
prop: dav::AnyProp(vec![ prop: dav::AnyProp(vec![
dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd2\"".into())), dav::AnyProperty::Value(dav::Property::GetEtag(
dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload { "\"fffff-abcd2\"".into(),
mime: None, )),
payload: "PLACEHOLDER".into() dav::AnyProperty::Value(dav::Property::Extension(
}))), Property::CalendarData(CalendarDataPayload {
mime: None,
payload: "PLACEHOLDER".into(),
}),
)),
]), ]),
status: dav::Status(http::status::StatusCode::OK), status: dav::Status(http::status::StatusCode::OK),
error: None, error: None,
responsedescription: None, responsedescription: None,
}] }],
), ),
location: None, location: None,
error: None, error: None,
responsedescription: None, responsedescription: None,
}, },
dav::Response { dav::Response {
status_or_propstat: dav::StatusOrPropstat::PropStat( status_or_propstat: dav::StatusOrPropstat::PropStat(
dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()), dav::Href("http://cal.example.com/bernard/work/abcd3.ics".into()),
vec![dav::PropStat { vec![dav::PropStat {
prop: dav::AnyProp(vec![ prop: dav::AnyProp(vec![
dav::AnyProperty::Value(dav::Property::GetEtag("\"fffff-abcd3\"".into())), dav::AnyProperty::Value(dav::Property::GetEtag(
dav::AnyProperty::Value(dav::Property::Extension(Property::CalendarData(CalendarDataPayload{ "\"fffff-abcd3\"".into(),
mime: None, )),
payload: "PLACEHOLDER".into(), dav::AnyProperty::Value(dav::Property::Extension(
}))), Property::CalendarData(CalendarDataPayload {
mime: None,
payload: "PLACEHOLDER".into(),
}),
)),
]), ]),
status: dav::Status(http::status::StatusCode::OK), status: dav::Status(http::status::StatusCode::OK),
error: None, error: None,
responsedescription: None, responsedescription: None,
}] }],
), ),
location: None, location: None,
error: None, error: None,
responsedescription: None, responsedescription: None,
}, },
], ],
responsedescription: None, responsedescription: None,
}, })
).await; .await;
let expected = r#"<D:multistatus xmlns:D="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav"> let expected = r#"<D:multistatus xmlns:D="DAV:" xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:response> <D:response>
@ -921,7 +1012,9 @@ mod tests {
</D:response> </D:response>
</D:multistatus>"#; </D:multistatus>"#;
assert_eq!(
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); &got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
} }
} }

View file

@ -1,7 +1,7 @@
#![allow(dead_code)] #![allow(dead_code)]
use chrono::{DateTime,Utc};
use super::types as dav; use super::types as dav;
use chrono::{DateTime, Utc};
pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ"; pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ";
@ -13,7 +13,6 @@ pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ";
// For reference, non-official extensions documented by SabreDAV: // For reference, non-official extensions documented by SabreDAV:
// https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions // https://github.com/apple/ccs-calendarserver/tree/master/doc/Extensions
// ----- Root elements ----- // ----- Root elements -----
// --- (MKCALENDAR PART) --- // --- (MKCALENDAR PART) ---
@ -33,17 +32,16 @@ pub const ICAL_DATETIME_FMT: &str = "%Y%m%dT%H%M%SZ";
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct MkCalendar<E: dav::Extension>(pub dav::Set<E>); pub struct MkCalendar<E: dav::Extension>(pub dav::Set<E>);
/// If a response body for a successful request is included, it MUST /// If a response body for a successful request is included, it MUST
/// be a CALDAV:mkcalendar-response XML element. /// be a CALDAV:mkcalendar-response XML element.
/// ///
/// <!ELEMENT mkcalendar-response ANY> /// <!ELEMENT mkcalendar-response ANY>
/// ///
/// ---- /// ----
/// ///
/// ANY is not satisfying, so looking at RFC5689 /// ANY is not satisfying, so looking at RFC5689
/// https://www.rfc-editor.org/rfc/rfc5689.html#section-5.2 /// https://www.rfc-editor.org/rfc/rfc5689.html#section-5.2
/// ///
/// Definition: /// Definition:
/// ///
/// <!ELEMENT mkcol-response (propstat+)> /// <!ELEMENT mkcol-response (propstat+)>
@ -63,9 +61,9 @@ pub enum Report<E: dav::Extension> {
/// Namespace: urn:ietf:params:xml:ns:caldav /// Namespace: urn:ietf:params:xml:ns:caldav
/// ///
/// Purpose: Defines a report for querying calendar object resources. /// Purpose: Defines a report for querying calendar object resources.
/// ///
/// Description: See Section 7.8. /// Description: See Section 7.8.
/// ///
/// Definition: /// Definition:
/// ///
/// <!ELEMENT calendar-query ((DAV:allprop | /// <!ELEMENT calendar-query ((DAV:allprop |
@ -131,7 +129,7 @@ pub enum PropertyRequest {
MaxDateTime, MaxDateTime,
MaxInstances, MaxInstances,
MaxAttendeesPerInstance, MaxAttendeesPerInstance,
SupportedCollationSet, SupportedCollationSet,
CalendarData(CalendarDataRequest), CalendarData(CalendarDataRequest),
} }
@ -163,7 +161,7 @@ pub enum Property {
CalendarHomeSet(dav::Href), CalendarHomeSet(dav::Href),
/// Name: calendar-description /// Name: calendar-description
/// ///
/// Namespace: urn:ietf:params:xml:ns:caldav /// Namespace: urn:ietf:params:xml:ns:caldav
/// ///
/// Purpose: Provides a human-readable description of the calendar /// Purpose: Provides a human-readable description of the calendar
@ -192,10 +190,7 @@ pub enum Property {
/// <C:calendar-description xml:lang="fr-CA" /// <C:calendar-description xml:lang="fr-CA"
/// xmlns:C="urn:ietf:params:xml:ns:caldav" /// xmlns:C="urn:ietf:params:xml:ns:caldav"
/// >Calendrier de Mathilde Desruisseaux</C:calendar-description> /// >Calendrier de Mathilde Desruisseaux</C:calendar-description>
CalendarDescription { CalendarDescription { lang: Option<String>, text: String },
lang: Option<String>,
text: String,
},
/// 5.2.2. CALDAV:calendar-timezone Property /// 5.2.2. CALDAV:calendar-timezone Property
/// ///
@ -232,7 +227,7 @@ pub enum Property {
/// sequence "]]>", which is the end delimiter for the CDATA section. /// sequence "]]>", which is the end delimiter for the CDATA section.
/// ///
/// Definition: /// Definition:
/// ///
/// ```xmlschema /// ```xmlschema
/// <!ELEMENT calendar-timezone (#PCDATA)> /// <!ELEMENT calendar-timezone (#PCDATA)>
/// PCDATA value: an iCalendar object with exactly one VTIMEZONE component. /// PCDATA value: an iCalendar object with exactly one VTIMEZONE component.
@ -630,7 +625,7 @@ pub enum Property {
/// WebDAV property. However, the CALDAV:calendar-data XML element is /// WebDAV property. However, the CALDAV:calendar-data XML element is
/// not a WebDAV property and, as such, is not returned in PROPFIND /// not a WebDAV property and, as such, is not returned in PROPFIND
/// responses, nor used in PROPPATCH requests. /// responses, nor used in PROPPATCH requests.
/// ///
/// Note: The iCalendar data embedded within the CALDAV:calendar-data /// Note: The iCalendar data embedded within the CALDAV:calendar-data
/// XML element MUST follow the standard XML character data encoding /// XML element MUST follow the standard XML character data encoding
/// rules, including use of &lt;, &gt;, &amp; etc. entity encoding or /// rules, including use of &lt;, &gt;, &amp; etc. entity encoding or
@ -649,7 +644,7 @@ pub enum Violation {
/// (CALDAV:calendar-collection-location-ok): The Request-URI MUST /// (CALDAV:calendar-collection-location-ok): The Request-URI MUST
/// identify a location where a calendar collection can be created; /// identify a location where a calendar collection can be created;
CalendarCollectionLocationOk, CalendarCollectionLocationOk,
/// (CALDAV:valid-calendar-data): The time zone specified in CALDAV: /// (CALDAV:valid-calendar-data): The time zone specified in CALDAV:
/// calendar-timezone property MUST be a valid iCalendar object /// calendar-timezone property MUST be a valid iCalendar object
/// containing a single valid VTIMEZONE component. /// containing a single valid VTIMEZONE component.
@ -712,7 +707,7 @@ pub enum Violation {
/// date-time property value (Section 5.2.6) on the calendar /// date-time property value (Section 5.2.6) on the calendar
/// collection where the resource will be stored; /// collection where the resource will be stored;
MinDateTime, MinDateTime,
/// (CALDAV:max-date-time): The resource submitted in the PUT request, /// (CALDAV:max-date-time): The resource submitted in the PUT request,
/// or targeted by a COPY or MOVE request, MUST have all of its /// or targeted by a COPY or MOVE request, MUST have all of its
/// iCalendar DATE or DATE-TIME property values (for each recurring /// iCalendar DATE or DATE-TIME property values (for each recurring
@ -784,15 +779,15 @@ pub enum Violation {
/// To deal with this, this specification makes use of the IANA Collation /// To deal with this, this specification makes use of the IANA Collation
/// Registry defined in [RFC4790] to specify collations that may be used /// Registry defined in [RFC4790] to specify collations that may be used
/// to carry out the text comparison operations with a well-defined rule. /// to carry out the text comparison operations with a well-defined rule.
/// ///
/// The comparisons used in CalDAV are all "substring" matches, as per /// The comparisons used in CalDAV are all "substring" matches, as per
/// [RFC4790], Section 4.2. Collations supported by the server MUST /// [RFC4790], Section 4.2. Collations supported by the server MUST
/// support "substring" match operations. /// support "substring" match operations.
/// ///
/// CalDAV servers are REQUIRED to support the "i;ascii-casemap" and /// CalDAV servers are REQUIRED to support the "i;ascii-casemap" and
/// "i;octet" collations, as described in [RFC4790], and MAY support /// "i;octet" collations, as described in [RFC4790], and MAY support
/// other collations. /// other collations.
/// ///
/// Servers MUST advertise the set of collations that they support via /// Servers MUST advertise the set of collations that they support via
/// the CALDAV:supported-collation-set property defined on any resource /// the CALDAV:supported-collation-set property defined on any resource
/// that supports reports that use collations. /// that supports reports that use collations.
@ -807,7 +802,7 @@ pub enum Violation {
/// ///
/// Wildcards (as defined in [RFC4790], Section 3.2) MUST NOT be used in /// Wildcards (as defined in [RFC4790], Section 3.2) MUST NOT be used in
/// the collation identifier. /// the collation identifier.
/// ///
/// If the client chooses a collation not supported by the server, the /// If the client chooses a collation not supported by the server, the
/// server MUST respond with a CALDAV:supported-collation precondition /// server MUST respond with a CALDAV:supported-collation precondition
/// error response. /// error response.
@ -915,7 +910,7 @@ pub struct CompSupport(pub Component);
/// Description: The CALDAV:allcomp XML element can be used when the /// Description: The CALDAV:allcomp XML element can be used when the
/// client wants all types of components returned by a calendaring /// client wants all types of components returned by a calendaring
/// REPORT request. /// REPORT request.
/// ///
/// Definition: /// Definition:
/// ///
/// <!ELEMENT allcomp EMPTY> /// <!ELEMENT allcomp EMPTY>
@ -997,7 +992,7 @@ pub enum RecurrenceModifier {
/// recurrence set into calendar components that define exactly one /// recurrence set into calendar components that define exactly one
/// recurrence instance, and MUST return only those whose scheduled /// recurrence instance, and MUST return only those whose scheduled
/// time intersect a specified time range. /// time intersect a specified time range.
/// ///
/// The "start" attribute specifies the inclusive start of the time /// The "start" attribute specifies the inclusive start of the time
/// range, and the "end" attribute specifies the non-inclusive end of /// range, and the "end" attribute specifies the non-inclusive end of
/// the time range. Both attributes are specified as date with UTC /// the time range. Both attributes are specified as date with UTC
@ -1189,7 +1184,7 @@ pub struct CompFilterMatch {
/// Name: prop-filter /// Name: prop-filter
/// ///
/// Namespace: urn:ietf:params:xml:ns:caldav /// Namespace: urn:ietf:params:xml:ns:caldav
/// ///
/// Purpose: Specifies search criteria on calendar properties. /// Purpose: Specifies search criteria on calendar properties.
/// ///
/// Description: The CALDAV:prop-filter XML element specifies a query /// Description: The CALDAV:prop-filter XML element specifies a query
@ -1352,8 +1347,6 @@ pub enum ParamFilterMatch {
/// <!ELEMENT is-not-defined EMPTY> /// <!ELEMENT is-not-defined EMPTY>
/* CURRENTLY INLINED */ /* CURRENTLY INLINED */
/// Name: timezone /// Name: timezone
/// ///
/// Namespace: urn:ietf:params:xml:ns:caldav /// Namespace: urn:ietf:params:xml:ns:caldav
@ -1475,7 +1468,7 @@ impl PropertyParameter {
} }
} }
#[derive(Default,Debug,PartialEq,Clone)] #[derive(Default, Debug, PartialEq, Clone)]
pub enum Collation { pub enum Collation {
#[default] #[default]
AsciiCaseMap, AsciiCaseMap,
@ -1492,9 +1485,9 @@ impl Collation {
} }
pub fn new(v: String) -> Self { pub fn new(v: String) -> Self {
match v.as_str() { match v.as_str() {
"i;ascii-casemap" => Self::AsciiCaseMap, "i;ascii-casemap" => Self::AsciiCaseMap,
"i;octet" => Self::Octet, "i;octet" => Self::Octet,
_ => Self::Unknown(v), _ => Self::Unknown(v),
} }
} }
} }

View file

@ -1,9 +1,9 @@
use quick_xml::events::Event;
use chrono::DateTime; use chrono::DateTime;
use quick_xml::events::Event;
use super::types::*;
use super::error::ParsingError; use super::error::ParsingError;
use super::xml::{Node, QRead, Reader, IRead, DAV_URN}; use super::types::*;
use super::xml::{IRead, Node, QRead, Reader, DAV_URN};
//@TODO (1) Rewrite all objects as Href, //@TODO (1) Rewrite all objects as Href,
// where we return Ok(None) instead of trying to find the object at any cost. // where we return Ok(None) instead of trying to find the object at any cost.
@ -25,20 +25,21 @@ impl<E: Extension> QRead<PropFind<E>> for PropFind<E> {
if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? { if let Some(_) = xml.maybe_open(DAV_URN, "allprop").await? {
xml.close().await?; xml.close().await?;
let includ = xml.maybe_find::<Include<E>>().await?; let includ = xml.maybe_find::<Include<E>>().await?;
break PropFind::AllProp(includ) break PropFind::AllProp(includ);
} }
// propname // propname
if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? { if let Some(_) = xml.maybe_open(DAV_URN, "propname").await? {
xml.close().await?; xml.close().await?;
break PropFind::PropName break PropFind::PropName;
} }
// prop // prop
let (mut maybe_prop, mut dirty) = (None, false); let (mut maybe_prop, mut dirty) = (None, false);
xml.maybe_read::<PropName<E>>(&mut maybe_prop, &mut dirty).await?; xml.maybe_read::<PropName<E>>(&mut maybe_prop, &mut dirty)
.await?;
if let Some(prop) = maybe_prop { if let Some(prop) = maybe_prop {
break PropFind::Prop(prop) break PropFind::Prop(prop);
} }
// not found, skipping // not found, skipping
@ -80,7 +81,10 @@ impl<E: Extension> QRead<Multistatus<E>> for Multistatus<E> {
} }
xml.close().await?; xml.close().await?;
Ok(Multistatus { responses, responsedescription }) Ok(Multistatus {
responses,
responsedescription,
})
} }
} }
@ -91,7 +95,8 @@ impl QRead<LockInfo> for LockInfo {
let (mut m_scope, mut m_type, mut owner) = (None, None, None); let (mut m_scope, mut m_type, mut owner) = (None, None, None);
loop { loop {
let mut dirty = false; let mut dirty = false;
xml.maybe_read::<LockScope>(&mut m_scope, &mut dirty).await?; xml.maybe_read::<LockScope>(&mut m_scope, &mut dirty)
.await?;
xml.maybe_read::<LockType>(&mut m_type, &mut dirty).await?; xml.maybe_read::<LockType>(&mut m_type, &mut dirty).await?;
xml.maybe_read::<Owner>(&mut owner, &mut dirty).await?; xml.maybe_read::<Owner>(&mut owner, &mut dirty).await?;
@ -104,7 +109,11 @@ impl QRead<LockInfo> for LockInfo {
} }
xml.close().await?; xml.close().await?;
match (m_scope, m_type) { match (m_scope, m_type) {
(Some(lockscope), Some(locktype)) => Ok(LockInfo { lockscope, locktype, owner }), (Some(lockscope), Some(locktype)) => Ok(LockInfo {
lockscope,
locktype,
owner,
}),
_ => Err(ParsingError::MissingChild), _ => Err(ParsingError::MissingChild),
} }
} }
@ -121,7 +130,6 @@ impl<E: Extension> QRead<PropValue<E>> for PropValue<E> {
} }
} }
/// Error response /// Error response
impl<E: Extension> QRead<Error<E>> for Error<E> { impl<E: Extension> QRead<Error<E>> for Error<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
@ -132,13 +140,12 @@ impl<E: Extension> QRead<Error<E>> for Error<E> {
} }
} }
// ---- INNER XML // ---- INNER XML
impl<E: Extension> QRead<Response<E>> for Response<E> { impl<E: Extension> QRead<Response<E>> for Response<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
xml.open(DAV_URN, "response").await?; xml.open(DAV_URN, "response").await?;
let (mut status, mut error, mut responsedescription, mut location) = (None, None, None, None); let (mut status, mut error, mut responsedescription, mut location) =
(None, None, None, None);
let mut href = Vec::new(); let mut href = Vec::new();
let mut propstat = Vec::new(); let mut propstat = Vec::new();
@ -146,28 +153,38 @@ impl<E: Extension> QRead<Response<E>> for Response<E> {
let mut dirty = false; let mut dirty = false;
xml.maybe_read::<Status>(&mut status, &mut dirty).await?; xml.maybe_read::<Status>(&mut status, &mut dirty).await?;
xml.maybe_push::<Href>(&mut href, &mut dirty).await?; xml.maybe_push::<Href>(&mut href, &mut dirty).await?;
xml.maybe_push::<PropStat<E>>(&mut propstat, &mut dirty).await?; xml.maybe_push::<PropStat<E>>(&mut propstat, &mut dirty)
.await?;
xml.maybe_read::<Error<E>>(&mut error, &mut dirty).await?; xml.maybe_read::<Error<E>>(&mut error, &mut dirty).await?;
xml.maybe_read::<ResponseDescription>(&mut responsedescription, &mut dirty).await?; xml.maybe_read::<ResponseDescription>(&mut responsedescription, &mut dirty)
xml.maybe_read::<Location>(&mut location, &mut dirty).await?; .await?;
xml.maybe_read::<Location>(&mut location, &mut dirty)
.await?;
if !dirty { if !dirty {
match xml.peek() { match xml.peek() {
Event::End(_) => break, Event::End(_) => break,
_ => { xml.skip().await? }, _ => xml.skip().await?,
}; };
} }
} }
xml.close().await?; xml.close().await?;
match (status, &propstat[..], &href[..]) { match (status, &propstat[..], &href[..]) {
(Some(status), &[], &[_, ..]) => Ok(Response { (Some(status), &[], &[_, ..]) => Ok(Response {
status_or_propstat: StatusOrPropstat::Status(href, status), status_or_propstat: StatusOrPropstat::Status(href, status),
error, responsedescription, location, error,
responsedescription,
location,
}), }),
(None, &[_, ..], &[_, ..]) => Ok(Response { (None, &[_, ..], &[_, ..]) => Ok(Response {
status_or_propstat: StatusOrPropstat::PropStat(href.into_iter().next().unwrap(), propstat), status_or_propstat: StatusOrPropstat::PropStat(
error, responsedescription, location, href.into_iter().next().unwrap(),
propstat,
),
error,
responsedescription,
location,
}), }),
(Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue), (Some(_), &[_, ..], _) => Err(ParsingError::InvalidValue),
_ => Err(ParsingError::MissingChild), _ => Err(ParsingError::MissingChild),
@ -179,14 +196,17 @@ impl<E: Extension> QRead<PropStat<E>> for PropStat<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
xml.open(DAV_URN, "propstat").await?; xml.open(DAV_URN, "propstat").await?;
let (mut m_any_prop, mut m_status, mut error, mut responsedescription) = (None, None, None, None); let (mut m_any_prop, mut m_status, mut error, mut responsedescription) =
(None, None, None, None);
loop { loop {
let mut dirty = false; let mut dirty = false;
xml.maybe_read::<AnyProp<E>>(&mut m_any_prop, &mut dirty).await?; xml.maybe_read::<AnyProp<E>>(&mut m_any_prop, &mut dirty)
.await?;
xml.maybe_read::<Status>(&mut m_status, &mut dirty).await?; xml.maybe_read::<Status>(&mut m_status, &mut dirty).await?;
xml.maybe_read::<Error<E>>(&mut error, &mut dirty).await?; xml.maybe_read::<Error<E>>(&mut error, &mut dirty).await?;
xml.maybe_read::<ResponseDescription>(&mut responsedescription, &mut dirty).await?; xml.maybe_read::<ResponseDescription>(&mut responsedescription, &mut dirty)
.await?;
if !dirty { if !dirty {
match xml.peek() { match xml.peek() {
@ -198,7 +218,12 @@ impl<E: Extension> QRead<PropStat<E>> for PropStat<E> {
xml.close().await?; xml.close().await?;
match (m_any_prop, m_status) { match (m_any_prop, m_status) {
(Some(prop), Some(status)) => Ok(PropStat { prop, status, error, responsedescription }), (Some(prop), Some(status)) => Ok(PropStat {
prop,
status,
error,
responsedescription,
}),
_ => Err(ParsingError::MissingChild), _ => Err(ParsingError::MissingChild),
} }
} }
@ -208,8 +233,12 @@ impl QRead<Status> for Status {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
xml.open(DAV_URN, "status").await?; xml.open(DAV_URN, "status").await?;
let fullcode = xml.tag_string().await?; let fullcode = xml.tag_string().await?;
let txtcode = fullcode.splitn(3, ' ').nth(1).ok_or(ParsingError::InvalidValue)?; let txtcode = fullcode
let code = http::status::StatusCode::from_bytes(txtcode.as_bytes()).or(Err(ParsingError::InvalidValue))?; .splitn(3, ' ')
.nth(1)
.ok_or(ParsingError::InvalidValue)?;
let code = http::status::StatusCode::from_bytes(txtcode.as_bytes())
.or(Err(ParsingError::InvalidValue))?;
xml.close().await?; xml.close().await?;
Ok(Status(code)) Ok(Status(code))
} }
@ -263,27 +292,55 @@ impl<E: Extension> QRead<Set<E>> for Set<E> {
impl<E: Extension> QRead<Violation<E>> for Violation<E> { impl<E: Extension> QRead<Violation<E>> for Violation<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
if xml.maybe_open(DAV_URN, "lock-token-matches-request-uri").await?.is_some() { if xml
.maybe_open(DAV_URN, "lock-token-matches-request-uri")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
Ok(Violation::LockTokenMatchesRequestUri) Ok(Violation::LockTokenMatchesRequestUri)
} else if xml.maybe_open(DAV_URN, "lock-token-submitted").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "lock-token-submitted")
.await?
.is_some()
{
let links = xml.collect::<Href>().await?; let links = xml.collect::<Href>().await?;
xml.close().await?; xml.close().await?;
Ok(Violation::LockTokenSubmitted(links)) Ok(Violation::LockTokenSubmitted(links))
} else if xml.maybe_open(DAV_URN, "no-conflicting-lock").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "no-conflicting-lock")
.await?
.is_some()
{
let links = xml.collect::<Href>().await?; let links = xml.collect::<Href>().await?;
xml.close().await?; xml.close().await?;
Ok(Violation::NoConflictingLock(links)) Ok(Violation::NoConflictingLock(links))
} else if xml.maybe_open(DAV_URN, "no-external-entities").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "no-external-entities")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
Ok(Violation::NoExternalEntities) Ok(Violation::NoExternalEntities)
} else if xml.maybe_open(DAV_URN, "preserved-live-properties").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "preserved-live-properties")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
Ok(Violation::PreservedLiveProperties) Ok(Violation::PreservedLiveProperties)
} else if xml.maybe_open(DAV_URN, "propfind-finite-depth").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "propfind-finite-depth")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
Ok(Violation::PropfindFiniteDepth) Ok(Violation::PropfindFiniteDepth)
} else if xml.maybe_open(DAV_URN, "cannot-modify-protected-property").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "cannot-modify-protected-property")
.await?
.is_some()
{
xml.close().await?; xml.close().await?;
Ok(Violation::CannotModifyProtectedProperty) Ok(Violation::CannotModifyProtectedProperty)
} else { } else {
@ -323,7 +380,7 @@ impl<E: Extension> QRead<AnyProperty<E>> for AnyProperty<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
match Property::qread(xml).await { match Property::qread(xml).await {
Err(ParsingError::Recoverable) => (), Err(ParsingError::Recoverable) => (),
otherwise => return otherwise.map(Self::Value) otherwise => return otherwise.map(Self::Value),
} }
PropertyRequest::qread(xml).await.map(Self::Request) PropertyRequest::qread(xml).await.map(Self::Request)
} }
@ -335,7 +392,11 @@ impl<E: Extension> QRead<PropertyRequest<E>> for PropertyRequest<E> {
Some(PropertyRequest::CreationDate) Some(PropertyRequest::CreationDate)
} else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() { } else if xml.maybe_open(DAV_URN, "displayname").await?.is_some() {
Some(PropertyRequest::DisplayName) Some(PropertyRequest::DisplayName)
} else if xml.maybe_open(DAV_URN, "getcontentlanguage").await?.is_some() { } else if xml
.maybe_open(DAV_URN, "getcontentlanguage")
.await?
.is_some()
{
Some(PropertyRequest::GetContentLanguage) Some(PropertyRequest::GetContentLanguage)
} else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() { } else if xml.maybe_open(DAV_URN, "getcontentlength").await?.is_some() {
Some(PropertyRequest::GetContentLength) Some(PropertyRequest::GetContentLength)
@ -359,8 +420,10 @@ impl<E: Extension> QRead<PropertyRequest<E>> for PropertyRequest<E> {
Some(pr) => { Some(pr) => {
xml.close().await?; xml.close().await?;
Ok(pr) Ok(pr)
}, }
None => E::PropertyRequest::qread(xml).await.map(PropertyRequest::Extension), None => E::PropertyRequest::qread(xml)
.await
.map(PropertyRequest::Extension),
} }
} }
} }
@ -368,46 +431,86 @@ impl<E: Extension> QRead<PropertyRequest<E>> for PropertyRequest<E> {
impl<E: Extension> QRead<Property<E>> for Property<E> { impl<E: Extension> QRead<Property<E>> for Property<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
// Core WebDAV properties // Core WebDAV properties
if xml.maybe_open_start(DAV_URN, "creationdate").await?.is_some() { if xml
.maybe_open_start(DAV_URN, "creationdate")
.await?
.is_some()
{
let datestr = xml.tag_string().await?; let datestr = xml.tag_string().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(datestr.as_str())?)) return Ok(Property::CreationDate(DateTime::parse_from_rfc3339(
} else if xml.maybe_open_start(DAV_URN, "displayname").await?.is_some() { datestr.as_str(),
)?));
} else if xml
.maybe_open_start(DAV_URN, "displayname")
.await?
.is_some()
{
let name = xml.tag_string().await?; let name = xml.tag_string().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::DisplayName(name)) return Ok(Property::DisplayName(name));
} else if xml.maybe_open_start(DAV_URN, "getcontentlanguage").await?.is_some() { } else if xml
.maybe_open_start(DAV_URN, "getcontentlanguage")
.await?
.is_some()
{
let lang = xml.tag_string().await?; let lang = xml.tag_string().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::GetContentLanguage(lang)) return Ok(Property::GetContentLanguage(lang));
} else if xml.maybe_open_start(DAV_URN, "getcontentlength").await?.is_some() { } else if xml
.maybe_open_start(DAV_URN, "getcontentlength")
.await?
.is_some()
{
let cl = xml.tag_string().await?.parse::<u64>()?; let cl = xml.tag_string().await?.parse::<u64>()?;
xml.close().await?; xml.close().await?;
return Ok(Property::GetContentLength(cl)) return Ok(Property::GetContentLength(cl));
} else if xml.maybe_open_start(DAV_URN, "getcontenttype").await?.is_some() { } else if xml
.maybe_open_start(DAV_URN, "getcontenttype")
.await?
.is_some()
{
let ct = xml.tag_string().await?; let ct = xml.tag_string().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::GetContentType(ct)) return Ok(Property::GetContentType(ct));
} else if xml.maybe_open_start(DAV_URN, "getetag").await?.is_some() { } else if xml.maybe_open_start(DAV_URN, "getetag").await?.is_some() {
let etag = xml.tag_string().await?; let etag = xml.tag_string().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::GetEtag(etag)) return Ok(Property::GetEtag(etag));
} else if xml.maybe_open_start(DAV_URN, "getlastmodified").await?.is_some() { } else if xml
.maybe_open_start(DAV_URN, "getlastmodified")
.await?
.is_some()
{
let datestr = xml.tag_string().await?; let datestr = xml.tag_string().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(datestr.as_str())?)) return Ok(Property::GetLastModified(DateTime::parse_from_rfc2822(
} else if xml.maybe_open_start(DAV_URN, "lockdiscovery").await?.is_some() { datestr.as_str(),
)?));
} else if xml
.maybe_open_start(DAV_URN, "lockdiscovery")
.await?
.is_some()
{
let acc = xml.collect::<ActiveLock>().await?; let acc = xml.collect::<ActiveLock>().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::LockDiscovery(acc)) return Ok(Property::LockDiscovery(acc));
} else if xml.maybe_open_start(DAV_URN, "resourcetype").await?.is_some() { } else if xml
.maybe_open_start(DAV_URN, "resourcetype")
.await?
.is_some()
{
let acc = xml.collect::<ResourceType<E>>().await?; let acc = xml.collect::<ResourceType<E>>().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::ResourceType(acc)) return Ok(Property::ResourceType(acc));
} else if xml.maybe_open_start(DAV_URN, "supportedlock").await?.is_some() { } else if xml
.maybe_open_start(DAV_URN, "supportedlock")
.await?
.is_some()
{
let acc = xml.collect::<LockEntry>().await?; let acc = xml.collect::<LockEntry>().await?;
xml.close().await?; xml.close().await?;
return Ok(Property::SupportedLock(acc)) return Ok(Property::SupportedLock(acc));
} }
// Option 2: an extension property, delegating // Option 2: an extension property, delegating
@ -418,31 +521,49 @@ impl<E: Extension> QRead<Property<E>> for Property<E> {
impl QRead<ActiveLock> for ActiveLock { impl QRead<ActiveLock> for ActiveLock {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
xml.open(DAV_URN, "activelock").await?; xml.open(DAV_URN, "activelock").await?;
let (mut m_scope, mut m_type, mut m_depth, mut owner, mut timeout, mut locktoken, mut m_root) = let (
(None, None, None, None, None, None, None); mut m_scope,
mut m_type,
mut m_depth,
mut owner,
mut timeout,
mut locktoken,
mut m_root,
) = (None, None, None, None, None, None, None);
loop { loop {
let mut dirty = false; let mut dirty = false;
xml.maybe_read::<LockScope>(&mut m_scope, &mut dirty).await?; xml.maybe_read::<LockScope>(&mut m_scope, &mut dirty)
.await?;
xml.maybe_read::<LockType>(&mut m_type, &mut dirty).await?; xml.maybe_read::<LockType>(&mut m_type, &mut dirty).await?;
xml.maybe_read::<Depth>(&mut m_depth, &mut dirty).await?; xml.maybe_read::<Depth>(&mut m_depth, &mut dirty).await?;
xml.maybe_read::<Owner>(&mut owner, &mut dirty).await?; xml.maybe_read::<Owner>(&mut owner, &mut dirty).await?;
xml.maybe_read::<Timeout>(&mut timeout, &mut dirty).await?; xml.maybe_read::<Timeout>(&mut timeout, &mut dirty).await?;
xml.maybe_read::<LockToken>(&mut locktoken, &mut dirty).await?; xml.maybe_read::<LockToken>(&mut locktoken, &mut dirty)
.await?;
xml.maybe_read::<LockRoot>(&mut m_root, &mut dirty).await?; xml.maybe_read::<LockRoot>(&mut m_root, &mut dirty).await?;
if !dirty { if !dirty {
match xml.peek() { match xml.peek() {
Event::End(_) => break, Event::End(_) => break,
_ => { xml.skip().await?; }, _ => {
xml.skip().await?;
}
} }
} }
} }
xml.close().await?; xml.close().await?;
match (m_scope, m_type, m_depth, m_root) { match (m_scope, m_type, m_depth, m_root) {
(Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => (Some(lockscope), Some(locktype), Some(depth), Some(lockroot)) => Ok(ActiveLock {
Ok(ActiveLock { lockscope, locktype, depth, owner, timeout, locktoken, lockroot }), lockscope,
locktype,
depth,
owner,
timeout,
locktoken,
lockroot,
}),
_ => Err(ParsingError::MissingChild), _ => Err(ParsingError::MissingChild),
} }
} }
@ -465,7 +586,7 @@ impl QRead<Depth> for Depth {
impl QRead<Owner> for Owner { impl QRead<Owner> for Owner {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
xml.open(DAV_URN, "owner").await?; xml.open(DAV_URN, "owner").await?;
let mut owner = Owner::Unknown; let mut owner = Owner::Unknown;
loop { loop {
match xml.peek() { match xml.peek() {
@ -475,17 +596,21 @@ impl QRead<Owner> for Owner {
owner = Owner::Txt(txt); owner = Owner::Txt(txt);
} }
} }
Event::Start(_) | Event::Empty(_) => { Event::Start(_) | Event::Empty(_) => match Href::qread(xml).await {
match Href::qread(xml).await { Ok(href) => {
Ok(href) => { owner = Owner::Href(href); }, owner = Owner::Href(href);
Err(ParsingError::Recoverable) => { xml.skip().await?; },
Err(e) => return Err(e),
} }
} Err(ParsingError::Recoverable) => {
xml.skip().await?;
}
Err(e) => return Err(e),
},
Event::End(_) => break, Event::End(_) => break,
_ => { xml.skip().await?; }, _ => {
xml.skip().await?;
}
} }
}; }
xml.close().await?; xml.close().await?;
Ok(owner) Ok(owner)
} }
@ -495,7 +620,7 @@ impl QRead<Timeout> for Timeout {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
const SEC_PFX: &str = "Second-"; const SEC_PFX: &str = "Second-";
xml.open(DAV_URN, "timeout").await?; xml.open(DAV_URN, "timeout").await?;
let timeout = match xml.tag_string().await?.as_str() { let timeout = match xml.tag_string().await?.as_str() {
"Infinite" => Timeout::Infinite, "Infinite" => Timeout::Infinite,
seconds => match seconds.strip_prefix(SEC_PFX) { seconds => match seconds.strip_prefix(SEC_PFX) {
@ -531,10 +656,12 @@ impl<E: Extension> QRead<ResourceType<E>> for ResourceType<E> {
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> { async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
if xml.maybe_open(DAV_URN, "collection").await?.is_some() { if xml.maybe_open(DAV_URN, "collection").await?.is_some() {
xml.close().await?; xml.close().await?;
return Ok(ResourceType::Collection) return Ok(ResourceType::Collection);
} }
E::ResourceType::qread(xml).await.map(ResourceType::Extension) E::ResourceType::qread(xml)
.await
.map(ResourceType::Extension)
} }
} }
@ -545,8 +672,10 @@ impl QRead<LockEntry> for LockEntry {
loop { loop {
let mut dirty = false; let mut dirty = false;
xml.maybe_read::<LockScope>(&mut maybe_scope, &mut dirty).await?; xml.maybe_read::<LockScope>(&mut maybe_scope, &mut dirty)
xml.maybe_read::<LockType>(&mut maybe_type, &mut dirty).await?; .await?;
xml.maybe_read::<LockType>(&mut maybe_type, &mut dirty)
.await?;
if !dirty { if !dirty {
match xml.peek() { match xml.peek() {
Event::End(_) => break, Event::End(_) => break,
@ -557,7 +686,10 @@ impl QRead<LockEntry> for LockEntry {
xml.close().await?; xml.close().await?;
match (maybe_scope, maybe_type) { match (maybe_scope, maybe_type) {
(Some(lockscope), Some(locktype)) => Ok(LockEntry { lockscope, locktype }), (Some(lockscope), Some(locktype)) => Ok(LockEntry {
lockscope,
locktype,
}),
_ => Err(ParsingError::MissingChild), _ => Err(ParsingError::MissingChild),
} }
} }
@ -570,12 +702,12 @@ impl QRead<LockScope> for LockScope {
let lockscope = loop { let lockscope = loop {
if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() { if xml.maybe_open(DAV_URN, "exclusive").await?.is_some() {
xml.close().await?; xml.close().await?;
break LockScope::Exclusive break LockScope::Exclusive;
} }
if xml.maybe_open(DAV_URN, "shared").await?.is_some() { if xml.maybe_open(DAV_URN, "shared").await?.is_some() {
xml.close().await?; xml.close().await?;
break LockScope::Shared break LockScope::Shared;
} }
xml.skip().await?; xml.skip().await?;
@ -593,7 +725,7 @@ impl QRead<LockType> for LockType {
let locktype = loop { let locktype = loop {
if xml.maybe_open(DAV_URN, "write").await?.is_some() { if xml.maybe_open(DAV_URN, "write").await?.is_some() {
xml.close().await?; xml.close().await?;
break LockType::Write break LockType::Write;
} }
xml.skip().await?; xml.skip().await?;
@ -616,8 +748,8 @@ impl QRead<Href> for Href {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use chrono::{FixedOffset, TimeZone};
use crate::realization::Core; use crate::realization::Core;
use chrono::{FixedOffset, TimeZone};
use quick_xml::reader::NsReader; use quick_xml::reader::NsReader;
#[tokio::test] #[tokio::test]
@ -630,8 +762,10 @@ mod tests {
</D:propfind> </D:propfind>
"#; "#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
let got = rdr.find::<PropFind::<Core>>().await.unwrap(); .await
.unwrap();
let got = rdr.find::<PropFind<Core>>().await.unwrap();
assert_eq!(got, PropFind::<Core>::PropName); assert_eq!(got, PropFind::<Core>::PropName);
} }
@ -654,18 +788,23 @@ mod tests {
</D:propfind> </D:propfind>
"#; "#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
let got = rdr.find::<PropFind::<Core>>().await.unwrap(); .await
.unwrap();
let got = rdr.find::<PropFind<Core>>().await.unwrap();
assert_eq!(got, PropFind::Prop(PropName(vec![ assert_eq!(
PropertyRequest::DisplayName, got,
PropertyRequest::GetContentLength, PropFind::Prop(PropName(vec![
PropertyRequest::GetContentType, PropertyRequest::DisplayName,
PropertyRequest::GetEtag, PropertyRequest::GetContentLength,
PropertyRequest::GetLastModified, PropertyRequest::GetContentType,
PropertyRequest::ResourceType, PropertyRequest::GetEtag,
PropertyRequest::SupportedLock, PropertyRequest::GetLastModified,
]))); PropertyRequest::ResourceType,
PropertyRequest::SupportedLock,
]))
);
} }
#[tokio::test] #[tokio::test]
@ -677,17 +816,19 @@ mod tests {
</D:lock-token-submitted> </D:lock-token-submitted>
</D:error>"#; </D:error>"#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
let got = rdr.find::<Error::<Core>>().await.unwrap(); .await
.unwrap();
let got = rdr.find::<Error<Core>>().await.unwrap();
assert_eq!(got, Error(vec![ assert_eq!(
Violation::LockTokenSubmitted(vec![ got,
Href("/locked/".into()) Error(vec![Violation::LockTokenSubmitted(vec![Href(
]) "/locked/".into()
])); )])])
);
} }
#[tokio::test] #[tokio::test]
async fn rfc_propertyupdate() { async fn rfc_propertyupdate() {
let src = r#"<?xml version="1.0" encoding="utf-8" ?> let src = r#"<?xml version="1.0" encoding="utf-8" ?>
@ -706,13 +847,18 @@ mod tests {
</D:remove> </D:remove>
</D:propertyupdate>"#; </D:propertyupdate>"#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
let got = rdr.find::<PropertyUpdate::<Core>>().await.unwrap(); .await
.unwrap();
let got = rdr.find::<PropertyUpdate<Core>>().await.unwrap();
assert_eq!(got, PropertyUpdate(vec![ assert_eq!(
PropertyUpdateItem::Set(Set(PropValue(vec![]))), got,
PropertyUpdateItem::Remove(Remove(PropName(vec![]))), PropertyUpdate(vec![
])); PropertyUpdateItem::Set(Set(PropValue(vec![]))),
PropertyUpdateItem::Remove(Remove(PropName(vec![]))),
])
);
} }
#[tokio::test] #[tokio::test]
@ -728,14 +874,21 @@ mod tests {
</D:lockinfo> </D:lockinfo>
"#; "#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
.await
.unwrap();
let got = rdr.find::<LockInfo>().await.unwrap(); let got = rdr.find::<LockInfo>().await.unwrap();
assert_eq!(got, LockInfo { assert_eq!(
lockscope: LockScope::Exclusive, got,
locktype: LockType::Write, LockInfo {
owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), lockscope: LockScope::Exclusive,
}); locktype: LockType::Write,
owner: Some(Owner::Href(Href(
"http://example.org/~ejw/contact.html".into()
))),
}
);
} }
#[tokio::test] #[tokio::test]
@ -777,59 +930,63 @@ mod tests {
</multistatus> </multistatus>
"#; "#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
let got = rdr.find::<Multistatus::<Core>>().await.unwrap(); .await
.unwrap();
let got = rdr.find::<Multistatus<Core>>().await.unwrap();
assert_eq!(got, Multistatus { assert_eq!(
responses: vec![ got,
Response { Multistatus {
status_or_propstat: StatusOrPropstat::PropStat( responses: vec![
Href("http://www.example.com/container/".into()), Response {
vec![PropStat { status_or_propstat: StatusOrPropstat::PropStat(
prop: AnyProp(vec![ Href("http://www.example.com/container/".into()),
AnyProperty::Request(PropertyRequest::CreationDate), vec![PropStat {
AnyProperty::Request(PropertyRequest::DisplayName), prop: AnyProp(vec![
AnyProperty::Request(PropertyRequest::ResourceType), AnyProperty::Request(PropertyRequest::CreationDate),
AnyProperty::Request(PropertyRequest::SupportedLock), AnyProperty::Request(PropertyRequest::DisplayName),
]), AnyProperty::Request(PropertyRequest::ResourceType),
status: Status(http::status::StatusCode::OK), AnyProperty::Request(PropertyRequest::SupportedLock),
error: None, ]),
responsedescription: None, status: Status(http::status::StatusCode::OK),
}], error: None,
), responsedescription: None,
error: None, }],
responsedescription: None, ),
location: None, error: None,
}, responsedescription: None,
Response { location: None,
status_or_propstat: StatusOrPropstat::PropStat( },
Href("http://www.example.com/container/front.html".into()), Response {
vec![PropStat { status_or_propstat: StatusOrPropstat::PropStat(
prop: AnyProp(vec![ Href("http://www.example.com/container/front.html".into()),
AnyProperty::Request(PropertyRequest::CreationDate), vec![PropStat {
AnyProperty::Request(PropertyRequest::DisplayName), prop: AnyProp(vec![
AnyProperty::Request(PropertyRequest::GetContentLength), AnyProperty::Request(PropertyRequest::CreationDate),
AnyProperty::Request(PropertyRequest::GetContentType), AnyProperty::Request(PropertyRequest::DisplayName),
AnyProperty::Request(PropertyRequest::GetEtag), AnyProperty::Request(PropertyRequest::GetContentLength),
AnyProperty::Request(PropertyRequest::GetLastModified), AnyProperty::Request(PropertyRequest::GetContentType),
AnyProperty::Request(PropertyRequest::ResourceType), AnyProperty::Request(PropertyRequest::GetEtag),
AnyProperty::Request(PropertyRequest::SupportedLock), AnyProperty::Request(PropertyRequest::GetLastModified),
]), AnyProperty::Request(PropertyRequest::ResourceType),
status: Status(http::status::StatusCode::OK), AnyProperty::Request(PropertyRequest::SupportedLock),
error: None, ]),
responsedescription: None, status: Status(http::status::StatusCode::OK),
}], error: None,
), responsedescription: None,
error: None, }],
responsedescription: None, ),
location: None, error: None,
}, responsedescription: None,
], location: None,
responsedescription: None, },
}); ],
responsedescription: None,
}
);
} }
#[tokio::test] #[tokio::test]
async fn rfc_multistatus_value() { async fn rfc_multistatus_value() {
let src = r#" let src = r#"
@ -888,78 +1045,103 @@ mod tests {
</D:response> </D:response>
</D:multistatus>"#; </D:multistatus>"#;
let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = Reader::new(NsReader::from_reader(src.as_bytes()))
let got = rdr.find::<Multistatus::<Core>>().await.unwrap(); .await
.unwrap();
let got = rdr.find::<Multistatus<Core>>().await.unwrap();
assert_eq!(got, Multistatus { assert_eq!(
responses: vec![ got,
Response { Multistatus {
status_or_propstat: StatusOrPropstat::PropStat( responses: vec![
Href("/container/".into()), Response {
vec![PropStat { status_or_propstat: StatusOrPropstat::PropStat(
prop: AnyProp(vec![ Href("/container/".into()),
AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 17, 42, 21).unwrap())), vec![PropStat {
AnyProperty::Value(Property::DisplayName("Example collection".into())), prop: AnyProp(vec![
AnyProperty::Value(Property::ResourceType(vec![ResourceType::Collection])), AnyProperty::Value(Property::CreationDate(
AnyProperty::Value(Property::SupportedLock(vec![ FixedOffset::west_opt(8 * 3600)
LockEntry { .unwrap()
lockscope: LockScope::Exclusive, .with_ymd_and_hms(1997, 12, 01, 17, 42, 21)
locktype: LockType::Write, .unwrap()
}, )),
LockEntry { AnyProperty::Value(Property::DisplayName(
lockscope: LockScope::Shared, "Example collection".into()
locktype: LockType::Write, )),
}, AnyProperty::Value(Property::ResourceType(vec![
])), ResourceType::Collection
]), ])),
status: Status(http::status::StatusCode::OK), AnyProperty::Value(Property::SupportedLock(vec![
error: None, LockEntry {
responsedescription: None, lockscope: LockScope::Exclusive,
}], locktype: LockType::Write,
), },
error: None, LockEntry {
responsedescription: None, lockscope: LockScope::Shared,
location: None, locktype: LockType::Write,
},
}, ])),
Response { ]),
status_or_propstat: StatusOrPropstat::PropStat( status: Status(http::status::StatusCode::OK),
Href("/container/front.html".into()), error: None,
vec![PropStat { responsedescription: None,
prop: AnyProp(vec![ }],
AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600).unwrap().with_ymd_and_hms(1997, 12, 01, 18, 27, 21).unwrap())), ),
AnyProperty::Value(Property::DisplayName("Example HTML resource".into())), error: None,
AnyProperty::Value(Property::GetContentLength(4525)), responsedescription: None,
AnyProperty::Value(Property::GetContentType("text/html".into())), location: None,
AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), },
AnyProperty::Value(Property::GetLastModified(FixedOffset::west_opt(0).unwrap().with_ymd_and_hms(1998, 01, 12, 09, 25, 56).unwrap())), Response {
//@FIXME know bug, can't disambiguate between an empty resource status_or_propstat: StatusOrPropstat::PropStat(
//type value and a request resource type Href("/container/front.html".into()),
AnyProperty::Request(PropertyRequest::ResourceType), vec![PropStat {
AnyProperty::Value(Property::SupportedLock(vec![ prop: AnyProp(vec![
LockEntry { AnyProperty::Value(Property::CreationDate(
lockscope: LockScope::Exclusive, FixedOffset::west_opt(8 * 3600)
locktype: LockType::Write, .unwrap()
}, .with_ymd_and_hms(1997, 12, 01, 18, 27, 21)
LockEntry { .unwrap()
lockscope: LockScope::Shared, )),
locktype: LockType::Write, AnyProperty::Value(Property::DisplayName(
}, "Example HTML resource".into()
])), )),
]), AnyProperty::Value(Property::GetContentLength(4525)),
status: Status(http::status::StatusCode::OK), AnyProperty::Value(Property::GetContentType(
error: None, "text/html".into()
responsedescription: None, )),
}], AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())),
), AnyProperty::Value(Property::GetLastModified(
error: None, FixedOffset::west_opt(0)
responsedescription: None, .unwrap()
location: None, .with_ymd_and_hms(1998, 01, 12, 09, 25, 56)
.unwrap()
}, )),
], //@FIXME know bug, can't disambiguate between an empty resource
responsedescription: None, //type value and a request resource type
}); AnyProperty::Request(PropertyRequest::ResourceType),
AnyProperty::Value(Property::SupportedLock(vec![
LockEntry {
lockscope: LockScope::Exclusive,
locktype: LockType::Write,
},
LockEntry {
lockscope: LockScope::Shared,
locktype: LockType::Write,
},
])),
]),
status: Status(http::status::StatusCode::OK),
error: None,
responsedescription: None,
}],
),
error: None,
responsedescription: None,
location: None,
},
],
responsedescription: None,
}
);
} }
} }

View file

@ -1,8 +1,7 @@
use quick_xml::Error as QError;
use quick_xml::events::{Event, BytesText};
use super::types::*; use super::types::*;
use super::xml::{Node, Writer,QWrite,IWrite}; use super::xml::{IWrite, Node, QWrite, Writer};
use quick_xml::events::{BytesText, Event};
use quick_xml::Error as QError;
// --- XML ROOTS // --- XML ROOTS
@ -16,15 +15,17 @@ impl<E: Extension> QWrite for PropFind<E> {
match self { match self {
Self::PropName => { Self::PropName => {
let empty_propname = xml.create_dav_element("propname"); let empty_propname = xml.create_dav_element("propname");
xml.q.write_event_async(Event::Empty(empty_propname)).await? xml.q
}, .write_event_async(Event::Empty(empty_propname))
.await?
}
Self::AllProp(maybe_include) => { Self::AllProp(maybe_include) => {
let empty_allprop = xml.create_dav_element("allprop"); let empty_allprop = xml.create_dav_element("allprop");
xml.q.write_event_async(Event::Empty(empty_allprop)).await?; xml.q.write_event_async(Event::Empty(empty_allprop)).await?;
if let Some(include) = maybe_include { if let Some(include) = maybe_include {
include.qwrite(xml).await?; include.qwrite(xml).await?;
} }
}, }
Self::Prop(propname) => propname.qwrite(xml).await?, Self::Prop(propname) => propname.qwrite(xml).await?,
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
@ -45,9 +46,8 @@ impl<E: Extension> QWrite for PropertyUpdate<E> {
} }
} }
/// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE /// PROPFIND RESPONSE, PROPPATCH RESPONSE, COPY RESPONSE, MOVE RESPONSE
/// DELETE RESPONSE, /// DELETE RESPONSE,
impl<E: Extension> QWrite for Multistatus<E> { impl<E: Extension> QWrite for Multistatus<E> {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let start = xml.create_dav_element("multistatus"); let start = xml.create_dav_element("multistatus");
@ -140,7 +140,6 @@ impl<E: Extension> QWrite for Remove<E> {
} }
} }
impl<E: Extension> QWrite for PropName<E> { impl<E: Extension> QWrite for PropName<E> {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let start = xml.create_dav_element("prop"); let start = xml.create_dav_element("prop");
@ -176,14 +175,15 @@ impl<E: Extension> QWrite for AnyProperty<E> {
} }
} }
impl QWrite for Href { impl QWrite for Href {
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> { async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
let start = xml.create_dav_element("href"); let start = xml.create_dav_element("href");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(&self.0)))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -216,9 +216,9 @@ impl<E: Extension> QWrite for StatusOrPropstat<E> {
href.qwrite(xml).await?; href.qwrite(xml).await?;
} }
status.qwrite(xml).await status.qwrite(xml).await
}, }
Self::PropStat(href, propstat_list) => { Self::PropStat(href, propstat_list) => {
href.qwrite(xml).await?; href.qwrite(xml).await?;
for propstat in propstat_list.iter() { for propstat in propstat_list.iter() {
propstat.qwrite(xml).await?; propstat.qwrite(xml).await?;
} }
@ -235,8 +235,14 @@ impl QWrite for Status {
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
let txt = format!("HTTP/1.1 {} {}", self.0.as_str(), self.0.canonical_reason().unwrap_or("No reason")); let txt = format!(
xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?; "HTTP/1.1 {} {}",
self.0.as_str(),
self.0.canonical_reason().unwrap_or("No reason")
);
xml.q
.write_event_async(Event::Text(BytesText::new(&txt)))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
@ -250,7 +256,9 @@ impl QWrite for ResponseDescription {
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(&self.0))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(&self.0)))
.await?;
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -296,62 +304,76 @@ impl<E: Extension> QWrite for Property<E> {
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(&date.to_rfc3339())))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
DisplayName(name) => { DisplayName(name) => {
// <D:displayname>Example collection</D:displayname> // <D:displayname>Example collection</D:displayname>
let start = xml.create_dav_element("displayname"); let start = xml.create_dav_element("displayname");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(name))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(name)))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
GetContentLanguage(lang) => { GetContentLanguage(lang) => {
let start = xml.create_dav_element("getcontentlanguage"); let start = xml.create_dav_element("getcontentlanguage");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(lang))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(lang)))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
GetContentLength(len) => { GetContentLength(len) => {
// <D:getcontentlength>4525</D:getcontentlength> // <D:getcontentlength>4525</D:getcontentlength>
let start = xml.create_dav_element("getcontentlength"); let start = xml.create_dav_element("getcontentlength");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(&len.to_string()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(&len.to_string())))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
GetContentType(ct) => { GetContentType(ct) => {
// <D:getcontenttype>text/html</D:getcontenttype> // <D:getcontenttype>text/html</D:getcontenttype>
let start = xml.create_dav_element("getcontenttype"); let start = xml.create_dav_element("getcontenttype");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(&ct))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(&ct)))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
GetEtag(et) => { GetEtag(et) => {
// <D:getetag>"zzyzx"</D:getetag> // <D:getetag>"zzyzx"</D:getetag>
let start = xml.create_dav_element("getetag"); let start = xml.create_dav_element("getetag");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(et))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(et)))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
GetLastModified(date) => { GetLastModified(date) => {
// <D:getlastmodified>Mon, 12 Jan 1998 09:25:56 GMT</D:getlastmodified> // <D:getlastmodified>Mon, 12 Jan 1998 09:25:56 GMT</D:getlastmodified>
let start = xml.create_dav_element("getlastmodified"); let start = xml.create_dav_element("getlastmodified");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
xml.q.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822()))).await?; xml.q
.write_event_async(Event::Text(BytesText::new(&date.to_rfc2822())))
.await?;
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
LockDiscovery(many_locks) => { LockDiscovery(many_locks) => {
// <D:lockdiscovery><D:activelock> ... </D:activelock></D:lockdiscovery> // <D:lockdiscovery><D:activelock> ... </D:activelock></D:lockdiscovery>
let start = xml.create_dav_element("lockdiscovery"); let start = xml.create_dav_element("lockdiscovery");
@ -362,17 +384,17 @@ impl<E: Extension> QWrite for Property<E> {
lock.qwrite(xml).await?; lock.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
}, }
ResourceType(many_types) => { ResourceType(many_types) => {
// <D:resourcetype><D:collection/></D:resourcetype> // <D:resourcetype><D:collection/></D:resourcetype>
// <D:resourcetype/> // <D:resourcetype/>
// <x:resourcetype xmlns:x="DAV:"> // <x:resourcetype xmlns:x="DAV:">
// <x:collection/> // <x:collection/>
// <f:search-results xmlns:f="http://www.example.com/ns"/> // <f:search-results xmlns:f="http://www.example.com/ns"/>
// </x:resourcetype> // </x:resourcetype>
let start = xml.create_dav_element("resourcetype"); let start = xml.create_dav_element("resourcetype");
if many_types.is_empty() { if many_types.is_empty() {
xml.q.write_event_async(Event::Empty(start)).await?; xml.q.write_event_async(Event::Empty(start)).await?;
@ -384,7 +406,7 @@ impl<E: Extension> QWrite for Property<E> {
} }
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
} }
}, }
SupportedLock(many_entries) => { SupportedLock(many_entries) => {
// <D:supportedlock/> // <D:supportedlock/>
@ -401,7 +423,7 @@ impl<E: Extension> QWrite for Property<E> {
} }
xml.q.write_event_async(Event::End(end)).await?; xml.q.write_event_async(Event::End(end)).await?;
} }
}, }
Extension(inner) => inner.qwrite(xml).await?, Extension(inner) => inner.qwrite(xml).await?,
}; };
Ok(()) Ok(())
@ -413,8 +435,10 @@ impl<E: Extension> QWrite for ResourceType<E> {
match self { match self {
Self::Collection => { Self::Collection => {
let empty_collection = xml.create_dav_element("collection"); let empty_collection = xml.create_dav_element("collection");
xml.q.write_event_async(Event::Empty(empty_collection)).await xml.q
}, .write_event_async(Event::Empty(empty_collection))
.await
}
Self::Extension(inner) => inner.qwrite(xml).await, Self::Extension(inner) => inner.qwrite(xml).await,
} }
} }
@ -425,7 +449,7 @@ impl<E: Extension> QWrite for Include<E> {
let start = xml.create_dav_element("include"); let start = xml.create_dav_element("include");
let end = start.to_end(); let end = start.to_end();
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
for prop in self.0.iter() { for prop in self.0.iter() {
prop.qwrite(xml).await?; prop.qwrite(xml).await?;
} }
@ -505,8 +529,8 @@ impl QWrite for LockType {
Self::Write => { Self::Write => {
let empty_write = xml.create_dav_element("write"); let empty_write = xml.create_dav_element("write");
xml.q.write_event_async(Event::Empty(empty_write)).await? xml.q.write_event_async(Event::Empty(empty_write)).await?
}, }
}; };
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -521,12 +545,12 @@ impl QWrite for LockScope {
Self::Exclusive => { Self::Exclusive => {
let empty_tag = xml.create_dav_element("exclusive"); let empty_tag = xml.create_dav_element("exclusive");
xml.q.write_event_async(Event::Empty(empty_tag)).await? xml.q.write_event_async(Event::Empty(empty_tag)).await?
}, }
Self::Shared => { Self::Shared => {
let empty_tag = xml.create_dav_element("shared"); let empty_tag = xml.create_dav_element("shared");
xml.q.write_event_async(Event::Empty(empty_tag)).await? xml.q.write_event_async(Event::Empty(empty_tag)).await?
}, }
}; };
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
} }
@ -538,7 +562,11 @@ impl QWrite for Owner {
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
match self { match self {
Self::Txt(txt) => xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await?, Self::Txt(txt) => {
xml.q
.write_event_async(Event::Text(BytesText::new(&txt)))
.await?
}
Self::Href(href) => href.qwrite(xml).await?, Self::Href(href) => href.qwrite(xml).await?,
Self::Unknown => (), Self::Unknown => (),
} }
@ -553,9 +581,21 @@ impl QWrite for Depth {
xml.q.write_event_async(Event::Start(start.clone())).await?; xml.q.write_event_async(Event::Start(start.clone())).await?;
match self { match self {
Self::Zero => xml.q.write_event_async(Event::Text(BytesText::new("0"))).await?, Self::Zero => {
Self::One => xml.q.write_event_async(Event::Text(BytesText::new("1"))).await?, xml.q
Self::Infinity => xml.q.write_event_async(Event::Text(BytesText::new("infinity"))).await?, .write_event_async(Event::Text(BytesText::new("0")))
.await?
}
Self::One => {
xml.q
.write_event_async(Event::Text(BytesText::new("1")))
.await?
}
Self::Infinity => {
xml.q
.write_event_async(Event::Text(BytesText::new("infinity")))
.await?
}
}; };
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
@ -570,9 +610,15 @@ impl QWrite for Timeout {
match self { match self {
Self::Seconds(count) => { Self::Seconds(count) => {
let txt = format!("Second-{}", count); let txt = format!("Second-{}", count);
xml.q.write_event_async(Event::Text(BytesText::new(&txt))).await? xml.q
}, .write_event_async(Event::Text(BytesText::new(&txt)))
Self::Infinite => xml.q.write_event_async(Event::Text(BytesText::new("Infinite"))).await? .await?
}
Self::Infinite => {
xml.q
.write_event_async(Event::Text(BytesText::new("Infinite")))
.await?
}
}; };
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
} }
@ -620,8 +666,10 @@ impl<E: Extension> QWrite for Violation<E> {
}; };
match self { match self {
Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await, Violation::LockTokenMatchesRequestUri => atom("lock-token-matches-request-uri").await,
Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => atom("lock-token-submitted").await, Violation::LockTokenSubmitted(hrefs) if hrefs.is_empty() => {
atom("lock-token-submitted").await
}
Violation::LockTokenSubmitted(hrefs) => { Violation::LockTokenSubmitted(hrefs) => {
let start = xml.create_dav_element("lock-token-submitted"); let start = xml.create_dav_element("lock-token-submitted");
let end = start.to_end(); let end = start.to_end();
@ -631,8 +679,10 @@ impl<E: Extension> QWrite for Violation<E> {
href.qwrite(xml).await?; href.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => atom("no-conflicting-lock").await, Violation::NoConflictingLock(hrefs) if hrefs.is_empty() => {
atom("no-conflicting-lock").await
}
Violation::NoConflictingLock(hrefs) => { Violation::NoConflictingLock(hrefs) => {
let start = xml.create_dav_element("no-conflicting-lock"); let start = xml.create_dav_element("no-conflicting-lock");
let end = start.to_end(); let end = start.to_end();
@ -642,11 +692,13 @@ impl<E: Extension> QWrite for Violation<E> {
href.qwrite(xml).await?; href.qwrite(xml).await?;
} }
xml.q.write_event_async(Event::End(end)).await xml.q.write_event_async(Event::End(end)).await
}, }
Violation::NoExternalEntities => atom("no-external-entities").await, Violation::NoExternalEntities => atom("no-external-entities").await,
Violation::PreservedLiveProperties => atom("preserved-live-properties").await, Violation::PreservedLiveProperties => atom("preserved-live-properties").await,
Violation::PropfindFiniteDepth => atom("propfind-finite-depth").await, Violation::PropfindFiniteDepth => atom("propfind-finite-depth").await,
Violation::CannotModifyProtectedProperty => atom("cannot-modify-protected-property").await, Violation::CannotModifyProtectedProperty => {
atom("cannot-modify-protected-property").await
}
Violation::Extension(inner) => inner.qwrite(xml).await, Violation::Extension(inner) => inner.qwrite(xml).await,
} }
} }
@ -654,30 +706,32 @@ impl<E: Extension> QWrite for Violation<E> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use super::super::xml; use super::super::xml;
use super::*;
use crate::realization::Core; use crate::realization::Core;
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
/// To run only the unit tests and avoid the behavior ones: /// To run only the unit tests and avoid the behavior ones:
/// cargo test --bin aerogramme /// cargo test --bin aerogramme
async fn serialize(elem: &impl QWrite) -> String { async fn serialize(elem: &impl QWrite) -> String {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer); let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer);
let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4); let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ]; let ns_to_apply = vec![("xmlns:D".into(), "DAV:".into())];
let mut writer = Writer { q, ns_to_apply }; let mut writer = Writer { q, ns_to_apply };
elem.qwrite(&mut writer).await.expect("xml serialization"); elem.qwrite(&mut writer).await.expect("xml serialization");
tokio_buffer.flush().await.expect("tokio buffer flush"); tokio_buffer.flush().await.expect("tokio buffer flush");
let got = std::str::from_utf8(buffer.as_slice()).unwrap(); let got = std::str::from_utf8(buffer.as_slice()).unwrap();
return got.into() return got.into();
} }
async fn deserialize<T: xml::Node<T>>(src: &str) -> T { async fn deserialize<T: xml::Node<T>>(src: &str) -> T {
let mut rdr = xml::Reader::new(quick_xml::reader::NsReader::from_reader(src.as_bytes())).await.unwrap(); let mut rdr = xml::Reader::new(quick_xml::reader::NsReader::from_reader(src.as_bytes()))
.await
.unwrap();
rdr.find().await.unwrap() rdr.find().await.unwrap()
} }
@ -688,15 +742,18 @@ mod tests {
let got = serialize(&orig).await; let got = serialize(&orig).await;
let expected = r#"<D:href xmlns:D="DAV:">/SOGo/dav/so/</D:href>"#; let expected = r#"<D:href xmlns:D="DAV:">/SOGo/dav/so/</D:href>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<Href>(got.as_str()).await, orig) assert_eq!(deserialize::<Href>(got.as_str()).await, orig)
} }
#[tokio::test] #[tokio::test]
async fn basic_multistatus() { async fn basic_multistatus() {
let orig = Multistatus::<Core> { let orig = Multistatus::<Core> {
responses: vec![], responses: vec![],
responsedescription: Some(ResponseDescription("Hello world".into())) responsedescription: Some(ResponseDescription("Hello world".into())),
}; };
let got = serialize(&orig).await; let got = serialize(&orig).await;
@ -704,18 +761,18 @@ mod tests {
<D:responsedescription>Hello world</D:responsedescription> <D:responsedescription>Hello world</D:responsedescription>
</D:multistatus>"#; </D:multistatus>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig)
} }
#[tokio::test] #[tokio::test]
async fn rfc_error_delete_locked() { async fn rfc_error_delete_locked() {
let orig = Error::<Core>(vec![ let orig = Error::<Core>(vec![Violation::LockTokenSubmitted(vec![Href(
Violation::LockTokenSubmitted(vec![ "/locked/".into(),
Href("/locked/".into()) )])]);
])
]);
let got = serialize(&orig).await; let got = serialize(&orig).await;
let expected = r#"<D:error xmlns:D="DAV:"> let expected = r#"<D:error xmlns:D="DAV:">
@ -724,7 +781,10 @@ mod tests {
</D:lock-token-submitted> </D:lock-token-submitted>
</D:error>"#; </D:error>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<Error<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<Error<Core>>(got.as_str()).await, orig)
} }
@ -738,7 +798,10 @@ mod tests {
<D:propname/> <D:propname/>
</D:propfind>"#; </D:propfind>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<PropFind::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<PropFind::<Core>>(got.as_str()).await, orig)
} }
@ -759,7 +822,7 @@ mod tests {
status: Status(http::status::StatusCode::OK), status: Status(http::status::StatusCode::OK),
error: None, error: None,
responsedescription: None, responsedescription: None,
}] }],
), ),
error: None, error: None,
responsedescription: None, responsedescription: None,
@ -782,8 +845,8 @@ mod tests {
status: Status(http::status::StatusCode::OK), status: Status(http::status::StatusCode::OK),
error: None, error: None,
responsedescription: None, responsedescription: None,
} }],
]), ),
error: None, error: None,
responsedescription: None, responsedescription: None,
location: None, location: None,
@ -825,8 +888,10 @@ mod tests {
</D:response> </D:response>
</D:multistatus>"#; </D:multistatus>"#;
assert_eq!(
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); &got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig)
} }
@ -835,17 +900,20 @@ mod tests {
let orig = PropFind::<Core>::AllProp(None); let orig = PropFind::<Core>::AllProp(None);
let got = serialize(&orig).await; let got = serialize(&orig).await;
let expected = r#"<D:propfind xmlns:D="DAV:"> let expected = r#"<D:propfind xmlns:D="DAV:">
<D:allprop/> <D:allprop/>
</D:propfind>"#; </D:propfind>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<PropFind::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<PropFind::<Core>>(got.as_str()).await, orig)
} }
#[tokio::test] #[tokio::test]
async fn rfc_allprop_res() { async fn rfc_allprop_res() {
use chrono::{FixedOffset,TimeZone}; use chrono::{FixedOffset, TimeZone};
let orig = Multistatus::<Core> { let orig = Multistatus::<Core> {
responses: vec![ responses: vec![
@ -853,28 +921,34 @@ mod tests {
status_or_propstat: StatusOrPropstat::PropStat( status_or_propstat: StatusOrPropstat::PropStat(
Href("/container/".into()), Href("/container/".into()),
vec![PropStat { vec![PropStat {
prop: AnyProp(vec![ prop: AnyProp(vec![
AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600) AnyProperty::Value(Property::CreationDate(
.unwrap() FixedOffset::west_opt(8 * 3600)
.with_ymd_and_hms(1997, 12, 1, 17, 42, 21) .unwrap()
.unwrap())), .with_ymd_and_hms(1997, 12, 1, 17, 42, 21)
AnyProperty::Value(Property::DisplayName("Example collection".into())), .unwrap(),
AnyProperty::Value(Property::ResourceType(vec![ResourceType::Collection])), )),
AnyProperty::Value(Property::SupportedLock(vec![ AnyProperty::Value(Property::DisplayName(
LockEntry { "Example collection".into(),
lockscope: LockScope::Exclusive, )),
locktype: LockType::Write, AnyProperty::Value(Property::ResourceType(vec![
}, ResourceType::Collection,
LockEntry { ])),
lockscope: LockScope::Shared, AnyProperty::Value(Property::SupportedLock(vec![
locktype: LockType::Write, LockEntry {
}, lockscope: LockScope::Exclusive,
])), locktype: LockType::Write,
]), },
status: Status(http::status::StatusCode::OK), LockEntry {
error: None, lockscope: LockScope::Shared,
responsedescription: None, locktype: LockType::Write,
}] },
])),
]),
status: Status(http::status::StatusCode::OK),
error: None,
responsedescription: None,
}],
), ),
error: None, error: None,
responsedescription: None, responsedescription: None,
@ -884,37 +958,43 @@ mod tests {
status_or_propstat: StatusOrPropstat::PropStat( status_or_propstat: StatusOrPropstat::PropStat(
Href("/container/front.html".into()), Href("/container/front.html".into()),
vec![PropStat { vec![PropStat {
prop: AnyProp(vec![ prop: AnyProp(vec![
AnyProperty::Value(Property::CreationDate(FixedOffset::west_opt(8 * 3600) AnyProperty::Value(Property::CreationDate(
.unwrap() FixedOffset::west_opt(8 * 3600)
.with_ymd_and_hms(1997, 12, 1, 18, 27, 21) .unwrap()
.unwrap())), .with_ymd_and_hms(1997, 12, 1, 18, 27, 21)
AnyProperty::Value(Property::DisplayName("Example HTML resource".into())), .unwrap(),
AnyProperty::Value(Property::GetContentLength(4525)), )),
AnyProperty::Value(Property::GetContentType("text/html".into())), AnyProperty::Value(Property::DisplayName(
AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())), "Example HTML resource".into(),
AnyProperty::Value(Property::GetLastModified(FixedOffset::east_opt(0) )),
.unwrap() AnyProperty::Value(Property::GetContentLength(4525)),
.with_ymd_and_hms(1998, 1, 12, 9, 25, 56) AnyProperty::Value(Property::GetContentType("text/html".into())),
.unwrap())), AnyProperty::Value(Property::GetEtag(r#""zzyzx""#.into())),
//@FIXME know bug, can't disambiguate between an empty resource AnyProperty::Value(Property::GetLastModified(
//type value and a request resource type FixedOffset::east_opt(0)
AnyProperty::Request(PropertyRequest::ResourceType), .unwrap()
AnyProperty::Value(Property::SupportedLock(vec![ .with_ymd_and_hms(1998, 1, 12, 9, 25, 56)
LockEntry { .unwrap(),
lockscope: LockScope::Exclusive, )),
locktype: LockType::Write, //@FIXME know bug, can't disambiguate between an empty resource
}, //type value and a request resource type
LockEntry { AnyProperty::Request(PropertyRequest::ResourceType),
lockscope: LockScope::Shared, AnyProperty::Value(Property::SupportedLock(vec![
locktype: LockType::Write, LockEntry {
}, lockscope: LockScope::Exclusive,
])), locktype: LockType::Write,
]), },
status: Status(http::status::StatusCode::OK), LockEntry {
error: None, lockscope: LockScope::Shared,
responsedescription: None, locktype: LockType::Write,
}] },
])),
]),
status: Status(http::status::StatusCode::OK),
error: None,
responsedescription: None,
}],
), ),
error: None, error: None,
responsedescription: None, responsedescription: None,
@ -993,15 +1073,18 @@ mod tests {
</D:response> </D:response>
</D:multistatus>"#; </D:multistatus>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig)
} }
#[tokio::test] #[tokio::test]
async fn rfc_allprop_include() { async fn rfc_allprop_include() {
let orig = PropFind::<Core>::AllProp(Some(Include(vec![ let orig = PropFind::<Core>::AllProp(Some(Include(vec![
PropertyRequest::DisplayName, PropertyRequest::DisplayName,
PropertyRequest::ResourceType, PropertyRequest::ResourceType,
]))); ])));
let got = serialize(&orig).await; let got = serialize(&orig).await;
@ -1014,19 +1097,20 @@ mod tests {
</D:include> </D:include>
</D:propfind>"#; </D:propfind>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<PropFind::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<PropFind::<Core>>(got.as_str()).await, orig)
} }
#[tokio::test] #[tokio::test]
async fn rfc_propertyupdate() { async fn rfc_propertyupdate() {
let orig = PropertyUpdate::<Core>(vec![ let orig = PropertyUpdate::<Core>(vec![
PropertyUpdateItem::Set(Set(PropValue(vec![ PropertyUpdateItem::Set(Set(PropValue(vec![Property::GetContentLanguage(
Property::GetContentLanguage("fr-FR".into()), "fr-FR".into(),
]))), )]))),
PropertyUpdateItem::Remove(Remove(PropName(vec![ PropertyUpdateItem::Remove(Remove(PropName(vec![PropertyRequest::DisplayName]))),
PropertyRequest::DisplayName,
]))),
]); ]);
let got = serialize(&orig).await; let got = serialize(&orig).await;
@ -1043,8 +1127,14 @@ mod tests {
</D:remove> </D:remove>
</D:propertyupdate>"#; </D:propertyupdate>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
assert_eq!(deserialize::<PropertyUpdate::<Core>>(got.as_str()).await, orig) &got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(
deserialize::<PropertyUpdate::<Core>>(got.as_str()).await,
orig
)
} }
#[tokio::test] #[tokio::test]
@ -1053,7 +1143,7 @@ mod tests {
responses: vec![Response { responses: vec![Response {
status_or_propstat: StatusOrPropstat::Status( status_or_propstat: StatusOrPropstat::Status(
vec![Href("http://www.example.com/container/resource3".into())], vec![Href("http://www.example.com/container/resource3".into())],
Status(http::status::StatusCode::from_u16(423).unwrap()) Status(http::status::StatusCode::from_u16(423).unwrap()),
), ),
error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])), error: Some(Error(vec![Violation::LockTokenSubmitted(vec![])])),
responsedescription: None, responsedescription: None,
@ -1074,7 +1164,10 @@ mod tests {
</D:response> </D:response>
</D:multistatus>"#; </D:multistatus>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<Multistatus::<Core>>(got.as_str()).await, orig)
} }
@ -1083,7 +1176,9 @@ mod tests {
let orig = LockInfo { let orig = LockInfo {
lockscope: LockScope::Exclusive, lockscope: LockScope::Exclusive,
locktype: LockType::Write, locktype: LockType::Write,
owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), owner: Some(Owner::Href(Href(
"http://example.org/~ejw/contact.html".into(),
))),
}; };
let got = serialize(&orig).await; let got = serialize(&orig).await;
@ -1100,23 +1195,30 @@ mod tests {
</D:owner> </D:owner>
</D:lockinfo>"#; </D:lockinfo>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<LockInfo>(got.as_str()).await, orig) assert_eq!(deserialize::<LockInfo>(got.as_str()).await, orig)
} }
#[tokio::test] #[tokio::test]
async fn rfc_simple_lock_response() { async fn rfc_simple_lock_response() {
let orig = PropValue::<Core>(vec![ let orig = PropValue::<Core>(vec![Property::LockDiscovery(vec![ActiveLock {
Property::LockDiscovery(vec![ActiveLock { lockscope: LockScope::Exclusive,
lockscope: LockScope::Exclusive, locktype: LockType::Write,
locktype: LockType::Write, depth: Depth::Infinity,
depth: Depth::Infinity, owner: Some(Owner::Href(Href(
owner: Some(Owner::Href(Href("http://example.org/~ejw/contact.html".into()))), "http://example.org/~ejw/contact.html".into(),
timeout: Some(Timeout::Seconds(604800)), ))),
locktoken: Some(LockToken(Href("urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into()))), timeout: Some(Timeout::Seconds(604800)),
lockroot: LockRoot(Href("http://example.com/workspace/webdav/proposal.doc".into())), locktoken: Some(LockToken(Href(
}]), "urn:uuid:e71d4fae-5dec-22d6-fea5-00a0c91e6be4".into(),
]); ))),
lockroot: LockRoot(Href(
"http://example.com/workspace/webdav/proposal.doc".into(),
)),
}])]);
let got = serialize(&orig).await; let got = serialize(&orig).await;
@ -1144,7 +1246,10 @@ mod tests {
</D:lockdiscovery> </D:lockdiscovery>
</D:prop>"#; </D:prop>"#;
assert_eq!(&got, expected, "\n---GOT---\n{got}\n---EXP---\n{expected}\n"); assert_eq!(
&got, expected,
"\n---GOT---\n{got}\n---EXP---\n{expected}\n"
);
assert_eq!(deserialize::<PropValue::<Core>>(got.as_str()).await, orig) assert_eq!(deserialize::<PropValue::<Core>>(got.as_str()).await, orig)
} }
} }

View file

@ -10,10 +10,10 @@ pub enum ParsingError {
TagNotFound, TagNotFound,
InvalidValue, InvalidValue,
Utf8Error(std::str::Utf8Error), Utf8Error(std::str::Utf8Error),
QuickXml(quick_xml::Error), QuickXml(quick_xml::Error),
Chrono(chrono::format::ParseError), Chrono(chrono::format::ParseError),
Int(std::num::ParseIntError), Int(std::num::ParseIntError),
Eof Eof,
} }
impl std::fmt::Display for ParsingError { impl std::fmt::Display for ParsingError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {

View file

@ -7,19 +7,19 @@ pub mod error;
pub mod xml; pub mod xml;
// webdav // webdav
pub mod types;
pub mod encoder;
pub mod decoder; pub mod decoder;
pub mod encoder;
pub mod types;
// calendar // calendar
pub mod caltypes;
pub mod calencoder;
pub mod caldecoder; pub mod caldecoder;
pub mod calencoder;
pub mod caltypes;
// acl (wip) // acl (wip)
pub mod acltypes;
pub mod aclencoder;
pub mod acldecoder; pub mod acldecoder;
pub mod aclencoder;
pub mod acltypes;
// versioning (wip) // versioning (wip)
mod versioningtypes; mod versioningtypes;

View file

@ -1,8 +1,8 @@
use super::types as dav;
use super::caltypes as cal;
use super::acltypes as acl; use super::acltypes as acl;
use super::xml; use super::caltypes as cal;
use super::error; use super::error;
use super::types as dav;
use super::xml;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Disabled(()); pub struct Disabled(());
@ -12,12 +12,15 @@ impl xml::QRead<Disabled> for Disabled {
} }
} }
impl xml::QWrite for Disabled { impl xml::QWrite for Disabled {
async fn qwrite(&self, _xml: &mut xml::Writer<impl xml::IWrite>) -> Result<(), quick_xml::Error> { async fn qwrite(
&self,
_xml: &mut xml::Writer<impl xml::IWrite>,
) -> Result<(), quick_xml::Error> {
unreachable!() unreachable!()
} }
} }
/// The base WebDAV /// The base WebDAV
/// ///
/// Any extension is disabled through an object we can't build /// Any extension is disabled through an object we can't build
/// due to a private inner element. /// due to a private inner element.
@ -33,8 +36,7 @@ impl dav::Extension for Core {
// WebDAV with the base Calendar implementation (RFC4791) // WebDAV with the base Calendar implementation (RFC4791)
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Calendar {} pub struct Calendar {}
impl dav::Extension for Calendar impl dav::Extension for Calendar {
{
type Error = cal::Violation; type Error = cal::Violation;
type Property = cal::Property; type Property = cal::Property;
type PropertyRequest = cal::PropertyRequest; type PropertyRequest = cal::PropertyRequest;
@ -44,8 +46,7 @@ impl dav::Extension for Calendar
// ACL // ACL
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Acl {} pub struct Acl {}
impl dav::Extension for Acl impl dav::Extension for Acl {
{
type Error = Disabled; type Error = Disabled;
type Property = acl::Property; type Property = acl::Property;
type PropertyRequest = acl::PropertyRequest; type PropertyRequest = acl::PropertyRequest;
@ -77,7 +78,10 @@ impl xml::QRead<Property> for Property {
} }
} }
impl xml::QWrite for Property { impl xml::QWrite for Property {
async fn qwrite(&self, xml: &mut xml::Writer<impl xml::IWrite>) -> Result<(), quick_xml::Error> { async fn qwrite(
&self,
xml: &mut xml::Writer<impl xml::IWrite>,
) -> Result<(), quick_xml::Error> {
match self { match self {
Self::Cal(c) => c.qwrite(xml).await, Self::Cal(c) => c.qwrite(xml).await,
Self::Acl(a) => a.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await,
@ -96,11 +100,16 @@ impl xml::QRead<PropertyRequest> for PropertyRequest {
Err(error::ParsingError::Recoverable) => (), Err(error::ParsingError::Recoverable) => (),
otherwise => return otherwise.map(PropertyRequest::Cal), otherwise => return otherwise.map(PropertyRequest::Cal),
} }
acl::PropertyRequest::qread(xml).await.map(PropertyRequest::Acl) acl::PropertyRequest::qread(xml)
.await
.map(PropertyRequest::Acl)
} }
} }
impl xml::QWrite for PropertyRequest { impl xml::QWrite for PropertyRequest {
async fn qwrite(&self, xml: &mut xml::Writer<impl xml::IWrite>) -> Result<(), quick_xml::Error> { async fn qwrite(
&self,
xml: &mut xml::Writer<impl xml::IWrite>,
) -> Result<(), quick_xml::Error> {
match self { match self {
Self::Cal(c) => c.qwrite(xml).await, Self::Cal(c) => c.qwrite(xml).await,
Self::Acl(a) => a.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await,
@ -123,7 +132,10 @@ impl xml::QRead<ResourceType> for ResourceType {
} }
} }
impl xml::QWrite for ResourceType { impl xml::QWrite for ResourceType {
async fn qwrite(&self, xml: &mut xml::Writer<impl xml::IWrite>) -> Result<(), quick_xml::Error> { async fn qwrite(
&self,
xml: &mut xml::Writer<impl xml::IWrite>,
) -> Result<(), quick_xml::Error> {
match self { match self {
Self::Cal(c) => c.qwrite(xml).await, Self::Cal(c) => c.qwrite(xml).await,
Self::Acl(a) => a.qwrite(xml).await, Self::Acl(a) => a.qwrite(xml).await,

View file

@ -1,8 +1,8 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::fmt::Debug; use std::fmt::Debug;
use chrono::{DateTime,FixedOffset};
use super::xml; use super::xml;
use chrono::{DateTime, FixedOffset};
/// It's how we implement a DAV extension /// It's how we implement a DAV extension
/// (That's the dark magic part...) /// (That's the dark magic part...)
@ -42,7 +42,7 @@ pub struct ActiveLock {
/// ///
/// <!ELEMENT collection EMPTY > /// <!ELEMENT collection EMPTY >
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct Collection{} pub struct Collection {}
/// 14.4 depth XML Element /// 14.4 depth XML Element
/// ///
@ -58,7 +58,7 @@ pub struct Collection{}
pub enum Depth { pub enum Depth {
Zero, Zero,
One, One,
Infinity Infinity,
} }
/// 14.5 error XML Element /// 14.5 error XML Element
@ -153,7 +153,6 @@ pub enum Violation<E: Extension> {
/// PROPFIND requests on collections. /// PROPFIND requests on collections.
PropfindFiniteDepth, PropfindFiniteDepth,
/// Name: cannot-modify-protected-property /// Name: cannot-modify-protected-property
/// ///
/// Use with: 403 Forbidden /// Use with: 403 Forbidden
@ -172,7 +171,7 @@ pub enum Violation<E: Extension> {
/// Name: exclusive /// Name: exclusive
/// ///
/// Purpose: Specifies an exclusive lock. /// Purpose: Specifies an exclusive lock.
/// ///
/// <!ELEMENT exclusive EMPTY > /// <!ELEMENT exclusive EMPTY >
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct Exclusive {} pub struct Exclusive {}
@ -193,7 +192,6 @@ pub struct Exclusive {}
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Href(pub String); pub struct Href(pub String);
/// 14.8. include XML Element /// 14.8. include XML Element
/// ///
/// Name: include /// Name: include
@ -280,7 +278,7 @@ pub struct LockRoot(pub Href);
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum LockScope { pub enum LockScope {
Exclusive, Exclusive,
Shared Shared,
} }
/// 14.14. locktoken XML Element /// 14.14. locktoken XML Element
@ -288,7 +286,7 @@ pub enum LockScope {
/// Name: locktoken /// Name: locktoken
/// ///
/// Purpose: The lock token associated with a lock. /// Purpose: The lock token associated with a lock.
/// ///
/// Description: The href contains a single lock token URI, which /// Description: The href contains a single lock token URI, which
/// refers to the lock. /// refers to the lock.
/// ///
@ -314,7 +312,7 @@ pub enum LockType {
/// ///
/// ///
/// <!ELEMENT write EMPTY > /// <!ELEMENT write EMPTY >
Write Write,
} }
/// 14.16. multistatus XML Element /// 14.16. multistatus XML Element
@ -477,7 +475,6 @@ pub struct PropStat<E: Extension> {
pub responsedescription: Option<ResponseDescription>, pub responsedescription: Option<ResponseDescription>,
} }
/// 14.23. remove XML Element /// 14.23. remove XML Element
/// ///
/// Name: remove /// Name: remove
@ -579,15 +576,14 @@ pub struct Set<E: Extension>(pub PropValue<E>);
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Shared {} pub struct Shared {}
/// 14.28. status XML Element /// 14.28. status XML Element
/// ///
/// Name: status /// Name: status
/// ///
/// Purpose: Holds a single HTTP status-line. /// Purpose: Holds a single HTTP status-line.
/// ///
/// Value: status-line (defined in Section 6.1 of [RFC2616]) /// Value: status-line (defined in Section 6.1 of [RFC2616])
/// ///
/// <!ELEMENT status (#PCDATA) > /// <!ELEMENT status (#PCDATA) >
//@FIXME: Better typing is possible with an enum for example //@FIXME: Better typing is possible with an enum for example
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
@ -624,7 +620,6 @@ pub enum Timeout {
Infinite, Infinite,
} }
/// 15. DAV Properties /// 15. DAV Properties
/// ///
/// For DAV properties, the name of the property is also the same as the /// For DAV properties, the name of the property is also the same as the
@ -704,7 +699,7 @@ pub enum Property<E: Extension> {
CreationDate(DateTime<FixedOffset>), CreationDate(DateTime<FixedOffset>),
/// 15.2. displayname Property /// 15.2. displayname Property
/// ///
/// Name: displayname /// Name: displayname
/// ///
/// Purpose: Provides a name for the resource that is suitable for /// Purpose: Provides a name for the resource that is suitable for
@ -734,7 +729,6 @@ pub enum Property<E: Extension> {
/// <!ELEMENT displayname (#PCDATA) > /// <!ELEMENT displayname (#PCDATA) >
DisplayName(String), DisplayName(String),
/// 15.3. getcontentlanguage Property /// 15.3. getcontentlanguage Property
/// ///
/// Name: getcontentlanguage /// Name: getcontentlanguage
@ -893,7 +887,6 @@ pub enum Property<E: Extension> {
/// <!ELEMENT lockdiscovery (activelock)* > /// <!ELEMENT lockdiscovery (activelock)* >
LockDiscovery(Vec<ActiveLock>), LockDiscovery(Vec<ActiveLock>),
/// 15.9. resourcetype Property /// 15.9. resourcetype Property
/// ///
/// Name: resourcetype /// Name: resourcetype
@ -920,7 +913,7 @@ pub enum Property<E: Extension> {
/// type. /// type.
/// ///
/// Example: (fictional example to show extensibility) /// Example: (fictional example to show extensibility)
/// ///
/// <x:resourcetype xmlns:x="DAV:"> /// <x:resourcetype xmlns:x="DAV:">
/// <x:collection/> /// <x:collection/>
/// <f:search-results xmlns:f="http://www.example.com/ns"/> /// <f:search-results xmlns:f="http://www.example.com/ns"/>

View file

@ -1,8 +1,8 @@
use futures::Future; use futures::Future;
use quick_xml::events::{Event, BytesStart}; use quick_xml::events::{BytesStart, Event};
use quick_xml::name::ResolveResult; use quick_xml::name::ResolveResult;
use quick_xml::reader::NsReader; use quick_xml::reader::NsReader;
use tokio::io::{AsyncWrite, AsyncBufRead}; use tokio::io::{AsyncBufRead, AsyncWrite};
use super::error::ParsingError; use super::error::ParsingError;
@ -17,7 +17,10 @@ pub trait IRead = AsyncBufRead + Unpin;
// Serialization/Deserialization traits // Serialization/Deserialization traits
pub trait QWrite { pub trait QWrite {
fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> impl Future<Output = Result<(), quick_xml::Error>> + Send; fn qwrite(
&self,
xml: &mut Writer<impl IWrite>,
) -> impl Future<Output = Result<(), quick_xml::Error>> + Send;
} }
pub trait QRead<T> { pub trait QRead<T> {
fn qread(xml: &mut Reader<impl IRead>) -> impl Future<Output = Result<T, ParsingError>>; fn qread(xml: &mut Reader<impl IRead>) -> impl Future<Output = Result<T, ParsingError>>;
@ -44,7 +47,11 @@ impl<T: IWrite> Writer<T> {
fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> { fn create_ns_element(&mut self, ns: &str, name: &str) -> BytesStart<'static> {
let mut start = BytesStart::new(format!("{}:{}", ns, name)); let mut start = BytesStart::new(format!("{}:{}", ns, name));
if !self.ns_to_apply.is_empty() { if !self.ns_to_apply.is_empty() {
start.extend_attributes(self.ns_to_apply.iter().map(|(k, n)| (k.as_str(), n.as_str()))); start.extend_attributes(
self.ns_to_apply
.iter()
.map(|(k, n)| (k.as_str(), n.as_str())),
);
self.ns_to_apply.clear() self.ns_to_apply.clear()
} }
start start
@ -66,16 +73,26 @@ impl<T: IRead> Reader<T> {
let parents = vec![]; let parents = vec![];
let prev = Event::Eof; let prev = Event::Eof;
buf.clear(); buf.clear();
Ok(Self { cur, prev, parents, rdr, buf }) Ok(Self {
cur,
prev,
parents,
rdr,
buf,
})
} }
/// read one more tag /// read one more tag
/// do not expose it publicly /// do not expose it publicly
async fn next(&mut self) -> Result<Event<'static>, ParsingError> { async fn next(&mut self) -> Result<Event<'static>, ParsingError> {
let evt = self.rdr.read_event_into_async(&mut self.buf).await?.into_owned(); let evt = self
self.buf.clear(); .rdr
self.prev = std::mem::replace(&mut self.cur, evt); .read_event_into_async(&mut self.buf)
Ok(self.prev.clone()) .await?
.into_owned();
self.buf.clear();
self.prev = std::mem::replace(&mut self.cur, evt);
Ok(self.prev.clone())
} }
/// skip a node at current level /// skip a node at current level
@ -84,9 +101,12 @@ impl<T: IRead> Reader<T> {
//println!("skipping inside node {:?} value {:?}", self.parents.last(), self.cur); //println!("skipping inside node {:?} value {:?}", self.parents.last(), self.cur);
match &self.cur { match &self.cur {
Event::Start(b) => { Event::Start(b) => {
let _span = self.rdr.read_to_end_into_async(b.to_end().name(), &mut self.buf).await?; let _span = self
.rdr
.read_to_end_into_async(b.to_end().name(), &mut self.buf)
.await?;
self.next().await self.next().await
}, }
Event::End(_) => Err(ParsingError::WrongToken), Event::End(_) => Err(ParsingError::WrongToken),
Event::Eof => Err(ParsingError::Eof), Event::Eof => Err(ParsingError::Eof),
_ => self.next().await, _ => self.next().await,
@ -100,13 +120,13 @@ impl<T: IRead> Reader<T> {
Event::End(be) => be.name(), Event::End(be) => be.name(),
_ => return false, _ => return false,
}; };
let (extr_ns, local) = self.rdr.resolve_element(qname); let (extr_ns, local) = self.rdr.resolve_element(qname);
if local.into_inner() != key.as_bytes() { if local.into_inner() != key.as_bytes() {
return false return false;
} }
match extr_ns { match extr_ns {
ResolveResult::Bound(v) => v.into_inner() == ns, ResolveResult::Bound(v) => v.into_inner() == ns,
_ => false, _ => false,
@ -142,7 +162,7 @@ impl<T: IRead> Reader<T> {
Event::CData(unescaped) => { Event::CData(unescaped) => {
acc.push_str(std::str::from_utf8(unescaped.as_ref())?); acc.push_str(std::str::from_utf8(unescaped.as_ref())?);
self.next().await? self.next().await?
}, }
Event::Text(escaped) => { Event::Text(escaped) => {
acc.push_str(escaped.unescape()?.as_ref()); acc.push_str(escaped.unescape()?.as_ref());
self.next().await? self.next().await?
@ -153,33 +173,41 @@ impl<T: IRead> Reader<T> {
} }
} }
pub async fn maybe_read<N: Node<N>>(&mut self, t: &mut Option<N>, dirty: &mut bool) -> Result<(), ParsingError> { pub async fn maybe_read<N: Node<N>>(
&mut self,
t: &mut Option<N>,
dirty: &mut bool,
) -> Result<(), ParsingError> {
if !self.parent_has_child() { if !self.parent_has_child() {
return Ok(()) return Ok(());
} }
match N::qread(self).await { match N::qread(self).await {
Ok(v) => { Ok(v) => {
*t = Some(v); *t = Some(v);
*dirty = true; *dirty = true;
Ok(()) Ok(())
}, }
Err(ParsingError::Recoverable) => Ok(()), Err(ParsingError::Recoverable) => Ok(()),
Err(e) => Err(e), Err(e) => Err(e),
} }
} }
pub async fn maybe_push<N: Node<N>>(&mut self, t: &mut Vec<N>, dirty: &mut bool) -> Result<(), ParsingError> { pub async fn maybe_push<N: Node<N>>(
&mut self,
t: &mut Vec<N>,
dirty: &mut bool,
) -> Result<(), ParsingError> {
if !self.parent_has_child() { if !self.parent_has_child() {
return Ok(()) return Ok(());
} }
match N::qread(self).await { match N::qread(self).await {
Ok(v) => { Ok(v) => {
t.push(v); t.push(v);
*dirty = true; *dirty = true;
Ok(()) Ok(())
}, }
Err(ParsingError::Recoverable) => Ok(()), Err(ParsingError::Recoverable) => Ok(()),
Err(e) => Err(e), Err(e) => Err(e),
} }
@ -220,7 +248,7 @@ impl<T: IRead> Reader<T> {
pub async fn collect<N: Node<N>>(&mut self) -> Result<Vec<N>, ParsingError> { pub async fn collect<N: Node<N>>(&mut self) -> Result<Vec<N>, ParsingError> {
let mut acc = Vec::new(); let mut acc = Vec::new();
if !self.parent_has_child() { if !self.parent_has_child() {
return Ok(acc) return Ok(acc);
} }
loop { loop {
@ -229,7 +257,7 @@ impl<T: IRead> Reader<T> {
Event::End(_) => return Ok(acc), Event::End(_) => return Ok(acc),
_ => { _ => {
self.skip().await?; self.skip().await?;
}, }
}, },
Ok(v) => acc.push(v), Ok(v) => acc.push(v),
Err(e) => return Err(e), Err(e) => return Err(e),
@ -242,13 +270,13 @@ impl<T: IRead> Reader<T> {
let evt = match self.peek() { let evt = match self.peek() {
Event::Empty(_) if self.is_tag(ns, key) => { Event::Empty(_) if self.is_tag(ns, key) => {
// hack to make `prev_attr` works // hack to make `prev_attr` works
// here we duplicate the current tag // here we duplicate the current tag
// as in other words, we virtually moved one token // as in other words, we virtually moved one token
// which is useful for prev_attr and any logic based on // which is useful for prev_attr and any logic based on
// self.prev + self.open() on empty nodes // self.prev + self.open() on empty nodes
self.prev = self.cur.clone(); self.prev = self.cur.clone();
self.cur.clone() self.cur.clone()
}, }
Event::Start(_) if self.is_tag(ns, key) => self.next().await?, Event::Start(_) if self.is_tag(ns, key) => self.next().await?,
_ => return Err(ParsingError::Recoverable), _ => return Err(ParsingError::Recoverable),
}; };
@ -258,7 +286,11 @@ impl<T: IRead> Reader<T> {
Ok(evt) Ok(evt)
} }
pub async fn open_start(&mut self, ns: &[u8], key: &str) -> Result<Event<'static>, ParsingError> { pub async fn open_start(
&mut self,
ns: &[u8],
key: &str,
) -> Result<Event<'static>, ParsingError> {
//println!("try open start tag {:?}, on {:?}", key, self.peek()); //println!("try open start tag {:?}, on {:?}", key, self.peek());
let evt = match self.peek() { let evt = match self.peek() {
Event::Start(_) if self.is_tag(ns, key) => self.next().await?, Event::Start(_) if self.is_tag(ns, key) => self.next().await?,
@ -270,7 +302,11 @@ impl<T: IRead> Reader<T> {
Ok(evt) Ok(evt)
} }
pub async fn maybe_open(&mut self, ns: &[u8], key: &str) -> Result<Option<Event<'static>>, ParsingError> { pub async fn maybe_open(
&mut self,
ns: &[u8],
key: &str,
) -> Result<Option<Event<'static>>, ParsingError> {
match self.open(ns, key).await { match self.open(ns, key).await {
Ok(v) => Ok(Some(v)), Ok(v) => Ok(Some(v)),
Err(ParsingError::Recoverable) => Ok(None), Err(ParsingError::Recoverable) => Ok(None),
@ -278,7 +314,11 @@ impl<T: IRead> Reader<T> {
} }
} }
pub async fn maybe_open_start(&mut self, ns: &[u8], key: &str) -> Result<Option<Event<'static>>, ParsingError> { pub async fn maybe_open_start(
&mut self,
ns: &[u8],
key: &str,
) -> Result<Option<Event<'static>>, ParsingError> {
match self.open_start(ns, key).await { match self.open_start(ns, key).await {
Ok(v) => Ok(Some(v)), Ok(v) => Ok(Some(v)),
Err(ParsingError::Recoverable) => Ok(None), Err(ParsingError::Recoverable) => Ok(None),
@ -289,9 +329,12 @@ impl<T: IRead> Reader<T> {
pub fn prev_attr(&self, attr: &str) -> Option<String> { pub fn prev_attr(&self, attr: &str) -> Option<String> {
match &self.prev { match &self.prev {
Event::Start(bs) | Event::Empty(bs) => match bs.try_get_attribute(attr) { Event::Start(bs) | Event::Empty(bs) => match bs.try_get_attribute(attr) {
Ok(Some(attr)) => attr.decode_and_unescape_value(&self.rdr).ok().map(|v| v.into_owned()), Ok(Some(attr)) => attr
.decode_and_unescape_value(&self.rdr)
.ok()
.map(|v| v.into_owned()),
_ => None, _ => None,
} },
_ => None, _ => None,
} }
} }
@ -303,7 +346,7 @@ impl<T: IRead> Reader<T> {
// Handle the empty case // Handle the empty case
if !self.parent_has_child() { if !self.parent_has_child() {
self.parents.pop(); self.parents.pop();
return self.next().await return self.next().await;
} }
// Handle the start/end case // Handle the start/end case
@ -311,11 +354,10 @@ impl<T: IRead> Reader<T> {
match self.peek() { match self.peek() {
Event::End(_) => { Event::End(_) => {
self.parents.pop(); self.parents.pop();
return self.next().await return self.next().await;
}, }
_ => self.skip().await?, _ => self.skip().await?,
}; };
} }
} }
} }

View file

@ -35,3 +35,4 @@ smtp-message.workspace = true
smtp-server.workspace = true smtp-server.workspace = true
tracing.workspace = true tracing.workspace = true
quick-xml.workspace = true quick-xml.workspace = true
icalendar.workspace = true

View file

@ -1,26 +1,30 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use hyper::{Request, Response, body::Bytes}; use futures::sink::SinkExt;
use hyper::body::Incoming;
use http_body_util::Full;
use futures::stream::StreamExt; use futures::stream::StreamExt;
use futures::stream::TryStreamExt; use futures::stream::TryStreamExt;
use http_body_util::BodyStream;
use http_body_util::StreamBody;
use http_body_util::combinators::UnsyncBoxBody; use http_body_util::combinators::UnsyncBoxBody;
use hyper::body::Frame;
use tokio_util::sync::PollSender;
use std::io::{Error, ErrorKind};
use futures::sink::SinkExt;
use tokio_util::io::{SinkWriter, CopyToBytes};
use http_body_util::BodyExt; use http_body_util::BodyExt;
use http_body_util::BodyStream;
use http_body_util::Full;
use http_body_util::StreamBody;
use hyper::body::Frame;
use hyper::body::Incoming;
use hyper::{body::Bytes, Request, Response};
use std::io::{Error, ErrorKind};
use tokio_util::io::{CopyToBytes, SinkWriter};
use tokio_util::sync::PollSender;
use aero_dav::types as dav;
use aero_dav::xml as dxml;
use super::controller::HttpResponse; use super::controller::HttpResponse;
use super::node::PutPolicy; use super::node::PutPolicy;
use aero_dav::types as dav;
use aero_dav::xml as dxml;
pub(crate) fn depth(req: &Request<impl hyper::body::Body>) -> dav::Depth { pub(crate) fn depth(req: &Request<impl hyper::body::Body>) -> dav::Depth {
match req.headers().get("Depth").map(hyper::header::HeaderValue::to_str) { match req
.headers()
.get("Depth")
.map(hyper::header::HeaderValue::to_str)
{
Some(Ok("0")) => dav::Depth::Zero, Some(Ok("0")) => dav::Depth::Zero,
Some(Ok("1")) => dav::Depth::One, Some(Ok("1")) => dav::Depth::One,
Some(Ok("Infinity")) => dav::Depth::Infinity, Some(Ok("Infinity")) => dav::Depth::Infinity,
@ -29,20 +33,28 @@ pub(crate) fn depth(req: &Request<impl hyper::body::Body>) -> dav::Depth {
} }
pub(crate) fn put_policy(req: &Request<impl hyper::body::Body>) -> Result<PutPolicy> { pub(crate) fn put_policy(req: &Request<impl hyper::body::Body>) -> Result<PutPolicy> {
if let Some(maybe_txt_etag) = req.headers().get("If-Match").map(hyper::header::HeaderValue::to_str) { if let Some(maybe_txt_etag) = req
.headers()
.get("If-Match")
.map(hyper::header::HeaderValue::to_str)
{
let etag = maybe_txt_etag?; let etag = maybe_txt_etag?;
let dquote_count = etag.chars().filter(|c| *c == '"').count(); let dquote_count = etag.chars().filter(|c| *c == '"').count();
if dquote_count != 2 { if dquote_count != 2 {
bail!("Either If-Match value is invalid or it's not supported (only single etag is supported)"); bail!("Either If-Match value is invalid or it's not supported (only single etag is supported)");
} }
return Ok(PutPolicy::ReplaceEtag(etag.into())) return Ok(PutPolicy::ReplaceEtag(etag.into()));
} }
if let Some(maybe_txt_etag) = req.headers().get("If-None-Match").map(hyper::header::HeaderValue::to_str) { if let Some(maybe_txt_etag) = req
.headers()
.get("If-None-Match")
.map(hyper::header::HeaderValue::to_str)
{
let etag = maybe_txt_etag?; let etag = maybe_txt_etag?;
if etag == "*" { if etag == "*" {
return Ok(PutPolicy::CreateOnly) return Ok(PutPolicy::CreateOnly);
} }
bail!("Either If-None-Match value is invalid or it's not supported (only asterisk is supported)") bail!("Either If-None-Match value is invalid or it's not supported (only asterisk is supported)")
} }
@ -54,7 +66,10 @@ pub(crate) fn text_body(txt: &'static str) -> UnsyncBoxBody<Bytes, std::io::Erro
UnsyncBoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {})) UnsyncBoxBody::new(Full::new(Bytes::from(txt)).map_err(|e| match e {}))
} }
pub(crate) fn serialize<T: dxml::QWrite + Send + 'static>(status_ok: hyper::StatusCode, elem: T) -> Result<HttpResponse> { pub(crate) fn serialize<T: dxml::QWrite + Send + 'static>(
status_ok: hyper::StatusCode,
elem: T,
) -> Result<HttpResponse> {
let (tx, rx) = tokio::sync::mpsc::channel::<Bytes>(1); let (tx, rx) = tokio::sync::mpsc::channel::<Bytes>(1);
// Build the writer // Build the writer
@ -62,10 +77,21 @@ pub(crate) fn serialize<T: dxml::QWrite + Send + 'static>(status_ok: hyper::Stat
let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe)); let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe));
let mut writer = SinkWriter::new(CopyToBytes::new(sink)); let mut writer = SinkWriter::new(CopyToBytes::new(sink));
let q = quick_xml::writer::Writer::new_with_indent(&mut writer, b' ', 4); let q = quick_xml::writer::Writer::new_with_indent(&mut writer, b' ', 4);
let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()), ("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()) ]; let ns_to_apply = vec![
("xmlns:D".into(), "DAV:".into()),
("xmlns:C".into(), "urn:ietf:params:xml:ns:caldav".into()),
];
let mut qwriter = dxml::Writer { q, ns_to_apply }; let mut qwriter = dxml::Writer { q, ns_to_apply };
let decl = quick_xml::events::BytesDecl::from_start(quick_xml::events::BytesStart::from_content("xml version=\"1.0\" encoding=\"utf-8\"", 0)); let decl =
match qwriter.q.write_event_async(quick_xml::events::Event::Decl(decl)).await { quick_xml::events::BytesDecl::from_start(quick_xml::events::BytesStart::from_content(
"xml version=\"1.0\" encoding=\"utf-8\"",
0,
));
match qwriter
.q
.write_event_async(quick_xml::events::Event::Decl(decl))
.await
{
Ok(_) => (), Ok(_) => (),
Err(e) => tracing::error!(err=?e, "unable to write XML declaration <?xml ... >"), Err(e) => tracing::error!(err=?e, "unable to write XML declaration <?xml ... >"),
} }
@ -75,7 +101,6 @@ pub(crate) fn serialize<T: dxml::QWrite + Send + 'static>(status_ok: hyper::Stat
} }
}); });
// Build the reader // Build the reader
let recv = tokio_stream::wrappers::ReceiverStream::new(rx); let recv = tokio_stream::wrappers::ReceiverStream::new(rx);
let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v)))); let stream = StreamBody::new(recv.map(|v| Ok(Frame::data(v))));
@ -89,7 +114,6 @@ pub(crate) fn serialize<T: dxml::QWrite + Send + 'static>(status_ok: hyper::Stat
Ok(response) Ok(response)
} }
/// Deserialize a request body to an XML request /// Deserialize a request body to an XML request
pub(crate) async fn deserialize<T: dxml::Node<T>>(req: Request<Incoming>) -> Result<T> { pub(crate) async fn deserialize<T: dxml::Node<T>>(req: Request<Incoming>) -> Result<T> {
let stream_of_frames = BodyStream::new(req.into_body()); let stream_of_frames = BodyStream::new(req.into_body());
@ -97,7 +121,10 @@ pub(crate) async fn deserialize<T: dxml::Node<T>>(req: Request<Incoming>) -> Res
.map_ok(|frame| frame.into_data()) .map_ok(|frame| frame.into_data())
.map(|obj| match obj { .map(|obj| match obj {
Ok(Ok(v)) => Ok(v), Ok(Ok(v)) => Ok(v),
Ok(Err(_)) => Err(std::io::Error::new(std::io::ErrorKind::Other, "conversion error")), Ok(Err(_)) => Err(std::io::Error::new(
std::io::ErrorKind::Other,
"conversion error",
)),
Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)),
}); });
let async_read = tokio_util::io::StreamReader::new(stream_of_bytes); let async_read = tokio_util::io::StreamReader::new(stream_of_bytes);

View file

@ -1,21 +1,21 @@
use anyhow::Result; use anyhow::Result;
use http_body_util::combinators::{UnsyncBoxBody, BoxBody}; use futures::stream::{StreamExt, TryStreamExt};
use hyper::body::Incoming; use http_body_util::combinators::{BoxBody, UnsyncBoxBody};
use hyper::{Request, Response, body::Bytes};
use http_body_util::BodyStream; use http_body_util::BodyStream;
use http_body_util::StreamBody; use http_body_util::StreamBody;
use hyper::body::Frame; use hyper::body::Frame;
use futures::stream::{StreamExt, TryStreamExt}; use hyper::body::Incoming;
use hyper::{body::Bytes, Request, Response};
use aero_collections::user::User; use aero_collections::user::User;
use aero_dav::types as dav;
use aero_dav::realization::All;
use aero_dav::caltypes as cal; use aero_dav::caltypes as cal;
use aero_dav::realization::All;
use aero_dav::types as dav;
use crate::dav::codec::{serialize, deserialize, depth, text_body}; use crate::dav::codec;
use crate::dav::codec::{depth, deserialize, serialize, text_body};
use crate::dav::node::{DavNode, PutPolicy}; use crate::dav::node::{DavNode, PutPolicy};
use crate::dav::resource::RootNode; use crate::dav::resource::RootNode;
use crate::dav::codec;
pub(super) type ArcUser = std::sync::Arc<User>; pub(super) type ArcUser = std::sync::Arc<User>;
pub(super) type HttpResponse = Response<UnsyncBoxBody<Bytes, std::io::Error>>; pub(super) type HttpResponse = Response<UnsyncBoxBody<Bytes, std::io::Error>>;
@ -39,19 +39,22 @@ pub(crate) struct Controller {
req: Request<Incoming>, req: Request<Incoming>,
} }
impl Controller { impl Controller {
pub(crate) async fn route(user: std::sync::Arc<User>, req: Request<Incoming>) -> Result<HttpResponse> { pub(crate) async fn route(
user: std::sync::Arc<User>,
req: Request<Incoming>,
) -> Result<HttpResponse> {
let path = req.uri().path().to_string(); let path = req.uri().path().to_string();
let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect(); let path_segments: Vec<_> = path.split("/").filter(|s| *s != "").collect();
let method = req.method().as_str().to_uppercase(); let method = req.method().as_str().to_uppercase();
let can_create = matches!(method.as_str(), "PUT" | "MKCOL" | "MKCALENDAR"); let can_create = matches!(method.as_str(), "PUT" | "MKCOL" | "MKCALENDAR");
let node = match (RootNode {}).fetch(&user, &path_segments, can_create).await{ let node = match (RootNode {}).fetch(&user, &path_segments, can_create).await {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
tracing::warn!(err=?e, "dav node fetch failed"); tracing::warn!(err=?e, "dav node fetch failed");
return Ok(Response::builder() return Ok(Response::builder()
.status(404) .status(404)
.body(codec::text_body("Resource not found"))?) .body(codec::text_body("Resource not found"))?);
} }
}; };
@ -80,7 +83,6 @@ impl Controller {
} }
} }
// --- Per-method functions --- // --- Per-method functions ---
/// REPORT has been first described in the "Versioning Extension" of WebDAV /// REPORT has been first described in the "Versioning Extension" of WebDAV
@ -89,7 +91,7 @@ impl Controller {
/// Note: current implementation is not generic at all, it is heavily tied to CalDAV. /// Note: current implementation is not generic at all, it is heavily tied to CalDAV.
/// A rewrite would be required to make it more generic (with the extension system that has /// A rewrite would be required to make it more generic (with the extension system that has
/// been introduced in aero-dav) /// been introduced in aero-dav)
async fn report(self) -> Result<HttpResponse> { async fn report(self) -> Result<HttpResponse> {
let status = hyper::StatusCode::from_u16(207)?; let status = hyper::StatusCode::from_u16(207)?;
let report = match deserialize::<cal::Report<All>>(self.req).await { let report = match deserialize::<cal::Report<All>>(self.req).await {
@ -97,54 +99,75 @@ impl Controller {
Err(e) => { Err(e) => {
tracing::error!(err=?e, "unable to decode REPORT body"); tracing::error!(err=?e, "unable to decode REPORT body");
return Ok(Response::builder() return Ok(Response::builder()
.status(400) .status(400)
.body(text_body("Bad request"))?) .body(text_body("Bad request"))?);
} }
}; };
// Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary // Internal representation that will handle processed request
// list of URLs let (mut ok_node, mut not_found) = (Vec::new(), Vec::new());
// @FIXME let calprop: Option<cal::CalendarSelector<All>>;
let multiget = match report {
cal::Report::Multiget(m) => m, // Extracting request information
cal::Report::Query(q) => todo!(), match report {
cal::Report::FreeBusy(_) => return Ok(Response::builder() cal::Report::Multiget(m) => {
.status(501) // Multiget is really like a propfind where Depth: 0|1|Infinity is replaced by an arbitrary
.body(text_body("Not implemented"))?), // list of URLs
// Getting the list of nodes
for h in m.href.into_iter() {
let maybe_collected_node = match Path::new(h.0.as_str()) {
Ok(Path::Abs(p)) => RootNode {}
.fetch(&self.user, p.as_slice(), false)
.await
.or(Err(h)),
Ok(Path::Rel(p)) => self
.node
.fetch(&self.user, p.as_slice(), false)
.await
.or(Err(h)),
Err(_) => Err(h),
};
match maybe_collected_node {
Ok(v) => ok_node.push(v),
Err(h) => not_found.push(h),
};
}
calprop = m.selector;
}
cal::Report::Query(q) => {
calprop = q.selector;
ok_node = apply_filter(&self.user, self.node.children(&self.user).await, q.filter)
.try_collect()
.await?;
}
cal::Report::FreeBusy(_) => {
return Ok(Response::builder()
.status(501)
.body(text_body("Not implemented"))?)
}
}; };
// Getting the list of nodes
let (mut ok_node, mut not_found) = (Vec::new(), Vec::new());
for h in multiget.href.into_iter() {
let maybe_collected_node = match Path::new(h.0.as_str()) {
Ok(Path::Abs(p)) => RootNode{}.fetch(&self.user, p.as_slice(), false).await.or(Err(h)),
Ok(Path::Rel(p)) => self.node.fetch(&self.user, p.as_slice(), false).await.or(Err(h)),
Err(_) => Err(h),
};
match maybe_collected_node {
Ok(v) => ok_node.push(v),
Err(h) => not_found.push(h),
};
}
// Getting props // Getting props
let props = match multiget.selector { let props = match calprop {
None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())), None | Some(cal::CalendarSelector::AllProp) => Some(dav::PropName(ALLPROP.to_vec())),
Some(cal::CalendarSelector::PropName) => None, Some(cal::CalendarSelector::PropName) => None,
Some(cal::CalendarSelector::Prop(inner)) => Some(inner), Some(cal::CalendarSelector::Prop(inner)) => Some(inner),
}; };
serialize(status, Self::multistatus(&self.user, ok_node, not_found, props).await) serialize(
status,
Self::multistatus(&self.user, ok_node, not_found, props).await,
)
} }
/// PROPFIND is the standard way to fetch WebDAV properties /// PROPFIND is the standard way to fetch WebDAV properties
async fn propfind(self) -> Result<HttpResponse> { async fn propfind(self) -> Result<HttpResponse> {
let depth = depth(&self.req); let depth = depth(&self.req);
if matches!(depth, dav::Depth::Infinity) { if matches!(depth, dav::Depth::Infinity) {
return Ok(Response::builder() return Ok(Response::builder()
.status(501) .status(501)
.body(text_body("Depth: Infinity not implemented"))?) .body(text_body("Depth: Infinity not implemented"))?);
} }
let status = hyper::StatusCode::from_u16(207)?; let status = hyper::StatusCode::from_u16(207)?;
@ -153,7 +176,9 @@ impl Controller {
// request body MUST be treated as if it were an 'allprop' request. // request body MUST be treated as if it were an 'allprop' request.
// @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly // @FIXME here we handle any invalid data as an allprop, an empty request is thus correctly
// handled, but corrupted requests are also silently handled as allprop. // handled, but corrupted requests are also silently handled as allprop.
let propfind = deserialize::<dav::PropFind<All>>(self.req).await.unwrap_or_else(|_| dav::PropFind::<All>::AllProp(None)); let propfind = deserialize::<dav::PropFind<All>>(self.req)
.await
.unwrap_or_else(|_| dav::PropFind::<All>::AllProp(None));
tracing::debug!(recv=?propfind, "inferred propfind request"); tracing::debug!(recv=?propfind, "inferred propfind request");
// Collect nodes as PROPFIND is not limited to the targeted node // Collect nodes as PROPFIND is not limited to the targeted node
@ -170,29 +195,36 @@ impl Controller {
dav::PropFind::AllProp(Some(dav::Include(mut include))) => { dav::PropFind::AllProp(Some(dav::Include(mut include))) => {
include.extend_from_slice(&ALLPROP); include.extend_from_slice(&ALLPROP);
Some(dav::PropName(include)) Some(dav::PropName(include))
}, }
dav::PropFind::Prop(inner) => Some(inner), dav::PropFind::Prop(inner) => Some(inner),
}; };
// Not Found is currently impossible considering the way we designed this function // Not Found is currently impossible considering the way we designed this function
let not_found = vec![]; let not_found = vec![];
serialize(status, Self::multistatus(&self.user, nodes, not_found, propname).await) serialize(
status,
Self::multistatus(&self.user, nodes, not_found, propname).await,
)
} }
async fn put(self) -> Result<HttpResponse> { async fn put(self) -> Result<HttpResponse> {
let put_policy = codec::put_policy(&self.req)?; let put_policy = codec::put_policy(&self.req)?;
let stream_of_frames = BodyStream::new(self.req.into_body()); let stream_of_frames = BodyStream::new(self.req.into_body());
let stream_of_bytes = stream_of_frames let stream_of_bytes = stream_of_frames
.map_ok(|frame| frame.into_data()) .map_ok(|frame| frame.into_data())
.map(|obj| match obj { .map(|obj| match obj {
Ok(Ok(v)) => Ok(v), Ok(Ok(v)) => Ok(v),
Ok(Err(_)) => Err(std::io::Error::new(std::io::ErrorKind::Other, "conversion error")), Ok(Err(_)) => Err(std::io::Error::new(
Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)), std::io::ErrorKind::Other,
}).boxed(); "conversion error",
)),
Err(err) => Err(std::io::Error::new(std::io::ErrorKind::Other, err)),
})
.boxed();
let etag = self.node.put(put_policy, stream_of_bytes).await?; let etag = self.node.put(put_policy, stream_of_bytes).await?;
let response = Response::builder() let response = Response::builder()
.status(201) .status(201)
.header("ETag", etag) .header("ETag", etag)
@ -202,7 +234,7 @@ impl Controller {
Ok(response) Ok(response)
} }
async fn get(self) -> Result<HttpResponse> { async fn get(self) -> Result<HttpResponse> {
let stream_body = StreamBody::new(self.node.content().map_ok(|v| Frame::data(v))); let stream_body = StreamBody::new(self.node.content().map_ok(|v| Frame::data(v)));
let boxed_body = UnsyncBoxBody::new(stream_body); let boxed_body = UnsyncBoxBody::new(stream_body);
@ -227,17 +259,33 @@ impl Controller {
// --- Common utility functions --- // --- Common utility functions ---
/// Build a multistatus response from a list of DavNodes /// Build a multistatus response from a list of DavNodes
async fn multistatus(user: &ArcUser, nodes: Vec<Box<dyn DavNode>>, not_found: Vec<dav::Href>, props: Option<dav::PropName<All>>) -> dav::Multistatus<All> { async fn multistatus(
user: &ArcUser,
nodes: Vec<Box<dyn DavNode>>,
not_found: Vec<dav::Href>,
props: Option<dav::PropName<All>>,
) -> dav::Multistatus<All> {
// Collect properties on existing objects // Collect properties on existing objects
let mut responses: Vec<dav::Response<All>> = match props { let mut responses: Vec<dav::Response<All>> = match props {
Some(props) => futures::stream::iter(nodes).then(|n| n.response_props(user, props.clone())).collect().await, Some(props) => {
None => nodes.into_iter().map(|n| n.response_propname(user)).collect(), futures::stream::iter(nodes)
.then(|n| n.response_props(user, props.clone()))
.collect()
.await
}
None => nodes
.into_iter()
.map(|n| n.response_propname(user))
.collect(),
}; };
// Register not found objects only if relevant // Register not found objects only if relevant
if !not_found.is_empty() { if !not_found.is_empty() {
responses.push(dav::Response { responses.push(dav::Response {
status_or_propstat: dav::StatusOrPropstat::Status(not_found, dav::Status(hyper::StatusCode::NOT_FOUND)), status_or_propstat: dav::StatusOrPropstat::Status(
not_found,
dav::Status(hyper::StatusCode::NOT_FOUND),
),
error: None, error: None,
location: None, location: None,
responsedescription: None, responsedescription: None,
@ -252,7 +300,6 @@ impl Controller {
} }
} }
/// Path is a voluntarily feature limited /// Path is a voluntarily feature limited
/// compared to the expressiveness of a UNIX path /// compared to the expressiveness of a UNIX path
/// For example getting parent with ../ is not supported, scheme is not supported, etc. /// For example getting parent with ../ is not supported, scheme is not supported, etc.
@ -271,8 +318,39 @@ impl<'a> Path<'a> {
let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect(); let path_segments: Vec<_> = path.split("/").filter(|s| *s != "" && *s != ".").collect();
if path.starts_with("/") { if path.starts_with("/") {
return Ok(Path::Abs(path_segments)) return Ok(Path::Abs(path_segments));
} }
Ok(Path::Rel(path_segments)) Ok(Path::Rel(path_segments))
} }
} }
//@FIXME move somewhere else
//@FIXME naive implementation, must be refactored later
use futures::stream::Stream;
use icalendar;
fn apply_filter(
user: &ArcUser,
nodes: Vec<Box<dyn DavNode>>,
filter: cal::Filter,
) -> impl Stream<Item = std::result::Result<Box<dyn DavNode>, std::io::Error>> {
futures::stream::iter(nodes).filter_map(|single_node| async move {
// Get ICS
let chunks: Vec<_> = match single_node.content().try_collect().await {
Ok(v) => v,
Err(e) => return Some(Err(e)),
};
let raw_ics = chunks.iter().fold(String::new(), |mut acc, single_chunk| {
let str_fragment = std::str::from_utf8(single_chunk.as_ref());
acc.extend(str_fragment);
acc
});
// Parse ICS
let ics = icalendar::parser::read_calendar(&raw_ics).unwrap();
// Do checks
// Object has been kept
Some(Ok(single_node))
})
}

View file

@ -1,10 +1,10 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use base64::Engine; use base64::Engine;
use hyper::{Request, Response};
use hyper::body::Incoming; use hyper::body::Incoming;
use hyper::{Request, Response};
use aero_user::login::ArcLoginProvider;
use aero_collections::user::User; use aero_collections::user::User;
use aero_user::login::ArcLoginProvider;
use super::codec::text_body; use super::codec::text_body;
use super::controller::HttpResponse; use super::controller::HttpResponse;
@ -13,7 +13,7 @@ type ArcUser = std::sync::Arc<User>;
pub(super) async fn auth<'a>( pub(super) async fn auth<'a>(
login: ArcLoginProvider, login: ArcLoginProvider,
req: Request<Incoming>, req: Request<Incoming>,
next: impl Fn(ArcUser, Request<Incoming>) -> futures::future::BoxFuture<'a, Result<HttpResponse>>, next: impl Fn(ArcUser, Request<Incoming>) -> futures::future::BoxFuture<'a, Result<HttpResponse>>,
) -> Result<HttpResponse> { ) -> Result<HttpResponse> {
let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) { let auth_val = match req.headers().get(hyper::header::AUTHORIZATION) {
@ -23,8 +23,8 @@ pub(super) async fn auth<'a>(
return Ok(Response::builder() return Ok(Response::builder()
.status(401) .status(401)
.header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"")
.body(text_body("Missing Authorization field"))?) .body(text_body("Missing Authorization field"))?);
}, }
}; };
let b64_creds_maybe_padded = match auth_val.split_once(" ") { let b64_creds_maybe_padded = match auth_val.split_once(" ") {
@ -33,8 +33,8 @@ pub(super) async fn auth<'a>(
tracing::info!("Unsupported authorization field"); tracing::info!("Unsupported authorization field");
return Ok(Response::builder() return Ok(Response::builder()
.status(400) .status(400)
.body(text_body("Unsupported Authorization field"))?) .body(text_body("Unsupported Authorization field"))?);
}, }
}; };
// base64urlencoded may have trailing equals, base64urlsafe has not // base64urlencoded may have trailing equals, base64urlsafe has not
@ -44,22 +44,22 @@ pub(super) async fn auth<'a>(
// Decode base64 // Decode base64
let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?; let creds = base64::engine::general_purpose::STANDARD_NO_PAD.decode(b64_creds_clean)?;
let str_creds = std::str::from_utf8(&creds)?; let str_creds = std::str::from_utf8(&creds)?;
// Split username and password // Split username and password
let (username, password) = str_creds let (username, password) = str_creds.split_once(':').ok_or(anyhow!(
.split_once(':') "Missing colon in Authorization, can't split decoded value into a username/password pair"
.ok_or(anyhow!("Missing colon in Authorization, can't split decoded value into a username/password pair"))?; ))?;
// Call login provider // Call login provider
let creds = match login.login(username, password).await { let creds = match login.login(username, password).await {
Ok(c) => c, Ok(c) => c,
Err(_) => { Err(_) => {
tracing::info!(user=username, "Wrong credentials"); tracing::info!(user = username, "Wrong credentials");
return Ok(Response::builder() return Ok(Response::builder()
.status(401) .status(401)
.header("WWW-Authenticate", "Basic realm=\"Aerogramme\"") .header("WWW-Authenticate", "Basic realm=\"Aerogramme\"")
.body(text_body("Wrong credentials"))?) .body(text_body("Wrong credentials"))?);
}, }
}; };
// Build a user // Build a user

View file

@ -1,6 +1,6 @@
mod middleware;
mod controller;
mod codec; mod codec;
mod controller;
mod middleware;
mod node; mod node;
mod resource; mod resource;
@ -8,19 +8,19 @@ use std::net::SocketAddr;
use std::sync::Arc; use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use futures::future::FutureExt;
use futures::stream::{FuturesUnordered, StreamExt};
use hyper::rt::{Read, Write};
use hyper::server::conn::http1 as http;
use hyper::service::service_fn; use hyper::service::service_fn;
use hyper::{Request, Response}; use hyper::{Request, Response};
use hyper::server::conn::http1 as http;
use hyper::rt::{Read, Write};
use hyper_util::rt::TokioIo; use hyper_util::rt::TokioIo;
use futures::stream::{FuturesUnordered, StreamExt}; use rustls_pemfile::{certs, private_key};
use tokio::io::{AsyncRead, AsyncWrite};
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::net::TcpStream;
use tokio::sync::watch; use tokio::sync::watch;
use tokio_rustls::TlsAcceptor; use tokio_rustls::TlsAcceptor;
use tokio::net::TcpStream;
use futures::future::FutureExt;
use tokio::io::{AsyncRead, AsyncWrite};
use rustls_pemfile::{certs, private_key};
use aero_user::config::{DavConfig, DavUnsecureConfig}; use aero_user::config::{DavConfig, DavUnsecureConfig};
use aero_user::login::ArcLoginProvider; use aero_user::login::ArcLoginProvider;
@ -90,7 +90,7 @@ impl Server {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
tracing::error!(err=?e, "TLS acceptor failed"); tracing::error!(err=?e, "TLS acceptor failed");
continue continue;
} }
}; };
@ -100,21 +100,31 @@ impl Server {
//abitrarily bound //abitrarily bound
//@FIXME replace with a handler supporting http2 and TLS //@FIXME replace with a handler supporting http2 and TLS
match http::Builder::new().serve_connection(stream, service_fn(|req: Request<hyper::body::Incoming>| { match http::Builder::new()
let login = login.clone(); .serve_connection(
tracing::info!("{:?} {:?}", req.method(), req.uri()); stream,
async { service_fn(|req: Request<hyper::body::Incoming>| {
match middleware::auth(login, req, |user, request| async { Controller::route(user, request).await }.boxed()).await { let login = login.clone();
Ok(v) => Ok(v), tracing::info!("{:?} {:?}", req.method(), req.uri());
Err(e) => { async {
tracing::error!(err=?e, "internal error"); match middleware::auth(login, req, |user, request| {
Response::builder() async { Controller::route(user, request).await }.boxed()
.status(500) })
.body(codec::text_body("Internal error")) .await
}, {
} Ok(v) => Ok(v),
} Err(e) => {
})).await { tracing::error!(err=?e, "internal error");
Response::builder()
.status(500)
.body(codec::text_body("Internal error"))
}
}
}
}),
)
.await
{
Err(e) => tracing::warn!(err=?e, "connection failed"), Err(e) => tracing::warn!(err=?e, "connection failed"),
Ok(()) => tracing::trace!("connection terminated with success"), Ok(()) => tracing::trace!("connection terminated with success"),
} }
@ -149,7 +159,6 @@ impl Server {
// </D:prop> // </D:prop>
// </D:propfind> // </D:propfind>
// <D:propfind xmlns:D='DAV:' xmlns:A='http://apple.com/ns/ical/' xmlns:C='urn:ietf:params:xml:ns:caldav'> // <D:propfind xmlns:D='DAV:' xmlns:A='http://apple.com/ns/ical/' xmlns:C='urn:ietf:params:xml:ns:caldav'>
// <D:prop> // <D:prop>
// <D:resourcetype/> // <D:resourcetype/>

View file

@ -1,16 +1,17 @@
use anyhow::Result; use anyhow::Result;
use futures::stream::{BoxStream, StreamExt};
use futures::future::{BoxFuture, FutureExt}; use futures::future::{BoxFuture, FutureExt};
use futures::stream::{BoxStream, StreamExt};
use hyper::body::Bytes; use hyper::body::Bytes;
use aero_dav::types as dav;
use aero_dav::realization::All;
use aero_collections::davdag::Etag; use aero_collections::davdag::Etag;
use aero_dav::realization::All;
use aero_dav::types as dav;
use super::controller::ArcUser; use super::controller::ArcUser;
pub(crate) type Content<'a> = BoxStream<'a, std::result::Result<Bytes, std::io::Error>>; pub(crate) type Content<'a> = BoxStream<'a, std::result::Result<Bytes, std::io::Error>>;
pub(crate) type PropertyStream<'a> = BoxStream<'a, std::result::Result<dav::Property<All>, dav::PropertyRequest<All>>>; pub(crate) type PropertyStream<'a> =
BoxStream<'a, std::result::Result<dav::Property<All>, dav::PropertyRequest<All>>>;
pub(crate) enum PutPolicy { pub(crate) enum PutPolicy {
OverwriteAll, OverwriteAll,
@ -25,7 +26,12 @@ pub(crate) trait DavNode: Send {
/// This node direct children /// This node direct children
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>>; fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>>;
/// Recursively fetch a child (progress inside the filesystem hierarchy) /// Recursively fetch a child (progress inside the filesystem hierarchy)
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>>; fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>>;
// node properties // node properties
/// Get the path /// Get the path
@ -36,13 +42,17 @@ pub(crate) trait DavNode: Send {
fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static>; fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static>;
//fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> Vec<dav::AnyProperty<All>>; //fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> Vec<dav::AnyProperty<All>>;
/// Put an element (create or update) /// Put an element (create or update)
fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>>; fn put<'a>(
&'a self,
policy: PutPolicy,
stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>>;
/// Content type of the element /// Content type of the element
fn content_type(&self) -> &str; fn content_type(&self) -> &str;
/// Get ETag /// Get ETag
fn etag(&self) -> BoxFuture<Option<Etag>>; fn etag(&self) -> BoxFuture<Option<Etag>>;
/// Get content /// Get content
fn content(&self) -> Content<'static>; fn content<'a>(&self) -> Content<'a>;
/// Delete /// Delete
fn delete(&self) -> BoxFuture<std::result::Result<(), std::io::Error>>; fn delete(&self) -> BoxFuture<std::result::Result<(), std::io::Error>>;
@ -52,24 +62,32 @@ pub(crate) trait DavNode: Send {
fn response_propname(&self, user: &ArcUser) -> dav::Response<All> { fn response_propname(&self, user: &ArcUser) -> dav::Response<All> {
dav::Response { dav::Response {
status_or_propstat: dav::StatusOrPropstat::PropStat( status_or_propstat: dav::StatusOrPropstat::PropStat(
dav::Href(self.path(user)), dav::Href(self.path(user)),
vec![ vec![dav::PropStat {
dav::PropStat { status: dav::Status(hyper::StatusCode::OK),
status: dav::Status(hyper::StatusCode::OK), prop: dav::AnyProp(
prop: dav::AnyProp(self.supported_properties(user).0.into_iter().map(dav::AnyProperty::Request).collect()), self.supported_properties(user)
error: None, .0
responsedescription: None, .into_iter()
} .map(dav::AnyProperty::Request)
], .collect(),
),
error: None,
responsedescription: None,
}],
), ),
error: None, error: None,
location: None, location: None,
responsedescription: None responsedescription: None,
} }
} }
/// Utility function to get a prop response from a node & a list of propname /// Utility function to get a prop response from a node & a list of propname
fn response_props(&self, user: &ArcUser, props: dav::PropName<All>) -> BoxFuture<'static, dav::Response<All>> { fn response_props(
&self,
user: &ArcUser,
props: dav::PropName<All>,
) -> BoxFuture<'static, dav::Response<All>> {
//@FIXME we should make the DAV parsed object a stream... //@FIXME we should make the DAV parsed object a stream...
let mut result_stream = self.properties(user, props); let mut result_stream = self.properties(user, props);
let path = self.path(user); let path = self.path(user);
@ -87,8 +105,8 @@ pub(crate) trait DavNode: Send {
// If at least one property has been found on this object, adding a HTTP 200 propstat to // If at least one property has been found on this object, adding a HTTP 200 propstat to
// the response // the response
if !found.is_empty() { if !found.is_empty() {
prop_desc.push(dav::PropStat { prop_desc.push(dav::PropStat {
status: dav::Status(hyper::StatusCode::OK), status: dav::Status(hyper::StatusCode::OK),
prop: dav::AnyProp(found), prop: dav::AnyProp(found),
error: None, error: None,
responsedescription: None, responsedescription: None,
@ -98,8 +116,8 @@ pub(crate) trait DavNode: Send {
// If at least one property can't be found on this object, adding a HTTP 404 propstat to // If at least one property can't be found on this object, adding a HTTP 404 propstat to
// the response // the response
if !not_found.is_empty() { if !not_found.is_empty() {
prop_desc.push(dav::PropStat { prop_desc.push(dav::PropStat {
status: dav::Status(hyper::StatusCode::NOT_FOUND), status: dav::Status(hyper::StatusCode::NOT_FOUND),
prop: dav::AnyProp(not_found), prop: dav::AnyProp(not_found),
error: None, error: None,
responsedescription: None, responsedescription: None,
@ -111,9 +129,9 @@ pub(crate) trait DavNode: Send {
status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(path), prop_desc), status_or_propstat: dav::StatusOrPropstat::PropStat(dav::Href(path), prop_desc),
error: None, error: None,
location: None, location: None,
responsedescription: None responsedescription: None,
} }
}.boxed() }
.boxed()
} }
} }

View file

@ -2,23 +2,32 @@ use std::sync::Arc;
type ArcUser = std::sync::Arc<User>; type ArcUser = std::sync::Arc<User>;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use futures::stream::{TryStreamExt, StreamExt};
use futures::io::AsyncReadExt; use futures::io::AsyncReadExt;
use futures::stream::{StreamExt, TryStreamExt};
use futures::{future::BoxFuture, future::FutureExt}; use futures::{future::BoxFuture, future::FutureExt};
use aero_collections::{user::User, calendar::Calendar, davdag::{BlobId, Etag}}; use aero_collections::{
use aero_dav::types as dav; calendar::Calendar,
use aero_dav::caltypes as cal; davdag::{BlobId, Etag},
user::User,
};
use aero_dav::acltypes as acl; use aero_dav::acltypes as acl;
use aero_dav::realization::{All, self as all}; use aero_dav::caltypes as cal;
use aero_dav::realization::{self as all, All};
use aero_dav::types as dav;
use crate::dav::node::{DavNode, PutPolicy, Content};
use super::node::PropertyStream; use super::node::PropertyStream;
use crate::dav::node::{Content, DavNode, PutPolicy};
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct RootNode {} pub(crate) struct RootNode {}
impl DavNode for RootNode { impl DavNode for RootNode {
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>> { fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>> {
if path.len() == 0 { if path.len() == 0 {
let this = self.clone(); let this = self.clone();
return async { Ok(Box::new(this) as Box<dyn DavNode>) }.boxed(); return async { Ok(Box::new(this) as Box<dyn DavNode>) }.boxed();
@ -34,7 +43,7 @@ impl DavNode for RootNode {
} }
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> { fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> {
async { vec![Box::new(HomeNode { }) as Box<dyn DavNode>] }.boxed() async { vec![Box::new(HomeNode {}) as Box<dyn DavNode>] }.boxed()
} }
fn path(&self, user: &ArcUser) -> String { fn path(&self, user: &ArcUser) -> String {
@ -46,33 +55,53 @@ impl DavNode for RootNode {
dav::PropertyRequest::DisplayName, dav::PropertyRequest::DisplayName,
dav::PropertyRequest::ResourceType, dav::PropertyRequest::ResourceType,
dav::PropertyRequest::GetContentType, dav::PropertyRequest::GetContentType,
dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)), dav::PropertyRequest::Extension(all::PropertyRequest::Acl(
acl::PropertyRequest::CurrentUserPrincipal,
)),
]) ])
} }
fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> { fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> {
let user = user.clone(); let user = user.clone();
futures::stream::iter(prop.0).map(move |n| { futures::stream::iter(prop.0)
let prop = match n { .map(move |n| {
dav::PropertyRequest::DisplayName => dav::Property::DisplayName("DAV Root".to_string()), let prop = match n {
dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ dav::PropertyRequest::DisplayName => {
dav::ResourceType::Collection, dav::Property::DisplayName("DAV Root".to_string())
]), }
dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), dav::PropertyRequest::ResourceType => {
dav::PropertyRequest::Extension(all::PropertyRequest::Acl(acl::PropertyRequest::CurrentUserPrincipal)) => dav::Property::ResourceType(vec![dav::ResourceType::Collection])
dav::Property::Extension(all::Property::Acl(acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(HomeNode{}.path(&user)))))), }
v => return Err(v), dav::PropertyRequest::GetContentType => {
}; dav::Property::GetContentType("httpd/unix-directory".into())
Ok(prop) }
}).boxed() dav::PropertyRequest::Extension(all::PropertyRequest::Acl(
acl::PropertyRequest::CurrentUserPrincipal,
)) => dav::Property::Extension(all::Property::Acl(
acl::Property::CurrentUserPrincipal(acl::User::Authenticated(dav::Href(
HomeNode {}.path(&user),
))),
)),
v => return Err(v),
};
Ok(prop)
})
.boxed()
} }
fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> { fn put<'a>(
&'a self,
_policy: PutPolicy,
stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> {
futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed()
} }
fn content(&self) -> Content<'static> { fn content<'a>(&self) -> Content<'a> {
futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() futures::stream::once(futures::future::err(std::io::Error::from(
std::io::ErrorKind::Unsupported,
)))
.boxed()
} }
fn content_type(&self) -> &str { fn content_type(&self) -> &str {
@ -91,29 +120,37 @@ impl DavNode for RootNode {
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct HomeNode {} pub(crate) struct HomeNode {}
impl DavNode for HomeNode { impl DavNode for HomeNode {
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>> { fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>> {
if path.len() == 0 { if path.len() == 0 {
let node = Box::new(self.clone()) as Box<dyn DavNode>; let node = Box::new(self.clone()) as Box<dyn DavNode>;
return async { Ok(node) }.boxed() return async { Ok(node) }.boxed();
} }
if path[0] == "calendar" { if path[0] == "calendar" {
return async move { return async move {
let child = Box::new(CalendarListNode::new(user).await?); let child = Box::new(CalendarListNode::new(user).await?);
child.fetch(user, &path[1..], create).await child.fetch(user, &path[1..], create).await
}.boxed(); }
.boxed();
} }
//@NOTE: we can't create a node at this level //@NOTE: we can't create a node at this level
async { Err(anyhow!("Not found")) }.boxed() async { Err(anyhow!("Not found")) }.boxed()
} }
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> { fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> {
async { async {
CalendarListNode::new(user).await CalendarListNode::new(user)
.await
.map(|c| vec![Box::new(c) as Box<dyn DavNode>]) .map(|c| vec![Box::new(c) as Box<dyn DavNode>])
.unwrap_or(vec![]) .unwrap_or(vec![])
}.boxed() }
.boxed()
} }
fn path(&self, user: &ArcUser) -> String { fn path(&self, user: &ArcUser) -> String {
@ -125,38 +162,58 @@ impl DavNode for HomeNode {
dav::PropertyRequest::DisplayName, dav::PropertyRequest::DisplayName,
dav::PropertyRequest::ResourceType, dav::PropertyRequest::ResourceType,
dav::PropertyRequest::GetContentType, dav::PropertyRequest::GetContentType,
dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)), dav::PropertyRequest::Extension(all::PropertyRequest::Cal(
cal::PropertyRequest::CalendarHomeSet,
)),
]) ])
} }
fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> { fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> {
let user = user.clone(); let user = user.clone();
futures::stream::iter(prop.0).map(move |n| { futures::stream::iter(prop.0)
let prop = match n { .map(move |n| {
dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} home", user.username)), let prop = match n {
dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ dav::PropertyRequest::DisplayName => {
dav::ResourceType::Collection, dav::Property::DisplayName(format!("{} home", user.username))
dav::ResourceType::Extension(all::ResourceType::Acl(acl::ResourceType::Principal)), }
]), dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![
dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), dav::ResourceType::Collection,
dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarHomeSet)) => dav::ResourceType::Extension(all::ResourceType::Acl(
dav::Property::Extension(all::Property::Cal(cal::Property::CalendarHomeSet(dav::Href( acl::ResourceType::Principal,
//@FIXME we are hardcoding the calendar path, instead we would want to use )),
//objects ]),
format!("/{}/calendar/", user.username) dav::PropertyRequest::GetContentType => {
)))), dav::Property::GetContentType("httpd/unix-directory".into())
v => return Err(v), }
}; dav::PropertyRequest::Extension(all::PropertyRequest::Cal(
Ok(prop) cal::PropertyRequest::CalendarHomeSet,
}).boxed() )) => dav::Property::Extension(all::Property::Cal(
cal::Property::CalendarHomeSet(dav::Href(
//@FIXME we are hardcoding the calendar path, instead we would want to use
//objects
format!("/{}/calendar/", user.username),
)),
)),
v => return Err(v),
};
Ok(prop)
})
.boxed()
} }
fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> { fn put<'a>(
&'a self,
_policy: PutPolicy,
stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> {
futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed()
} }
fn content(&self) -> Content<'static> { fn content<'a>(&self) -> Content<'a> {
futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() futures::stream::once(futures::future::err(std::io::Error::from(
std::io::ErrorKind::Unsupported,
)))
.boxed()
} }
fn content_type(&self) -> &str { fn content_type(&self) -> &str {
@ -183,7 +240,12 @@ impl CalendarListNode {
} }
} }
impl DavNode for CalendarListNode { impl DavNode for CalendarListNode {
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>> { fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>> {
if path.len() == 0 { if path.len() == 0 {
let node = Box::new(self.clone()) as Box<dyn DavNode>; let node = Box::new(self.clone()) as Box<dyn DavNode>;
return async { Ok(node) }.boxed(); return async { Ok(node) }.boxed();
@ -191,13 +253,18 @@ impl DavNode for CalendarListNode {
async move { async move {
//@FIXME: we should create a node if the open returns a "not found". //@FIXME: we should create a node if the open returns a "not found".
let cal = user.calendars.open(user, path[0]).await?.ok_or(anyhow!("Not found"))?; let cal = user
let child = Box::new(CalendarNode { .calendars
.open(user, path[0])
.await?
.ok_or(anyhow!("Not found"))?;
let child = Box::new(CalendarNode {
col: cal, col: cal,
calname: path[0].to_string() calname: path[0].to_string(),
}); });
child.fetch(user, &path[1..], create).await child.fetch(user, &path[1..], create).await
}.boxed() }
.boxed()
} }
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> { fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> {
@ -206,18 +273,23 @@ impl DavNode for CalendarListNode {
//@FIXME maybe we want to be lazy here?! //@FIXME maybe we want to be lazy here?!
futures::stream::iter(list.iter()) futures::stream::iter(list.iter())
.filter_map(|name| async move { .filter_map(|name| async move {
user.calendars.open(user, name).await user.calendars
.open(user, name)
.await
.ok() .ok()
.flatten() .flatten()
.map(|v| (name, v)) .map(|v| (name, v))
}) })
.map(|(name, cal)| Box::new(CalendarNode { .map(|(name, cal)| {
col: cal, Box::new(CalendarNode {
calname: name.to_string(), col: cal,
}) as Box<dyn DavNode>) calname: name.to_string(),
}) as Box<dyn DavNode>
})
.collect::<Vec<Box<dyn DavNode>>>() .collect::<Vec<Box<dyn DavNode>>>()
.await .await
}.boxed() }
.boxed()
} }
fn path(&self, user: &ArcUser) -> String { fn path(&self, user: &ArcUser) -> String {
@ -234,23 +306,38 @@ impl DavNode for CalendarListNode {
fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> { fn properties(&self, user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> {
let user = user.clone(); let user = user.clone();
futures::stream::iter(prop.0).map(move |n| { futures::stream::iter(prop.0)
let prop = match n { .map(move |n| {
dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} calendars", user.username)), let prop = match n {
dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![dav::ResourceType::Collection]), dav::PropertyRequest::DisplayName => {
dav::PropertyRequest::GetContentType => dav::Property::GetContentType("httpd/unix-directory".into()), dav::Property::DisplayName(format!("{} calendars", user.username))
v => return Err(v), }
}; dav::PropertyRequest::ResourceType => {
Ok(prop) dav::Property::ResourceType(vec![dav::ResourceType::Collection])
}).boxed() }
dav::PropertyRequest::GetContentType => {
dav::Property::GetContentType("httpd/unix-directory".into())
}
v => return Err(v),
};
Ok(prop)
})
.boxed()
} }
fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> { fn put<'a>(
&'a self,
_policy: PutPolicy,
stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> {
futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed()
} }
fn content(&self) -> Content<'static> { fn content<'a>(&self) -> Content<'a> {
futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() futures::stream::once(futures::future::err(std::io::Error::from(
std::io::ErrorKind::Unsupported,
)))
.boxed()
} }
fn content_type(&self) -> &str { fn content_type(&self) -> &str {
@ -272,17 +359,22 @@ pub(crate) struct CalendarNode {
calname: String, calname: String,
} }
impl DavNode for CalendarNode { impl DavNode for CalendarNode {
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>> { fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>> {
if path.len() == 0 { if path.len() == 0 {
let node = Box::new(self.clone()) as Box<dyn DavNode>; let node = Box::new(self.clone()) as Box<dyn DavNode>;
return async { Ok(node) }.boxed() return async { Ok(node) }.boxed();
} }
let col = self.col.clone(); let col = self.col.clone();
let calname = self.calname.clone(); let calname = self.calname.clone();
async move { async move {
match (col.dag().await.idx_by_filename.get(path[0]), create) { match (col.dag().await.idx_by_filename.get(path[0]), create) {
(Some(blob_id), _) => { (Some(blob_id), _) => {
let child = Box::new(EventNode { let child = Box::new(EventNode {
col: col.clone(), col: col.clone(),
calname, calname,
@ -290,7 +382,7 @@ impl DavNode for CalendarNode {
blob_id: *blob_id, blob_id: *blob_id,
}); });
child.fetch(user, &path[1..], create).await child.fetch(user, &path[1..], create).await
}, }
(None, true) => { (None, true) => {
let child = Box::new(CreateEventNode { let child = Box::new(CreateEventNode {
col: col.clone(), col: col.clone(),
@ -298,11 +390,11 @@ impl DavNode for CalendarNode {
filename: path[0].to_string(), filename: path[0].to_string(),
}); });
child.fetch(user, &path[1..], create).await child.fetch(user, &path[1..], create).await
}, }
_ => Err(anyhow!("Not found")), _ => Err(anyhow!("Not found")),
} }
}
}.boxed() .boxed()
} }
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> { fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> {
@ -310,15 +402,21 @@ impl DavNode for CalendarNode {
let calname = self.calname.clone(); let calname = self.calname.clone();
async move { async move {
col.dag().await.idx_by_filename.iter().map(|(filename, blob_id)| { col.dag()
Box::new(EventNode { .await
col: col.clone(), .idx_by_filename
calname: calname.clone(), .iter()
filename: filename.to_string(), .map(|(filename, blob_id)| {
blob_id: *blob_id, Box::new(EventNode {
}) as Box<dyn DavNode> col: col.clone(),
}).collect() calname: calname.clone(),
}.boxed() filename: filename.to_string(),
blob_id: *blob_id,
}) as Box<dyn DavNode>
})
.collect()
}
.boxed()
} }
fn path(&self, user: &ArcUser) -> String { fn path(&self, user: &ArcUser) -> String {
@ -330,38 +428,58 @@ impl DavNode for CalendarNode {
dav::PropertyRequest::DisplayName, dav::PropertyRequest::DisplayName,
dav::PropertyRequest::ResourceType, dav::PropertyRequest::ResourceType,
dav::PropertyRequest::GetContentType, dav::PropertyRequest::GetContentType,
dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)), dav::PropertyRequest::Extension(all::PropertyRequest::Cal(
cal::PropertyRequest::SupportedCalendarComponentSet,
)),
]) ])
} }
fn properties(&self, _user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> { fn properties(&self, _user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> {
let calname = self.calname.to_string(); let calname = self.calname.to_string();
futures::stream::iter(prop.0).map(move |n| { futures::stream::iter(prop.0)
let prop = match n { .map(move |n| {
dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} calendar", calname)), let prop = match n {
dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![ dav::PropertyRequest::DisplayName => {
dav::ResourceType::Collection, dav::Property::DisplayName(format!("{} calendar", calname))
dav::ResourceType::Extension(all::ResourceType::Cal(cal::ResourceType::Calendar)), }
]), dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![
//dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())), dav::ResourceType::Collection,
//@FIXME seems wrong but seems to be what Thunderbird expects... dav::ResourceType::Extension(all::ResourceType::Cal(
dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), cal::ResourceType::Calendar,
dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::SupportedCalendarComponentSet)) )),
=> dav::Property::Extension(all::Property::Cal(cal::Property::SupportedCalendarComponentSet(vec![ ]),
cal::CompSupport(cal::Component::VEvent), //dav::PropertyRequest::GetContentType => dav::AnyProperty::Value(dav::Property::GetContentType("httpd/unix-directory".into())),
]))), //@FIXME seems wrong but seems to be what Thunderbird expects...
v => return Err(v), dav::PropertyRequest::GetContentType => {
}; dav::Property::GetContentType("text/calendar".into())
Ok(prop) }
}).boxed() dav::PropertyRequest::Extension(all::PropertyRequest::Cal(
cal::PropertyRequest::SupportedCalendarComponentSet,
)) => dav::Property::Extension(all::Property::Cal(
cal::Property::SupportedCalendarComponentSet(vec![cal::CompSupport(
cal::Component::VEvent,
)]),
)),
v => return Err(v),
};
Ok(prop)
})
.boxed()
} }
fn put<'a>(&'a self, _policy: PutPolicy, _stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> { fn put<'a>(
&'a self,
_policy: PutPolicy,
_stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> {
futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed() futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported)).boxed()
} }
fn content<'a>(&'a self) -> Content<'static> { fn content<'a>(&self) -> Content<'a> {
futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() futures::stream::once(futures::future::err(std::io::Error::from(
std::io::ErrorKind::Unsupported,
)))
.boxed()
} }
fn content_type(&self) -> &str { fn content_type(&self) -> &str {
@ -386,13 +504,23 @@ pub(crate) struct EventNode {
} }
impl DavNode for EventNode { impl DavNode for EventNode {
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>> { fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>> {
if path.len() == 0 { if path.len() == 0 {
let node = Box::new(self.clone()) as Box<dyn DavNode>; let node = Box::new(self.clone()) as Box<dyn DavNode>;
return async { Ok(node) }.boxed() return async { Ok(node) }.boxed();
} }
async { Err(anyhow!("Not supported: can't create a child on an event node")) }.boxed() async {
Err(anyhow!(
"Not supported: can't create a child on an event node"
))
}
.boxed()
} }
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> { fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> {
@ -400,7 +528,10 @@ impl DavNode for EventNode {
} }
fn path(&self, user: &ArcUser) -> String { fn path(&self, user: &ArcUser) -> String {
format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) format!(
"/{}/calendar/{}/{}",
user.username, self.calname, self.filename
)
} }
fn supported_properties(&self, user: &ArcUser) -> dav::PropName<All> { fn supported_properties(&self, user: &ArcUser) -> dav::PropName<All> {
@ -408,66 +539,106 @@ impl DavNode for EventNode {
dav::PropertyRequest::DisplayName, dav::PropertyRequest::DisplayName,
dav::PropertyRequest::ResourceType, dav::PropertyRequest::ResourceType,
dav::PropertyRequest::GetEtag, dav::PropertyRequest::GetEtag,
dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()))), dav::PropertyRequest::Extension(all::PropertyRequest::Cal(
cal::PropertyRequest::CalendarData(cal::CalendarDataRequest::default()),
)),
]) ])
} }
fn properties(&self, _user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> { fn properties(&self, _user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> {
let this = self.clone(); let this = self.clone();
futures::stream::iter(prop.0).then(move |n| { futures::stream::iter(prop.0)
let this = this.clone(); .then(move |n| {
let this = this.clone();
async move { async move {
let prop = match &n { let prop = match &n {
dav::PropertyRequest::DisplayName => dav::Property::DisplayName(format!("{} event", this.filename)), dav::PropertyRequest::DisplayName => {
dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![]), dav::Property::DisplayName(format!("{} event", this.filename))
dav::PropertyRequest::GetContentType => dav::Property::GetContentType("text/calendar".into()), }
dav::PropertyRequest::GetEtag => { dav::PropertyRequest::ResourceType => dav::Property::ResourceType(vec![]),
let etag = this.etag().await.ok_or(n.clone())?; dav::PropertyRequest::GetContentType => {
dav::Property::GetEtag(etag) dav::Property::GetContentType("text/calendar".into())
}, }
dav::PropertyRequest::Extension(all::PropertyRequest::Cal(cal::PropertyRequest::CalendarData(_req))) => { dav::PropertyRequest::GetEtag => {
let ics = String::from_utf8(this.col.get(this.blob_id).await.or(Err(n.clone()))?).or(Err(n.clone()))?; let etag = this.etag().await.ok_or(n.clone())?;
dav::Property::GetEtag(etag)
dav::Property::Extension(all::Property::Cal(cal::Property::CalendarData(cal::CalendarDataPayload { }
mime: None, dav::PropertyRequest::Extension(all::PropertyRequest::Cal(
payload: ics, cal::PropertyRequest::CalendarData(_req),
}))) )) => {
}, let ics = String::from_utf8(
_ => return Err(n), this.col.get(this.blob_id).await.or(Err(n.clone()))?,
}; )
Ok(prop) .or(Err(n.clone()))?;
}
}).boxed() dav::Property::Extension(all::Property::Cal(
cal::Property::CalendarData(cal::CalendarDataPayload {
mime: None,
payload: ics,
}),
))
}
_ => return Err(n),
};
Ok(prop)
}
})
.boxed()
} }
fn put<'a>(&'a self, policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> { fn put<'a>(
&'a self,
policy: PutPolicy,
stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> {
async { async {
let existing_etag = self.etag().await.ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Etag error"))?; let existing_etag = self
.etag()
.await
.ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Etag error"))?;
match policy { match policy {
PutPolicy::CreateOnly => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), PutPolicy::CreateOnly => {
PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists)), return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists))
_ => () }
PutPolicy::ReplaceEtag(etag) if etag != existing_etag.as_str() => {
return Err(std::io::Error::from(std::io::ErrorKind::AlreadyExists))
}
_ => (),
}; };
//@FIXME for now, our storage interface does not allow streaming, //@FIXME for now, our storage interface does not allow streaming,
// so we load everything in memory // so we load everything in memory
let mut evt = Vec::new(); let mut evt = Vec::new();
let mut reader = stream.into_async_read(); let mut reader = stream.into_async_read();
reader.read_to_end(&mut evt).await.or(Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)))?; reader
let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; .read_to_end(&mut evt)
self.col.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; .await
.or(Err(std::io::Error::from(std::io::ErrorKind::BrokenPipe)))?;
let (_token, entry) = self
.col
.put(self.filename.as_str(), evt.as_ref())
.await
.or(Err(std::io::ErrorKind::Interrupted))?;
self.col
.opportunistic_sync()
.await
.or(Err(std::io::ErrorKind::ConnectionReset))?;
Ok(entry.2) Ok(entry.2)
}.boxed() }
.boxed()
} }
fn content<'a>(&'a self) -> Content<'static> { fn content<'a>(&self) -> Content<'a> {
//@FIXME for now, our storage interface does not allow streaming, //@FIXME for now, our storage interface does not allow streaming,
// so we load everything in memory // so we load everything in memory
let calendar = self.col.clone(); let calendar = self.col.clone();
let blob_id = self.blob_id.clone(); let blob_id = self.blob_id.clone();
let r = async move { let r = async move {
let content = calendar.get(blob_id).await.or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted))); let content = calendar
.get(blob_id)
.await
.or(Err(std::io::Error::from(std::io::ErrorKind::Interrupted)));
Ok(hyper::body::Bytes::from(content?)) Ok(hyper::body::Bytes::from(content?))
}; };
futures::stream::once(Box::pin(r)).boxed() futures::stream::once(Box::pin(r)).boxed()
@ -481,8 +652,14 @@ impl DavNode for EventNode {
let calendar = self.col.clone(); let calendar = self.col.clone();
async move { async move {
calendar.dag().await.table.get(&self.blob_id).map(|(_, _, etag)| etag.to_string()) calendar
}.boxed() .dag()
.await
.table
.get(&self.blob_id)
.map(|(_, _, etag)| etag.to_string())
}
.boxed()
} }
fn delete(&self) -> BoxFuture<std::result::Result<(), std::io::Error>> { fn delete(&self) -> BoxFuture<std::result::Result<(), std::io::Error>> {
@ -494,12 +671,16 @@ impl DavNode for EventNode {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
tracing::error!(err=?e, "delete event node"); tracing::error!(err=?e, "delete event node");
return Err(std::io::Error::from(std::io::ErrorKind::Interrupted)) return Err(std::io::Error::from(std::io::ErrorKind::Interrupted));
}, }
}; };
calendar.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; calendar
.opportunistic_sync()
.await
.or(Err(std::io::ErrorKind::ConnectionReset))?;
Ok(()) Ok(())
}.boxed() }
.boxed()
} }
} }
@ -510,13 +691,23 @@ pub(crate) struct CreateEventNode {
filename: String, filename: String,
} }
impl DavNode for CreateEventNode { impl DavNode for CreateEventNode {
fn fetch<'a>(&self, user: &'a ArcUser, path: &'a [&str], create: bool) -> BoxFuture<'a, Result<Box<dyn DavNode>>> { fn fetch<'a>(
&self,
user: &'a ArcUser,
path: &'a [&str],
create: bool,
) -> BoxFuture<'a, Result<Box<dyn DavNode>>> {
if path.len() == 0 { if path.len() == 0 {
let node = Box::new(self.clone()) as Box<dyn DavNode>; let node = Box::new(self.clone()) as Box<dyn DavNode>;
return async { Ok(node) }.boxed() return async { Ok(node) }.boxed();
} }
async { Err(anyhow!("Not supported: can't create a child on an event node")) }.boxed() async {
Err(anyhow!(
"Not supported: can't create a child on an event node"
))
}
.boxed()
} }
fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> { fn children<'a>(&self, user: &'a ArcUser) -> BoxFuture<'a, Vec<Box<dyn DavNode>>> {
@ -524,33 +715,51 @@ impl DavNode for CreateEventNode {
} }
fn path(&self, user: &ArcUser) -> String { fn path(&self, user: &ArcUser) -> String {
format!("/{}/calendar/{}/{}", user.username, self.calname, self.filename) format!(
"/{}/calendar/{}/{}",
user.username, self.calname, self.filename
)
} }
fn supported_properties(&self, user: &ArcUser) -> dav::PropName<All> { fn supported_properties(&self, user: &ArcUser) -> dav::PropName<All> {
dav::PropName(vec![]) dav::PropName(vec![])
} }
fn properties(&self, _user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> { fn properties(&self, _user: &ArcUser, prop: dav::PropName<All>) -> PropertyStream<'static> {
futures::stream::iter(vec![]).boxed() futures::stream::iter(vec![]).boxed()
} }
fn put<'a>(&'a self, _policy: PutPolicy, stream: Content<'a>) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> { fn put<'a>(
&'a self,
_policy: PutPolicy,
stream: Content<'a>,
) -> BoxFuture<'a, std::result::Result<Etag, std::io::Error>> {
//@NOTE: policy might not be needed here: whatever we put, there is no known entries here //@NOTE: policy might not be needed here: whatever we put, there is no known entries here
async { async {
//@FIXME for now, our storage interface does not allow for streaming //@FIXME for now, our storage interface does not allow for streaming
let mut evt = Vec::new(); let mut evt = Vec::new();
let mut reader = stream.into_async_read(); let mut reader = stream.into_async_read();
reader.read_to_end(&mut evt).await.unwrap(); reader.read_to_end(&mut evt).await.unwrap();
let (_token, entry) = self.col.put(self.filename.as_str(), evt.as_ref()).await.or(Err(std::io::ErrorKind::Interrupted))?; let (_token, entry) = self
self.col.opportunistic_sync().await.or(Err(std::io::ErrorKind::ConnectionReset))?; .col
.put(self.filename.as_str(), evt.as_ref())
.await
.or(Err(std::io::ErrorKind::Interrupted))?;
self.col
.opportunistic_sync()
.await
.or(Err(std::io::ErrorKind::ConnectionReset))?;
Ok(entry.2) Ok(entry.2)
}.boxed() }
.boxed()
} }
fn content(&self) -> Content<'static> { fn content<'a>(&self) -> Content<'a> {
futures::stream::once(futures::future::err(std::io::Error::from(std::io::ErrorKind::Unsupported))).boxed() futures::stream::once(futures::future::err(std::io::Error::from(
std::io::ErrorKind::Unsupported,
)))
.boxed()
} }
fn content_type(&self) -> &str { fn content_type(&self) -> &str {

View file

@ -4,8 +4,8 @@ use imap_codec::imap_types::core::AString;
use imap_codec::imap_types::response::Code; use imap_codec::imap_types::response::Code;
use imap_codec::imap_types::secret::Secret; use imap_codec::imap_types::secret::Secret;
use aero_user::login::ArcLoginProvider;
use aero_collections::user::User; use aero_collections::user::User;
use aero_user::login::ArcLoginProvider;
use crate::imap::capability::ServerCapability; use crate::imap::capability::ServerCapability;
use crate::imap::command::anystate; use crate::imap::command::anystate;

View file

@ -14,10 +14,10 @@ use imap_codec::imap_types::mailbox::{ListMailbox, Mailbox as MailboxCodec};
use imap_codec::imap_types::response::{Code, CodeOther, Data}; use imap_codec::imap_types::response::{Code, CodeOther, Data};
use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName}; use imap_codec::imap_types::status::{StatusDataItem, StatusDataItemName};
use aero_collections::mail::uidindex::*;
use aero_collections::user::User;
use aero_collections::mail::IMF;
use aero_collections::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW; use aero_collections::mail::namespace::MAILBOX_HIERARCHY_DELIMITER as MBX_HIER_DELIM_RAW;
use aero_collections::mail::uidindex::*;
use aero_collections::mail::IMF;
use aero_collections::user::User;
use crate::imap::capability::{ClientCapability, ServerCapability}; use crate::imap::capability::{ClientCapability, ServerCapability};
use crate::imap::command::{anystate, MailboxName}; use crate::imap::command::{anystate, MailboxName};

View file

@ -17,14 +17,14 @@ use std::net::SocketAddr;
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
use futures::stream::{FuturesUnordered, StreamExt}; use futures::stream::{FuturesUnordered, StreamExt};
use tokio::net::TcpListener;
use tokio::sync::mpsc;
use tokio::sync::watch;
use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status}; use imap_codec::imap_types::response::{Code, CommandContinuationRequest, Response, Status};
use imap_codec::imap_types::{core::Text, response::Greeting}; use imap_codec::imap_types::{core::Text, response::Greeting};
use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions}; use imap_flow::server::{ServerFlow, ServerFlowEvent, ServerFlowOptions};
use imap_flow::stream::AnyStream; use imap_flow::stream::AnyStream;
use rustls_pemfile::{certs, private_key}; use rustls_pemfile::{certs, private_key};
use tokio::net::TcpListener;
use tokio::sync::mpsc;
use tokio::sync::watch;
use tokio_rustls::TlsAcceptor; use tokio_rustls::TlsAcceptor;
use aero_user::config::{ImapConfig, ImapUnsecureConfig}; use aero_user::config::{ImapConfig, ImapUnsecureConfig};

View file

@ -10,16 +10,16 @@ use futures::{
stream::{FuturesOrdered, FuturesUnordered}, stream::{FuturesOrdered, FuturesUnordered},
StreamExt, StreamExt,
}; };
use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode};
use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata};
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::select; use tokio::select;
use tokio::sync::watch; use tokio::sync::watch;
use tokio_util::compat::*; use tokio_util::compat::*;
use smtp_message::{DataUnescaper, Email, EscapedDataReader, Reply, ReplyCode};
use smtp_server::{reply, Config, ConnectionMetadata, Decision, MailMetadata};
use aero_collections::mail::incoming::EncryptedMessage;
use aero_user::config::*; use aero_user::config::*;
use aero_user::login::*; use aero_user::login::*;
use aero_collections::mail::incoming::EncryptedMessage;
pub struct LmtpServer { pub struct LmtpServer {
bind_addr: SocketAddr, bind_addr: SocketAddr,

View file

@ -8,9 +8,9 @@ use tokio::net::{TcpListener, TcpStream};
use tokio::sync::watch; use tokio::sync::watch;
use tokio_util::bytes::BytesMut; use tokio_util::bytes::BytesMut;
use aero_sasl::{decode::client_command, encode::Encode, flow::State};
use aero_user::config::AuthConfig; use aero_user::config::AuthConfig;
use aero_user::login::ArcLoginProvider; use aero_user::login::ArcLoginProvider;
use aero_sasl::{flow::State, decode::client_command, encode::Encode};
pub struct AuthServer { pub struct AuthServer {
login_provider: ArcLoginProvider, login_provider: ArcLoginProvider,

View file

@ -1,8 +1,8 @@
use futures::Future; use futures::Future;
use rand::prelude::*; use rand::prelude::*;
use super::types::*;
use super::decode::auth_plain; use super::decode::auth_plain;
use super::types::*;
#[derive(Debug)] #[derive(Debug)]
pub enum AuthRes { pub enum AuthRes {
@ -29,10 +29,10 @@ impl State {
} }
async fn try_auth_plain<X, F>(&self, data: &[u8], login: X) -> AuthRes async fn try_auth_plain<X, F>(&self, data: &[u8], login: X) -> AuthRes
where where
X: FnOnce(String, String) -> F, X: FnOnce(String, String) -> F,
F: Future<Output=bool>, F: Future<Output = bool>,
{ {
// Check that we can extract user's login+pass // Check that we can extract user's login+pass
let (ubin, pbin) = match auth_plain(&data) { let (ubin, pbin) = match auth_plain(&data) {
Ok(([], (authz, user, pass))) if authz == user || authz == EMPTY_AUTHZ => (user, pass), Ok(([], (authz, user, pass))) if authz == user || authz == EMPTY_AUTHZ => (user, pass),
@ -65,10 +65,10 @@ impl State {
} }
} }
pub async fn progress<F,X>(&mut self, cmd: ClientCommand, login: X) pub async fn progress<F, X>(&mut self, cmd: ClientCommand, login: X)
where where
X: FnOnce(String, String) -> F, X: FnOnce(String, String) -> F,
F: Future<Output=bool>, F: Future<Output = bool>,
{ {
let new_state = 'state: { let new_state = 'state: {
match (std::mem::replace(self, State::Error), cmd) { match (std::mem::replace(self, State::Error), cmd) {

View file

@ -1,3 +1,6 @@
pub mod decode;
pub mod encode;
pub mod flow;
/// Seek compatibility with the Dovecot Authentication Protocol /// Seek compatibility with the Dovecot Authentication Protocol
/// ///
/// ## Trace /// ## Trace
@ -38,6 +41,3 @@
/// https://doc.dovecot.org/configuration_manual/howto/simple_virtual_install/#simple-virtual-install-smtp-auth /// https://doc.dovecot.org/configuration_manual/howto/simple_virtual_install/#simple-virtual-install-smtp-auth
/// https://doc.dovecot.org/configuration_manual/howto/postfix_and_dovecot_sasl/#howto-postfix-and-dovecot-sasl /// https://doc.dovecot.org/configuration_manual/howto/postfix_and_dovecot_sasl/#howto-postfix-and-dovecot-sasl
pub mod types; pub mod types;
pub mod encode;
pub mod decode;
pub mod flow;

View file

@ -159,5 +159,3 @@ pub enum ServerCommand {
extra_parameters: Vec<Vec<u8>>, extra_parameters: Vec<Vec<u8>>,
}, },
} }

View file

@ -11,7 +11,6 @@ pub struct CompanionConfig {
pub pid: Option<PathBuf>, pub pid: Option<PathBuf>,
pub imap: ImapUnsecureConfig, pub imap: ImapUnsecureConfig,
// @FIXME Add DAV // @FIXME Add DAV
#[serde(flatten)] #[serde(flatten)]
pub users: LoginStaticConfig, pub users: LoginStaticConfig,
} }

View file

@ -2,9 +2,9 @@ use async_trait::async_trait;
use ldap3::{LdapConnAsync, Scope, SearchEntry}; use ldap3::{LdapConnAsync, Scope, SearchEntry};
use log::debug; use log::debug;
use super::*;
use crate::config::*; use crate::config::*;
use crate::storage; use crate::storage;
use super::*;
pub struct LdapLoginProvider { pub struct LdapLoginProvider {
ldap_server: String, ldap_server: String,

View file

@ -2,7 +2,7 @@ use std::collections::BTreeMap;
use std::ops::Bound::{self, Excluded, Included, Unbounded}; use std::ops::Bound::{self, Excluded, Included, Unbounded};
use std::sync::RwLock; use std::sync::RwLock;
use sodiumoxide::{hex, crypto::hash}; use sodiumoxide::{crypto::hash, hex};
use tokio::sync::Notify; use tokio::sync::Notify;
use crate::storage::*; use crate::storage::*;

View file

@ -7,9 +7,9 @@ use anyhow::{bail, Context, Result};
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use nix::{sys::signal, unistd::Pid}; use nix::{sys::signal, unistd::Pid};
use crate::server::Server;
use aero_user::config::*; use aero_user::config::*;
use aero_user::login::{static_provider::*, *}; use aero_user::login::{static_provider::*, *};
use crate::server::Server;
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None)] #[clap(author, version, about, long_about = None)]

View file

@ -7,13 +7,13 @@ use futures::try_join;
use log::*; use log::*;
use tokio::sync::watch; use tokio::sync::watch;
use aero_user::config::*;
use aero_user::login::ArcLoginProvider;
use aero_user::login::{demo_provider::*, ldap_provider::*, static_provider::*};
use aero_proto::sasl as auth;
use aero_proto::dav; use aero_proto::dav;
use aero_proto::imap; use aero_proto::imap;
use aero_proto::lmtp::*; use aero_proto::lmtp::*;
use aero_proto::sasl as auth;
use aero_user::config::*;
use aero_user::login::ArcLoginProvider;
use aero_user::login::{demo_provider::*, ldap_provider::*, static_provider::*};
pub struct Server { pub struct Server {
lmtp_server: Option<Arc<LmtpServer>>, lmtp_server: Option<Arc<LmtpServer>>,