format + WIP calendar-query
This commit is contained in:
parent
6b9542088c
commit
32dfd25f57
46 changed files with 2453 additions and 1446 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -90,6 +90,7 @@ dependencies = [
|
|||
"http-body-util",
|
||||
"hyper 1.2.0",
|
||||
"hyper-util",
|
||||
"icalendar",
|
||||
"imap-codec",
|
||||
"imap-flow",
|
||||
"quick-xml",
|
||||
|
|
|
@ -4,12 +4,12 @@ use anyhow::{anyhow, bail, Result};
|
|||
use tokio::sync::RwLock;
|
||||
|
||||
use aero_bayou::Bayou;
|
||||
use aero_user::login::Credentials;
|
||||
use aero_user::cryptoblob::{self, gen_key, Key};
|
||||
use aero_user::login::Credentials;
|
||||
use aero_user::storage::{self, BlobRef, BlobVal, Store};
|
||||
|
||||
use crate::davdag::{BlobId, DavDag, IndexEntry, SyncChange, Token};
|
||||
use crate::unique_ident::*;
|
||||
use crate::davdag::{DavDag, IndexEntry, Token, BlobId, SyncChange};
|
||||
|
||||
pub struct Calendar {
|
||||
pub(super) id: UniqueIdent,
|
||||
|
@ -17,10 +17,7 @@ pub struct Calendar {
|
|||
}
|
||||
|
||||
impl Calendar {
|
||||
pub(crate) async fn open(
|
||||
creds: &Credentials,
|
||||
id: UniqueIdent,
|
||||
) -> Result<Self> {
|
||||
pub(crate) async fn open(creds: &Credentials, id: UniqueIdent) -> Result<Self> {
|
||||
let bayou_path = format!("calendar/dag/{}", id);
|
||||
let cal_path = format!("calendar/events/{}", id);
|
||||
|
||||
|
@ -126,7 +123,7 @@ impl CalendarInternal {
|
|||
async fn put<'a>(&mut self, name: &str, evt: &'a [u8]) -> Result<(Token, IndexEntry)> {
|
||||
let message_key = gen_key();
|
||||
let blob_id = gen_ident();
|
||||
|
||||
|
||||
let encrypted_msg_key = cryptoblob::seal(&message_key.as_ref(), &self.encryption_key)?;
|
||||
let key_header = base64::engine::general_purpose::STANDARD.encode(&encrypted_msg_key);
|
||||
|
||||
|
@ -138,9 +135,7 @@ impl CalendarInternal {
|
|||
)
|
||||
.with_meta(MESSAGE_KEY.to_string(), key_header);
|
||||
|
||||
let etag = self.storage
|
||||
.blob_insert(blob_val)
|
||||
.await?;
|
||||
let etag = self.storage.blob_insert(blob_val).await?;
|
||||
|
||||
// Add entry to Bayou
|
||||
let entry: IndexEntry = (blob_id, name.to_string(), etag);
|
||||
|
@ -181,7 +176,7 @@ impl CalendarInternal {
|
|||
|
||||
let heads = davstate.heads_vec();
|
||||
let token = match heads.as_slice() {
|
||||
[ token ] => *token,
|
||||
[token] => *token,
|
||||
_ => {
|
||||
let op_mg = davstate.op_merge();
|
||||
let token = op_mg.token();
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
use anyhow::{bail, Result};
|
||||
use std::collections::{HashMap, BTreeMap};
|
||||
use std::sync::{Weak, Arc};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use aero_bayou::timestamp::now_msec;
|
||||
use aero_user::storage;
|
||||
use aero_user::cryptoblob::{open_deserialize, seal_serialize};
|
||||
use aero_user::storage;
|
||||
|
||||
use super::Calendar;
|
||||
use crate::unique_ident::{gen_ident, UniqueIdent};
|
||||
use crate::user::User;
|
||||
use super::Calendar;
|
||||
|
||||
pub(crate) const CAL_LIST_PK: &str = "calendars";
|
||||
pub(crate) const CAL_LIST_SK: &str = "list";
|
||||
|
@ -46,7 +46,7 @@ impl CalendarNs {
|
|||
}
|
||||
|
||||
let cal = Arc::new(Calendar::open(&user.creds, id).await?);
|
||||
|
||||
|
||||
let mut cache = self.0.lock().unwrap();
|
||||
if let Some(concurrent_cal) = cache.get(&id).and_then(Weak::upgrade) {
|
||||
drop(cal); // we worked for nothing but at least we didn't starve someone else
|
||||
|
@ -117,13 +117,15 @@ impl CalendarNs {
|
|||
CalendarExists::Created(_) => (),
|
||||
}
|
||||
list.save(user, ct).await?;
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Has calendar
|
||||
pub async fn has(&self, user: &Arc<User>, name: &str) -> Result<bool> {
|
||||
CalendarList::load(user).await.map(|(list, _)| list.has(name))
|
||||
CalendarList::load(user)
|
||||
.await
|
||||
.map(|(list, _)| list.has(name))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,7 +163,8 @@ impl CalendarList {
|
|||
|
||||
for v in row_vals {
|
||||
if let storage::Alternative::Value(vbytes) = v {
|
||||
let list2 = open_deserialize::<CalendarList>(&vbytes, &user.creds.keys.master)?;
|
||||
let list2 =
|
||||
open_deserialize::<CalendarList>(&vbytes, &user.creds.keys.master)?;
|
||||
list.merge(list2);
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +203,7 @@ impl CalendarList {
|
|||
/// (Don't forget to save if it returns CalendarExists::Created)
|
||||
fn create(&mut self, name: &str) -> CalendarExists {
|
||||
if let Some(CalendarListEntry {
|
||||
id_lww: (_, Some(id))
|
||||
id_lww: (_, Some(id)),
|
||||
}) = self.0.get(name)
|
||||
{
|
||||
return CalendarExists::Existed(*id);
|
||||
|
@ -222,9 +225,10 @@ impl CalendarList {
|
|||
|
||||
/// For a given calendar name, get its Unique Identifier
|
||||
fn get(&self, name: &str) -> Option<UniqueIdent> {
|
||||
self.0.get(name).map(|CalendarListEntry {
|
||||
id_lww: (_, ident),
|
||||
}| *ident).flatten()
|
||||
self.0
|
||||
.get(name)
|
||||
.map(|CalendarListEntry { id_lww: (_, ident) }| *ident)
|
||||
.flatten()
|
||||
}
|
||||
|
||||
/// Check if a given calendar name exists
|
||||
|
@ -271,9 +275,7 @@ impl CalendarList {
|
|||
(now_msec(), id)
|
||||
}
|
||||
}
|
||||
Some(CalendarListEntry {
|
||||
id_lww,
|
||||
}) => {
|
||||
Some(CalendarListEntry { id_lww }) => {
|
||||
if id_lww.1 == id {
|
||||
// Entry is already equals to the requested id (Option<UniqueIdent)
|
||||
// Nothing to do
|
||||
|
@ -281,20 +283,15 @@ impl CalendarList {
|
|||
} else {
|
||||
// Entry does not equal to what we know internally
|
||||
// We update the Last Write Win CRDT here with the new id value
|
||||
(
|
||||
std::cmp::max(id_lww.0 + 1, now_msec()),
|
||||
id,
|
||||
)
|
||||
(std::cmp::max(id_lww.0 + 1, now_msec()), id)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// If we did not return here, that's because we have to update
|
||||
// something in our internal index.
|
||||
self.0.insert(
|
||||
name.into(),
|
||||
CalendarListEntry { id_lww: (ts, id) },
|
||||
);
|
||||
self.0
|
||||
.insert(name.into(), CalendarListEntry { id_lww: (ts, id) });
|
||||
Some(())
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use anyhow::{bail, Result};
|
||||
use im::{ordset, OrdMap, OrdSet};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use im::{OrdMap, OrdSet, ordset};
|
||||
|
||||
use aero_bayou::*;
|
||||
|
||||
|
@ -26,7 +26,6 @@ pub struct DavDag {
|
|||
pub idx_by_filename: OrdMap<FileName, BlobId>,
|
||||
|
||||
// ------------ Below this line, data is ephemeral, ie. not checkpointed
|
||||
|
||||
/// Partial synchronization graph
|
||||
pub ancestors: OrdMap<Token, OrdSet<Token>>,
|
||||
|
||||
|
@ -84,7 +83,7 @@ impl DavDag {
|
|||
// HELPER functions
|
||||
|
||||
pub fn heads_vec(&self) -> Vec<Token> {
|
||||
self.heads.clone().into_iter().collect()
|
||||
self.heads.clone().into_iter().collect()
|
||||
}
|
||||
|
||||
/// A sync descriptor
|
||||
|
@ -99,7 +98,7 @@ impl DavDag {
|
|||
// We can't capture all missing events if we are not connected
|
||||
// to all sinks of the graph,
|
||||
// ie. if we don't already know all the sinks,
|
||||
// ie. if we are missing so much history that
|
||||
// ie. if we are missing so much history that
|
||||
// the event log has been transformed into a checkpoint
|
||||
if !self.origins.is_subset(already_known.clone()) {
|
||||
bail!("Not enough history to produce a correct diff, a full resync is needed");
|
||||
|
@ -124,7 +123,7 @@ impl DavDag {
|
|||
|
||||
if all_known.insert(cursor).is_some() {
|
||||
// Item already processed
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// Collect parents
|
||||
|
@ -167,7 +166,8 @@ impl DavDag {
|
|||
self.idx_by_filename.remove(filename);
|
||||
|
||||
// Record the change in the ephemeral synchronization map
|
||||
self.change.insert(sync_token, SyncChange::NotFound(filename.to_string()));
|
||||
self.change
|
||||
.insert(sync_token, SyncChange::NotFound(filename.to_string()));
|
||||
|
||||
// Finally clear item from the source of trust
|
||||
self.table.remove(blob_id);
|
||||
|
@ -179,10 +179,13 @@ impl DavDag {
|
|||
|
||||
// --- Update ANCESTORS
|
||||
// We register ancestors as it is required for the sync algorithm
|
||||
self.ancestors.insert(*child, parents.iter().fold(ordset![], |mut acc, p| {
|
||||
acc.insert(*p);
|
||||
acc
|
||||
}));
|
||||
self.ancestors.insert(
|
||||
*child,
|
||||
parents.iter().fold(ordset![], |mut acc, p| {
|
||||
acc.insert(*p);
|
||||
acc
|
||||
}),
|
||||
);
|
||||
|
||||
// --- Update ORIGINS
|
||||
// If this event has no parents, it's an origin
|
||||
|
@ -192,11 +195,13 @@ impl DavDag {
|
|||
|
||||
// --- Update HEADS
|
||||
// Remove from HEADS this event's parents
|
||||
parents.iter().for_each(|par| { self.heads.remove(par); });
|
||||
parents.iter().for_each(|par| {
|
||||
self.heads.remove(par);
|
||||
});
|
||||
|
||||
// This event becomes a new HEAD in turn
|
||||
self.heads.insert(*child);
|
||||
|
||||
|
||||
// --- Update ALL NODES
|
||||
self.all_nodes.insert(*child);
|
||||
}
|
||||
|
@ -217,16 +222,16 @@ impl BayouState for DavDag {
|
|||
|
||||
fn apply(&self, op: &Self::Op) -> Self {
|
||||
let mut new = self.clone();
|
||||
|
||||
|
||||
match op {
|
||||
DavDagOp::Put(sync_desc, entry) => {
|
||||
new.sync_dag(sync_desc);
|
||||
new.register(Some(sync_desc.1), entry.clone());
|
||||
},
|
||||
}
|
||||
DavDagOp::Delete(sync_desc, blob_id) => {
|
||||
new.sync_dag(sync_desc);
|
||||
new.unregister(sync_desc.1, blob_id);
|
||||
},
|
||||
}
|
||||
DavDagOp::Merge(sync_desc) => {
|
||||
new.sync_dag(sync_desc);
|
||||
}
|
||||
|
@ -252,7 +257,9 @@ impl<'de> Deserialize<'de> for DavDag {
|
|||
let mut davdag = DavDag::default();
|
||||
|
||||
// Build the table + index
|
||||
val.items.into_iter().for_each(|entry| davdag.register(None, entry));
|
||||
val.items
|
||||
.into_iter()
|
||||
.for_each(|entry| davdag.register(None, entry));
|
||||
|
||||
// Initialize the synchronization DAG with its roots
|
||||
val.heads.into_iter().for_each(|ident| {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
pub mod unique_ident;
|
||||
pub mod davdag;
|
||||
pub mod user;
|
||||
pub mod mail;
|
||||
pub mod calendar;
|
||||
pub mod davdag;
|
||||
pub mod mail;
|
||||
pub mod unique_ident;
|
||||
pub mod user;
|
||||
|
|
|
@ -8,16 +8,16 @@ use futures::{future::BoxFuture, FutureExt};
|
|||
use tokio::sync::watch;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
use aero_bayou::timestamp::now_msec;
|
||||
use aero_user::cryptoblob;
|
||||
use aero_user::login::{Credentials, PublicCredentials};
|
||||
use aero_user::storage;
|
||||
use aero_bayou::timestamp::now_msec;
|
||||
|
||||
use crate::mail::mailbox::Mailbox;
|
||||
use crate::mail::uidindex::ImapUidvalidity;
|
||||
use crate::mail::IMF;
|
||||
use crate::unique_ident::*;
|
||||
use crate::user::User;
|
||||
use crate::mail::IMF;
|
||||
|
||||
const INCOMING_PK: &str = "incoming";
|
||||
const INCOMING_LOCK_SK: &str = "lock";
|
||||
|
|
|
@ -2,15 +2,15 @@ use anyhow::{anyhow, bail, Result};
|
|||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use aero_bayou::timestamp::now_msec;
|
||||
use aero_bayou::Bayou;
|
||||
use aero_user::cryptoblob::{self, gen_key, open_deserialize, seal_serialize, Key};
|
||||
use aero_user::login::Credentials;
|
||||
use aero_user::storage::{self, BlobRef, BlobVal, RowRef, RowVal, Selector, Store};
|
||||
use aero_bayou::Bayou;
|
||||
use aero_bayou::timestamp::now_msec;
|
||||
|
||||
use crate::unique_ident::*;
|
||||
use crate::mail::uidindex::*;
|
||||
use crate::mail::IMF;
|
||||
use crate::unique_ident::*;
|
||||
|
||||
pub struct Mailbox {
|
||||
pub(super) id: UniqueIdent,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
pub mod incoming;
|
||||
pub mod mailbox;
|
||||
pub mod namespace;
|
||||
pub mod query;
|
||||
pub mod snapshot;
|
||||
pub mod uidindex;
|
||||
pub mod namespace;
|
||||
|
||||
// Internet Message Format
|
||||
// aka RFC 822 - RFC 2822 - RFC 5322
|
||||
|
|
|
@ -104,7 +104,11 @@ impl MailboxList {
|
|||
/// Ensures mailbox `name` maps to id `id`.
|
||||
/// If it already mapped to that, returns None.
|
||||
/// If a change had to be done, returns Some(new uidvalidity in mailbox).
|
||||
pub(crate) fn set_mailbox(&mut self, name: &str, id: Option<UniqueIdent>) -> Option<ImapUidvalidity> {
|
||||
pub(crate) fn set_mailbox(
|
||||
&mut self,
|
||||
name: &str,
|
||||
id: Option<UniqueIdent>,
|
||||
) -> Option<ImapUidvalidity> {
|
||||
let (ts, id, uidvalidity) = match self.0.get_mut(name) {
|
||||
None => {
|
||||
if id.is_none() {
|
||||
|
|
|
@ -2,10 +2,10 @@ use std::sync::Arc;
|
|||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::unique_ident::UniqueIdent;
|
||||
use super::mailbox::Mailbox;
|
||||
use super::query::{Query, QueryScope};
|
||||
use super::uidindex::UidIndex;
|
||||
use crate::unique_ident::UniqueIdent;
|
||||
|
||||
/// A Frozen Mailbox has a snapshot of the current mailbox
|
||||
/// state that is desynchronized with the real mailbox state.
|
||||
|
|
|
@ -3,8 +3,8 @@ use std::num::{NonZeroU32, NonZeroU64};
|
|||
use im::{HashMap, OrdMap, OrdSet};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use aero_bayou::*;
|
||||
use crate::unique_ident::UniqueIdent;
|
||||
use aero_bayou::*;
|
||||
|
||||
pub type ModSeq = NonZeroU64;
|
||||
pub type ImapUid = NonZeroU32;
|
||||
|
|
|
@ -9,12 +9,15 @@ use aero_user::cryptoblob::{open_deserialize, seal_serialize};
|
|||
use aero_user::login::Credentials;
|
||||
use aero_user::storage;
|
||||
|
||||
use crate::calendar::namespace::CalendarNs;
|
||||
use crate::mail::incoming::incoming_mail_watch_process;
|
||||
use crate::mail::mailbox::Mailbox;
|
||||
use crate::mail::namespace::{
|
||||
CreatedMailbox, MailboxList, ARCHIVE, DRAFTS, INBOX, MAILBOX_HIERARCHY_DELIMITER,
|
||||
MAILBOX_LIST_PK, MAILBOX_LIST_SK, SENT, TRASH,
|
||||
};
|
||||
use crate::mail::uidindex::ImapUidvalidity;
|
||||
use crate::unique_ident::UniqueIdent;
|
||||
use crate::mail::namespace::{MAILBOX_HIERARCHY_DELIMITER, INBOX, DRAFTS, ARCHIVE, SENT, TRASH, MAILBOX_LIST_PK, MAILBOX_LIST_SK,MailboxList,CreatedMailbox};
|
||||
use crate::calendar::namespace::CalendarNs;
|
||||
|
||||
//@FIXME User should be totally rewriten
|
||||
// to extract the local mailbox list
|
||||
|
|
|
@ -1,79 +1,79 @@
|
|||
#![no_main]
|
||||
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
use libfuzzer_sys::arbitrary;
|
||||
use libfuzzer_sys::arbitrary::Arbitrary;
|
||||
use libfuzzer_sys::fuzz_target;
|
||||
|
||||
use aero_dav::{types, realization, xml};
|
||||
use aero_dav::{realization, types, xml};
|
||||
use quick_xml::reader::NsReader;
|
||||
use tokio::runtime::Runtime;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
// Split this file
|
||||
const tokens: [&str; 63] = [
|
||||
"0",
|
||||
"1",
|
||||
"activelock",
|
||||
"allprop",
|
||||
"encoding",
|
||||
"utf-8",
|
||||
"http://ns.example.com/boxschema/",
|
||||
"HTTP/1.1 200 OK",
|
||||
"1997-12-01T18:27:21-08:00",
|
||||
"Mon, 12 Jan 1998 09:25:56 GMT",
|
||||
"\"abcdef\"",
|
||||
"cannot-modify-protected-property",
|
||||
"collection",
|
||||
"creationdate",
|
||||
"DAV:",
|
||||
"D",
|
||||
"C",
|
||||
"xmlns:D",
|
||||
"depth",
|
||||
"displayname",
|
||||
"error",
|
||||
"exclusive",
|
||||
"getcontentlanguage",
|
||||
"getcontentlength",
|
||||
"getcontenttype",
|
||||
"getetag",
|
||||
"getlastmodified",
|
||||
"href",
|
||||
"include",
|
||||
"Infinite",
|
||||
"infinity",
|
||||
"location",
|
||||
"lockdiscovery",
|
||||
"lockentry",
|
||||
"lockinfo",
|
||||
"lockroot",
|
||||
"lockscope",
|
||||
"locktoken",
|
||||
"lock-token-matches-request-uri",
|
||||
"lock-token-submitted",
|
||||
"locktype",
|
||||
"multistatus",
|
||||
"no-conflicting-lock",
|
||||
"no-external-entities",
|
||||
"owner",
|
||||
"preserved-live-properties",
|
||||
"prop",
|
||||
"propertyupdate",
|
||||
"propfind",
|
||||
"propfind-finite-depth",
|
||||
"propname",
|
||||
"propstat",
|
||||
"remove",
|
||||
"resourcetype",
|
||||
"response",
|
||||
"responsedescription",
|
||||
"set",
|
||||
"shared",
|
||||
"status",
|
||||
"supportedlock",
|
||||
"text/html",
|
||||
"timeout",
|
||||
"write",
|
||||
"0",
|
||||
"1",
|
||||
"activelock",
|
||||
"allprop",
|
||||
"encoding",
|
||||
"utf-8",
|
||||
"http://ns.example.com/boxschema/",
|
||||
"HTTP/1.1 200 OK",
|
||||
"1997-12-01T18:27:21-08:00",
|
||||
"Mon, 12 Jan 1998 09:25:56 GMT",
|
||||
"\"abcdef\"",
|
||||
"cannot-modify-protected-property",
|
||||
"collection",
|
||||
"creationdate",
|
||||
"DAV:",
|
||||
"D",
|
||||
"C",
|
||||
"xmlns:D",
|
||||
"depth",
|
||||
"displayname",
|
||||
"error",
|
||||
"exclusive",
|
||||
"getcontentlanguage",
|
||||
"getcontentlength",
|
||||
"getcontenttype",
|
||||
"getetag",
|
||||
"getlastmodified",
|
||||
"href",
|
||||
"include",
|
||||
"Infinite",
|
||||
"infinity",
|
||||
"location",
|
||||
"lockdiscovery",
|
||||
"lockentry",
|
||||
"lockinfo",
|
||||
"lockroot",
|
||||
"lockscope",
|
||||
"locktoken",
|
||||
"lock-token-matches-request-uri",
|
||||
"lock-token-submitted",
|
||||
"locktype",
|
||||
"multistatus",
|
||||
"no-conflicting-lock",
|
||||
"no-external-entities",
|
||||
"owner",
|
||||
"preserved-live-properties",
|
||||
"prop",
|
||||
"propertyupdate",
|
||||
"propfind",
|
||||
"propfind-finite-depth",
|
||||
"propname",
|
||||
"propstat",
|
||||
"remove",
|
||||
"resourcetype",
|
||||
"response",
|
||||
"responsedescription",
|
||||
"set",
|
||||
"shared",
|
||||
"status",
|
||||
"supportedlock",
|
||||
"text/html",
|
||||
"timeout",
|
||||
"write",
|
||||
];
|
||||
|
||||
#[derive(Arbitrary)]
|
||||
|
@ -106,7 +106,7 @@ impl Tag {
|
|||
acc.push_str("D:");
|
||||
acc.push_str(self.name.serialize().as_str());
|
||||
|
||||
if let Some((k,v)) = &self.attr {
|
||||
if let Some((k, v)) = &self.attr {
|
||||
acc.push_str(" ");
|
||||
acc.push_str(k.serialize().as_str());
|
||||
acc.push_str("=\"");
|
||||
|
@ -123,7 +123,6 @@ impl Tag {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Arbitrary)]
|
||||
enum XmlNode {
|
||||
//@FIXME: build RFC3339 and RFC822 Dates with chrono based on timestamps
|
||||
|
@ -145,9 +144,14 @@ impl XmlNode {
|
|||
let stag = tag.start();
|
||||
match children.is_empty() {
|
||||
true => format!("<{}/>", stag),
|
||||
false => format!("<{}>{}</{}>", stag, children.iter().map(|v| v.serialize()).collect::<String>(), tag.end()),
|
||||
false => format!(
|
||||
"<{}>{}</{}>",
|
||||
stag,
|
||||
children.iter().map(|v| v.serialize()).collect::<String>(),
|
||||
tag.end()
|
||||
),
|
||||
}
|
||||
},
|
||||
}
|
||||
Self::Number(v) => format!("{}", v),
|
||||
Self::Text(v) => v.serialize(),
|
||||
}
|
||||
|
@ -158,19 +162,22 @@ async fn serialize(elem: &impl xml::QWrite) -> Vec<u8> {
|
|||
let mut buffer = Vec::new();
|
||||
let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer);
|
||||
let q = quick_xml::writer::Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
|
||||
let ns_to_apply = vec![ ("xmlns:D".into(), "DAV:".into()) ];
|
||||
let ns_to_apply = vec![("xmlns:D".into(), "DAV:".into())];
|
||||
let mut writer = xml::Writer { q, ns_to_apply };
|
||||
|
||||
elem.qwrite(&mut writer).await.expect("xml serialization");
|
||||
tokio_buffer.flush().await.expect("tokio buffer flush");
|
||||
|
||||
return buffer
|
||||
return buffer;
|
||||
}
|
||||
|
||||
type Object = types::Multistatus<realization::Core, types::PropValue<realization::Core>>;
|
||||
|
||||
fuzz_target!(|nodes: XmlNode| {
|
||||
let gen = format!("<D:multistatus xmlns:D=\"DAV:\">{}<D:/multistatus>", nodes.serialize());
|
||||
let gen = format!(
|
||||
"<D:multistatus xmlns:D=\"DAV:\">{}<D:/multistatus>",
|
||||
nodes.serialize()
|
||||
);
|
||||
//println!("--------\n{}", gen);
|
||||
let data = gen.as_bytes();
|
||||
|
||||
|
@ -191,7 +198,9 @@ fuzz_target!(|nodes: XmlNode| {
|
|||
let my_serialization = serialize(&reference).await;
|
||||
|
||||
// 3. De-serialize my serialization
|
||||
let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice())).await.expect("XML Reader init");
|
||||
let mut rdr2 = xml::Reader::new(NsReader::from_reader(my_serialization.as_slice()))
|
||||
.await
|
||||
.expect("XML Reader init");
|
||||
let comparison = rdr2.find::<Object>().await.expect("Deserialize again");
|
||||
|
||||
// 4. Both the first decoding and last decoding must be identical
|
||||
|
|
|
@ -1,23 +1,31 @@
|
|||
use super::acltypes::*;
|
||||
use super::types as dav;
|
||||
use super::xml::{QRead, Reader, IRead, DAV_URN};
|
||||
use super::error::ParsingError;
|
||||
use super::types as dav;
|
||||
use super::xml::{IRead, QRead, Reader, DAV_URN};
|
||||
|
||||
impl QRead<Property> for Property {
|
||||
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
|
||||
if xml.maybe_open_start(DAV_URN, "owner").await?.is_some() {
|
||||
let href = xml.find().await?;
|
||||
xml.close().await?;
|
||||
return Ok(Self::Owner(href))
|
||||
return Ok(Self::Owner(href));
|
||||
}
|
||||
if xml.maybe_open_start(DAV_URN, "current-user-principal").await?.is_some() {
|
||||
if xml
|
||||
.maybe_open_start(DAV_URN, "current-user-principal")
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
let user = xml.find().await?;
|
||||
xml.close().await?;
|
||||
return Ok(Self::CurrentUserPrincipal(user))
|
||||
return Ok(Self::CurrentUserPrincipal(user));
|
||||
}
|
||||
if xml.maybe_open_start(DAV_URN, "current-user-privilege-set").await?.is_some() {
|
||||
if xml
|
||||
.maybe_open_start(DAV_URN, "current-user-privilege-set")
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
xml.close().await?;
|
||||
return Ok(Self::CurrentUserPrivilegeSet(vec![]))
|
||||
return Ok(Self::CurrentUserPrivilegeSet(vec![]));
|
||||
}
|
||||
|
||||
Err(ParsingError::Recoverable)
|
||||
|
@ -28,17 +36,25 @@ impl QRead<PropertyRequest> for PropertyRequest {
|
|||
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
|
||||
if xml.maybe_open(DAV_URN, "owner").await?.is_some() {
|
||||
xml.close().await?;
|
||||
return Ok(Self::Owner)
|
||||
return Ok(Self::Owner);
|
||||
}
|
||||
|
||||
if xml.maybe_open(DAV_URN, "current-user-principal").await?.is_some() {
|
||||
if xml
|
||||
.maybe_open(DAV_URN, "current-user-principal")
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
xml.close().await?;
|
||||
return Ok(Self::CurrentUserPrincipal)
|
||||
return Ok(Self::CurrentUserPrincipal);
|
||||
}
|
||||
|
||||
if xml.maybe_open(DAV_URN, "current-user-privilege-set").await?.is_some() {
|
||||
if xml
|
||||
.maybe_open(DAV_URN, "current-user-privilege-set")
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
xml.close().await?;
|
||||
return Ok(Self::CurrentUserPrivilegeSet)
|
||||
return Ok(Self::CurrentUserPrivilegeSet);
|
||||
}
|
||||
|
||||
Err(ParsingError::Recoverable)
|
||||
|
@ -49,7 +65,7 @@ impl QRead<ResourceType> for ResourceType {
|
|||
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
|
||||
if xml.maybe_open(DAV_URN, "principal").await?.is_some() {
|
||||
xml.close().await?;
|
||||
return Ok(Self::Principal)
|
||||
return Ok(Self::Principal);
|
||||
}
|
||||
Err(ParsingError::Recoverable)
|
||||
}
|
||||
|
@ -60,7 +76,7 @@ impl QRead<User> for User {
|
|||
async fn qread(xml: &mut Reader<impl IRead>) -> Result<Self, ParsingError> {
|
||||
if xml.maybe_open(DAV_URN, "unauthenticated").await?.is_some() {
|
||||
xml.close().await?;
|
||||
return Ok(Self::Unauthenticated)
|
||||
return Ok(Self::Unauthenticated);
|
||||
}
|
||||
|
||||
dav::Href::qread(xml).await.map(Self::Authenticated)
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use quick_xml::Error as QError;
|
||||
use quick_xml::events::Event;
|
||||
use quick_xml::Error as QError;
|
||||
|
||||
use super::acltypes::*;
|
||||
use super::xml::{QWrite, Writer, IWrite};
|
||||
use super::error::ParsingError;
|
||||
use super::xml::{IWrite, QWrite, Writer};
|
||||
|
||||
impl QWrite for Property {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
|
@ -14,18 +14,18 @@ impl QWrite for Property {
|
|||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
href.qwrite(xml).await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::CurrentUserPrincipal(user) => {
|
||||
let start = xml.create_dav_element("current-user-principal");
|
||||
let end = start.to_end();
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
user.qwrite(xml).await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::CurrentUserPrivilegeSet(_) => {
|
||||
let empty_tag = xml.create_dav_element("current-user-privilege-set");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ impl QWrite for User {
|
|||
Self::Unauthenticated => {
|
||||
let tag = xml.create_dav_element("unauthenticated");
|
||||
xml.q.write_event_async(Event::Empty(tag)).await
|
||||
},
|
||||
}
|
||||
Self::Authenticated(href) => href.qwrite(xml).await,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,14 +2,12 @@ use super::types as dav;
|
|||
|
||||
//RFC covered: RFC3744 (ACL core) + RFC5397 (ACL Current Principal Extension)
|
||||
|
||||
|
||||
//@FIXME required for a full CalDAV implementation
|
||||
// See section 6. of the CalDAV RFC
|
||||
// It seems mainly required for free-busy that I will not implement now.
|
||||
// It can also be used for discovering main calendar, not sure it is used.
|
||||
// Note: it is used by Thunderbird
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum PropertyRequest {
|
||||
Owner,
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,9 @@
|
|||
use quick_xml::events::{BytesText, Event};
|
||||
use quick_xml::Error as QError;
|
||||
use quick_xml::events::{Event, BytesText};
|
||||
|
||||
use super::caltypes::*;
|
||||
use super::xml::{Node, QWrite, IWrite, Writer};
|
||||
use super::types::Extension;
|
||||
|
||||
use super::xml::{IWrite, Node, QWrite, Writer};
|
||||
|
||||
// ==================== Calendar Types Serialization =========================
|
||||
|
||||
|
@ -54,7 +53,7 @@ impl<E: Extension> QWrite for CalendarQuery<E> {
|
|||
selector.qwrite(xml).await?;
|
||||
}
|
||||
self.filter.qwrite(xml).await?;
|
||||
if let Some(tz) = &self.timezone {
|
||||
if let Some(tz) = &self.timezone {
|
||||
tz.qwrite(xml).await?;
|
||||
}
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
|
@ -106,8 +105,8 @@ impl QWrite for PropertyRequest {
|
|||
Self::MinDateTime => atom("min-date-time").await,
|
||||
Self::MaxDateTime => atom("max-date-time").await,
|
||||
Self::MaxInstances => atom("max-instances").await,
|
||||
Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await,
|
||||
Self::SupportedCollationSet => atom("supported-collation-set").await,
|
||||
Self::MaxAttendeesPerInstance => atom("max-attendees-per-instance").await,
|
||||
Self::SupportedCollationSet => atom("supported-collation-set").await,
|
||||
Self::CalendarData(req) => req.qwrite(xml).await,
|
||||
}
|
||||
}
|
||||
|
@ -130,17 +129,21 @@ impl QWrite for Property {
|
|||
let end = start.to_end();
|
||||
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(text))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(text)))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::CalendarTimezone(payload) => {
|
||||
let start = xml.create_cal_element("calendar-timezone");
|
||||
let end = start.to_end();
|
||||
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(payload))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(payload)))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::SupportedCalendarComponentSet(many_comp) => {
|
||||
let start = xml.create_cal_element("supported-calendar-component-set");
|
||||
let end = start.to_end();
|
||||
|
@ -150,7 +153,7 @@ impl QWrite for Property {
|
|||
comp.qwrite(xml).await?;
|
||||
}
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::SupportedCalendarData(many_mime) => {
|
||||
let start = xml.create_cal_element("supported-calendar-data");
|
||||
let end = start.to_end();
|
||||
|
@ -160,49 +163,59 @@ impl QWrite for Property {
|
|||
mime.qwrite(xml).await?;
|
||||
}
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::MaxResourceSize(bytes) => {
|
||||
let start = xml.create_cal_element("max-resource-size");
|
||||
let end = start.to_end();
|
||||
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str()))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(bytes.to_string().as_str())))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::MinDateTime(dt) => {
|
||||
let start = xml.create_cal_element("min-date-time");
|
||||
let end = start.to_end();
|
||||
|
||||
let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT));
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(dtstr.as_str())))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::MaxDateTime(dt) => {
|
||||
let start = xml.create_cal_element("max-date-time");
|
||||
let end = start.to_end();
|
||||
|
||||
let dtstr = format!("{}", dt.format(ICAL_DATETIME_FMT));
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(dtstr.as_str()))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(dtstr.as_str())))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::MaxInstances(count) => {
|
||||
let start = xml.create_cal_element("max-instances");
|
||||
let end = start.to_end();
|
||||
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(count.to_string().as_str())))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::MaxAttendeesPerInstance(count) => {
|
||||
let start = xml.create_cal_element("max-attendees-per-instance");
|
||||
let end = start.to_end();
|
||||
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(count.to_string().as_str()))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(count.to_string().as_str())))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::SupportedCollationSet(many_collations) => {
|
||||
let start = xml.create_cal_element("supported-collation-set");
|
||||
let end = start.to_end();
|
||||
|
@ -211,8 +224,8 @@ impl QWrite for Property {
|
|||
for collation in many_collations.iter() {
|
||||
collation.qwrite(xml).await?;
|
||||
}
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
}
|
||||
Self::CalendarData(inner) => inner.qwrite(xml).await,
|
||||
}
|
||||
}
|
||||
|
@ -225,7 +238,7 @@ impl QWrite for ResourceType {
|
|||
Self::Calendar => {
|
||||
let empty_tag = xml.create_cal_element("calendar");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -245,7 +258,7 @@ impl QWrite for Violation {
|
|||
Self::NeedPrivileges => {
|
||||
let empty_tag = xml.create_dav_element("need-privileges");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
|
||||
// Regular CalDAV errors
|
||||
Self::ResourceMustBeNull => atom("resource-must-be-null").await,
|
||||
|
@ -262,7 +275,7 @@ impl QWrite for Violation {
|
|||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
href.qwrite(xml).await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::MaxResourceSize => atom("max-resource-size").await,
|
||||
Self::MinDateTime => atom("min-date-time").await,
|
||||
Self::MaxDateTime => atom("max-date-time").await,
|
||||
|
@ -284,13 +297,12 @@ impl QWrite for Violation {
|
|||
param_item.qwrite(xml).await?;
|
||||
}
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
Self::NumberOfMatchesWithinLimits => atom("number-of-matches-within-limits").await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ---------------------------- Inner XML ------------------------------------
|
||||
impl QWrite for SupportedCollation {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
|
@ -300,19 +312,20 @@ impl QWrite for SupportedCollation {
|
|||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
self.0.qwrite(xml).await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
impl QWrite for Collation {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
let col = match self {
|
||||
Self::AsciiCaseMap => "i;ascii-casemap",
|
||||
Self::Octet => "i;octet",
|
||||
Self::Unknown(v) => v.as_str(),
|
||||
Self::AsciiCaseMap => "i;ascii-casemap",
|
||||
Self::Octet => "i;octet",
|
||||
Self::Unknown(v) => v.as_str(),
|
||||
};
|
||||
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(col))).await
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(col)))
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -332,7 +345,9 @@ impl QWrite for CalendarDataPayload {
|
|||
let end = start.to_end();
|
||||
|
||||
xml.q.write_event_async(Event::Start(start.clone())).await?;
|
||||
xml.q.write_event_async(Event::Text(BytesText::new(self.payload.as_str()))).await?;
|
||||
xml.q
|
||||
.write_event_async(Event::Text(BytesText::new(self.payload.as_str())))
|
||||
.await?;
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
}
|
||||
}
|
||||
|
@ -347,7 +362,7 @@ impl QWrite for CalendarDataRequest {
|
|||
|
||||
// Empty tag
|
||||
if self.comp.is_none() && self.recurrence.is_none() && self.limit_freebusy_set.is_none() {
|
||||
return xml.q.write_event_async(Event::Empty(start.clone())).await
|
||||
return xml.q.write_event_async(Event::Empty(start.clone())).await;
|
||||
}
|
||||
|
||||
let end = start.to_end();
|
||||
|
@ -392,7 +407,7 @@ impl QWrite for Comp {
|
|||
comp_kind.qwrite(xml).await?;
|
||||
}
|
||||
xml.q.write_event_async(Event::End(end)).await
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,7 +426,7 @@ impl QWrite for CompKind {
|
|||
Self::AllComp => {
|
||||
let empty_tag = xml.create_cal_element("allcomp");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
Self::Comp(many_comp) => {
|
||||
for comp in many_comp.iter() {
|
||||
// Required: recursion in an async fn requires boxing
|
||||
|
@ -420,7 +435,10 @@ impl QWrite for CompKind {
|
|||
// For more information about this error, try `rustc --explain E0391`.
|
||||
// https://github.com/rust-lang/rust/issues/78649
|
||||
#[inline(always)]
|
||||
fn recurse<'a>(comp: &'a Comp, xml: &'a mut Writer<impl IWrite>) -> futures::future::BoxFuture<'a, Result<(), QError>> {
|
||||
fn recurse<'a>(
|
||||
comp: &'a Comp,
|
||||
xml: &'a mut Writer<impl IWrite>,
|
||||
) -> futures::future::BoxFuture<'a, Result<(), QError>> {
|
||||
Box::pin(comp.qwrite(xml))
|
||||
}
|
||||
recurse(comp, xml).await?;
|
||||
|
@ -437,7 +455,7 @@ impl QWrite for PropKind {
|
|||
Self::AllProp => {
|
||||
let empty_tag = xml.create_cal_element("allprop");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
Self::Prop(many_prop) => {
|
||||
for prop in many_prop.iter() {
|
||||
prop.qwrite(xml).await?;
|
||||
|
@ -473,8 +491,14 @@ impl QWrite for RecurrenceModifier {
|
|||
impl QWrite for Expand {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
let mut empty = xml.create_cal_element("expand");
|
||||
empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str()));
|
||||
empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str()));
|
||||
empty.push_attribute((
|
||||
"start",
|
||||
format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(),
|
||||
));
|
||||
empty.push_attribute((
|
||||
"end",
|
||||
format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(),
|
||||
));
|
||||
xml.q.write_event_async(Event::Empty(empty)).await
|
||||
}
|
||||
}
|
||||
|
@ -482,8 +506,14 @@ impl QWrite for Expand {
|
|||
impl QWrite for LimitRecurrenceSet {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
let mut empty = xml.create_cal_element("limit-recurrence-set");
|
||||
empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str()));
|
||||
empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str()));
|
||||
empty.push_attribute((
|
||||
"start",
|
||||
format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(),
|
||||
));
|
||||
empty.push_attribute((
|
||||
"end",
|
||||
format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(),
|
||||
));
|
||||
xml.q.write_event_async(Event::Empty(empty)).await
|
||||
}
|
||||
}
|
||||
|
@ -491,8 +521,14 @@ impl QWrite for LimitRecurrenceSet {
|
|||
impl QWrite for LimitFreebusySet {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
let mut empty = xml.create_cal_element("limit-freebusy-set");
|
||||
empty.push_attribute(("start", format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str()));
|
||||
empty.push_attribute(("end", format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str()));
|
||||
empty.push_attribute((
|
||||
"start",
|
||||
format!("{}", self.0.format(ICAL_DATETIME_FMT)).as_str(),
|
||||
));
|
||||
empty.push_attribute((
|
||||
"end",
|
||||
format!("{}", self.1.format(ICAL_DATETIME_FMT)).as_str(),
|
||||
));
|
||||
xml.q.write_event_async(Event::Empty(empty)).await
|
||||
}
|
||||
}
|
||||
|
@ -503,11 +539,11 @@ impl<E: Extension> QWrite for CalendarSelector<E> {
|
|||
Self::AllProp => {
|
||||
let empty_tag = xml.create_dav_element("allprop");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
Self::PropName => {
|
||||
let empty_tag = xml.create_dav_element("propname");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
Self::Prop(prop) => prop.qwrite(xml).await,
|
||||
}
|
||||
}
|
||||
|
@ -534,10 +570,10 @@ impl QWrite for CompFilter {
|
|||
impl QWrite for CompFilterRules {
|
||||
async fn qwrite(&self, xml: &mut Writer<impl IWrite>) -> Result<(), QError> {
|
||||
match self {
|
||||
Self::IsNotDefined => {
|
||||
Self::IsNotDefined => {
|
||||
let empty_tag = xml.create_dav_element("is-not-defined");
|
||||
xml.q.write_event_async(Event::Empty(empty_tag)).await
|
||||
},
|
||||
}
|
||||
Self::Matches(cfm) => cfm.qwrite(xml).await,
|
||||
}
|
||||
}
|
||||
|
@ -559,7 +595,10 @@ impl QWrite for CompFilterMatch {
|
|||
// For more information about this error, try `rustc --explain E0391`.
|
||||
// https://github.com/rust-lang/rust/issues/78649
|
||||
#[inline(always)]
|
||||
fn recurse<'a>(comp: &'a CompFilter, xml: &'a mut Writer<impl IWrite>) -> futures::future::BoxFuture<'a, Result<(), QError>> {
|
||||
fn recurse<'a>(
|
||||
comp: &'a CompFilter,
|
||||
xml: &'a mut Writer< |