Fix even more simple clippy suggestions

This commit is contained in:
James Musselman 2024-12-16 23:25:53 -06:00
parent 9270577b68
commit 6116e0151c
Signed by: Musselman
GPG key ID: 1DAEFF35ECB5D6DB
13 changed files with 1331 additions and 945 deletions

View file

@ -1,38 +1,45 @@
//! This is an example of how kitchen-fridge can be used //! This is an example of how kitchen-fridge can be used
use chrono::{Utc}; use chrono::Utc;
use url::Url; use url::Url;
use kitchen_fridge::traits::CalDavSource;
use kitchen_fridge::calendar::SupportedComponents; use kitchen_fridge::calendar::SupportedComponents;
use kitchen_fridge::Item;
use kitchen_fridge::Task;
use kitchen_fridge::task::CompletionStatus; use kitchen_fridge::task::CompletionStatus;
use kitchen_fridge::CalDavProvider;
use kitchen_fridge::traits::BaseCalendar; use kitchen_fridge::traits::BaseCalendar;
use kitchen_fridge::traits::CalDavSource;
use kitchen_fridge::traits::CompleteCalendar; use kitchen_fridge::traits::CompleteCalendar;
use kitchen_fridge::utils::pause; use kitchen_fridge::utils::pause;
use kitchen_fridge::CalDavProvider;
use kitchen_fridge::Item;
use kitchen_fridge::Task;
mod shared; mod shared;
use shared::initial_sync; use shared::initial_sync;
use shared::{URL, USERNAME, EXAMPLE_EXISTING_CALENDAR_URL, EXAMPLE_CREATED_CALENDAR_URL}; use shared::{EXAMPLE_CREATED_CALENDAR_URL, EXAMPLE_EXISTING_CALENDAR_URL, URL, USERNAME};
const CACHE_FOLDER: &str = "test_cache/provider_sync"; const CACHE_FOLDER: &str = "test_cache/provider_sync";
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
env_logger::init(); env_logger::init();
println!("This example show how to sync a remote server with a local cache, using a Provider."); println!("This example show how to sync a remote server with a local cache, using a Provider.");
println!("Make sure you have edited the constants in the 'shared.rs' file to include correct URLs and credentials."); println!("Make sure you have edited the constants in the 'shared.rs' file to include correct URLs and credentials.");
println!("You can also set the RUST_LOG environment variable to display more info about the sync."); println!(
"You can also set the RUST_LOG environment variable to display more info about the sync."
);
println!(); println!();
println!("This will use the following settings:"); println!("This will use the following settings:");
println!(" * URL = {}", URL); println!(" * URL = {}", URL);
println!(" * USERNAME = {}", USERNAME); println!(" * USERNAME = {}", USERNAME);
println!(" * EXAMPLE_EXISTING_CALENDAR_URL = {}", EXAMPLE_EXISTING_CALENDAR_URL); println!(
println!(" * EXAMPLE_CREATED_CALENDAR_URL = {}", EXAMPLE_CREATED_CALENDAR_URL); " * EXAMPLE_EXISTING_CALENDAR_URL = {}",
EXAMPLE_EXISTING_CALENDAR_URL
);
println!(
" * EXAMPLE_CREATED_CALENDAR_URL = {}",
EXAMPLE_CREATED_CALENDAR_URL
);
pause(); pause();
let mut provider = initial_sync(CACHE_FOLDER).await; let mut provider = initial_sync(CACHE_FOLDER).await;
@ -47,32 +54,56 @@ async fn add_items_and_sync_again(provider: &mut CalDavProvider) {
// Create a new calendar... // Create a new calendar...
let new_calendar_url: Url = EXAMPLE_CREATED_CALENDAR_URL.parse().unwrap(); let new_calendar_url: Url = EXAMPLE_CREATED_CALENDAR_URL.parse().unwrap();
let new_calendar_name = "A brave new calendar".to_string(); let new_calendar_name = "A brave new calendar".to_string();
if let Err(_err) = provider.local_mut() if let Err(_err) = provider
.create_calendar(new_calendar_url.clone(), new_calendar_name.clone(), SupportedComponents::TODO, Some("#ff8000".parse().unwrap())) .local_mut()
.await { .create_calendar(
println!("Unable to add calendar, maybe it exists already. We're not adding it after all."); new_calendar_url.clone(),
new_calendar_name.clone(),
SupportedComponents::TODO,
Some("#ff8000".parse().unwrap()),
)
.await
{
println!("Unable to add calendar, maybe it exists already. We're not adding it after all.");
} }
// ...and add a task in it // ...and add a task in it
let new_name = "This is a new task in a new calendar"; let new_name = "This is a new task in a new calendar";
let new_task = Task::new(String::from(new_name), true, &new_calendar_url); let new_task = Task::new(String::from(new_name), true, &new_calendar_url);
provider.local().get_calendar(&new_calendar_url).await.unwrap() provider
.lock().unwrap().add_item(Item::Task(new_task)).await.unwrap(); .local()
.get_calendar(&new_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.add_item(Item::Task(new_task))
.await
.unwrap();
// Also create a task in a previously existing calendar // Also create a task in a previously existing calendar
let changed_calendar_url: Url = EXAMPLE_EXISTING_CALENDAR_URL.parse().unwrap(); let changed_calendar_url: Url = EXAMPLE_EXISTING_CALENDAR_URL.parse().unwrap();
let new_task_name = "This is a new task we're adding as an example, with ÜTF-8 characters"; let new_task_name = "This is a new task we're adding as an example, with ÜTF-8 characters";
let new_task = Task::new(String::from(new_task_name), false, &changed_calendar_url); let new_task = Task::new(String::from(new_task_name), false, &changed_calendar_url);
let new_url = new_task.url().clone(); let new_url = new_task.url().clone();
provider.local().get_calendar(&changed_calendar_url).await.unwrap() provider
.lock().unwrap().add_item(Item::Task(new_task)).await.unwrap(); .local()
.get_calendar(&changed_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.add_item(Item::Task(new_task))
.await
.unwrap();
if !(provider.sync().await) { if !(provider.sync().await) {
log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced."); log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced.");
} else { } else {
println!("Done syncing the new task '{}' and the new calendar '{}'", new_task_name, new_calendar_name); println!(
"Done syncing the new task '{}' and the new calendar '{}'",
new_task_name, new_calendar_name
);
} }
provider.local().save_to_folder().unwrap(); provider.local().save_to_folder().unwrap();
@ -82,14 +113,22 @@ async fn add_items_and_sync_again(provider: &mut CalDavProvider) {
async fn complete_item_and_sync_again( async fn complete_item_and_sync_again(
provider: &mut CalDavProvider, provider: &mut CalDavProvider,
changed_calendar_url: &Url, changed_calendar_url: &Url,
url_to_complete: &Url) url_to_complete: &Url,
{ ) {
println!("\nNow, we'll mark this last task as completed, and run the sync again."); println!("\nNow, we'll mark this last task as completed, and run the sync again.");
pause(); pause();
let completion_status = CompletionStatus::Completed(Some(Utc::now())); let completion_status = CompletionStatus::Completed(Some(Utc::now()));
provider.local().get_calendar(changed_calendar_url).await.unwrap() provider
.lock().unwrap().get_item_by_url_mut(url_to_complete).await.unwrap() .local()
.get_calendar(changed_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.get_item_by_url_mut(url_to_complete)
.await
.unwrap()
.unwrap_task_mut() .unwrap_task_mut()
.set_completion_status(completion_status); .set_completion_status(completion_status);
@ -106,15 +145,22 @@ async fn complete_item_and_sync_again(
async fn remove_items_and_sync_again( async fn remove_items_and_sync_again(
provider: &mut CalDavProvider, provider: &mut CalDavProvider,
changed_calendar_url: &Url, changed_calendar_url: &Url,
id_to_remove: &Url) id_to_remove: &Url,
{ ) {
println!("\nNow, we'll delete this last task, and run the sync again."); println!("\nNow, we'll delete this last task, and run the sync again.");
pause(); pause();
// Remove the task we had created // Remove the task we had created
provider.local().get_calendar(changed_calendar_url).await.unwrap() provider
.lock().unwrap() .local()
.mark_for_deletion(id_to_remove).await.unwrap(); .get_calendar(changed_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.mark_for_deletion(id_to_remove)
.await
.unwrap();
if !(provider.sync().await) { if !(provider.sync().await) {
log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced."); log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced.");

View file

@ -1,21 +1,20 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::error::Error; use std::error::Error;
use serde::{Deserialize, Serialize};
use async_trait::async_trait; use async_trait::async_trait;
use csscolorparser::Color; use csscolorparser::Color;
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use crate::calendar::SupportedComponents;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::traits::{BaseCalendar, CompleteCalendar}; use crate::traits::{BaseCalendar, CompleteCalendar};
use crate::calendar::SupportedComponents;
use crate::Item; use crate::Item;
#[cfg(feature = "local_calendar_mocks_remote_calendars")]
use std::sync::{Arc, Mutex};
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
use crate::mock_behaviour::MockBehaviour; use crate::mock_behaviour::MockBehaviour;
#[cfg(feature = "local_calendar_mocks_remote_calendars")]
use std::sync::{Arc, Mutex};
/// A calendar used by the [`cache`](crate::cache) module /// A calendar used by the [`cache`](crate::cache) module
/// ///
@ -41,11 +40,12 @@ impl CachedCalendar {
self.mock_behaviour = mock_behaviour; self.mock_behaviour = mock_behaviour;
} }
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
fn add_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> { fn add_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> {
if self.mock_behaviour.is_some() { if self.mock_behaviour.is_some() {
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_add_item())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_add_item())?;
self.add_or_update_item_force_synced(item) self.add_or_update_item_force_synced(item)
} else { } else {
self.regular_add_or_update_item(item) self.regular_add_or_update_item(item)
@ -55,7 +55,9 @@ impl CachedCalendar {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
fn update_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> { fn update_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> {
if self.mock_behaviour.is_some() { if self.mock_behaviour.is_some() {
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_update_item())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_update_item())?;
self.add_or_update_item_force_synced(item) self.add_or_update_item_force_synced(item)
} else { } else {
self.regular_add_or_update_item(item) self.regular_add_or_update_item(item)
@ -72,7 +74,10 @@ impl CachedCalendar {
/// Add or update an item, but force a "synced" SyncStatus. This is the normal behaviour that would happen on a server /// Add or update an item, but force a "synced" SyncStatus. This is the normal behaviour that would happen on a server
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
fn add_or_update_item_force_synced(&mut self, mut item: Item) -> Result<SyncStatus, Box<dyn Error>> { fn add_or_update_item_force_synced(
&mut self,
mut item: Item,
) -> Result<SyncStatus, Box<dyn Error>> {
log::debug!("Adding or updating an item, but forces a synced SyncStatus"); log::debug!("Adding or updating an item, but forces a synced SyncStatus");
match item.sync_status() { match item.sync_status() {
SyncStatus::Synced(_) => (), SyncStatus::Synced(_) => (),
@ -85,21 +90,23 @@ impl CachedCalendar {
/// Some kind of equality check /// Some kind of equality check
#[cfg(any(test, feature = "integration_tests"))] #[cfg(any(test, feature = "integration_tests"))]
pub async fn has_same_observable_content_as(&self, other: &CachedCalendar) -> Result<bool, Box<dyn Error>> { pub async fn has_same_observable_content_as(
&self,
other: &CachedCalendar,
) -> Result<bool, Box<dyn Error>> {
if self.name != other.name if self.name != other.name
|| self.url != other.url || self.url != other.url
|| self.supported_components != other.supported_components || self.supported_components != other.supported_components
|| self.color != other.color || self.color != other.color
{ {
log::debug!("Calendar properties mismatch"); log::debug!("Calendar properties mismatch");
return Ok(false); return Ok(false);
} }
let items_l = self.get_items().await?; let items_l = self.get_items().await?;
let items_r = other.get_items().await?; let items_r = other.get_items().await?;
if crate::utils::keys_are_the_same(&items_l, &items_r) == false { if !crate::utils::keys_are_the_same(&items_l, &items_r) {
log::debug!("Different keys for items"); log::debug!("Different keys for items");
return Ok(false); return Ok(false);
} }
@ -108,7 +115,7 @@ impl CachedCalendar {
Some(c) => c, Some(c) => c,
None => return Err("should not happen, we've just tested keys are the same".into()), None => return Err("should not happen, we've just tested keys are the same".into()),
}; };
if item_l.has_same_observable_content_as(&item_r) == false { if !item_l.has_same_observable_content_as(item_r) {
log::debug!("Different items for URL {}:", url_l); log::debug!("Different items for URL {}:", url_l);
log::debug!("{:#?}", item_l); log::debug!("{:#?}", item_l);
log::debug!("{:#?}", item_r); log::debug!("{:#?}", item_r);
@ -121,26 +128,25 @@ impl CachedCalendar {
/// The non-async version of [`Self::get_item_urls`] /// The non-async version of [`Self::get_item_urls`]
pub fn get_item_urls_sync(&self) -> Result<HashSet<Url>, Box<dyn Error>> { pub fn get_item_urls_sync(&self) -> Result<HashSet<Url>, Box<dyn Error>> {
Ok(self.items.iter() Ok(self.items.keys().cloned().collect())
.map(|(url, _)| url.clone())
.collect()
)
} }
/// The non-async version of [`Self::get_items`] /// The non-async version of [`Self::get_items`]
pub fn get_items_sync(&self) -> Result<HashMap<Url, &Item>, Box<dyn Error>> { pub fn get_items_sync(&self) -> Result<HashMap<Url, &Item>, Box<dyn Error>> {
Ok(self.items.iter() Ok(self
.items
.iter()
.map(|(url, item)| (url.clone(), item)) .map(|(url, item)| (url.clone(), item))
.collect() .collect())
)
} }
/// The non-async version of [`Self::get_items_mut`] /// The non-async version of [`Self::get_items_mut`]
pub fn get_items_mut_sync(&mut self) -> Result<HashMap<Url, &mut Item>, Box<dyn Error>> { pub fn get_items_mut_sync(&mut self) -> Result<HashMap<Url, &mut Item>, Box<dyn Error>> {
Ok(self.items.iter_mut() Ok(self
.items
.iter_mut()
.map(|(url, item)| (url.clone(), item)) .map(|(url, item)| (url.clone(), item))
.collect() .collect())
)
} }
/// The non-async version of [`Self::get_item_by_url`] /// The non-async version of [`Self::get_item_by_url`]
@ -167,8 +173,12 @@ impl CachedCalendar {
/// The non-async version of [`Self::update_item`] /// The non-async version of [`Self::update_item`]
pub fn update_item_sync(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> { pub fn update_item_sync(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> {
if self.items.contains_key(item.url()) == false { if !self.items.contains_key(item.url()) {
return Err(format!("Item {:?} cannot be updated, it does not already exist", item.url()).into()); return Err(format!(
"Item {:?} cannot be updated, it does not already exist",
item.url()
)
.into());
} }
#[cfg(not(feature = "local_calendar_mocks_remote_calendars"))] #[cfg(not(feature = "local_calendar_mocks_remote_calendars"))]
return self.regular_add_or_update_item(item); return self.regular_add_or_update_item(item);
@ -185,20 +195,20 @@ impl CachedCalendar {
match item.sync_status() { match item.sync_status() {
SyncStatus::Synced(prev_ss) => { SyncStatus::Synced(prev_ss) => {
let prev_ss = prev_ss.clone(); let prev_ss = prev_ss.clone();
item.set_sync_status( SyncStatus::LocallyDeleted(prev_ss)); item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss));
}, }
SyncStatus::LocallyModified(prev_ss) => { SyncStatus::LocallyModified(prev_ss) => {
let prev_ss = prev_ss.clone(); let prev_ss = prev_ss.clone();
item.set_sync_status( SyncStatus::LocallyDeleted(prev_ss)); item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss));
}, }
SyncStatus::LocallyDeleted(prev_ss) => { SyncStatus::LocallyDeleted(prev_ss) => {
let prev_ss = prev_ss.clone(); let prev_ss = prev_ss.clone();
item.set_sync_status( SyncStatus::LocallyDeleted(prev_ss)); item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss));
}, }
SyncStatus::NotSynced => { SyncStatus::NotSynced => {
// This was never synced to the server, we can safely delete it as soon as now // This was never synced to the server, we can safely delete it as soon as now
self.items.remove(item_url); self.items.remove(item_url);
}, }
}; };
Ok(()) Ok(())
} }
@ -209,13 +219,11 @@ impl CachedCalendar {
pub fn immediately_delete_item_sync(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> { pub fn immediately_delete_item_sync(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> {
match self.items.remove(item_url) { match self.items.remove(item_url) {
None => Err(format!("Item {} is absent from this calendar", item_url).into()), None => Err(format!("Item {} is absent from this calendar", item_url).into()),
Some(_) => Ok(()) Some(_) => Ok(()),
} }
} }
} }
#[async_trait] #[async_trait]
impl BaseCalendar for CachedCalendar { impl BaseCalendar for CachedCalendar {
fn name(&self) -> &str { fn name(&self) -> &str {
@ -245,9 +253,17 @@ impl BaseCalendar for CachedCalendar {
#[async_trait] #[async_trait]
impl CompleteCalendar for CachedCalendar { impl CompleteCalendar for CachedCalendar {
fn new(name: String, url: Url, supported_components: SupportedComponents, color: Option<Color>) -> Self { fn new(
name: String,
url: Url,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self {
Self { Self {
name, url, supported_components, color, name,
url,
supported_components,
color,
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
mock_behaviour: None, mock_behaviour: None,
items: HashMap::new(), items: HashMap::new(),
@ -283,25 +299,33 @@ impl CompleteCalendar for CachedCalendar {
} }
} }
// This class can be used to mock a remote calendar for integration tests // This class can be used to mock a remote calendar for integration tests
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
use crate::{item::VersionTag, use crate::{item::VersionTag, resource::Resource, traits::DavCalendar};
traits::DavCalendar,
resource::Resource};
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
#[async_trait] #[async_trait]
impl DavCalendar for CachedCalendar { impl DavCalendar for CachedCalendar {
fn new(name: String, resource: Resource, supported_components: SupportedComponents, color: Option<Color>) -> Self { fn new(
crate::traits::CompleteCalendar::new(name, resource.url().clone(), supported_components, color) name: String,
resource: Resource,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self {
crate::traits::CompleteCalendar::new(
name,
resource.url().clone(),
supported_components,
color,
)
} }
async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>> { async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>> {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_get_item_version_tags())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_get_item_version_tags())?;
use crate::item::SyncStatus; use crate::item::SyncStatus;
@ -311,7 +335,10 @@ impl DavCalendar for CachedCalendar {
let vt = match item.sync_status() { let vt = match item.sync_status() {
SyncStatus::Synced(vt) => vt.clone(), SyncStatus::Synced(vt) => vt.clone(),
_ => { _ => {
panic!("Mock calendars must contain only SyncStatus::Synced. Got {:?}", item); panic!(
"Mock calendars must contain only SyncStatus::Synced. Got {:?}",
item
);
} }
}; };
result.insert(url.clone(), vt); result.insert(url.clone(), vt);
@ -322,7 +349,9 @@ impl DavCalendar for CachedCalendar {
async fn get_item_by_url(&self, url: &Url) -> Result<Option<Item>, Box<dyn Error>> { async fn get_item_by_url(&self, url: &Url) -> Result<Option<Item>, Box<dyn Error>> {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_get_item_by_url())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_get_item_by_url())?;
Ok(self.items.get(url).cloned()) Ok(self.items.get(url).cloned())
} }
@ -337,7 +366,9 @@ impl DavCalendar for CachedCalendar {
async fn delete_item(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> { async fn delete_item(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_delete_item())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_delete_item())?;
self.immediately_delete_item(item_url).await self.immediately_delete_item(item_url).await
} }

View file

@ -1,7 +1,7 @@
//! Calendar events (iCal `VEVENT` items) //! Calendar events (iCal `VEVENT` items)
use serde::{Deserialize, Serialize};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use crate::item::SyncStatus; use crate::item::SyncStatus;
@ -56,3 +56,9 @@ impl Event {
unimplemented!(); unimplemented!();
} }
} }
impl Default for Event {
fn default() -> Self {
Self::new()
}
}

View file

@ -2,36 +2,41 @@
use std::error::Error; use std::error::Error;
use ical::parser::ical::component::{IcalCalendar, IcalEvent, IcalTodo};
use chrono::{DateTime, TimeZone, Utc}; use chrono::{DateTime, TimeZone, Utc};
use ical::parser::ical::component::{IcalCalendar, IcalEvent, IcalTodo};
use url::Url; use url::Url;
use crate::Item;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::Task;
use crate::task::CompletionStatus; use crate::task::CompletionStatus;
use crate::Event; use crate::Event;
use crate::Item;
use crate::Task;
/// Parse an iCal file into the internal representation [`crate::Item`] /// Parse an iCal file into the internal representation [`crate::Item`]
pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<Item, Box<dyn Error>> { pub fn parse(
content: &str,
item_url: Url,
sync_status: SyncStatus,
) -> Result<Item, Box<dyn Error>> {
let mut reader = ical::IcalParser::new(content.as_bytes()); let mut reader = ical::IcalParser::new(content.as_bytes());
let parsed_item = match reader.next() { let parsed_item = match reader.next() {
None => return Err(format!("Invalid iCal data to parse for item {}", item_url).into()), None => return Err(format!("Invalid iCal data to parse for item {}", item_url).into()),
Some(item) => match item { Some(item) => match item {
Err(err) => return Err(format!("Unable to parse iCal data for item {}: {}", item_url, err).into()), Err(err) => {
return Err(
format!("Unable to parse iCal data for item {}: {}", item_url, err).into(),
)
}
Ok(item) => item, Ok(item) => item,
} },
}; };
let ical_prod_id = extract_ical_prod_id(&parsed_item) let ical_prod_id = extract_ical_prod_id(&parsed_item)
.map(|s| s.to_string()) .map(|s| s.to_string())
.unwrap_or_else(|| super::default_prod_id()); .unwrap_or_else(super::default_prod_id);
let item = match assert_single_type(&parsed_item)? { let item = match assert_single_type(&parsed_item)? {
CurrentType::Event(_) => { CurrentType::Event(_) => Item::Event(Event::new()),
Item::Event(Event::new())
},
CurrentType::Todo(todo) => { CurrentType::Todo(todo) => {
let mut name = None; let mut name = None;
@ -44,8 +49,8 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
for prop in &todo.properties { for prop in &todo.properties {
match prop.name.as_str() { match prop.name.as_str() {
"SUMMARY" => { name = prop.value.clone() }, "SUMMARY" => name = prop.value.clone(),
"UID" => { uid = prop.value.clone() }, "UID" => uid = prop.value.clone(),
"DTSTAMP" => { "DTSTAMP" => {
// The property can be specified once, but is not mandatory // The property can be specified once, but is not mandatory
// "This property specifies the date and time that the information associated with // "This property specifies the date and time that the information associated with
@ -53,7 +58,7 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
// "In the case of an iCalendar object that doesn't specify a "METHOD" // "In the case of an iCalendar object that doesn't specify a "METHOD"
// property [e.g.: VTODO and VEVENT], this property is equivalent to the "LAST-MODIFIED" property". // property [e.g.: VTODO and VEVENT], this property is equivalent to the "LAST-MODIFIED" property".
last_modified = parse_date_time_from_property(&prop.value); last_modified = parse_date_time_from_property(&prop.value);
}, }
"LAST-MODIFIED" => { "LAST-MODIFIED" => {
// The property can be specified once, but is not mandatory // The property can be specified once, but is not mandatory
// "This property specifies the date and time that the information associated with // "This property specifies the date and time that the information associated with
@ -66,11 +71,11 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
// "This property defines the date and time that a to-do was // "This property defines the date and time that a to-do was
// actually completed." // actually completed."
completion_date = parse_date_time_from_property(&prop.value) completion_date = parse_date_time_from_property(&prop.value)
}, }
"CREATED" => { "CREATED" => {
// The property can be specified once, but is not mandatory // The property can be specified once, but is not mandatory
creation_date = parse_date_time_from_property(&prop.value) creation_date = parse_date_time_from_property(&prop.value)
}, }
"STATUS" => { "STATUS" => {
// Possible values: // Possible values:
// "NEEDS-ACTION" ;Indicates to-do needs action. // "NEEDS-ACTION" ;Indicates to-do needs action.
@ -97,7 +102,13 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
}; };
let last_modified = match last_modified { let last_modified = match last_modified {
Some(dt) => dt, Some(dt) => dt,
None => return Err(format!("Missing DTSTAMP for item {}, but this is required by RFC5545", item_url).into()), None => {
return Err(format!(
"Missing DTSTAMP for item {}, but this is required by RFC5545",
item_url
)
.into())
}
}; };
let completion_status = match completed { let completion_status = match completed {
false => { false => {
@ -105,15 +116,24 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
log::warn!("Task {:?} has an inconsistent content: its STATUS is not completed, yet it has a COMPLETED timestamp at {:?}", uid, completion_date); log::warn!("Task {:?} has an inconsistent content: its STATUS is not completed, yet it has a COMPLETED timestamp at {:?}", uid, completion_date);
} }
CompletionStatus::Uncompleted CompletionStatus::Uncompleted
}, }
true => CompletionStatus::Completed(completion_date), true => CompletionStatus::Completed(completion_date),
}; };
Item::Task(Task::new_with_parameters(name, uid, item_url, completion_status, sync_status, creation_date, last_modified, ical_prod_id, extra_parameters)) Item::Task(Task::new_with_parameters(
}, name,
uid,
item_url,
completion_status,
sync_status,
creation_date,
last_modified,
ical_prod_id,
extra_parameters,
))
}
}; };
// What to do with multiple items? // What to do with multiple items?
if reader.next().map(|r| r.is_ok()) == Some(true) { if reader.next().map(|r| r.is_ok()) == Some(true) {
return Err("Parsing multiple items are not supported".into()); return Err("Parsing multiple items are not supported".into());
@ -123,33 +143,30 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
} }
fn parse_date_time(dt: &str) -> Result<DateTime<Utc>, chrono::format::ParseError> { fn parse_date_time(dt: &str) -> Result<DateTime<Utc>, chrono::format::ParseError> {
Utc.datetime_from_str(dt, "%Y%m%dT%H%M%SZ") Utc.datetime_from_str(dt, "%Y%m%dT%H%M%SZ")
.or_else(|_err| Utc.datetime_from_str(dt, "%Y%m%dT%H%M%S") ) .or_else(|_err| Utc.datetime_from_str(dt, "%Y%m%dT%H%M%S"))
} }
fn parse_date_time_from_property(value: &Option<String>) -> Option<DateTime<Utc>> { fn parse_date_time_from_property(value: &Option<String>) -> Option<DateTime<Utc>> {
value.as_ref() value.as_ref().and_then(|s| {
.and_then(|s| { parse_date_time(s)
parse_date_time(s)
.map_err(|err| { .map_err(|err| {
log::warn!("Invalid timestamp: {}", s); log::warn!("Invalid timestamp: {}", s);
err err
}) })
.ok() .ok()
}) })
} }
fn extract_ical_prod_id(item: &IcalCalendar) -> Option<&str> { fn extract_ical_prod_id(item: &IcalCalendar) -> Option<&str> {
for prop in &item.properties { for prop in &item.properties {
if &prop.name == "PRODID" { if &prop.name == "PRODID" {
return prop.value.as_ref().map(|s| s.as_str()) return prop.value.as_deref();
} }
} }
None None
} }
enum CurrentType<'a> { enum CurrentType<'a> {
Event(&'a IcalEvent), Event(&'a IcalEvent),
Todo(&'a IcalTodo), Todo(&'a IcalTodo),
@ -176,10 +193,9 @@ fn assert_single_type<'a>(item: &'a IcalCalendar) -> Result<CurrentType<'a>, Box
} }
} }
return Err("Only a single TODO or a single EVENT is supported".into()); Err("Only a single TODO or a single EVENT is supported".into())
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
const EXAMPLE_ICAL: &str = r#"BEGIN:VCALENDAR const EXAMPLE_ICAL: &str = r#"BEGIN:VCALENDAR
@ -195,7 +211,7 @@ END:VTODO
END:VCALENDAR END:VCALENDAR
"#; "#;
const EXAMPLE_ICAL_COMPLETED: &str = r#"BEGIN:VCALENDAR const EXAMPLE_ICAL_COMPLETED: &str = r#"BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//Nextcloud Tasks v0.13.6 PRODID:-//Nextcloud Tasks v0.13.6
BEGIN:VTODO BEGIN:VTODO
@ -211,7 +227,7 @@ END:VTODO
END:VCALENDAR END:VCALENDAR
"#; "#;
const EXAMPLE_ICAL_COMPLETED_WITHOUT_A_COMPLETION_DATE: &str = r#"BEGIN:VCALENDAR const EXAMPLE_ICAL_COMPLETED_WITHOUT_A_COMPLETION_DATE: &str = r#"BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//Nextcloud Tasks v0.13.6 PRODID:-//Nextcloud Tasks v0.13.6
BEGIN:VTODO BEGIN:VTODO
@ -261,11 +277,17 @@ END:VCALENDAR
assert_eq!(task.name(), "Do not forget to do this"); assert_eq!(task.name(), "Do not forget to do this");
assert_eq!(task.url(), &item_url); assert_eq!(task.url(), &item_url);
assert_eq!(task.uid(), "0633de27-8c32-42be-bcb8-63bc879c6185@some-domain.com"); assert_eq!(
assert_eq!(task.completed(), false); task.uid(),
"0633de27-8c32-42be-bcb8-63bc879c6185@some-domain.com"
);
assert!(!task.completed());
assert_eq!(task.completion_status(), &CompletionStatus::Uncompleted); assert_eq!(task.completion_status(), &CompletionStatus::Uncompleted);
assert_eq!(task.sync_status(), &sync_status); assert_eq!(task.sync_status(), &sync_status);
assert_eq!(task.last_modified(), &Utc.ymd(2021, 03, 21).and_hms(0, 16, 0)); assert_eq!(
task.last_modified(),
&Utc.ymd(2021, 03, 21).and_hms(0, 16, 0)
);
} }
#[test] #[test]
@ -274,11 +296,19 @@ END:VCALENDAR
let sync_status = SyncStatus::Synced(version_tag); let sync_status = SyncStatus::Synced(version_tag);
let item_url: Url = "http://some.id/for/testing".parse().unwrap(); let item_url: Url = "http://some.id/for/testing".parse().unwrap();
let item = parse(EXAMPLE_ICAL_COMPLETED, item_url.clone(), sync_status.clone()).unwrap(); let item = parse(
EXAMPLE_ICAL_COMPLETED,
item_url.clone(),
sync_status.clone(),
)
.unwrap();
let task = item.unwrap_task(); let task = item.unwrap_task();
assert_eq!(task.completed(), true); assert!(task.completed());
assert_eq!(task.completion_status(), &CompletionStatus::Completed(Some(Utc.ymd(2021, 04, 02).and_hms(8, 15, 57)))); assert_eq!(
task.completion_status(),
&CompletionStatus::Completed(Some(Utc.ymd(2021, 04, 02).and_hms(8, 15, 57)))
);
} }
#[test] #[test]
@ -287,10 +317,15 @@ END:VCALENDAR
let sync_status = SyncStatus::Synced(version_tag); let sync_status = SyncStatus::Synced(version_tag);
let item_url: Url = "http://some.id/for/testing".parse().unwrap(); let item_url: Url = "http://some.id/for/testing".parse().unwrap();
let item = parse(EXAMPLE_ICAL_COMPLETED_WITHOUT_A_COMPLETION_DATE, item_url.clone(), sync_status.clone()).unwrap(); let item = parse(
EXAMPLE_ICAL_COMPLETED_WITHOUT_A_COMPLETION_DATE,
item_url.clone(),
sync_status.clone(),
)
.unwrap();
let task = item.unwrap_task(); let task = item.unwrap_task();
assert_eq!(task.completed(), true); assert!(task.completed());
assert_eq!(task.completion_status(), &CompletionStatus::Completed(None)); assert_eq!(task.completion_status(), &CompletionStatus::Completed(None));
} }

View file

@ -1,10 +1,9 @@
//! CalDAV items (todo, events, journals...) //! CalDAV items (todo, events, journals...)
// TODO: move Event and Task to nest them in crate::items::calendar::Calendar? // TODO: move Event and Task to nest them in crate::items::calendar::Calendar?
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use chrono::{DateTime, Utc};
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum Item { pub enum Item {
@ -21,7 +20,7 @@ macro_rules! synthetise_common_getter {
Item::Task(t) => t.$property_name(), Item::Task(t) => t.$property_name(),
} }
} }
} };
} }
impl Item { impl Item {
@ -41,17 +40,11 @@ impl Item {
} }
pub fn is_event(&self) -> bool { pub fn is_event(&self) -> bool {
match &self { matches!(self, Item::Event(_))
Item::Event(_) => true,
_ => false,
}
} }
pub fn is_task(&self) -> bool { pub fn is_task(&self) -> bool {
match &self { matches!(self, Item::Task(_))
Item::Task(_) => true,
_ => false,
}
} }
/// Returns a mutable reference to the inner Task /// Returns a mutable reference to the inner Task
@ -80,19 +73,16 @@ impl Item {
pub fn has_same_observable_content_as(&self, other: &Item) -> bool { pub fn has_same_observable_content_as(&self, other: &Item) -> bool {
match (self, other) { match (self, other) {
(Item::Event(s), Item::Event(o)) => s.has_same_observable_content_as(o), (Item::Event(s), Item::Event(o)) => s.has_same_observable_content_as(o),
(Item::Task(s), Item::Task(o)) => s.has_same_observable_content_as(o), (Item::Task(s), Item::Task(o)) => s.has_same_observable_content_as(o),
_ => false, _ => false,
} }
} }
} }
/// A VersionTag is basically a CalDAV `ctag` or `etag`. Whenever it changes, this means the data has changed. /// A VersionTag is basically a CalDAV `ctag` or `etag`. Whenever it changes, this means the data has changed.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VersionTag { pub struct VersionTag {
tag: String tag: String,
} }
impl From<String> for VersionTag { impl From<String> for VersionTag {
@ -115,8 +105,6 @@ impl VersionTag {
} }
} }
/// Describes whether this item has been synced already, or modified since the last time it was synced /// Describes whether this item has been synced already, or modified since the last time it was synced
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SyncStatus { pub enum SyncStatus {

View file

@ -61,7 +61,9 @@ impl MockBehaviour {
} }
pub fn can_get_calendars(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_get_calendars(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.get_calendars_behaviour, "get_calendars") decrement(&mut self.get_calendars_behaviour, "get_calendars")
} }
// pub fn can_get_calendar(&mut self) -> Result<(), Box<dyn Error>> { // pub fn can_get_calendar(&mut self) -> Result<(), Box<dyn Error>> {
@ -69,50 +71,66 @@ impl MockBehaviour {
// decrement(&mut self.get_calendar_behaviour, "get_calendar") // decrement(&mut self.get_calendar_behaviour, "get_calendar")
// } // }
pub fn can_create_calendar(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_create_calendar(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.create_calendar_behaviour, "create_calendar") decrement(&mut self.create_calendar_behaviour, "create_calendar")
} }
pub fn can_add_item(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_add_item(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.add_item_behaviour, "add_item") decrement(&mut self.add_item_behaviour, "add_item")
} }
pub fn can_update_item(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_update_item(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.update_item_behaviour, "update_item") decrement(&mut self.update_item_behaviour, "update_item")
} }
pub fn can_get_item_version_tags(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_get_item_version_tags(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
decrement(&mut self.get_item_version_tags_behaviour, "get_item_version_tags") return Ok(());
}
decrement(
&mut self.get_item_version_tags_behaviour,
"get_item_version_tags",
)
} }
pub fn can_get_item_by_url(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_get_item_by_url(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.get_item_by_url_behaviour, "get_item_by_url") decrement(&mut self.get_item_by_url_behaviour, "get_item_by_url")
} }
pub fn can_delete_item(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_delete_item(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.delete_item_behaviour, "delete_item") decrement(&mut self.delete_item_behaviour, "delete_item")
} }
} }
/// Return Ok(()) in case the value is `(1+, _)` or `(_, 0)`, or return Err and decrement otherwise /// Return Ok(()) in case the value is `(1+, _)` or `(_, 0)`, or return Err and decrement otherwise
fn decrement(value: &mut (u32, u32), descr: &str) -> Result<(), Box<dyn Error>> { fn decrement(value: &mut (u32, u32), descr: &str) -> Result<(), Box<dyn Error>> {
let remaining_successes = value.0; let remaining_successes = value.0;
let remaining_failures = value.1; let remaining_failures = value.1;
if remaining_successes > 0 { if remaining_successes > 0 {
value.0 = value.0 - 1; value.0 -= 1;
log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value); log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value);
Ok(()) Ok(())
} else if remaining_failures > 0 {
value.1 -= 1;
log::debug!("Mock behaviour: failing a {} ({:?})", descr, value);
Err(format!(
"Mocked behaviour requires this {} to fail this time. ({:?})",
descr, value
)
.into())
} else { } else {
if remaining_failures > 0 { log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value);
value.1 = value.1 - 1; Ok(())
log::debug!("Mock behaviour: failing a {} ({:?})", descr, value);
Err(format!("Mocked behaviour requires this {} to fail this time. ({:?})", descr, value).into())
} else {
log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value);
Ok(())
}
} }
} }
@ -140,9 +158,9 @@ mod test {
assert!(now.can_get_calendars().is_ok()); assert!(now.can_get_calendars().is_ok());
assert!(now.can_create_calendar().is_ok()); assert!(now.can_create_calendar().is_ok());
let mut custom = MockBehaviour{ let mut custom = MockBehaviour {
get_calendars_behaviour: (0,1), get_calendars_behaviour: (0, 1),
create_calendar_behaviour: (1,3), create_calendar_behaviour: (1, 3),
..MockBehaviour::default() ..MockBehaviour::default()
}; };
assert!(custom.can_get_calendars().is_err()); assert!(custom.can_get_calendars().is_err());

View file

@ -2,18 +2,18 @@
//! //!
//! It is also responsible for syncing them together //! It is also responsible for syncing them together
use std::error::Error;
use std::collections::HashSet; use std::collections::HashSet;
use std::error::Error;
use std::fmt::{Display, Formatter};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::fmt::{Display, Formatter};
use url::Url;
use itertools::Itertools; use itertools::Itertools;
use url::Url;
use crate::traits::{BaseCalendar, CalDavSource, DavCalendar};
use crate::traits::CompleteCalendar;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::traits::CompleteCalendar;
use crate::traits::{BaseCalendar, CalDavSource, DavCalendar};
pub mod sync_progress; pub mod sync_progress;
use sync_progress::SyncProgress; use sync_progress::SyncProgress;
@ -42,7 +42,6 @@ impl Display for BatchDownloadType {
} }
} }
/// A data source that combines two `CalDavSource`s, which is able to sync both sources. /// A data source that combines two `CalDavSource`s, which is able to sync both sources.
/// ///
/// Usually, you will only need to use a provider between a server and a local cache, that is to say a [`CalDavProvider`](crate::CalDavProvider), i.e. a `Provider<Cache, CachedCalendar, Client, RemoteCalendar>`. \ /// Usually, you will only need to use a provider between a server and a local cache, that is to say a [`CalDavProvider`](crate::CalDavProvider), i.e. a `Provider<Cache, CachedCalendar, Client, RemoteCalendar>`. \
@ -76,21 +75,30 @@ where
/// `remote` is usually a [`Client`](crate::client::Client), `local` is usually a [`Cache`](crate::cache::Cache). /// `remote` is usually a [`Client`](crate::client::Client), `local` is usually a [`Cache`](crate::cache::Cache).
/// However, both can be interchangeable. The only difference is that `remote` always wins in case of a sync conflict /// However, both can be interchangeable. The only difference is that `remote` always wins in case of a sync conflict
pub fn new(remote: R, local: L) -> Self { pub fn new(remote: R, local: L) -> Self {
Self { remote, local, Self {
phantom_t: PhantomData, phantom_u: PhantomData, remote,
local,
phantom_t: PhantomData,
phantom_u: PhantomData,
} }
} }
/// Returns the data source described as `local` /// Returns the data source described as `local`
pub fn local(&self) -> &L { &self.local } pub fn local(&self) -> &L {
&self.local
}
/// Returns the data source described as `local` /// Returns the data source described as `local`
pub fn local_mut(&mut self) -> &mut L { &mut self.local } pub fn local_mut(&mut self) -> &mut L {
&mut self.local
}
/// Returns the data source described as `remote`. /// Returns the data source described as `remote`.
/// ///
/// Apart from tests, there are very few (if any) reasons to access `remote` directly. /// Apart from tests, there are very few (if any) reasons to access `remote` directly.
/// Usually, you should rather use the `local` source, which (usually) is a much faster local cache. /// Usually, you should rather use the `local` source, which (usually) is a much faster local cache.
/// To be sure `local` accurately mirrors the `remote` source, you can run [`Provider::sync`] /// To be sure `local` accurately mirrors the `remote` source, you can run [`Provider::sync`]
pub fn remote(&self) -> &R { &self.remote } pub fn remote(&self) -> &R {
&self.remote
}
/// Performs a synchronisation between `local` and `remote`, and provide feeedback to the user about the progress. /// Performs a synchronisation between `local` and `remote`, and provide feeedback to the user about the progress.
/// ///
@ -117,7 +125,9 @@ where
if let Err(err) = self.run_sync_inner(progress).await { if let Err(err) = self.run_sync_inner(progress).await {
progress.error(&format!("Sync terminated because of an error: {}", err)); progress.error(&format!("Sync terminated because of an error: {}", err));
} }
progress.feedback(SyncEvent::Finished{ success: progress.is_success() }); progress.feedback(SyncEvent::Finished {
success: progress.is_success(),
});
progress.is_success() progress.is_success()
} }
@ -130,16 +140,22 @@ where
// Sync every remote calendar // Sync every remote calendar
let cals_remote = self.remote.get_calendars().await?; let cals_remote = self.remote.get_calendars().await?;
for (cal_url, cal_remote) in cals_remote { for (cal_url, cal_remote) in cals_remote {
let counterpart = match self.get_or_insert_local_counterpart_calendar(&cal_url, cal_remote.clone()).await { let counterpart = match self
.get_or_insert_local_counterpart_calendar(&cal_url, cal_remote.clone())
.await
{
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to get or insert local counterpart calendar for {} ({}). Skipping this time", cal_url, err)); progress.warn(&format!("Unable to get or insert local counterpart calendar for {} ({}). Skipping this time", cal_url, err));
continue; continue;
}, }
Ok(arc) => arc, Ok(arc) => arc,
}; };
if let Err(err) = Self::sync_calendar_pair(counterpart, cal_remote, progress).await { if let Err(err) = Self::sync_calendar_pair(counterpart, cal_remote, progress).await {
progress.warn(&format!("Unable to sync calendar {}: {}, skipping this time.", cal_url, err)); progress.warn(&format!(
"Unable to sync calendar {}: {}, skipping this time.",
cal_url, err
));
continue; continue;
} }
handled_calendars.insert(cal_url); handled_calendars.insert(cal_url);
@ -152,16 +168,22 @@ where
continue; continue;
} }
let counterpart = match self.get_or_insert_remote_counterpart_calendar(&cal_url, cal_local.clone()).await { let counterpart = match self
.get_or_insert_remote_counterpart_calendar(&cal_url, cal_local.clone())
.await
{
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to get or insert remote counterpart calendar for {} ({}). Skipping this time", cal_url, err)); progress.warn(&format!("Unable to get or insert remote counterpart calendar for {} ({}). Skipping this time", cal_url, err));
continue; continue;
}, }
Ok(arc) => arc, Ok(arc) => arc,
}; };
if let Err(err) = Self::sync_calendar_pair(cal_local, counterpart, progress).await { if let Err(err) = Self::sync_calendar_pair(cal_local, counterpart, progress).await {
progress.warn(&format!("Unable to sync calendar {}: {}, skipping this time.", cal_url, err)); progress.warn(&format!(
"Unable to sync calendar {}: {}, skipping this time.",
cal_url, err
));
continue; continue;
} }
} }
@ -171,26 +193,36 @@ where
Ok(()) Ok(())
} }
async fn get_or_insert_local_counterpart_calendar(
async fn get_or_insert_local_counterpart_calendar(&mut self, cal_url: &Url, needle: Arc<Mutex<U>>) -> Result<Arc<Mutex<T>>, Box<dyn Error>> { &mut self,
cal_url: &Url,
needle: Arc<Mutex<U>>,
) -> Result<Arc<Mutex<T>>, Box<dyn Error>> {
get_or_insert_counterpart_calendar("local", &mut self.local, cal_url, needle).await get_or_insert_counterpart_calendar("local", &mut self.local, cal_url, needle).await
} }
async fn get_or_insert_remote_counterpart_calendar(&mut self, cal_url: &Url, needle: Arc<Mutex<T>>) -> Result<Arc<Mutex<U>>, Box<dyn Error>> { async fn get_or_insert_remote_counterpart_calendar(
&mut self,
cal_url: &Url,
needle: Arc<Mutex<T>>,
) -> Result<Arc<Mutex<U>>, Box<dyn Error>> {
get_or_insert_counterpart_calendar("remote", &mut self.remote, cal_url, needle).await get_or_insert_counterpart_calendar("remote", &mut self.remote, cal_url, needle).await
} }
async fn sync_calendar_pair(
async fn sync_calendar_pair(cal_local: Arc<Mutex<T>>, cal_remote: Arc<Mutex<U>>, progress: &mut SyncProgress) -> Result<(), Box<dyn Error>> { cal_local: Arc<Mutex<T>>,
cal_remote: Arc<Mutex<U>>,
progress: &mut SyncProgress,
) -> Result<(), Box<dyn Error>> {
let mut cal_remote = cal_remote.lock().unwrap(); let mut cal_remote = cal_remote.lock().unwrap();
let mut cal_local = cal_local.lock().unwrap(); let mut cal_local = cal_local.lock().unwrap();
let cal_name = cal_local.name().to_string(); let cal_name = cal_local.name().to_string();
progress.info(&format!("Syncing calendar {}", cal_name)); progress.info(&format!("Syncing calendar {}", cal_name));
progress.reset_counter(); progress.reset_counter();
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: 0, items_done_already: 0,
details: "started".to_string() details: "started".to_string(),
}); });
// Step 1 - find the differences // Step 1 - find the differences
@ -203,7 +235,7 @@ where
let mut remote_additions = HashSet::new(); let mut remote_additions = HashSet::new();
let remote_items = cal_remote.get_item_version_tags().await?; let remote_items = cal_remote.get_item_version_tags().await?;
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: 0, items_done_already: 0,
details: format!("{} remote items", remote_items.len()), details: format!("{} remote items", remote_items.len()),
@ -217,24 +249,27 @@ where
// This was created on the remote // This was created on the remote
progress.debug(&format!("* {} is a remote addition", url)); progress.debug(&format!("* {} is a remote addition", url));
remote_additions.insert(url); remote_additions.insert(url);
}, }
Some(local_item) => { Some(local_item) => {
if local_items_to_handle.remove(&url) == false { if !local_items_to_handle.remove(&url) {
progress.error(&format!("Inconsistent state: missing task {} from the local tasks", url)); progress.error(&format!(
"Inconsistent state: missing task {} from the local tasks",
url
));
} }
match local_item.sync_status() { match local_item.sync_status() {
SyncStatus::NotSynced => { SyncStatus::NotSynced => {
progress.error(&format!("URL reuse between remote and local sources ({}). Ignoring this item in the sync", url)); progress.error(&format!("URL reuse between remote and local sources ({}). Ignoring this item in the sync", url));
continue; continue;
}, }
SyncStatus::Synced(local_tag) => { SyncStatus::Synced(local_tag) => {
if &remote_tag != local_tag { if &remote_tag != local_tag {
// This has been modified on the remote // This has been modified on the remote
progress.debug(&format!("* {} is a remote change", url)); progress.debug(&format!("* {} is a remote change", url));
remote_changes.insert(url); remote_changes.insert(url);
} }
}, }
SyncStatus::LocallyModified(local_tag) => { SyncStatus::LocallyModified(local_tag) => {
if &remote_tag == local_tag { if &remote_tag == local_tag {
// This has been changed locally // This has been changed locally
@ -242,10 +277,11 @@ where
local_changes.insert(url); local_changes.insert(url);
} else { } else {
progress.info(&format!("Conflict: task {} has been modified in both sources. Using the remote version.", url)); progress.info(&format!("Conflict: task {} has been modified in both sources. Using the remote version.", url));
progress.debug(&format!("* {} is considered a remote change", url)); progress
.debug(&format!("* {} is considered a remote change", url));
remote_changes.insert(url); remote_changes.insert(url);
} }
}, }
SyncStatus::LocallyDeleted(local_tag) => { SyncStatus::LocallyDeleted(local_tag) => {
if &remote_tag == local_tag { if &remote_tag == local_tag {
// This has been locally deleted // This has been locally deleted
@ -253,10 +289,11 @@ where
local_del.insert(url); local_del.insert(url);
} else { } else {
progress.info(&format!("Conflict: task {} has been locally deleted and remotely modified. Reverting to the remote version.", url)); progress.info(&format!("Conflict: task {} has been locally deleted and remotely modified. Reverting to the remote version.", url));
progress.debug(&format!("* {} is a considered a remote change", url)); progress
.debug(&format!("* {} is a considered a remote change", url));
remote_changes.insert(url); remote_changes.insert(url);
} }
}, }
} }
} }
} }
@ -267,9 +304,12 @@ where
progress.trace(&format!("##### Considering local item {}...", url)); progress.trace(&format!("##### Considering local item {}...", url));
let local_item = match cal_local.get_item_by_url(&url).await { let local_item = match cal_local.get_item_by_url(&url).await {
None => { None => {
progress.error(&format!("Inconsistent state: missing task {} from the local tasks", url)); progress.error(&format!(
"Inconsistent state: missing task {} from the local tasks",
url
));
continue; continue;
}, }
Some(item) => item, Some(item) => item,
}; };
@ -278,31 +318,33 @@ where
// This item has been removed from the remote // This item has been removed from the remote
progress.debug(&format!("# {} is a deletion from the server", url)); progress.debug(&format!("# {} is a deletion from the server", url));
remote_del.insert(url); remote_del.insert(url);
}, }
SyncStatus::NotSynced => { SyncStatus::NotSynced => {
// This item has just been locally created // This item has just been locally created
progress.debug(&format!("# {} has been locally created", url)); progress.debug(&format!("# {} has been locally created", url));
local_additions.insert(url); local_additions.insert(url);
}, }
SyncStatus::LocallyDeleted(_) => { SyncStatus::LocallyDeleted(_) => {
// This item has been deleted from both sources // This item has been deleted from both sources
progress.debug(&format!("# {} has been deleted from both sources", url)); progress.debug(&format!("# {} has been deleted from both sources", url));
remote_del.insert(url); remote_del.insert(url);
}, }
SyncStatus::LocallyModified(_) => { SyncStatus::LocallyModified(_) => {
progress.info(&format!("Conflict: item {} has been deleted from the server and locally modified. Deleting the local copy", url)); progress.info(&format!("Conflict: item {} has been deleted from the server and locally modified. Deleting the local copy", url));
remote_del.insert(url); remote_del.insert(url);
}, }
} }
} }
// Step 2 - commit changes // Step 2 - commit changes
progress.trace("Committing changes..."); progress.trace("Committing changes...");
for url_del in local_del { for url_del in local_del {
progress.debug(&format!("> Pushing local deletion {} to the server", url_del)); progress.debug(&format!(
"> Pushing local deletion {} to the server",
url_del
));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: progress.counter(), items_done_already: progress.counter(),
details: Self::item_name(&cal_local, &url_del).await, details: Self::item_name(&cal_local, &url_del).await,
@ -310,21 +352,27 @@ where
match cal_remote.delete_item(&url_del).await { match cal_remote.delete_item(&url_del).await {
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to delete remote item {}: {}", url_del, err)); progress.warn(&format!(
}, "Unable to delete remote item {}: {}",
url_del, err
));
}
Ok(()) => { Ok(()) => {
// Change the local copy from "marked to deletion" to "actually deleted" // Change the local copy from "marked to deletion" to "actually deleted"
if let Err(err) = cal_local.immediately_delete_item(&url_del).await { if let Err(err) = cal_local.immediately_delete_item(&url_del).await {
progress.error(&format!("Unable to permanently delete local item {}: {}", url_del, err)); progress.error(&format!(
"Unable to permanently delete local item {}: {}",
url_del, err
));
} }
}, }
} }
} }
for url_del in remote_del { for url_del in remote_del {
progress.debug(&format!("> Applying remote deletion {} locally", url_del)); progress.debug(&format!("> Applying remote deletion {} locally", url_del));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: progress.counter(), items_done_already: progress.counter(),
details: Self::item_name(&cal_local, &url_del).await, details: Self::item_name(&cal_local, &url_del).await,
@ -339,22 +387,26 @@ where
&mut *cal_local, &mut *cal_local,
&mut *cal_remote, &mut *cal_remote,
progress, progress,
&cal_name &cal_name,
).await; )
.await;
Self::apply_remote_changes( Self::apply_remote_changes(
remote_changes, remote_changes,
&mut *cal_local, &mut *cal_local,
&mut *cal_remote, &mut *cal_remote,
progress, progress,
&cal_name &cal_name,
).await; )
.await;
for url_add in local_additions { for url_add in local_additions {
progress.debug(&format!("> Pushing local addition {} to the server", url_add)); progress.debug(&format!(
"> Pushing local addition {} to the server",
url_add
));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: progress.counter(), items_done_already: progress.counter(),
details: Self::item_name(&cal_local, &url_add).await, details: Self::item_name(&cal_local, &url_add).await,
@ -363,23 +415,29 @@ where
None => { None => {
progress.error(&format!("Inconsistency: created item {} has been marked for upload but is locally missing", url_add)); progress.error(&format!("Inconsistency: created item {} has been marked for upload but is locally missing", url_add));
continue; continue;
}, }
Some(item) => { Some(item) => {
match cal_remote.add_item(item.clone()).await { match cal_remote.add_item(item.clone()).await {
Err(err) => progress.error(&format!("Unable to add item {} to remote calendar: {}", url_add, err)), Err(err) => progress.error(&format!(
"Unable to add item {} to remote calendar: {}",
url_add, err
)),
Ok(new_ss) => { Ok(new_ss) => {
// Update local sync status // Update local sync status
item.set_sync_status(new_ss); item.set_sync_status(new_ss);
}, }
} }
}, }
}; };
} }
for url_change in local_changes { for url_change in local_changes {
progress.debug(&format!("> Pushing local change {} to the server", url_change)); progress.debug(&format!(
"> Pushing local change {} to the server",
url_change
));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: progress.counter(), items_done_already: progress.counter(),
details: Self::item_name(&cal_local, &url_change).await, details: Self::item_name(&cal_local, &url_change).await,
@ -388,14 +446,17 @@ where
None => { None => {
progress.error(&format!("Inconsistency: modified item {} has been marked for upload but is locally missing", url_change)); progress.error(&format!("Inconsistency: modified item {} has been marked for upload but is locally missing", url_change));
continue; continue;
}, }
Some(item) => { Some(item) => {
match cal_remote.update_item(item.clone()).await { match cal_remote.update_item(item.clone()).await {
Err(err) => progress.error(&format!("Unable to update item {} in remote calendar: {}", url_change, err)), Err(err) => progress.error(&format!(
"Unable to update item {} in remote calendar: {}",
url_change, err
)),
Ok(new_ss) => { Ok(new_ss) => {
// Update local sync status // Update local sync status
item.set_sync_status(new_ss); item.set_sync_status(new_ss);
}, }
}; };
} }
}; };
@ -404,9 +465,12 @@ where
Ok(()) Ok(())
} }
async fn item_name(cal: &T, url: &Url) -> String { async fn item_name(cal: &T, url: &Url) -> String {
cal.get_item_by_url(url).await.map(|item| item.name()).unwrap_or_default().to_string() cal.get_item_by_url(url)
.await
.map(|item| item.name())
.unwrap_or_default()
.to_string()
} }
async fn apply_remote_additions( async fn apply_remote_additions(
@ -414,10 +478,22 @@ where
cal_local: &mut T, cal_local: &mut T,
cal_remote: &mut U, cal_remote: &mut U,
progress: &mut SyncProgress, progress: &mut SyncProgress,
cal_name: &str cal_name: &str,
) { ) {
for batch in remote_additions.drain().chunks(DOWNLOAD_BATCH_SIZE).into_iter() { for batch in remote_additions
Self::fetch_batch_and_apply(BatchDownloadType::RemoteAdditions, batch, cal_local, cal_remote, progress, cal_name).await; .drain()
.chunks(DOWNLOAD_BATCH_SIZE)
.into_iter()
{
Self::fetch_batch_and_apply(
BatchDownloadType::RemoteAdditions,
batch,
cal_local,
cal_remote,
progress,
cal_name,
)
.await;
} }
} }
@ -426,10 +502,22 @@ where
cal_local: &mut T, cal_local: &mut T,
cal_remote: &mut U, cal_remote: &mut U,
progress: &mut SyncProgress, progress: &mut SyncProgress,
cal_name: &str cal_name: &str,
) { ) {
for batch in remote_changes.drain().chunks(DOWNLOAD_BATCH_SIZE).into_iter() { for batch in remote_changes
Self::fetch_batch_and_apply(BatchDownloadType::RemoteChanges, batch, cal_local, cal_remote, progress, cal_name).await; .drain()
.chunks(DOWNLOAD_BATCH_SIZE)
.into_iter()
{
Self::fetch_batch_and_apply(
BatchDownloadType::RemoteChanges,
batch,
cal_local,
cal_remote,
progress,
cal_name,
)
.await;
} }
} }
@ -439,60 +527,74 @@ where
cal_local: &mut T, cal_local: &mut T,
cal_remote: &mut U, cal_remote: &mut U,
progress: &mut SyncProgress, progress: &mut SyncProgress,
cal_name: &str cal_name: &str,
) { ) {
progress.debug(&format!("> Applying a batch of {} locally", batch_type) /* too bad Chunks does not implement ExactSizeIterator, that could provide useful debug info. See https://github.com/rust-itertools/itertools/issues/171 */); progress.debug(&format!("> Applying a batch of {} locally", batch_type) /* too bad Chunks does not implement ExactSizeIterator, that could provide useful debug info. See https://github.com/rust-itertools/itertools/issues/171 */);
let list_of_additions: Vec<Url> = remote_additions.map(|url| url.clone()).collect(); let list_of_additions: Vec<Url> = remote_additions.map(|url| url.clone()).collect();
match cal_remote.get_items_by_url(&list_of_additions).await { match cal_remote.get_items_by_url(&list_of_additions).await {
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to get the batch of {} {:?}: {}. Skipping them.", batch_type, list_of_additions, err)); progress.warn(&format!(
}, "Unable to get the batch of {} {:?}: {}. Skipping them.",
batch_type, list_of_additions, err
));
}
Ok(items) => { Ok(items) => {
for item in items { for item in items {
match item { match item {
None => { None => {
progress.error(&format!("Inconsistency: an item from the batch has vanished from the remote end")); progress.error("Inconsistency: an item from the batch has vanished from the remote end");
continue; continue;
}, }
Some(new_item) => { Some(new_item) => {
let local_update_result = match batch_type { let local_update_result = match batch_type {
BatchDownloadType::RemoteAdditions => cal_local.add_item(new_item.clone()).await, BatchDownloadType::RemoteAdditions => {
BatchDownloadType::RemoteChanges => cal_local.update_item(new_item.clone()).await, cal_local.add_item(new_item.clone()).await
}
BatchDownloadType::RemoteChanges => {
cal_local.update_item(new_item.clone()).await
}
}; };
if let Err(err) = local_update_result { if let Err(err) = local_update_result {
progress.error(&format!("Not able to add item {} to local calendar: {}", new_item.url(), err)); progress.error(&format!(
"Not able to add item {} to local calendar: {}",
new_item.url(),
err
));
} }
}, }
} }
} }
// Notifying every item at the same time would not make sense. Let's notify only one of them // Notifying every item at the same time would not make sense. Let's notify only one of them
let one_item_name = match list_of_additions.get(0) { let one_item_name = match list_of_additions.first() {
Some(url) => Self::item_name(&cal_local, &url).await, Some(url) => Self::item_name(cal_local, url).await,
None => String::from("<unable to get the name of the first batched item>"), None => String::from("<unable to get the name of the first batched item>"),
}; };
progress.increment_counter(list_of_additions.len()); progress.increment_counter(list_of_additions.len());
progress.feedback(SyncEvent::InProgress{ progress.feedback(SyncEvent::InProgress {
calendar: cal_name.to_string(), calendar: cal_name.to_string(),
items_done_already: progress.counter(), items_done_already: progress.counter(),
details: one_item_name, details: one_item_name,
}); });
}, }
} }
} }
} }
async fn get_or_insert_counterpart_calendar<H, N, I>(
async fn get_or_insert_counterpart_calendar<H, N, I>(haystack_descr: &str, haystack: &mut H, cal_url: &Url, needle: Arc<Mutex<N>>) haystack_descr: &str,
-> Result<Arc<Mutex<I>>, Box<dyn Error>> haystack: &mut H,
cal_url: &Url,
needle: Arc<Mutex<N>>,
) -> Result<Arc<Mutex<I>>, Box<dyn Error>>
where where
H: CalDavSource<I>, H: CalDavSource<I>,
I: BaseCalendar, I: BaseCalendar,
N: BaseCalendar, N: BaseCalendar,
{ {
loop { loop {
if let Some(cal) = haystack.get_calendar(&cal_url).await { if let Some(cal) = haystack.get_calendar(cal_url).await {
break Ok(cal); break Ok(cal);
} }
@ -502,14 +604,11 @@ where
let name = src.name().to_string(); let name = src.name().to_string();
let supported_comps = src.supported_components(); let supported_comps = src.supported_components();
let color = src.color(); let color = src.color();
if let Err(err) = haystack.create_calendar( if let Err(err) = haystack
cal_url.clone(), .create_calendar(cal_url.clone(), name, supported_comps, color.cloned())
name, .await
supported_comps, {
color.cloned(),
).await{
return Err(err); return Err(err);
} }
} }
} }

View file

@ -10,17 +10,27 @@ pub struct Resource {
impl Resource { impl Resource {
pub fn new(url: Url, username: String, password: String) -> Self { pub fn new(url: Url, username: String, password: String) -> Self {
Self { url, username, password } Self {
url,
username,
password,
}
} }
pub fn url(&self) -> &Url { &self.url } pub fn url(&self) -> &Url {
pub fn username(&self) -> &String { &self.username } &self.url
pub fn password(&self) -> &String { &self.password } }
pub fn username(&self) -> &String {
&self.username
}
pub fn password(&self) -> &String {
&self.password
}
/// Build a new Resource by keeping the same credentials, scheme and server from `base` but changing the path part /// Build a new Resource by keeping the same credentials, scheme and server from `base` but changing the path part
pub fn combine(&self, new_path: &str) -> Resource { pub fn combine(&self, new_path: &str) -> Resource {
let mut built = (*self).clone(); let mut built = (*self).clone();
built.url.set_path(&new_path); built.url.set_path(new_path);
built built
} }
} }

View file

@ -1,20 +1,24 @@
//! To-do tasks (iCal `VTODO` item) //! To-do tasks (iCal `VTODO` item)
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use ical::property::Property; use ical::property::Property;
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use uuid::Uuid;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::utils::random_url; use crate::utils::random_url;
/// RFC5545 defines the completion as several optional fields, yet some combinations make no sense. /**
/// This enum provides an API that forbids such impossible combinations. RFC5545 defines the completion as several optional fields, yet some combinations make no sense.
/// This enum provides an API that forbids such impossible combinations.
/// * `COMPLETED` is an optional timestamp that tells whether this task is completed
/// * `STATUS` is an optional field, that can be set to `NEEDS-ACTION`, `COMPLETED`, or others. * `COMPLETED` is an optional timestamp that tells whether this task is completed
/// Even though having a `COMPLETED` date but a `STATUS:NEEDS-ACTION` is theorically possible, it obviously makes no sense. This API ensures this cannot happen * `STATUS` is an optional field, that can be set to `NEEDS-ACTION`, `COMPLETED`, or others.
Even though having a `COMPLETED` date but a `STATUS:NEEDS-ACTION` is theorically possible, it obviously makes no sense. This API ensures this cannot happen
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CompletionStatus { pub enum CompletionStatus {
Completed(Option<DateTime<Utc>>), Completed(Option<DateTime<Utc>>),
@ -22,10 +26,7 @@ pub enum CompletionStatus {
} }
impl CompletionStatus { impl CompletionStatus {
pub fn is_completed(&self) -> bool { pub fn is_completed(&self) -> bool {
match self { matches!(self, CompletionStatus::Completed(_))
CompletionStatus::Completed(_) => true,
_ => false,
}
} }
} }
@ -53,7 +54,6 @@ pub struct Task {
/// The display name of the task /// The display name of the task
name: String, name: String,
/// The PRODID, as defined in iCal files /// The PRODID, as defined in iCal files
ical_prod_id: String, ical_prod_id: String,
@ -62,7 +62,6 @@ pub struct Task {
extra_parameters: Vec<Property>, extra_parameters: Vec<Property>,
} }
impl Task { impl Task {
/// Create a brand new Task that is not on a server yet. /// Create a brand new Task that is not on a server yet.
/// This will pick a new (random) task ID. /// This will pick a new (random) task ID.
@ -73,20 +72,37 @@ impl Task {
let new_creation_date = Some(Utc::now()); let new_creation_date = Some(Utc::now());
let new_last_modified = Utc::now(); let new_last_modified = Utc::now();
let new_completion_status = if completed { let new_completion_status = if completed {
CompletionStatus::Completed(Some(Utc::now())) CompletionStatus::Completed(Some(Utc::now()))
} else { CompletionStatus::Uncompleted }; } else {
CompletionStatus::Uncompleted
};
let ical_prod_id = crate::ical::default_prod_id(); let ical_prod_id = crate::ical::default_prod_id();
let extra_parameters = Vec::new(); let extra_parameters = Vec::new();
Self::new_with_parameters(name, new_uid, new_url, new_completion_status, new_sync_status, new_creation_date, new_last_modified, ical_prod_id, extra_parameters) Self::new_with_parameters(
name,
new_uid,
new_url,
new_completion_status,
new_sync_status,
new_creation_date,
new_last_modified,
ical_prod_id,
extra_parameters,
)
} }
/// Create a new Task instance, that may be synced on the server already /// Create a new Task instance, that may be synced on the server already
pub fn new_with_parameters(name: String, uid: String, new_url: Url, pub fn new_with_parameters(
completion_status: CompletionStatus, name: String,
sync_status: SyncStatus, creation_date: Option<DateTime<Utc>>, last_modified: DateTime<Utc>, uid: String,
ical_prod_id: String, extra_parameters: Vec<Property>, new_url: Url,
) -> Self completion_status: CompletionStatus,
{ sync_status: SyncStatus,
creation_date: Option<DateTime<Utc>>,
last_modified: DateTime<Utc>,
ical_prod_id: String,
extra_parameters: Vec<Property>,
) -> Self {
Self { Self {
url: new_url, url: new_url,
uid, uid,
@ -100,20 +116,40 @@ impl Task {
} }
} }
pub fn url(&self) -> &Url { &self.url } pub fn url(&self) -> &Url {
pub fn uid(&self) -> &str { &self.uid } &self.url
pub fn name(&self) -> &str { &self.name } }
pub fn completed(&self) -> bool { self.completion_status.is_completed() } pub fn uid(&self) -> &str {
pub fn ical_prod_id(&self) -> &str { &self.ical_prod_id } &self.uid
pub fn sync_status(&self) -> &SyncStatus { &self.sync_status } }
pub fn last_modified(&self) -> &DateTime<Utc> { &self.last_modified } pub fn name(&self) -> &str {
pub fn creation_date(&self) -> Option<&DateTime<Utc>> { self.creation_date.as_ref() } &self.name
pub fn completion_status(&self) -> &CompletionStatus { &self.completion_status } }
pub fn extra_parameters(&self) -> &[Property] { &self.extra_parameters } pub fn completed(&self) -> bool {
self.completion_status.is_completed()
}
pub fn ical_prod_id(&self) -> &str {
&self.ical_prod_id
}
pub fn sync_status(&self) -> &SyncStatus {
&self.sync_status
}
pub fn last_modified(&self) -> &DateTime<Utc> {
&self.last_modified
}
pub fn creation_date(&self) -> Option<&DateTime<Utc>> {
self.creation_date.as_ref()
}
pub fn completion_status(&self) -> &CompletionStatus {
&self.completion_status
}
pub fn extra_parameters(&self) -> &[Property] {
&self.extra_parameters
}
#[cfg(any(test, feature = "integration_tests"))] #[cfg(any(test, feature = "integration_tests"))]
pub fn has_same_observable_content_as(&self, other: &Task) -> bool { pub fn has_same_observable_content_as(&self, other: &Task) -> bool {
self.url == other.url self.url == other.url
&& self.uid == other.uid && self.uid == other.uid
&& self.name == other.name && self.name == other.name
// sync status must be the same variant, but we ignore its embedded version tag // sync status must be the same variant, but we ignore its embedded version tag
@ -129,15 +165,13 @@ impl Task {
fn update_sync_status(&mut self) { fn update_sync_status(&mut self) {
match &self.sync_status { match &self.sync_status {
SyncStatus::NotSynced => return, SyncStatus::NotSynced | SyncStatus::LocallyModified(_) => (),
SyncStatus::LocallyModified(_) => return,
SyncStatus::Synced(prev_vt) => { SyncStatus::Synced(prev_vt) => {
self.sync_status = SyncStatus::LocallyModified(prev_vt.clone()); self.sync_status = SyncStatus::LocallyModified(prev_vt.clone());
} }
SyncStatus::LocallyDeleted(_) => { SyncStatus::LocallyDeleted(_) => {
log::warn!("Trying to update an item that has previously been deleted. These changes will probably be ignored at next sync."); log::warn!("Trying to update an item that has previously been deleted. These changes will probably be ignored at next sync.");
return; }
},
} }
} }
@ -145,7 +179,6 @@ impl Task {
self.last_modified = Utc::now(); self.last_modified = Utc::now();
} }
/// Rename a task. /// Rename a task.
/// This updates its "last modified" field /// This updates its "last modified" field
pub fn set_name(&mut self, new_name: String) { pub fn set_name(&mut self, new_name: String) {
@ -169,7 +202,10 @@ impl Task {
} }
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
/// Set the completion status, but forces a "master" SyncStatus, just like CalDAV servers are always "masters" /// Set the completion status, but forces a "master" SyncStatus, just like CalDAV servers are always "masters"
pub fn mock_remote_calendar_set_completion_status(&mut self, new_completion_status: CompletionStatus) { pub fn mock_remote_calendar_set_completion_status(
&mut self,
new_completion_status: CompletionStatus,
) {
self.sync_status = SyncStatus::random_synced(); self.sync_status = SyncStatus::random_synced();
self.completion_status = new_completion_status; self.completion_status = new_completion_status;
} }

View file

@ -1,17 +1,17 @@
//! Traits used by multiple structs in this crate //! Traits used by multiple structs in this crate
use std::error::Error;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::error::Error;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use async_trait::async_trait; use async_trait::async_trait;
use csscolorparser::Color; use csscolorparser::Color;
use url::Url; use url::Url;
use crate::item::SyncStatus;
use crate::item::Item;
use crate::item::VersionTag;
use crate::calendar::SupportedComponents; use crate::calendar::SupportedComponents;
use crate::item::Item;
use crate::item::SyncStatus;
use crate::item::VersionTag;
use crate::resource::Resource; use crate::resource::Resource;
/// This trait must be implemented by data sources (either local caches or remote CalDAV clients) /// This trait must be implemented by data sources (either local caches or remote CalDAV clients)
@ -25,8 +25,13 @@ pub trait CalDavSource<T: BaseCalendar> {
/// Returns the calendar matching the URL /// Returns the calendar matching the URL
async fn get_calendar(&self, url: &Url) -> Option<Arc<Mutex<T>>>; async fn get_calendar(&self, url: &Url) -> Option<Arc<Mutex<T>>>;
/// Create a calendar if it did not exist, and return it /// Create a calendar if it did not exist, and return it
async fn create_calendar(&mut self, url: Url, name: String, supported_components: SupportedComponents, color: Option<Color>) async fn create_calendar(
-> Result<Arc<Mutex<T>>, Box<dyn Error>>; &mut self,
url: Url,
name: String,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Result<Arc<Mutex<T>>, Box<dyn Error>>;
// Removing a calendar is not supported yet // Removing a calendar is not supported yet
} }
@ -59,23 +64,29 @@ pub trait BaseCalendar {
/// Returns whether this calDAV calendar supports to-do items /// Returns whether this calDAV calendar supports to-do items
fn supports_todo(&self) -> bool { fn supports_todo(&self) -> bool {
self.supported_components().contains(crate::calendar::SupportedComponents::TODO) self.supported_components()
.contains(crate::calendar::SupportedComponents::TODO)
} }
/// Returns whether this calDAV calendar supports calendar items /// Returns whether this calDAV calendar supports calendar items
fn supports_events(&self) -> bool { fn supports_events(&self) -> bool {
self.supported_components().contains(crate::calendar::SupportedComponents::EVENT) self.supported_components()
.contains(crate::calendar::SupportedComponents::EVENT)
} }
} }
/// Functions availabe for calendars that are backed by a CalDAV server /// Functions availabe for calendars that are backed by a CalDAV server
/// ///
/// Note that some concrete types (e.g. [`crate::calendar::cached_calendar::CachedCalendar`]) can also provide non-async versions of these functions /// Note that some concrete types (e.g. [`crate::calendar::cached_calendar::CachedCalendar`]) can also provide non-async versions of these functions
#[async_trait] #[async_trait]
pub trait DavCalendar : BaseCalendar { pub trait DavCalendar: BaseCalendar {
/// Create a new calendar /// Create a new calendar
fn new(name: String, resource: Resource, supported_components: SupportedComponents, color: Option<Color>) -> Self; fn new(
name: String,
resource: Resource,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self;
/// Get the URLs and the version tags of every item in this calendar /// Get the URLs and the version tags of every item in this calendar
async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>>; async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>>;
@ -93,25 +104,27 @@ pub trait DavCalendar : BaseCalendar {
/// Get the URLs of all current items in this calendar /// Get the URLs of all current items in this calendar
async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>> { async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>> {
let items = self.get_item_version_tags().await?; let items = self.get_item_version_tags().await?;
Ok(items.iter() Ok(items.keys().cloned().collect())
.map(|(url, _tag)| url.clone())
.collect())
} }
// Note: the CalDAV protocol could also enable to do this: // Note: the CalDAV protocol could also enable to do this:
// fn get_current_version(&self) -> CTag // fn get_current_version(&self) -> CTag
} }
/// Functions availabe for calendars we have full knowledge of /// Functions availabe for calendars we have full knowledge of
/// ///
/// Usually, these are local calendars fully backed by a local folder /// Usually, these are local calendars fully backed by a local folder
/// ///
/// Note that some concrete types (e.g. [`crate::calendar::cached_calendar::CachedCalendar`]) can also provide non-async versions of these functions /// Note that some concrete types (e.g. [`crate::calendar::cached_calendar::CachedCalendar`]) can also provide non-async versions of these functions
#[async_trait] #[async_trait]
pub trait CompleteCalendar : BaseCalendar { pub trait CompleteCalendar: BaseCalendar {
/// Create a new calendar /// Create a new calendar
fn new(name: String, url: Url, supported_components: SupportedComponents, color: Option<Color>) -> Self; fn new(
name: String,
url: Url,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self;
/// Get the URLs of all current items in this calendar /// Get the URLs of all current items in this calendar
async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>>; async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>>;

View file

@ -1,17 +1,17 @@
//! Some utility functions //! Some utility functions
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::sync::{Arc, Mutex};
use std::hash::Hash; use std::hash::Hash;
use std::io::{stdin, stdout, Read, Write}; use std::io::{stdin, stdout, Read, Write};
use std::sync::{Arc, Mutex};
use minidom::Element; use minidom::Element;
use url::Url; use url::Url;
use crate::item::SyncStatus;
use crate::traits::CompleteCalendar; use crate::traits::CompleteCalendar;
use crate::traits::DavCalendar; use crate::traits::DavCalendar;
use crate::Item; use crate::Item;
use crate::item::SyncStatus;
/// Walks an XML tree and returns every element that has the given name /// Walks an XML tree and returns every element that has the given name
pub fn find_elems<S: AsRef<str>>(root: &Element, searched_name: S) -> Vec<&Element> { pub fn find_elems<S: AsRef<str>>(root: &Element, searched_name: S) -> Vec<&Element> {
@ -49,14 +49,10 @@ pub fn find_elem<S: AsRef<str>>(root: &Element, searched_name: S) -> Option<&Ele
None None
} }
pub fn print_xml(element: &Element) { pub fn print_xml(element: &Element) {
let mut writer = std::io::stdout(); let mut writer = std::io::stdout();
let mut xml_writer = minidom::quick_xml::Writer::new_with_indent( let mut xml_writer = minidom::quick_xml::Writer::new_with_indent(std::io::stdout(), 0x20, 4);
std::io::stdout(),
0x20, 4
);
let _ = element.to_writer(&mut xml_writer); let _ = element.to_writer(&mut xml_writer);
let _ = writer.write(&[0x0a]); let _ = writer.write(&[0x0a]);
} }
@ -74,7 +70,7 @@ where
for (_, item) in map { for (_, item) in map {
print_task(item); print_task(item);
} }
}, }
} }
} }
} }
@ -92,28 +88,24 @@ where
for (url, version_tag) in map { for (url, version_tag) in map {
println!(" * {} (version {:?})", url, version_tag); println!(" * {} (version {:?})", url, version_tag);
} }
}, }
} }
} }
} }
pub fn print_task(item: &Item) { pub fn print_task(item: &Item) {
match item { if let Item::Task(task) = item {
Item::Task(task) => { let completion = if task.completed() { "" } else { " " };
let completion = if task.completed() { "" } else { " " }; let sync = match task.sync_status() {
let sync = match task.sync_status() { SyncStatus::NotSynced => ".",
SyncStatus::NotSynced => ".", SyncStatus::Synced(_) => "=",
SyncStatus::Synced(_) => "=", SyncStatus::LocallyModified(_) => "~",
SyncStatus::LocallyModified(_) => "~", SyncStatus::LocallyDeleted(_) => "x",
SyncStatus::LocallyDeleted(_) => "x", };
}; println!(" {}{} {}\t{}", completion, sync, task.name(), task.url());
println!(" {}{} {}\t{}", completion, sync, task.name(), task.url());
},
_ => return,
} }
} }
/// Compare keys of two hashmaps for equality /// Compare keys of two hashmaps for equality
pub fn keys_are_the_same<T, U, V>(left: &HashMap<T, U>, right: &HashMap<T, V>) -> bool pub fn keys_are_the_same<T, U, V>(left: &HashMap<T, U>, right: &HashMap<T, V>) -> bool
where where
@ -127,7 +119,7 @@ where
let keys_l: HashSet<T> = left.keys().cloned().collect(); let keys_l: HashSet<T> = left.keys().cloned().collect();
let keys_r: HashSet<T> = right.keys().cloned().collect(); let keys_r: HashSet<T> = right.keys().cloned().collect();
let result = keys_l == keys_r; let result = keys_l == keys_r;
if result == false { if !result {
log::debug!("Keys of a map mismatch"); log::debug!("Keys of a map mismatch");
for key in keys_l { for key in keys_l {
log::debug!(" left: {}", key); log::debug!(" left: {}", key);
@ -140,7 +132,6 @@ where
result result
} }
/// Wait for the user to press enter /// Wait for the user to press enter
pub fn pause() { pub fn pause() {
let mut stdout = stdout(); let mut stdout = stdout();
@ -149,7 +140,6 @@ pub fn pause() {
stdin().read_exact(&mut [0]).unwrap(); stdin().read_exact(&mut [0]).unwrap();
} }
/// Generate a random URL with a given prefix /// Generate a random URL with a given prefix
pub fn random_url(parent_calendar: &Url) -> Url { pub fn random_url(parent_calendar: &Url) -> Url {
let random = uuid::Uuid::new_v4().to_hyphenated().to_string(); let random = uuid::Uuid::new_v4().to_hyphenated().to_string();

File diff suppressed because it is too large Load diff

View file

@ -6,8 +6,6 @@ use std::sync::{Arc, Mutex};
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
use kitchen_fridge::mock_behaviour::MockBehaviour; use kitchen_fridge::mock_behaviour::MockBehaviour;
/// A test that simulates a regular synchronisation between a local cache and a server. /// A test that simulates a regular synchronisation between a local cache and a server.
/// Note that this uses a second cache to "mock" a server. /// Note that this uses a second cache to "mock" a server.
struct TestFlavour { struct TestFlavour {
@ -19,22 +17,54 @@ struct TestFlavour {
#[cfg(not(feature = "local_calendar_mocks_remote_calendars"))] #[cfg(not(feature = "local_calendar_mocks_remote_calendars"))]
impl TestFlavour { impl TestFlavour {
pub fn normal() -> Self { Self{} } pub fn normal() -> Self {
pub fn first_sync_to_local() -> Self { Self{} } Self {}
pub fn first_sync_to_server() -> Self { Self{} } }
pub fn transient_task() -> Self { Self{} } pub fn first_sync_to_local() -> Self {
pub fn normal_with_errors1() -> Self { Self{} } Self {}
pub fn normal_with_errors2() -> Self { Self{} } }
pub fn normal_with_errors3() -> Self { Self{} } pub fn first_sync_to_server() -> Self {
pub fn normal_with_errors4() -> Self { Self{} } Self {}
pub fn normal_with_errors5() -> Self { Self{} } }
pub fn normal_with_errors6() -> Self { Self{} } pub fn transient_task() -> Self {
pub fn normal_with_errors7() -> Self { Self{} } Self {}
pub fn normal_with_errors8() -> Self { Self{} } }
pub fn normal_with_errors9() -> Self { Self{} } pub fn normal_with_errors1() -> Self {
pub fn normal_with_errors10() -> Self { Self{} } Self {}
pub fn normal_with_errors11() -> Self { Self{} } }
pub fn normal_with_errors12() -> Self { Self{} } pub fn normal_with_errors2() -> Self {
Self {}
}
pub fn normal_with_errors3() -> Self {
Self {}
}
pub fn normal_with_errors4() -> Self {
Self {}
}
pub fn normal_with_errors5() -> Self {
Self {}
}
pub fn normal_with_errors6() -> Self {
Self {}
}
pub fn normal_with_errors7() -> Self {
Self {}
}
pub fn normal_with_errors8() -> Self {
Self {}
}
pub fn normal_with_errors9() -> Self {
Self {}
}
pub fn normal_with_errors10() -> Self {
Self {}
}
pub fn normal_with_errors11() -> Self {
Self {}
}
pub fn normal_with_errors12() -> Self {
Self {}
}
pub async fn run(&self, _max_attempts: u32) { pub async fn run(&self, _max_attempts: u32) {
panic!("WARNING: This test required the \"integration_tests\" Cargo feature"); panic!("WARNING: This test required the \"integration_tests\" Cargo feature");
@ -81,9 +111,9 @@ impl TestFlavour {
pub fn normal_with_errors2() -> Self { pub fn normal_with_errors2() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_calendars_behaviour: (0,1), get_calendars_behaviour: (0, 1),
create_calendar_behaviour: (2,2), create_calendar_behaviour: (2, 2),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -92,9 +122,9 @@ impl TestFlavour {
pub fn normal_with_errors3() -> Self { pub fn normal_with_errors3() -> Self {
Self { Self {
scenarii: scenarii::scenarii_first_sync_to_server(), scenarii: scenarii::scenarii_first_sync_to_server(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_calendars_behaviour: (1,6), get_calendars_behaviour: (1, 6),
create_calendar_behaviour: (0,1), create_calendar_behaviour: (0, 1),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -103,8 +133,8 @@ impl TestFlavour {
pub fn normal_with_errors4() -> Self { pub fn normal_with_errors4() -> Self {
Self { Self {
scenarii: scenarii::scenarii_first_sync_to_server(), scenarii: scenarii::scenarii_first_sync_to_server(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
add_item_behaviour: (1,3), add_item_behaviour: (1, 3),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -113,8 +143,8 @@ impl TestFlavour {
pub fn normal_with_errors5() -> Self { pub fn normal_with_errors5() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_item_version_tags_behaviour: (0,1), get_item_version_tags_behaviour: (0, 1),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -123,8 +153,8 @@ impl TestFlavour {
pub fn normal_with_errors6() -> Self { pub fn normal_with_errors6() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_item_by_url_behaviour: (3,2), get_item_by_url_behaviour: (3, 2),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -133,8 +163,8 @@ impl TestFlavour {
pub fn normal_with_errors7() -> Self { pub fn normal_with_errors7() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
delete_item_behaviour: (0,2), delete_item_behaviour: (0, 2),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -143,9 +173,9 @@ impl TestFlavour {
pub fn normal_with_errors8() -> Self { pub fn normal_with_errors8() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
add_item_behaviour: (2,3), add_item_behaviour: (2, 3),
get_item_by_url_behaviour: (1,12), get_item_by_url_behaviour: (1, 12),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -154,9 +184,9 @@ impl TestFlavour {
pub fn normal_with_errors9() -> Self { pub fn normal_with_errors9() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_calendars_behaviour: (0,8), get_calendars_behaviour: (0, 8),
delete_item_behaviour: (1,1), delete_item_behaviour: (1, 1),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -165,11 +195,11 @@ impl TestFlavour {
pub fn normal_with_errors10() -> Self { pub fn normal_with_errors10() -> Self {
Self { Self {
scenarii: scenarii::scenarii_first_sync_to_server(), scenarii: scenarii::scenarii_first_sync_to_server(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_calendars_behaviour: (0,8), get_calendars_behaviour: (0, 8),
delete_item_behaviour: (1,1), delete_item_behaviour: (1, 1),
create_calendar_behaviour: (1,4), create_calendar_behaviour: (1, 4),
get_item_version_tags_behaviour: (3,1), get_item_version_tags_behaviour: (3, 1),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -178,12 +208,12 @@ impl TestFlavour {
pub fn normal_with_errors11() -> Self { pub fn normal_with_errors11() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
get_calendars_behaviour: (0,8), get_calendars_behaviour: (0, 8),
delete_item_behaviour: (1,1), delete_item_behaviour: (1, 1),
create_calendar_behaviour: (1,4), create_calendar_behaviour: (1, 4),
get_item_version_tags_behaviour: (3,1), get_item_version_tags_behaviour: (3, 1),
get_item_by_url_behaviour: (0,41), get_item_by_url_behaviour: (0, 41),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
@ -192,26 +222,29 @@ impl TestFlavour {
pub fn normal_with_errors12() -> Self { pub fn normal_with_errors12() -> Self {
Self { Self {
scenarii: scenarii::scenarii_basic(), scenarii: scenarii::scenarii_basic(),
mock_behaviour: Arc::new(Mutex::new(MockBehaviour{ mock_behaviour: Arc::new(Mutex::new(MockBehaviour {
update_item_behaviour: (0,3), update_item_behaviour: (0, 3),
..MockBehaviour::default() ..MockBehaviour::default()
})), })),
} }
} }
pub async fn run(&self, max_attempts: u32) { pub async fn run(&self, max_attempts: u32) {
self.mock_behaviour.lock().unwrap().suspend(); self.mock_behaviour.lock().unwrap().suspend();
let mut provider = scenarii::populate_test_provider_before_sync(&self.scenarii, Arc::clone(&self.mock_behaviour)).await; let mut provider = scenarii::populate_test_provider_before_sync(
&self.scenarii,
Arc::clone(&self.mock_behaviour),
)
.await;
print_provider(&provider, "before sync").await; print_provider(&provider, "before sync").await;
self.mock_behaviour.lock().unwrap().resume(); self.mock_behaviour.lock().unwrap().resume();
for attempt in 0..max_attempts { for attempt in 0..max_attempts {
println!("\nSyncing...\n"); println!("\nSyncing...\n");
if provider.sync().await == true { if provider.sync().await {
println!("Sync complete after {} attempts (multiple attempts are due to forced errors in mocked behaviour)", attempt+1); println!("Sync complete after {} attempts (multiple attempts are due to forced errors in mocked behaviour)", attempt+1);
break break;
} }
} }
self.mock_behaviour.lock().unwrap().suspend(); self.mock_behaviour.lock().unwrap().suspend();
@ -219,136 +252,160 @@ impl TestFlavour {
print_provider(&provider, "after sync").await; print_provider(&provider, "after sync").await;
// Check the contents of both sources are the same after sync // Check the contents of both sources are the same after sync
assert!(provider.remote().has_same_observable_content_as(provider.local()).await.unwrap()); assert!(provider
.remote()
.has_same_observable_content_as(provider.local())
.await
.unwrap());
// But also explicitely check that every item is expected // But also explicitely check that every item is expected
let expected_provider = scenarii::populate_test_provider_after_sync(&self.scenarii, Arc::clone(&self.mock_behaviour)).await; let expected_provider = scenarii::populate_test_provider_after_sync(
&self.scenarii,
Arc::clone(&self.mock_behaviour),
)
.await;
assert!(provider.local() .has_same_observable_content_as(expected_provider.local() ).await.unwrap()); assert!(provider
assert!(provider.remote().has_same_observable_content_as(expected_provider.remote()).await.unwrap()); .local()
.has_same_observable_content_as(expected_provider.local())
.await
.unwrap());
assert!(provider
.remote()
.has_same_observable_content_as(expected_provider.remote())
.await
.unwrap());
// Perform a second sync, even if no change has happened, just to check // Perform a second sync, even if no change has happened, just to check
println!("Syncing again"); println!("Syncing again");
provider.sync().await; provider.sync().await;
assert!(provider.local() .has_same_observable_content_as(expected_provider.local() ).await.unwrap()); assert!(provider
assert!(provider.remote().has_same_observable_content_as(expected_provider.remote()).await.unwrap()); .local()
.has_same_observable_content_as(expected_provider.local())
.await
.unwrap());
assert!(provider
.remote()
.has_same_observable_content_as(expected_provider.remote())
.await
.unwrap());
} }
} }
async fn run_flavour(flavour: TestFlavour, max_attempts: u32) { async fn run_flavour(flavour: TestFlavour, max_attempts: u32) {
let _ = env_logger::builder().is_test(true).try_init(); let _ = env_logger::builder().is_test(true).try_init();
flavour.run(max_attempts).await; flavour.run(max_attempts).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_regular_sync() { async fn test_regular_sync() {
run_flavour(TestFlavour::normal(), 1).await; run_flavour(TestFlavour::normal(), 1).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_sync_empty_initial_local() { async fn test_sync_empty_initial_local() {
run_flavour(TestFlavour::first_sync_to_local(), 1).await; run_flavour(TestFlavour::first_sync_to_local(), 1).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_sync_empty_initial_server() { async fn test_sync_empty_initial_server() {
run_flavour(TestFlavour::first_sync_to_server(), 1).await; run_flavour(TestFlavour::first_sync_to_server(), 1).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_sync_transient_task() { async fn test_sync_transient_task() {
run_flavour(TestFlavour::transient_task(), 1).await; run_flavour(TestFlavour::transient_task(), 1).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync1() { async fn test_errors_in_regular_sync1() {
run_flavour(TestFlavour::normal_with_errors1(), 100).await; run_flavour(TestFlavour::normal_with_errors1(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync2() { async fn test_errors_in_regular_sync2() {
run_flavour(TestFlavour::normal_with_errors2(), 100).await; run_flavour(TestFlavour::normal_with_errors2(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync3() { async fn test_errors_in_regular_sync3() {
run_flavour(TestFlavour::normal_with_errors3(), 100).await; run_flavour(TestFlavour::normal_with_errors3(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync4() { async fn test_errors_in_regular_sync4() {
run_flavour(TestFlavour::normal_with_errors4(), 100).await; run_flavour(TestFlavour::normal_with_errors4(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync5() { async fn test_errors_in_regular_sync5() {
run_flavour(TestFlavour::normal_with_errors5(), 100).await; run_flavour(TestFlavour::normal_with_errors5(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync6() { async fn test_errors_in_regular_sync6() {
run_flavour(TestFlavour::normal_with_errors6(), 100).await; run_flavour(TestFlavour::normal_with_errors6(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync7() { async fn test_errors_in_regular_sync7() {
run_flavour(TestFlavour::normal_with_errors7(), 100).await; run_flavour(TestFlavour::normal_with_errors7(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync8() { async fn test_errors_in_regular_sync8() {
run_flavour(TestFlavour::normal_with_errors8(), 100).await; run_flavour(TestFlavour::normal_with_errors8(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync9() { async fn test_errors_in_regular_sync9() {
run_flavour(TestFlavour::normal_with_errors9(), 100).await; run_flavour(TestFlavour::normal_with_errors9(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync10() { async fn test_errors_in_regular_sync10() {
run_flavour(TestFlavour::normal_with_errors10(), 100).await; run_flavour(TestFlavour::normal_with_errors10(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync11() { async fn test_errors_in_regular_sync11() {
run_flavour(TestFlavour::normal_with_errors11(), 100).await; run_flavour(TestFlavour::normal_with_errors11(), 100).await;
} }
#[tokio::test] #[tokio::test]
#[cfg_attr(not(feature="integration_tests"), ignore)] #[cfg_attr(not(feature = "integration_tests"), ignore)]
async fn test_errors_in_regular_sync12() { async fn test_errors_in_regular_sync12() {
run_flavour(TestFlavour::normal_with_errors12(), 100).await; run_flavour(TestFlavour::normal_with_errors12(), 100).await;
} }
#[cfg(feature = "integration_tests")] #[cfg(feature = "integration_tests")]
use kitchen_fridge::{traits::CalDavSource, use kitchen_fridge::{
provider::Provider, cache::Cache, calendar::cached_calendar::CachedCalendar, provider::Provider,
cache::Cache, traits::CalDavSource,
calendar::cached_calendar::CachedCalendar,
}; };
/// Print the contents of the provider. This is usually used for debugging /// Print the contents of the provider. This is usually used for debugging
#[allow(dead_code)] #[allow(dead_code)]
#[cfg(feature = "integration_tests")] #[cfg(feature = "integration_tests")]
async fn print_provider(provider: &Provider<Cache, CachedCalendar, Cache, CachedCalendar>, title: &str) { async fn print_provider(
provider: &Provider<Cache, CachedCalendar, Cache, CachedCalendar>,
title: &str,
) {
let cals_server = provider.remote().get_calendars().await.unwrap(); let cals_server = provider.remote().get_calendars().await.unwrap();
println!("----Server, {}-------", title); println!("----Server, {}-------", title);
kitchen_fridge::utils::print_calendar_list(&cals_server).await; kitchen_fridge::utils::print_calendar_list(&cals_server).await;