Fix even more simple clippy suggestions

This commit is contained in:
James Musselman 2024-12-16 23:25:53 -06:00
parent 9270577b68
commit 6116e0151c
Signed by: Musselman
GPG key ID: 1DAEFF35ECB5D6DB
13 changed files with 1331 additions and 945 deletions

View file

@ -1,38 +1,45 @@
//! This is an example of how kitchen-fridge can be used //! This is an example of how kitchen-fridge can be used
use chrono::{Utc}; use chrono::Utc;
use url::Url; use url::Url;
use kitchen_fridge::traits::CalDavSource;
use kitchen_fridge::calendar::SupportedComponents; use kitchen_fridge::calendar::SupportedComponents;
use kitchen_fridge::Item;
use kitchen_fridge::Task;
use kitchen_fridge::task::CompletionStatus; use kitchen_fridge::task::CompletionStatus;
use kitchen_fridge::CalDavProvider;
use kitchen_fridge::traits::BaseCalendar; use kitchen_fridge::traits::BaseCalendar;
use kitchen_fridge::traits::CalDavSource;
use kitchen_fridge::traits::CompleteCalendar; use kitchen_fridge::traits::CompleteCalendar;
use kitchen_fridge::utils::pause; use kitchen_fridge::utils::pause;
use kitchen_fridge::CalDavProvider;
use kitchen_fridge::Item;
use kitchen_fridge::Task;
mod shared; mod shared;
use shared::initial_sync; use shared::initial_sync;
use shared::{URL, USERNAME, EXAMPLE_EXISTING_CALENDAR_URL, EXAMPLE_CREATED_CALENDAR_URL}; use shared::{EXAMPLE_CREATED_CALENDAR_URL, EXAMPLE_EXISTING_CALENDAR_URL, URL, USERNAME};
const CACHE_FOLDER: &str = "test_cache/provider_sync"; const CACHE_FOLDER: &str = "test_cache/provider_sync";
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
env_logger::init(); env_logger::init();
println!("This example show how to sync a remote server with a local cache, using a Provider."); println!("This example show how to sync a remote server with a local cache, using a Provider.");
println!("Make sure you have edited the constants in the 'shared.rs' file to include correct URLs and credentials."); println!("Make sure you have edited the constants in the 'shared.rs' file to include correct URLs and credentials.");
println!("You can also set the RUST_LOG environment variable to display more info about the sync."); println!(
"You can also set the RUST_LOG environment variable to display more info about the sync."
);
println!(); println!();
println!("This will use the following settings:"); println!("This will use the following settings:");
println!(" * URL = {}", URL); println!(" * URL = {}", URL);
println!(" * USERNAME = {}", USERNAME); println!(" * USERNAME = {}", USERNAME);
println!(" * EXAMPLE_EXISTING_CALENDAR_URL = {}", EXAMPLE_EXISTING_CALENDAR_URL); println!(
println!(" * EXAMPLE_CREATED_CALENDAR_URL = {}", EXAMPLE_CREATED_CALENDAR_URL); " * EXAMPLE_EXISTING_CALENDAR_URL = {}",
EXAMPLE_EXISTING_CALENDAR_URL
);
println!(
" * EXAMPLE_CREATED_CALENDAR_URL = {}",
EXAMPLE_CREATED_CALENDAR_URL
);
pause(); pause();
let mut provider = initial_sync(CACHE_FOLDER).await; let mut provider = initial_sync(CACHE_FOLDER).await;
@ -47,32 +54,56 @@ async fn add_items_and_sync_again(provider: &mut CalDavProvider) {
// Create a new calendar... // Create a new calendar...
let new_calendar_url: Url = EXAMPLE_CREATED_CALENDAR_URL.parse().unwrap(); let new_calendar_url: Url = EXAMPLE_CREATED_CALENDAR_URL.parse().unwrap();
let new_calendar_name = "A brave new calendar".to_string(); let new_calendar_name = "A brave new calendar".to_string();
if let Err(_err) = provider.local_mut() if let Err(_err) = provider
.create_calendar(new_calendar_url.clone(), new_calendar_name.clone(), SupportedComponents::TODO, Some("#ff8000".parse().unwrap())) .local_mut()
.await { .create_calendar(
new_calendar_url.clone(),
new_calendar_name.clone(),
SupportedComponents::TODO,
Some("#ff8000".parse().unwrap()),
)
.await
{
println!("Unable to add calendar, maybe it exists already. We're not adding it after all."); println!("Unable to add calendar, maybe it exists already. We're not adding it after all.");
} }
// ...and add a task in it // ...and add a task in it
let new_name = "This is a new task in a new calendar"; let new_name = "This is a new task in a new calendar";
let new_task = Task::new(String::from(new_name), true, &new_calendar_url); let new_task = Task::new(String::from(new_name), true, &new_calendar_url);
provider.local().get_calendar(&new_calendar_url).await.unwrap() provider
.lock().unwrap().add_item(Item::Task(new_task)).await.unwrap(); .local()
.get_calendar(&new_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.add_item(Item::Task(new_task))
.await
.unwrap();
// Also create a task in a previously existing calendar // Also create a task in a previously existing calendar
let changed_calendar_url: Url = EXAMPLE_EXISTING_CALENDAR_URL.parse().unwrap(); let changed_calendar_url: Url = EXAMPLE_EXISTING_CALENDAR_URL.parse().unwrap();
let new_task_name = "This is a new task we're adding as an example, with ÜTF-8 characters"; let new_task_name = "This is a new task we're adding as an example, with ÜTF-8 characters";
let new_task = Task::new(String::from(new_task_name), false, &changed_calendar_url); let new_task = Task::new(String::from(new_task_name), false, &changed_calendar_url);
let new_url = new_task.url().clone(); let new_url = new_task.url().clone();
provider.local().get_calendar(&changed_calendar_url).await.unwrap() provider
.lock().unwrap().add_item(Item::Task(new_task)).await.unwrap(); .local()
.get_calendar(&changed_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.add_item(Item::Task(new_task))
.await
.unwrap();
if !(provider.sync().await) { if !(provider.sync().await) {
log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced."); log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced.");
} else { } else {
println!("Done syncing the new task '{}' and the new calendar '{}'", new_task_name, new_calendar_name); println!(
"Done syncing the new task '{}' and the new calendar '{}'",
new_task_name, new_calendar_name
);
} }
provider.local().save_to_folder().unwrap(); provider.local().save_to_folder().unwrap();
@ -82,14 +113,22 @@ async fn add_items_and_sync_again(provider: &mut CalDavProvider) {
async fn complete_item_and_sync_again( async fn complete_item_and_sync_again(
provider: &mut CalDavProvider, provider: &mut CalDavProvider,
changed_calendar_url: &Url, changed_calendar_url: &Url,
url_to_complete: &Url) url_to_complete: &Url,
{ ) {
println!("\nNow, we'll mark this last task as completed, and run the sync again."); println!("\nNow, we'll mark this last task as completed, and run the sync again.");
pause(); pause();
let completion_status = CompletionStatus::Completed(Some(Utc::now())); let completion_status = CompletionStatus::Completed(Some(Utc::now()));
provider.local().get_calendar(changed_calendar_url).await.unwrap() provider
.lock().unwrap().get_item_by_url_mut(url_to_complete).await.unwrap() .local()
.get_calendar(changed_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.get_item_by_url_mut(url_to_complete)
.await
.unwrap()
.unwrap_task_mut() .unwrap_task_mut()
.set_completion_status(completion_status); .set_completion_status(completion_status);
@ -106,15 +145,22 @@ async fn complete_item_and_sync_again(
async fn remove_items_and_sync_again( async fn remove_items_and_sync_again(
provider: &mut CalDavProvider, provider: &mut CalDavProvider,
changed_calendar_url: &Url, changed_calendar_url: &Url,
id_to_remove: &Url) id_to_remove: &Url,
{ ) {
println!("\nNow, we'll delete this last task, and run the sync again."); println!("\nNow, we'll delete this last task, and run the sync again.");
pause(); pause();
// Remove the task we had created // Remove the task we had created
provider.local().get_calendar(changed_calendar_url).await.unwrap() provider
.lock().unwrap() .local()
.mark_for_deletion(id_to_remove).await.unwrap(); .get_calendar(changed_calendar_url)
.await
.unwrap()
.lock()
.unwrap()
.mark_for_deletion(id_to_remove)
.await
.unwrap();
if !(provider.sync().await) { if !(provider.sync().await) {
log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced."); log::warn!("Sync did not complete, see the previous log lines for more info. You can safely start a new sync. The new task may not have been synced.");

View file

@ -1,21 +1,20 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::error::Error; use std::error::Error;
use serde::{Deserialize, Serialize};
use async_trait::async_trait; use async_trait::async_trait;
use csscolorparser::Color; use csscolorparser::Color;
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use crate::calendar::SupportedComponents;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::traits::{BaseCalendar, CompleteCalendar}; use crate::traits::{BaseCalendar, CompleteCalendar};
use crate::calendar::SupportedComponents;
use crate::Item; use crate::Item;
#[cfg(feature = "local_calendar_mocks_remote_calendars")]
use std::sync::{Arc, Mutex};
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
use crate::mock_behaviour::MockBehaviour; use crate::mock_behaviour::MockBehaviour;
#[cfg(feature = "local_calendar_mocks_remote_calendars")]
use std::sync::{Arc, Mutex};
/// A calendar used by the [`cache`](crate::cache) module /// A calendar used by the [`cache`](crate::cache) module
/// ///
@ -41,11 +40,12 @@ impl CachedCalendar {
self.mock_behaviour = mock_behaviour; self.mock_behaviour = mock_behaviour;
} }
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
fn add_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> { fn add_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> {
if self.mock_behaviour.is_some() { if self.mock_behaviour.is_some() {
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_add_item())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_add_item())?;
self.add_or_update_item_force_synced(item) self.add_or_update_item_force_synced(item)
} else { } else {
self.regular_add_or_update_item(item) self.regular_add_or_update_item(item)
@ -55,7 +55,9 @@ impl CachedCalendar {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
fn update_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> { fn update_item_maybe_mocked(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> {
if self.mock_behaviour.is_some() { if self.mock_behaviour.is_some() {
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_update_item())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_update_item())?;
self.add_or_update_item_force_synced(item) self.add_or_update_item_force_synced(item)
} else { } else {
self.regular_add_or_update_item(item) self.regular_add_or_update_item(item)
@ -72,7 +74,10 @@ impl CachedCalendar {
/// Add or update an item, but force a "synced" SyncStatus. This is the normal behaviour that would happen on a server /// Add or update an item, but force a "synced" SyncStatus. This is the normal behaviour that would happen on a server
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
fn add_or_update_item_force_synced(&mut self, mut item: Item) -> Result<SyncStatus, Box<dyn Error>> { fn add_or_update_item_force_synced(
&mut self,
mut item: Item,
) -> Result<SyncStatus, Box<dyn Error>> {
log::debug!("Adding or updating an item, but forces a synced SyncStatus"); log::debug!("Adding or updating an item, but forces a synced SyncStatus");
match item.sync_status() { match item.sync_status() {
SyncStatus::Synced(_) => (), SyncStatus::Synced(_) => (),
@ -85,7 +90,10 @@ impl CachedCalendar {
/// Some kind of equality check /// Some kind of equality check
#[cfg(any(test, feature = "integration_tests"))] #[cfg(any(test, feature = "integration_tests"))]
pub async fn has_same_observable_content_as(&self, other: &CachedCalendar) -> Result<bool, Box<dyn Error>> { pub async fn has_same_observable_content_as(
&self,
other: &CachedCalendar,
) -> Result<bool, Box<dyn Error>> {
if self.name != other.name if self.name != other.name
|| self.url != other.url || self.url != other.url
|| self.supported_components != other.supported_components || self.supported_components != other.supported_components
@ -95,11 +103,10 @@ impl CachedCalendar {
return Ok(false); return Ok(false);
} }
let items_l = self.get_items().await?; let items_l = self.get_items().await?;
let items_r = other.get_items().await?; let items_r = other.get_items().await?;
if crate::utils::keys_are_the_same(&items_l, &items_r) == false { if !crate::utils::keys_are_the_same(&items_l, &items_r) {
log::debug!("Different keys for items"); log::debug!("Different keys for items");
return Ok(false); return Ok(false);
} }
@ -108,7 +115,7 @@ impl CachedCalendar {
Some(c) => c, Some(c) => c,
None => return Err("should not happen, we've just tested keys are the same".into()), None => return Err("should not happen, we've just tested keys are the same".into()),
}; };
if item_l.has_same_observable_content_as(&item_r) == false { if !item_l.has_same_observable_content_as(item_r) {
log::debug!("Different items for URL {}:", url_l); log::debug!("Different items for URL {}:", url_l);
log::debug!("{:#?}", item_l); log::debug!("{:#?}", item_l);
log::debug!("{:#?}", item_r); log::debug!("{:#?}", item_r);
@ -121,26 +128,25 @@ impl CachedCalendar {
/// The non-async version of [`Self::get_item_urls`] /// The non-async version of [`Self::get_item_urls`]
pub fn get_item_urls_sync(&self) -> Result<HashSet<Url>, Box<dyn Error>> { pub fn get_item_urls_sync(&self) -> Result<HashSet<Url>, Box<dyn Error>> {
Ok(self.items.iter() Ok(self.items.keys().cloned().collect())
.map(|(url, _)| url.clone())
.collect()
)
} }
/// The non-async version of [`Self::get_items`] /// The non-async version of [`Self::get_items`]
pub fn get_items_sync(&self) -> Result<HashMap<Url, &Item>, Box<dyn Error>> { pub fn get_items_sync(&self) -> Result<HashMap<Url, &Item>, Box<dyn Error>> {
Ok(self.items.iter() Ok(self
.items
.iter()
.map(|(url, item)| (url.clone(), item)) .map(|(url, item)| (url.clone(), item))
.collect() .collect())
)
} }
/// The non-async version of [`Self::get_items_mut`] /// The non-async version of [`Self::get_items_mut`]
pub fn get_items_mut_sync(&mut self) -> Result<HashMap<Url, &mut Item>, Box<dyn Error>> { pub fn get_items_mut_sync(&mut self) -> Result<HashMap<Url, &mut Item>, Box<dyn Error>> {
Ok(self.items.iter_mut() Ok(self
.items
.iter_mut()
.map(|(url, item)| (url.clone(), item)) .map(|(url, item)| (url.clone(), item))
.collect() .collect())
)
} }
/// The non-async version of [`Self::get_item_by_url`] /// The non-async version of [`Self::get_item_by_url`]
@ -167,8 +173,12 @@ impl CachedCalendar {
/// The non-async version of [`Self::update_item`] /// The non-async version of [`Self::update_item`]
pub fn update_item_sync(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> { pub fn update_item_sync(&mut self, item: Item) -> Result<SyncStatus, Box<dyn Error>> {
if self.items.contains_key(item.url()) == false { if !self.items.contains_key(item.url()) {
return Err(format!("Item {:?} cannot be updated, it does not already exist", item.url()).into()); return Err(format!(
"Item {:?} cannot be updated, it does not already exist",
item.url()
)
.into());
} }
#[cfg(not(feature = "local_calendar_mocks_remote_calendars"))] #[cfg(not(feature = "local_calendar_mocks_remote_calendars"))]
return self.regular_add_or_update_item(item); return self.regular_add_or_update_item(item);
@ -186,19 +196,19 @@ impl CachedCalendar {
SyncStatus::Synced(prev_ss) => { SyncStatus::Synced(prev_ss) => {
let prev_ss = prev_ss.clone(); let prev_ss = prev_ss.clone();
item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss)); item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss));
}, }
SyncStatus::LocallyModified(prev_ss) => { SyncStatus::LocallyModified(prev_ss) => {
let prev_ss = prev_ss.clone(); let prev_ss = prev_ss.clone();
item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss)); item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss));
}, }
SyncStatus::LocallyDeleted(prev_ss) => { SyncStatus::LocallyDeleted(prev_ss) => {
let prev_ss = prev_ss.clone(); let prev_ss = prev_ss.clone();
item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss)); item.set_sync_status(SyncStatus::LocallyDeleted(prev_ss));
}, }
SyncStatus::NotSynced => { SyncStatus::NotSynced => {
// This was never synced to the server, we can safely delete it as soon as now // This was never synced to the server, we can safely delete it as soon as now
self.items.remove(item_url); self.items.remove(item_url);
}, }
}; };
Ok(()) Ok(())
} }
@ -209,13 +219,11 @@ impl CachedCalendar {
pub fn immediately_delete_item_sync(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> { pub fn immediately_delete_item_sync(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> {
match self.items.remove(item_url) { match self.items.remove(item_url) {
None => Err(format!("Item {} is absent from this calendar", item_url).into()), None => Err(format!("Item {} is absent from this calendar", item_url).into()),
Some(_) => Ok(()) Some(_) => Ok(()),
} }
} }
} }
#[async_trait] #[async_trait]
impl BaseCalendar for CachedCalendar { impl BaseCalendar for CachedCalendar {
fn name(&self) -> &str { fn name(&self) -> &str {
@ -245,9 +253,17 @@ impl BaseCalendar for CachedCalendar {
#[async_trait] #[async_trait]
impl CompleteCalendar for CachedCalendar { impl CompleteCalendar for CachedCalendar {
fn new(name: String, url: Url, supported_components: SupportedComponents, color: Option<Color>) -> Self { fn new(
name: String,
url: Url,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self {
Self { Self {
name, url, supported_components, color, name,
url,
supported_components,
color,
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
mock_behaviour: None, mock_behaviour: None,
items: HashMap::new(), items: HashMap::new(),
@ -283,25 +299,33 @@ impl CompleteCalendar for CachedCalendar {
} }
} }
// This class can be used to mock a remote calendar for integration tests // This class can be used to mock a remote calendar for integration tests
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
use crate::{item::VersionTag, use crate::{item::VersionTag, resource::Resource, traits::DavCalendar};
traits::DavCalendar,
resource::Resource};
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
#[async_trait] #[async_trait]
impl DavCalendar for CachedCalendar { impl DavCalendar for CachedCalendar {
fn new(name: String, resource: Resource, supported_components: SupportedComponents, color: Option<Color>) -> Self { fn new(
crate::traits::CompleteCalendar::new(name, resource.url().clone(), supported_components, color) name: String,
resource: Resource,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self {
crate::traits::CompleteCalendar::new(
name,
resource.url().clone(),
supported_components,
color,
)
} }
async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>> { async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>> {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_get_item_version_tags())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_get_item_version_tags())?;
use crate::item::SyncStatus; use crate::item::SyncStatus;
@ -311,7 +335,10 @@ impl DavCalendar for CachedCalendar {
let vt = match item.sync_status() { let vt = match item.sync_status() {
SyncStatus::Synced(vt) => vt.clone(), SyncStatus::Synced(vt) => vt.clone(),
_ => { _ => {
panic!("Mock calendars must contain only SyncStatus::Synced. Got {:?}", item); panic!(
"Mock calendars must contain only SyncStatus::Synced. Got {:?}",
item
);
} }
}; };
result.insert(url.clone(), vt); result.insert(url.clone(), vt);
@ -322,7 +349,9 @@ impl DavCalendar for CachedCalendar {
async fn get_item_by_url(&self, url: &Url) -> Result<Option<Item>, Box<dyn Error>> { async fn get_item_by_url(&self, url: &Url) -> Result<Option<Item>, Box<dyn Error>> {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_get_item_by_url())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_get_item_by_url())?;
Ok(self.items.get(url).cloned()) Ok(self.items.get(url).cloned())
} }
@ -337,7 +366,9 @@ impl DavCalendar for CachedCalendar {
async fn delete_item(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> { async fn delete_item(&mut self, item_url: &Url) -> Result<(), Box<dyn Error>> {
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
self.mock_behaviour.as_ref().map_or(Ok(()), |b| b.lock().unwrap().can_delete_item())?; self.mock_behaviour
.as_ref()
.map_or(Ok(()), |b| b.lock().unwrap().can_delete_item())?;
self.immediately_delete_item(item_url).await self.immediately_delete_item(item_url).await
} }

View file

@ -1,7 +1,7 @@
//! Calendar events (iCal `VEVENT` items) //! Calendar events (iCal `VEVENT` items)
use serde::{Deserialize, Serialize};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use crate::item::SyncStatus; use crate::item::SyncStatus;
@ -56,3 +56,9 @@ impl Event {
unimplemented!(); unimplemented!();
} }
} }
impl Default for Event {
fn default() -> Self {
Self::new()
}
}

View file

@ -2,36 +2,41 @@
use std::error::Error; use std::error::Error;
use ical::parser::ical::component::{IcalCalendar, IcalEvent, IcalTodo};
use chrono::{DateTime, TimeZone, Utc}; use chrono::{DateTime, TimeZone, Utc};
use ical::parser::ical::component::{IcalCalendar, IcalEvent, IcalTodo};
use url::Url; use url::Url;
use crate::Item;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::Task;
use crate::task::CompletionStatus; use crate::task::CompletionStatus;
use crate::Event; use crate::Event;
use crate::Item;
use crate::Task;
/// Parse an iCal file into the internal representation [`crate::Item`] /// Parse an iCal file into the internal representation [`crate::Item`]
pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<Item, Box<dyn Error>> { pub fn parse(
content: &str,
item_url: Url,
sync_status: SyncStatus,
) -> Result<Item, Box<dyn Error>> {
let mut reader = ical::IcalParser::new(content.as_bytes()); let mut reader = ical::IcalParser::new(content.as_bytes());
let parsed_item = match reader.next() { let parsed_item = match reader.next() {
None => return Err(format!("Invalid iCal data to parse for item {}", item_url).into()), None => return Err(format!("Invalid iCal data to parse for item {}", item_url).into()),
Some(item) => match item { Some(item) => match item {
Err(err) => return Err(format!("Unable to parse iCal data for item {}: {}", item_url, err).into()), Err(err) => {
Ok(item) => item, return Err(
format!("Unable to parse iCal data for item {}: {}", item_url, err).into(),
)
} }
Ok(item) => item,
},
}; };
let ical_prod_id = extract_ical_prod_id(&parsed_item) let ical_prod_id = extract_ical_prod_id(&parsed_item)
.map(|s| s.to_string()) .map(|s| s.to_string())
.unwrap_or_else(|| super::default_prod_id()); .unwrap_or_else(super::default_prod_id);
let item = match assert_single_type(&parsed_item)? { let item = match assert_single_type(&parsed_item)? {
CurrentType::Event(_) => { CurrentType::Event(_) => Item::Event(Event::new()),
Item::Event(Event::new())
},
CurrentType::Todo(todo) => { CurrentType::Todo(todo) => {
let mut name = None; let mut name = None;
@ -44,8 +49,8 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
for prop in &todo.properties { for prop in &todo.properties {
match prop.name.as_str() { match prop.name.as_str() {
"SUMMARY" => { name = prop.value.clone() }, "SUMMARY" => name = prop.value.clone(),
"UID" => { uid = prop.value.clone() }, "UID" => uid = prop.value.clone(),
"DTSTAMP" => { "DTSTAMP" => {
// The property can be specified once, but is not mandatory // The property can be specified once, but is not mandatory
// "This property specifies the date and time that the information associated with // "This property specifies the date and time that the information associated with
@ -53,7 +58,7 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
// "In the case of an iCalendar object that doesn't specify a "METHOD" // "In the case of an iCalendar object that doesn't specify a "METHOD"
// property [e.g.: VTODO and VEVENT], this property is equivalent to the "LAST-MODIFIED" property". // property [e.g.: VTODO and VEVENT], this property is equivalent to the "LAST-MODIFIED" property".
last_modified = parse_date_time_from_property(&prop.value); last_modified = parse_date_time_from_property(&prop.value);
}, }
"LAST-MODIFIED" => { "LAST-MODIFIED" => {
// The property can be specified once, but is not mandatory // The property can be specified once, but is not mandatory
// "This property specifies the date and time that the information associated with // "This property specifies the date and time that the information associated with
@ -66,11 +71,11 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
// "This property defines the date and time that a to-do was // "This property defines the date and time that a to-do was
// actually completed." // actually completed."
completion_date = parse_date_time_from_property(&prop.value) completion_date = parse_date_time_from_property(&prop.value)
}, }
"CREATED" => { "CREATED" => {
// The property can be specified once, but is not mandatory // The property can be specified once, but is not mandatory
creation_date = parse_date_time_from_property(&prop.value) creation_date = parse_date_time_from_property(&prop.value)
}, }
"STATUS" => { "STATUS" => {
// Possible values: // Possible values:
// "NEEDS-ACTION" ;Indicates to-do needs action. // "NEEDS-ACTION" ;Indicates to-do needs action.
@ -97,7 +102,13 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
}; };
let last_modified = match last_modified { let last_modified = match last_modified {
Some(dt) => dt, Some(dt) => dt,
None => return Err(format!("Missing DTSTAMP for item {}, but this is required by RFC5545", item_url).into()), None => {
return Err(format!(
"Missing DTSTAMP for item {}, but this is required by RFC5545",
item_url
)
.into())
}
}; };
let completion_status = match completed { let completion_status = match completed {
false => { false => {
@ -105,15 +116,24 @@ pub fn parse(content: &str, item_url: Url, sync_status: SyncStatus) -> Result<It
log::warn!("Task {:?} has an inconsistent content: its STATUS is not completed, yet it has a COMPLETED timestamp at {:?}", uid, completion_date); log::warn!("Task {:?} has an inconsistent content: its STATUS is not completed, yet it has a COMPLETED timestamp at {:?}", uid, completion_date);
} }
CompletionStatus::Uncompleted CompletionStatus::Uncompleted
}, }
true => CompletionStatus::Completed(completion_date), true => CompletionStatus::Completed(completion_date),
}; };
Item::Task(Task::new_with_parameters(name, uid, item_url, completion_status, sync_status, creation_date, last_modified, ical_prod_id, extra_parameters)) Item::Task(Task::new_with_parameters(
}, name,
uid,
item_url,
completion_status,
sync_status,
creation_date,
last_modified,
ical_prod_id,
extra_parameters,
))
}
}; };
// What to do with multiple items? // What to do with multiple items?
if reader.next().map(|r| r.is_ok()) == Some(true) { if reader.next().map(|r| r.is_ok()) == Some(true) {
return Err("Parsing multiple items are not supported".into()); return Err("Parsing multiple items are not supported".into());
@ -128,8 +148,7 @@ fn parse_date_time(dt: &str) -> Result<DateTime<Utc>, chrono::format::ParseError
} }
fn parse_date_time_from_property(value: &Option<String>) -> Option<DateTime<Utc>> { fn parse_date_time_from_property(value: &Option<String>) -> Option<DateTime<Utc>> {
value.as_ref() value.as_ref().and_then(|s| {
.and_then(|s| {
parse_date_time(s) parse_date_time(s)
.map_err(|err| { .map_err(|err| {
log::warn!("Invalid timestamp: {}", s); log::warn!("Invalid timestamp: {}", s);
@ -139,17 +158,15 @@ fn parse_date_time_from_property(value: &Option<String>) -> Option<DateTime<Utc>
}) })
} }
fn extract_ical_prod_id(item: &IcalCalendar) -> Option<&str> { fn extract_ical_prod_id(item: &IcalCalendar) -> Option<&str> {
for prop in &item.properties { for prop in &item.properties {
if &prop.name == "PRODID" { if &prop.name == "PRODID" {
return prop.value.as_ref().map(|s| s.as_str()) return prop.value.as_deref();
} }
} }
None None
} }
enum CurrentType<'a> { enum CurrentType<'a> {
Event(&'a IcalEvent), Event(&'a IcalEvent),
Todo(&'a IcalTodo), Todo(&'a IcalTodo),
@ -176,10 +193,9 @@ fn assert_single_type<'a>(item: &'a IcalCalendar) -> Result<CurrentType<'a>, Box
} }
} }
return Err("Only a single TODO or a single EVENT is supported".into()); Err("Only a single TODO or a single EVENT is supported".into())
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
const EXAMPLE_ICAL: &str = r#"BEGIN:VCALENDAR const EXAMPLE_ICAL: &str = r#"BEGIN:VCALENDAR
@ -261,11 +277,17 @@ END:VCALENDAR
assert_eq!(task.name(), "Do not forget to do this"); assert_eq!(task.name(), "Do not forget to do this");
assert_eq!(task.url(), &item_url); assert_eq!(task.url(), &item_url);
assert_eq!(task.uid(), "0633de27-8c32-42be-bcb8-63bc879c6185@some-domain.com"); assert_eq!(
assert_eq!(task.completed(), false); task.uid(),
"0633de27-8c32-42be-bcb8-63bc879c6185@some-domain.com"
);
assert!(!task.completed());
assert_eq!(task.completion_status(), &CompletionStatus::Uncompleted); assert_eq!(task.completion_status(), &CompletionStatus::Uncompleted);
assert_eq!(task.sync_status(), &sync_status); assert_eq!(task.sync_status(), &sync_status);
assert_eq!(task.last_modified(), &Utc.ymd(2021, 03, 21).and_hms(0, 16, 0)); assert_eq!(
task.last_modified(),
&Utc.ymd(2021, 03, 21).and_hms(0, 16, 0)
);
} }
#[test] #[test]
@ -274,11 +296,19 @@ END:VCALENDAR
let sync_status = SyncStatus::Synced(version_tag); let sync_status = SyncStatus::Synced(version_tag);
let item_url: Url = "http://some.id/for/testing".parse().unwrap(); let item_url: Url = "http://some.id/for/testing".parse().unwrap();
let item = parse(EXAMPLE_ICAL_COMPLETED, item_url.clone(), sync_status.clone()).unwrap(); let item = parse(
EXAMPLE_ICAL_COMPLETED,
item_url.clone(),
sync_status.clone(),
)
.unwrap();
let task = item.unwrap_task(); let task = item.unwrap_task();
assert_eq!(task.completed(), true); assert!(task.completed());
assert_eq!(task.completion_status(), &CompletionStatus::Completed(Some(Utc.ymd(2021, 04, 02).and_hms(8, 15, 57)))); assert_eq!(
task.completion_status(),
&CompletionStatus::Completed(Some(Utc.ymd(2021, 04, 02).and_hms(8, 15, 57)))
);
} }
#[test] #[test]
@ -287,10 +317,15 @@ END:VCALENDAR
let sync_status = SyncStatus::Synced(version_tag); let sync_status = SyncStatus::Synced(version_tag);
let item_url: Url = "http://some.id/for/testing".parse().unwrap(); let item_url: Url = "http://some.id/for/testing".parse().unwrap();
let item = parse(EXAMPLE_ICAL_COMPLETED_WITHOUT_A_COMPLETION_DATE, item_url.clone(), sync_status.clone()).unwrap(); let item = parse(
EXAMPLE_ICAL_COMPLETED_WITHOUT_A_COMPLETION_DATE,
item_url.clone(),
sync_status.clone(),
)
.unwrap();
let task = item.unwrap_task(); let task = item.unwrap_task();
assert_eq!(task.completed(), true); assert!(task.completed());
assert_eq!(task.completion_status(), &CompletionStatus::Completed(None)); assert_eq!(task.completion_status(), &CompletionStatus::Completed(None));
} }

View file

@ -1,10 +1,9 @@
//! CalDAV items (todo, events, journals...) //! CalDAV items (todo, events, journals...)
// TODO: move Event and Task to nest them in crate::items::calendar::Calendar? // TODO: move Event and Task to nest them in crate::items::calendar::Calendar?
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use chrono::{DateTime, Utc};
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum Item { pub enum Item {
@ -21,7 +20,7 @@ macro_rules! synthetise_common_getter {
Item::Task(t) => t.$property_name(), Item::Task(t) => t.$property_name(),
} }
} }
} };
} }
impl Item { impl Item {
@ -41,17 +40,11 @@ impl Item {
} }
pub fn is_event(&self) -> bool { pub fn is_event(&self) -> bool {
match &self { matches!(self, Item::Event(_))
Item::Event(_) => true,
_ => false,
}
} }
pub fn is_task(&self) -> bool { pub fn is_task(&self) -> bool {
match &self { matches!(self, Item::Task(_))
Item::Task(_) => true,
_ => false,
}
} }
/// Returns a mutable reference to the inner Task /// Returns a mutable reference to the inner Task
@ -86,13 +79,10 @@ impl Item {
} }
} }
/// A VersionTag is basically a CalDAV `ctag` or `etag`. Whenever it changes, this means the data has changed. /// A VersionTag is basically a CalDAV `ctag` or `etag`. Whenever it changes, this means the data has changed.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VersionTag { pub struct VersionTag {
tag: String tag: String,
} }
impl From<String> for VersionTag { impl From<String> for VersionTag {
@ -115,8 +105,6 @@ impl VersionTag {
} }
} }
/// Describes whether this item has been synced already, or modified since the last time it was synced /// Describes whether this item has been synced already, or modified since the last time it was synced
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SyncStatus { pub enum SyncStatus {

View file

@ -61,7 +61,9 @@ impl MockBehaviour {
} }
pub fn can_get_calendars(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_get_calendars(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.get_calendars_behaviour, "get_calendars") decrement(&mut self.get_calendars_behaviour, "get_calendars")
} }
// pub fn can_get_calendar(&mut self) -> Result<(), Box<dyn Error>> { // pub fn can_get_calendar(&mut self) -> Result<(), Box<dyn Error>> {
@ -69,52 +71,68 @@ impl MockBehaviour {
// decrement(&mut self.get_calendar_behaviour, "get_calendar") // decrement(&mut self.get_calendar_behaviour, "get_calendar")
// } // }
pub fn can_create_calendar(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_create_calendar(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.create_calendar_behaviour, "create_calendar") decrement(&mut self.create_calendar_behaviour, "create_calendar")
} }
pub fn can_add_item(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_add_item(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.add_item_behaviour, "add_item") decrement(&mut self.add_item_behaviour, "add_item")
} }
pub fn can_update_item(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_update_item(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.update_item_behaviour, "update_item") decrement(&mut self.update_item_behaviour, "update_item")
} }
pub fn can_get_item_version_tags(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_get_item_version_tags(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
decrement(&mut self.get_item_version_tags_behaviour, "get_item_version_tags") return Ok(());
}
decrement(
&mut self.get_item_version_tags_behaviour,
"get_item_version_tags",
)
} }
pub fn can_get_item_by_url(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_get_item_by_url(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.get_item_by_url_behaviour, "get_item_by_url") decrement(&mut self.get_item_by_url_behaviour, "get_item_by_url")
} }
pub fn can_delete_item(&mut self) -> Result<(), Box<dyn Error>> { pub fn can_delete_item(&mut self) -> Result<(), Box<dyn Error>> {
if self.is_suspended { return Ok(()) } if self.is_suspended {
return Ok(());
}
decrement(&mut self.delete_item_behaviour, "delete_item") decrement(&mut self.delete_item_behaviour, "delete_item")
} }
} }
/// Return Ok(()) in case the value is `(1+, _)` or `(_, 0)`, or return Err and decrement otherwise /// Return Ok(()) in case the value is `(1+, _)` or `(_, 0)`, or return Err and decrement otherwise
fn decrement(value: &mut (u32, u32), descr: &str) -> Result<(), Box<dyn Error>> { fn decrement(value: &mut (u32, u32), descr: &str) -> Result<(), Box<dyn Error>> {
let remaining_successes = value.0; let remaining_successes = value.0;
let remaining_failures = value.1; let remaining_failures = value.1;
if remaining_successes > 0 { if remaining_successes > 0 {
value.0 = value.0 - 1; value.0 -= 1;
log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value); log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value);
Ok(()) Ok(())
} else { } else if remaining_failures > 0 {
if remaining_failures > 0 { value.1 -= 1;
value.1 = value.1 - 1;
log::debug!("Mock behaviour: failing a {} ({:?})", descr, value); log::debug!("Mock behaviour: failing a {} ({:?})", descr, value);
Err(format!("Mocked behaviour requires this {} to fail this time. ({:?})", descr, value).into()) Err(format!(
"Mocked behaviour requires this {} to fail this time. ({:?})",
descr, value
)
.into())
} else { } else {
log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value); log::debug!("Mock behaviour: allowing a {} ({:?})", descr, value);
Ok(()) Ok(())
} }
} }
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {

View file

@ -2,18 +2,18 @@
//! //!
//! It is also responsible for syncing them together //! It is also responsible for syncing them together
use std::error::Error;
use std::collections::HashSet; use std::collections::HashSet;
use std::error::Error;
use std::fmt::{Display, Formatter};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::fmt::{Display, Formatter};
use url::Url;
use itertools::Itertools; use itertools::Itertools;
use url::Url;
use crate::traits::{BaseCalendar, CalDavSource, DavCalendar};
use crate::traits::CompleteCalendar;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::traits::CompleteCalendar;
use crate::traits::{BaseCalendar, CalDavSource, DavCalendar};
pub mod sync_progress; pub mod sync_progress;
use sync_progress::SyncProgress; use sync_progress::SyncProgress;
@ -42,7 +42,6 @@ impl Display for BatchDownloadType {
} }
} }
/// A data source that combines two `CalDavSource`s, which is able to sync both sources. /// A data source that combines two `CalDavSource`s, which is able to sync both sources.
/// ///
/// Usually, you will only need to use a provider between a server and a local cache, that is to say a [`CalDavProvider`](crate::CalDavProvider), i.e. a `Provider<Cache, CachedCalendar, Client, RemoteCalendar>`. \ /// Usually, you will only need to use a provider between a server and a local cache, that is to say a [`CalDavProvider`](crate::CalDavProvider), i.e. a `Provider<Cache, CachedCalendar, Client, RemoteCalendar>`. \
@ -76,21 +75,30 @@ where
/// `remote` is usually a [`Client`](crate::client::Client), `local` is usually a [`Cache`](crate::cache::Cache). /// `remote` is usually a [`Client`](crate::client::Client), `local` is usually a [`Cache`](crate::cache::Cache).
/// However, both can be interchangeable. The only difference is that `remote` always wins in case of a sync conflict /// However, both can be interchangeable. The only difference is that `remote` always wins in case of a sync conflict
pub fn new(remote: R, local: L) -> Self { pub fn new(remote: R, local: L) -> Self {
Self { remote, local, Self {
phantom_t: PhantomData, phantom_u: PhantomData, remote,
local,
phantom_t: PhantomData,
phantom_u: PhantomData,
} }
} }
/// Returns the data source described as `local` /// Returns the data source described as `local`
pub fn local(&self) -> &L { &self.local } pub fn local(&self) -> &L {
&self.local
}
/// Returns the data source described as `local` /// Returns the data source described as `local`
pub fn local_mut(&mut self) -> &mut L { &mut self.local } pub fn local_mut(&mut self) -> &mut L {
&mut self.local
}
/// Returns the data source described as `remote`. /// Returns the data source described as `remote`.
/// ///
/// Apart from tests, there are very few (if any) reasons to access `remote` directly. /// Apart from tests, there are very few (if any) reasons to access `remote` directly.
/// Usually, you should rather use the `local` source, which (usually) is a much faster local cache. /// Usually, you should rather use the `local` source, which (usually) is a much faster local cache.
/// To be sure `local` accurately mirrors the `remote` source, you can run [`Provider::sync`] /// To be sure `local` accurately mirrors the `remote` source, you can run [`Provider::sync`]
pub fn remote(&self) -> &R { &self.remote } pub fn remote(&self) -> &R {
&self.remote
}
/// Performs a synchronisation between `local` and `remote`, and provide feeedback to the user about the progress. /// Performs a synchronisation between `local` and `remote`, and provide feeedback to the user about the progress.
/// ///
@ -117,7 +125,9 @@ where
if let Err(err) = self.run_sync_inner(progress).await { if let Err(err) = self.run_sync_inner(progress).await {
progress.error(&format!("Sync terminated because of an error: {}", err)); progress.error(&format!("Sync terminated because of an error: {}", err));
} }
progress.feedback(SyncEvent::Finished{ success: progress.is_success() }); progress.feedback(SyncEvent::Finished {
success: progress.is_success(),
});
progress.is_success() progress.is_success()
} }
@ -130,16 +140,22 @@ where
// Sync every remote calendar // Sync every remote calendar
let cals_remote = self.remote.get_calendars().await?; let cals_remote = self.remote.get_calendars().await?;
for (cal_url, cal_remote) in cals_remote { for (cal_url, cal_remote) in cals_remote {
let counterpart = match self.get_or_insert_local_counterpart_calendar(&cal_url, cal_remote.clone()).await { let counterpart = match self
.get_or_insert_local_counterpart_calendar(&cal_url, cal_remote.clone())
.await
{
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to get or insert local counterpart calendar for {} ({}). Skipping this time", cal_url, err)); progress.warn(&format!("Unable to get or insert local counterpart calendar for {} ({}). Skipping this time", cal_url, err));
continue; continue;
}, }
Ok(arc) => arc, Ok(arc) => arc,
}; };
if let Err(err) = Self::sync_calendar_pair(counterpart, cal_remote, progress).await { if let Err(err) = Self::sync_calendar_pair(counterpart, cal_remote, progress).await {
progress.warn(&format!("Unable to sync calendar {}: {}, skipping this time.", cal_url, err)); progress.warn(&format!(
"Unable to sync calendar {}: {}, skipping this time.",
cal_url, err
));
continue; continue;
} }
handled_calendars.insert(cal_url); handled_calendars.insert(cal_url);
@ -152,16 +168,22 @@ where
continue; continue;
} }
let counterpart = match self.get_or_insert_remote_counterpart_calendar(&cal_url, cal_local.clone()).await { let counterpart = match self
.get_or_insert_remote_counterpart_calendar(&cal_url, cal_local.clone())
.await
{
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to get or insert remote counterpart calendar for {} ({}). Skipping this time", cal_url, err)); progress.warn(&format!("Unable to get or insert remote counterpart calendar for {} ({}). Skipping this time", cal_url, err));
continue; continue;
}, }
Ok(arc) => arc, Ok(arc) => arc,
}; };
if let Err(err) = Self::sync_calendar_pair(cal_local, counterpart, progress).await { if let Err(err) = Self::sync_calendar_pair(cal_local, counterpart, progress).await {
progress.warn(&format!("Unable to sync calendar {}: {}, skipping this time.", cal_url, err)); progress.warn(&format!(
"Unable to sync calendar {}: {}, skipping this time.",
cal_url, err
));
continue; continue;
} }
} }
@ -171,16 +193,26 @@ where
Ok(()) Ok(())
} }
async fn get_or_insert_local_counterpart_calendar(
async fn get_or_insert_local_counterpart_calendar(&mut self, cal_url: &Url, needle: Arc<Mutex<U>>) -> Result<Arc<Mutex<T>>, Box<dyn Error>> { &mut self,
cal_url: &Url,
needle: Arc<Mutex<U>>,
) -> Result<Arc<Mutex<T>>, Box<dyn Error>> {
get_or_insert_counterpart_calendar("local", &mut self.local, cal_url, needle).await get_or_insert_counterpart_calendar("local", &mut self.local, cal_url, needle).await
} }
async fn get_or_insert_remote_counterpart_calendar(&mut self, cal_url: &Url, needle: Arc<Mutex<T>>) -> Result<Arc<Mutex<U>>, Box<dyn Error>> { async fn get_or_insert_remote_counterpart_calendar(
&mut self,
cal_url: &Url,
needle: Arc<Mutex<T>>,
) -> Result<Arc<Mutex<U>>, Box<dyn Error>> {
get_or_insert_counterpart_calendar("remote", &mut self.remote, cal_url, needle).await get_or_insert_counterpart_calendar("remote", &mut self.remote, cal_url, needle).await
} }
async fn sync_calendar_pair(
async fn sync_calendar_pair(cal_local: Arc<Mutex<T>>, cal_remote: Arc<Mutex<U>>, progress: &mut SyncProgress) -> Result<(), Box<dyn Error>> { cal_local: Arc<Mutex<T>>,
cal_remote: Arc<Mutex<U>>,
progress: &mut SyncProgress,
) -> Result<(), Box<dyn Error>> {
let mut cal_remote = cal_remote.lock().unwrap(); let mut cal_remote = cal_remote.lock().unwrap();
let mut cal_local = cal_local.lock().unwrap(); let mut cal_local = cal_local.lock().unwrap();
let cal_name = cal_local.name().to_string(); let cal_name = cal_local.name().to_string();
@ -190,7 +222,7 @@ where
progress.feedback(SyncEvent::InProgress { progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
items_done_already: 0, items_done_already: 0,
details: "started".to_string() details: "started".to_string(),
}); });
// Step 1 - find the differences // Step 1 - find the differences
@ -217,24 +249,27 @@ where
// This was created on the remote // This was created on the remote
progress.debug(&format!("* {} is a remote addition", url)); progress.debug(&format!("* {} is a remote addition", url));
remote_additions.insert(url); remote_additions.insert(url);
}, }
Some(local_item) => { Some(local_item) => {
if local_items_to_handle.remove(&url) == false { if !local_items_to_handle.remove(&url) {
progress.error(&format!("Inconsistent state: missing task {} from the local tasks", url)); progress.error(&format!(
"Inconsistent state: missing task {} from the local tasks",
url
));
} }
match local_item.sync_status() { match local_item.sync_status() {
SyncStatus::NotSynced => { SyncStatus::NotSynced => {
progress.error(&format!("URL reuse between remote and local sources ({}). Ignoring this item in the sync", url)); progress.error(&format!("URL reuse between remote and local sources ({}). Ignoring this item in the sync", url));
continue; continue;
}, }
SyncStatus::Synced(local_tag) => { SyncStatus::Synced(local_tag) => {
if &remote_tag != local_tag { if &remote_tag != local_tag {
// This has been modified on the remote // This has been modified on the remote
progress.debug(&format!("* {} is a remote change", url)); progress.debug(&format!("* {} is a remote change", url));
remote_changes.insert(url); remote_changes.insert(url);
} }
}, }
SyncStatus::LocallyModified(local_tag) => { SyncStatus::LocallyModified(local_tag) => {
if &remote_tag == local_tag { if &remote_tag == local_tag {
// This has been changed locally // This has been changed locally
@ -242,10 +277,11 @@ where
local_changes.insert(url); local_changes.insert(url);
} else { } else {
progress.info(&format!("Conflict: task {} has been modified in both sources. Using the remote version.", url)); progress.info(&format!("Conflict: task {} has been modified in both sources. Using the remote version.", url));
progress.debug(&format!("* {} is considered a remote change", url)); progress
.debug(&format!("* {} is considered a remote change", url));
remote_changes.insert(url); remote_changes.insert(url);
} }
}, }
SyncStatus::LocallyDeleted(local_tag) => { SyncStatus::LocallyDeleted(local_tag) => {
if &remote_tag == local_tag { if &remote_tag == local_tag {
// This has been locally deleted // This has been locally deleted
@ -253,10 +289,11 @@ where
local_del.insert(url); local_del.insert(url);
} else { } else {
progress.info(&format!("Conflict: task {} has been locally deleted and remotely modified. Reverting to the remote version.", url)); progress.info(&format!("Conflict: task {} has been locally deleted and remotely modified. Reverting to the remote version.", url));
progress.debug(&format!("* {} is a considered a remote change", url)); progress
.debug(&format!("* {} is a considered a remote change", url));
remote_changes.insert(url); remote_changes.insert(url);
} }
}, }
} }
} }
} }
@ -267,9 +304,12 @@ where
progress.trace(&format!("##### Considering local item {}...", url)); progress.trace(&format!("##### Considering local item {}...", url));
let local_item = match cal_local.get_item_by_url(&url).await { let local_item = match cal_local.get_item_by_url(&url).await {
None => { None => {
progress.error(&format!("Inconsistent state: missing task {} from the local tasks", url)); progress.error(&format!(
"Inconsistent state: missing task {} from the local tasks",
url
));
continue; continue;
}, }
Some(item) => item, Some(item) => item,
}; };
@ -278,29 +318,31 @@ where
// This item has been removed from the remote // This item has been removed from the remote
progress.debug(&format!("# {} is a deletion from the server", url)); progress.debug(&format!("# {} is a deletion from the server", url));
remote_del.insert(url); remote_del.insert(url);
}, }
SyncStatus::NotSynced => { SyncStatus::NotSynced => {
// This item has just been locally created // This item has just been locally created
progress.debug(&format!("# {} has been locally created", url)); progress.debug(&format!("# {} has been locally created", url));
local_additions.insert(url); local_additions.insert(url);
}, }
SyncStatus::LocallyDeleted(_) => { SyncStatus::LocallyDeleted(_) => {
// This item has been deleted from both sources // This item has been deleted from both sources
progress.debug(&format!("# {} has been deleted from both sources", url)); progress.debug(&format!("# {} has been deleted from both sources", url));
remote_del.insert(url); remote_del.insert(url);
}, }
SyncStatus::LocallyModified(_) => { SyncStatus::LocallyModified(_) => {
progress.info(&format!("Conflict: item {} has been deleted from the server and locally modified. Deleting the local copy", url)); progress.info(&format!("Conflict: item {} has been deleted from the server and locally modified. Deleting the local copy", url));
remote_del.insert(url); remote_del.insert(url);
},
} }
} }
}
// Step 2 - commit changes // Step 2 - commit changes
progress.trace("Committing changes..."); progress.trace("Committing changes...");
for url_del in local_del { for url_del in local_del {
progress.debug(&format!("> Pushing local deletion {} to the server", url_del)); progress.debug(&format!(
"> Pushing local deletion {} to the server",
url_del
));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress { progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
@ -310,14 +352,20 @@ where
match cal_remote.delete_item(&url_del).await { match cal_remote.delete_item(&url_del).await {
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to delete remote item {}: {}", url_del, err)); progress.warn(&format!(
}, "Unable to delete remote item {}: {}",
url_del, err
));
}
Ok(()) => { Ok(()) => {
// Change the local copy from "marked to deletion" to "actually deleted" // Change the local copy from "marked to deletion" to "actually deleted"
if let Err(err) = cal_local.immediately_delete_item(&url_del).await { if let Err(err) = cal_local.immediately_delete_item(&url_del).await {
progress.error(&format!("Unable to permanently delete local item {}: {}", url_del, err)); progress.error(&format!(
"Unable to permanently delete local item {}: {}",
url_del, err
));
}
} }
},
} }
} }
@ -339,20 +387,24 @@ where
&mut *cal_local, &mut *cal_local,
&mut *cal_remote, &mut *cal_remote,
progress, progress,
&cal_name &cal_name,
).await; )
.await;
Self::apply_remote_changes( Self::apply_remote_changes(
remote_changes, remote_changes,
&mut *cal_local, &mut *cal_local,
&mut *cal_remote, &mut *cal_remote,
progress, progress,
&cal_name &cal_name,
).await; )
.await;
for url_add in local_additions { for url_add in local_additions {
progress.debug(&format!("> Pushing local addition {} to the server", url_add)); progress.debug(&format!(
"> Pushing local addition {} to the server",
url_add
));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress { progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
@ -363,21 +415,27 @@ where
None => { None => {
progress.error(&format!("Inconsistency: created item {} has been marked for upload but is locally missing", url_add)); progress.error(&format!("Inconsistency: created item {} has been marked for upload but is locally missing", url_add));
continue; continue;
}, }
Some(item) => { Some(item) => {
match cal_remote.add_item(item.clone()).await { match cal_remote.add_item(item.clone()).await {
Err(err) => progress.error(&format!("Unable to add item {} to remote calendar: {}", url_add, err)), Err(err) => progress.error(&format!(
"Unable to add item {} to remote calendar: {}",
url_add, err
)),
Ok(new_ss) => { Ok(new_ss) => {
// Update local sync status // Update local sync status
item.set_sync_status(new_ss); item.set_sync_status(new_ss);
},
} }
}, }
}
}; };
} }
for url_change in local_changes { for url_change in local_changes {
progress.debug(&format!("> Pushing local change {} to the server", url_change)); progress.debug(&format!(
"> Pushing local change {} to the server",
url_change
));
progress.increment_counter(1); progress.increment_counter(1);
progress.feedback(SyncEvent::InProgress { progress.feedback(SyncEvent::InProgress {
calendar: cal_name.clone(), calendar: cal_name.clone(),
@ -388,14 +446,17 @@ where
None => { None => {
progress.error(&format!("Inconsistency: modified item {} has been marked for upload but is locally missing", url_change)); progress.error(&format!("Inconsistency: modified item {} has been marked for upload but is locally missing", url_change));
continue; continue;
}, }
Some(item) => { Some(item) => {
match cal_remote.update_item(item.clone()).await { match cal_remote.update_item(item.clone()).await {
Err(err) => progress.error(&format!("Unable to update item {} in remote calendar: {}", url_change, err)), Err(err) => progress.error(&format!(
"Unable to update item {} in remote calendar: {}",
url_change, err
)),
Ok(new_ss) => { Ok(new_ss) => {
// Update local sync status // Update local sync status
item.set_sync_status(new_ss); item.set_sync_status(new_ss);
}, }
}; };
} }
}; };
@ -404,9 +465,12 @@ where
Ok(()) Ok(())
} }
async fn item_name(cal: &T, url: &Url) -> String { async fn item_name(cal: &T, url: &Url) -> String {
cal.get_item_by_url(url).await.map(|item| item.name()).unwrap_or_default().to_string() cal.get_item_by_url(url)
.await
.map(|item| item.name())
.unwrap_or_default()
.to_string()
} }
async fn apply_remote_additions( async fn apply_remote_additions(
@ -414,10 +478,22 @@ where
cal_local: &mut T, cal_local: &mut T,
cal_remote: &mut U, cal_remote: &mut U,
progress: &mut SyncProgress, progress: &mut SyncProgress,
cal_name: &str cal_name: &str,
) { ) {
for batch in remote_additions.drain().chunks(DOWNLOAD_BATCH_SIZE).into_iter() { for batch in remote_additions
Self::fetch_batch_and_apply(BatchDownloadType::RemoteAdditions, batch, cal_local, cal_remote, progress, cal_name).await; .drain()
.chunks(DOWNLOAD_BATCH_SIZE)
.into_iter()
{
Self::fetch_batch_and_apply(
BatchDownloadType::RemoteAdditions,
batch,
cal_local,
cal_remote,
progress,
cal_name,
)
.await;
} }
} }
@ -426,10 +502,22 @@ where
cal_local: &mut T, cal_local: &mut T,
cal_remote: &mut U, cal_remote: &mut U,
progress: &mut SyncProgress, progress: &mut SyncProgress,
cal_name: &str cal_name: &str,
) { ) {
for batch in remote_changes.drain().chunks(DOWNLOAD_BATCH_SIZE).into_iter() { for batch in remote_changes
Self::fetch_batch_and_apply(BatchDownloadType::RemoteChanges, batch, cal_local, cal_remote, progress, cal_name).await; .drain()
.chunks(DOWNLOAD_BATCH_SIZE)
.into_iter()
{
Self::fetch_batch_and_apply(
BatchDownloadType::RemoteChanges,
batch,
cal_local,
cal_remote,
progress,
cal_name,
)
.await;
} }
} }
@ -439,37 +527,48 @@ where
cal_local: &mut T, cal_local: &mut T,
cal_remote: &mut U, cal_remote: &mut U,
progress: &mut SyncProgress, progress: &mut SyncProgress,
cal_name: &str cal_name: &str,
) { ) {
progress.debug(&format!("> Applying a batch of {} locally", batch_type) /* too bad Chunks does not implement ExactSizeIterator, that could provide useful debug info. See https://github.com/rust-itertools/itertools/issues/171 */); progress.debug(&format!("> Applying a batch of {} locally", batch_type) /* too bad Chunks does not implement ExactSizeIterator, that could provide useful debug info. See https://github.com/rust-itertools/itertools/issues/171 */);
let list_of_additions: Vec<Url> = remote_additions.map(|url| url.clone()).collect(); let list_of_additions: Vec<Url> = remote_additions.map(|url| url.clone()).collect();
match cal_remote.get_items_by_url(&list_of_additions).await { match cal_remote.get_items_by_url(&list_of_additions).await {
Err(err) => { Err(err) => {
progress.warn(&format!("Unable to get the batch of {} {:?}: {}. Skipping them.", batch_type, list_of_additions, err)); progress.warn(&format!(
}, "Unable to get the batch of {} {:?}: {}. Skipping them.",
batch_type, list_of_additions, err
));
}
Ok(items) => { Ok(items) => {
for item in items { for item in items {
match item { match item {
None => { None => {
progress.error(&format!("Inconsistency: an item from the batch has vanished from the remote end")); progress.error("Inconsistency: an item from the batch has vanished from the remote end");
continue; continue;
}, }
Some(new_item) => { Some(new_item) => {
let local_update_result = match batch_type { let local_update_result = match batch_type {
BatchDownloadType::RemoteAdditions => cal_local.add_item(new_item.clone()).await, BatchDownloadType::RemoteAdditions => {
BatchDownloadType::RemoteChanges => cal_local.update_item(new_item.clone()).await, cal_local.add_item(new_item.clone()).await
}
BatchDownloadType::RemoteChanges => {
cal_local.update_item(new_item.clone()).await
}
}; };
if let Err(err) = local_update_result { if let Err(err) = local_update_result {
progress.error(&format!("Not able to add item {} to local calendar: {}", new_item.url(), err)); progress.error(&format!(
"Not able to add item {} to local calendar: {}",
new_item.url(),
err
));
}
} }
},
} }
} }
// Notifying every item at the same time would not make sense. Let's notify only one of them // Notifying every item at the same time would not make sense. Let's notify only one of them
let one_item_name = match list_of_additions.get(0) { let one_item_name = match list_of_additions.first() {
Some(url) => Self::item_name(&cal_local, &url).await, Some(url) => Self::item_name(cal_local, url).await,
None => String::from("<unable to get the name of the first batched item>"), None => String::from("<unable to get the name of the first batched item>"),
}; };
progress.increment_counter(list_of_additions.len()); progress.increment_counter(list_of_additions.len());
@ -478,21 +577,24 @@ where
items_done_already: progress.counter(), items_done_already: progress.counter(),
details: one_item_name, details: one_item_name,
}); });
}, }
} }
} }
} }
async fn get_or_insert_counterpart_calendar<H, N, I>(
async fn get_or_insert_counterpart_calendar<H, N, I>(haystack_descr: &str, haystack: &mut H, cal_url: &Url, needle: Arc<Mutex<N>>) haystack_descr: &str,
-> Result<Arc<Mutex<I>>, Box<dyn Error>> haystack: &mut H,
cal_url: &Url,
needle: Arc<Mutex<N>>,
) -> Result<Arc<Mutex<I>>, Box<dyn Error>>
where where
H: CalDavSource<I>, H: CalDavSource<I>,
I: BaseCalendar, I: BaseCalendar,
N: BaseCalendar, N: BaseCalendar,
{ {
loop { loop {
if let Some(cal) = haystack.get_calendar(&cal_url).await { if let Some(cal) = haystack.get_calendar(cal_url).await {
break Ok(cal); break Ok(cal);
} }
@ -502,14 +604,11 @@ where
let name = src.name().to_string(); let name = src.name().to_string();
let supported_comps = src.supported_components(); let supported_comps = src.supported_components();
let color = src.color(); let color = src.color();
if let Err(err) = haystack.create_calendar( if let Err(err) = haystack
cal_url.clone(), .create_calendar(cal_url.clone(), name, supported_comps, color.cloned())
name, .await
supported_comps, {
color.cloned(),
).await{
return Err(err); return Err(err);
} }
} }
} }

View file

@ -10,17 +10,27 @@ pub struct Resource {
impl Resource { impl Resource {
pub fn new(url: Url, username: String, password: String) -> Self { pub fn new(url: Url, username: String, password: String) -> Self {
Self { url, username, password } Self {
url,
username,
password,
}
} }
pub fn url(&self) -> &Url { &self.url } pub fn url(&self) -> &Url {
pub fn username(&self) -> &String { &self.username } &self.url
pub fn password(&self) -> &String { &self.password } }
pub fn username(&self) -> &String {
&self.username
}
pub fn password(&self) -> &String {
&self.password
}
/// Build a new Resource by keeping the same credentials, scheme and server from `base` but changing the path part /// Build a new Resource by keeping the same credentials, scheme and server from `base` but changing the path part
pub fn combine(&self, new_path: &str) -> Resource { pub fn combine(&self, new_path: &str) -> Resource {
let mut built = (*self).clone(); let mut built = (*self).clone();
built.url.set_path(&new_path); built.url.set_path(new_path);
built built
} }
} }

View file

@ -1,20 +1,24 @@
//! To-do tasks (iCal `VTODO` item) //! To-do tasks (iCal `VTODO` item)
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use ical::property::Property; use ical::property::Property;
use serde::{Deserialize, Serialize};
use url::Url; use url::Url;
use uuid::Uuid;
use crate::item::SyncStatus; use crate::item::SyncStatus;
use crate::utils::random_url; use crate::utils::random_url;
/// RFC5545 defines the completion as several optional fields, yet some combinations make no sense. /**
/// This enum provides an API that forbids such impossible combinations. RFC5545 defines the completion as several optional fields, yet some combinations make no sense.
/// This enum provides an API that forbids such impossible combinations.
/// * `COMPLETED` is an optional timestamp that tells whether this task is completed
/// * `STATUS` is an optional field, that can be set to `NEEDS-ACTION`, `COMPLETED`, or others. * `COMPLETED` is an optional timestamp that tells whether this task is completed
/// Even though having a `COMPLETED` date but a `STATUS:NEEDS-ACTION` is theorically possible, it obviously makes no sense. This API ensures this cannot happen * `STATUS` is an optional field, that can be set to `NEEDS-ACTION`, `COMPLETED`, or others.
Even though having a `COMPLETED` date but a `STATUS:NEEDS-ACTION` is theorically possible, it obviously makes no sense. This API ensures this cannot happen
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CompletionStatus { pub enum CompletionStatus {
Completed(Option<DateTime<Utc>>), Completed(Option<DateTime<Utc>>),
@ -22,10 +26,7 @@ pub enum CompletionStatus {
} }
impl CompletionStatus { impl CompletionStatus {
pub fn is_completed(&self) -> bool { pub fn is_completed(&self) -> bool {
match self { matches!(self, CompletionStatus::Completed(_))
CompletionStatus::Completed(_) => true,
_ => false,
}
} }
} }
@ -53,7 +54,6 @@ pub struct Task {
/// The display name of the task /// The display name of the task
name: String, name: String,
/// The PRODID, as defined in iCal files /// The PRODID, as defined in iCal files
ical_prod_id: String, ical_prod_id: String,
@ -62,7 +62,6 @@ pub struct Task {
extra_parameters: Vec<Property>, extra_parameters: Vec<Property>,
} }
impl Task { impl Task {
/// Create a brand new Task that is not on a server yet. /// Create a brand new Task that is not on a server yet.
/// This will pick a new (random) task ID. /// This will pick a new (random) task ID.
@ -74,19 +73,36 @@ impl Task {
let new_last_modified = Utc::now(); let new_last_modified = Utc::now();
let new_completion_status = if completed { let new_completion_status = if completed {
CompletionStatus::Completed(Some(Utc::now())) CompletionStatus::Completed(Some(Utc::now()))
} else { CompletionStatus::Uncompleted }; } else {
CompletionStatus::Uncompleted
};
let ical_prod_id = crate::ical::default_prod_id(); let ical_prod_id = crate::ical::default_prod_id();
let extra_parameters = Vec::new(); let extra_parameters = Vec::new();
Self::new_with_parameters(name, new_uid, new_url, new_completion_status, new_sync_status, new_creation_date, new_last_modified, ical_prod_id, extra_parameters) Self::new_with_parameters(
name,
new_uid,
new_url,
new_completion_status,
new_sync_status,
new_creation_date,
new_last_modified,
ical_prod_id,
extra_parameters,
)
} }
/// Create a new Task instance, that may be synced on the server already /// Create a new Task instance, that may be synced on the server already
pub fn new_with_parameters(name: String, uid: String, new_url: Url, pub fn new_with_parameters(
name: String,
uid: String,
new_url: Url,
completion_status: CompletionStatus, completion_status: CompletionStatus,
sync_status: SyncStatus, creation_date: Option<DateTime<Utc>>, last_modified: DateTime<Utc>, sync_status: SyncStatus,
ical_prod_id: String, extra_parameters: Vec<Property>, creation_date: Option<DateTime<Utc>>,
) -> Self last_modified: DateTime<Utc>,
{ ical_prod_id: String,
extra_parameters: Vec<Property>,
) -> Self {
Self { Self {
url: new_url, url: new_url,
uid, uid,
@ -100,16 +116,36 @@ impl Task {
} }
} }
pub fn url(&self) -> &Url { &self.url } pub fn url(&self) -> &Url {
pub fn uid(&self) -> &str { &self.uid } &self.url
pub fn name(&self) -> &str { &self.name } }
pub fn completed(&self) -> bool { self.completion_status.is_completed() } pub fn uid(&self) -> &str {
pub fn ical_prod_id(&self) -> &str { &self.ical_prod_id } &self.uid
pub fn sync_status(&self) -> &SyncStatus { &self.sync_status } }
pub fn last_modified(&self) -> &DateTime<Utc> { &self.last_modified } pub fn name(&self) -> &str {
pub fn creation_date(&self) -> Option<&DateTime<Utc>> { self.creation_date.as_ref() } &self.name
pub fn completion_status(&self) -> &CompletionStatus { &self.completion_status } }
pub fn extra_parameters(&self) -> &[Property] { &self.extra_parameters } pub fn completed(&self) -> bool {
self.completion_status.is_completed()
}
pub fn ical_prod_id(&self) -> &str {
&self.ical_prod_id
}
pub fn sync_status(&self) -> &SyncStatus {
&self.sync_status
}
pub fn last_modified(&self) -> &DateTime<Utc> {
&self.last_modified
}
pub fn creation_date(&self) -> Option<&DateTime<Utc>> {
self.creation_date.as_ref()
}
pub fn completion_status(&self) -> &CompletionStatus {
&self.completion_status
}
pub fn extra_parameters(&self) -> &[Property] {
&self.extra_parameters
}
#[cfg(any(test, feature = "integration_tests"))] #[cfg(any(test, feature = "integration_tests"))]
pub fn has_same_observable_content_as(&self, other: &Task) -> bool { pub fn has_same_observable_content_as(&self, other: &Task) -> bool {
@ -129,15 +165,13 @@ impl Task {
fn update_sync_status(&mut self) { fn update_sync_status(&mut self) {
match &self.sync_status { match &self.sync_status {
SyncStatus::NotSynced => return, SyncStatus::NotSynced | SyncStatus::LocallyModified(_) => (),
SyncStatus::LocallyModified(_) => return,
SyncStatus::Synced(prev_vt) => { SyncStatus::Synced(prev_vt) => {
self.sync_status = SyncStatus::LocallyModified(prev_vt.clone()); self.sync_status = SyncStatus::LocallyModified(prev_vt.clone());
} }
SyncStatus::LocallyDeleted(_) => { SyncStatus::LocallyDeleted(_) => {
log::warn!("Trying to update an item that has previously been deleted. These changes will probably be ignored at next sync."); log::warn!("Trying to update an item that has previously been deleted. These changes will probably be ignored at next sync.");
return; }
},
} }
} }
@ -145,7 +179,6 @@ impl Task {
self.last_modified = Utc::now(); self.last_modified = Utc::now();
} }
/// Rename a task. /// Rename a task.
/// This updates its "last modified" field /// This updates its "last modified" field
pub fn set_name(&mut self, new_name: String) { pub fn set_name(&mut self, new_name: String) {
@ -169,7 +202,10 @@ impl Task {
} }
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
/// Set the completion status, but forces a "master" SyncStatus, just like CalDAV servers are always "masters" /// Set the completion status, but forces a "master" SyncStatus, just like CalDAV servers are always "masters"
pub fn mock_remote_calendar_set_completion_status(&mut self, new_completion_status: CompletionStatus) { pub fn mock_remote_calendar_set_completion_status(
&mut self,
new_completion_status: CompletionStatus,
) {
self.sync_status = SyncStatus::random_synced(); self.sync_status = SyncStatus::random_synced();
self.completion_status = new_completion_status; self.completion_status = new_completion_status;
} }

View file

@ -1,17 +1,17 @@
//! Traits used by multiple structs in this crate //! Traits used by multiple structs in this crate
use std::error::Error;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::error::Error;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use async_trait::async_trait; use async_trait::async_trait;
use csscolorparser::Color; use csscolorparser::Color;
use url::Url; use url::Url;
use crate::item::SyncStatus;
use crate::item::Item;
use crate::item::VersionTag;
use crate::calendar::SupportedComponents; use crate::calendar::SupportedComponents;
use crate::item::Item;
use crate::item::SyncStatus;
use crate::item::VersionTag;
use crate::resource::Resource; use crate::resource::Resource;
/// This trait must be implemented by data sources (either local caches or remote CalDAV clients) /// This trait must be implemented by data sources (either local caches or remote CalDAV clients)
@ -25,8 +25,13 @@ pub trait CalDavSource<T: BaseCalendar> {
/// Returns the calendar matching the URL /// Returns the calendar matching the URL
async fn get_calendar(&self, url: &Url) -> Option<Arc<Mutex<T>>>; async fn get_calendar(&self, url: &Url) -> Option<Arc<Mutex<T>>>;
/// Create a calendar if it did not exist, and return it /// Create a calendar if it did not exist, and return it
async fn create_calendar(&mut self, url: Url, name: String, supported_components: SupportedComponents, color: Option<Color>) async fn create_calendar(
-> Result<Arc<Mutex<T>>, Box<dyn Error>>; &mut self,
url: Url,
name: String,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Result<Arc<Mutex<T>>, Box<dyn Error>>;
// Removing a calendar is not supported yet // Removing a calendar is not supported yet
} }
@ -59,23 +64,29 @@ pub trait BaseCalendar {
/// Returns whether this calDAV calendar supports to-do items /// Returns whether this calDAV calendar supports to-do items
fn supports_todo(&self) -> bool { fn supports_todo(&self) -> bool {
self.supported_components().contains(crate::calendar::SupportedComponents::TODO) self.supported_components()
.contains(crate::calendar::SupportedComponents::TODO)
} }
/// Returns whether this calDAV calendar supports calendar items /// Returns whether this calDAV calendar supports calendar items
fn supports_events(&self) -> bool { fn supports_events(&self) -> bool {
self.supported_components().contains(crate::calendar::SupportedComponents::EVENT) self.supported_components()
.contains(crate::calendar::SupportedComponents::EVENT)
} }
} }
/// Functions availabe for calendars that are backed by a CalDAV server /// Functions availabe for calendars that are backed by a CalDAV server
/// ///
/// Note that some concrete types (e.g. [`crate::calendar::cached_calendar::CachedCalendar`]) can also provide non-async versions of these functions /// Note that some concrete types (e.g. [`crate::calendar::cached_calendar::CachedCalendar`]) can also provide non-async versions of these functions
#[async_trait] #[async_trait]
pub trait DavCalendar: BaseCalendar { pub trait DavCalendar: BaseCalendar {
/// Create a new calendar /// Create a new calendar
fn new(name: String, resource: Resource, supported_components: SupportedComponents, color: Option<Color>) -> Self; fn new(
name: String,
resource: Resource,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self;
/// Get the URLs and the version tags of every item in this calendar /// Get the URLs and the version tags of every item in this calendar
async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>>; async fn get_item_version_tags(&self) -> Result<HashMap<Url, VersionTag>, Box<dyn Error>>;
@ -93,16 +104,13 @@ pub trait DavCalendar : BaseCalendar {
/// Get the URLs of all current items in this calendar /// Get the URLs of all current items in this calendar
async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>> { async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>> {
let items = self.get_item_version_tags().await?; let items = self.get_item_version_tags().await?;
Ok(items.iter() Ok(items.keys().cloned().collect())
.map(|(url, _tag)| url.clone())
.collect())
} }
// Note: the CalDAV protocol could also enable to do this: // Note: the CalDAV protocol could also enable to do this:
// fn get_current_version(&self) -> CTag // fn get_current_version(&self) -> CTag
} }
/// Functions availabe for calendars we have full knowledge of /// Functions availabe for calendars we have full knowledge of
/// ///
/// Usually, these are local calendars fully backed by a local folder /// Usually, these are local calendars fully backed by a local folder
@ -111,7 +119,12 @@ pub trait DavCalendar : BaseCalendar {
#[async_trait] #[async_trait]
pub trait CompleteCalendar: BaseCalendar { pub trait CompleteCalendar: BaseCalendar {
/// Create a new calendar /// Create a new calendar
fn new(name: String, url: Url, supported_components: SupportedComponents, color: Option<Color>) -> Self; fn new(
name: String,
url: Url,
supported_components: SupportedComponents,
color: Option<Color>,
) -> Self;
/// Get the URLs of all current items in this calendar /// Get the URLs of all current items in this calendar
async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>>; async fn get_item_urls(&self) -> Result<HashSet<Url>, Box<dyn Error>>;

View file

@ -1,17 +1,17 @@
//! Some utility functions //! Some utility functions
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::sync::{Arc, Mutex};
use std::hash::Hash; use std::hash::Hash;
use std::io::{stdin, stdout, Read, Write}; use std::io::{stdin, stdout, Read, Write};
use std::sync::{Arc, Mutex};
use minidom::Element; use minidom::Element;
use url::Url; use url::Url;
use crate::item::SyncStatus;
use crate::traits::CompleteCalendar; use crate::traits::CompleteCalendar;
use crate::traits::DavCalendar; use crate::traits::DavCalendar;
use crate::Item; use crate::Item;
use crate::item::SyncStatus;
/// Walks an XML tree and returns every element that has the given name /// Walks an XML tree and returns every element that has the given name
pub fn find_elems<S: AsRef<str>>(root: &Element, searched_name: S) -> Vec<&Element> { pub fn find_elems<S: AsRef<str>>(root: &Element, searched_name: S) -> Vec<&Element> {
@ -49,14 +49,10 @@ pub fn find_elem<S: AsRef<str>>(root: &Element, searched_name: S) -> Option<&Ele
None None
} }
pub fn print_xml(element: &Element) { pub fn print_xml(element: &Element) {
let mut writer = std::io::stdout(); let mut writer = std::io::stdout();
let mut xml_writer = minidom::quick_xml::Writer::new_with_indent( let mut xml_writer = minidom::quick_xml::Writer::new_with_indent(std::io::stdout(), 0x20, 4);
std::io::stdout(),
0x20, 4
);
let _ = element.to_writer(&mut xml_writer); let _ = element.to_writer(&mut xml_writer);
let _ = writer.write(&[0x0a]); let _ = writer.write(&[0x0a]);
} }
@ -74,7 +70,7 @@ where
for (_, item) in map { for (_, item) in map {
print_task(item); print_task(item);
} }
}, }
} }
} }
} }
@ -92,14 +88,13 @@ where
for (url, version_tag) in map { for (url, version_tag) in map {
println!(" * {} (version {:?})", url, version_tag); println!(" * {} (version {:?})", url, version_tag);
} }
}, }
} }
} }
} }
pub fn print_task(item: &Item) { pub fn print_task(item: &Item) {
match item { if let Item::Task(task) = item {
Item::Task(task) => {
let completion = if task.completed() { "" } else { " " }; let completion = if task.completed() { "" } else { " " };
let sync = match task.sync_status() { let sync = match task.sync_status() {
SyncStatus::NotSynced => ".", SyncStatus::NotSynced => ".",
@ -108,12 +103,9 @@ pub fn print_task(item: &Item) {
SyncStatus::LocallyDeleted(_) => "x", SyncStatus::LocallyDeleted(_) => "x",
}; };
println!(" {}{} {}\t{}", completion, sync, task.name(), task.url()); println!(" {}{} {}\t{}", completion, sync, task.name(), task.url());
},
_ => return,
} }
} }
/// Compare keys of two hashmaps for equality /// Compare keys of two hashmaps for equality
pub fn keys_are_the_same<T, U, V>(left: &HashMap<T, U>, right: &HashMap<T, V>) -> bool pub fn keys_are_the_same<T, U, V>(left: &HashMap<T, U>, right: &HashMap<T, V>) -> bool
where where
@ -127,7 +119,7 @@ where
let keys_l: HashSet<T> = left.keys().cloned().collect(); let keys_l: HashSet<T> = left.keys().cloned().collect();
let keys_r: HashSet<T> = right.keys().cloned().collect(); let keys_r: HashSet<T> = right.keys().cloned().collect();
let result = keys_l == keys_r; let result = keys_l == keys_r;
if result == false { if !result {
log::debug!("Keys of a map mismatch"); log::debug!("Keys of a map mismatch");
for key in keys_l { for key in keys_l {
log::debug!(" left: {}", key); log::debug!(" left: {}", key);
@ -140,7 +132,6 @@ where
result result
} }
/// Wait for the user to press enter /// Wait for the user to press enter
pub fn pause() { pub fn pause() {
let mut stdout = stdout(); let mut stdout = stdout();
@ -149,7 +140,6 @@ pub fn pause() {
stdin().read_exact(&mut [0]).unwrap(); stdin().read_exact(&mut [0]).unwrap();
} }
/// Generate a random URL with a given prefix /// Generate a random URL with a given prefix
pub fn random_url(parent_calendar: &Url) -> Url { pub fn random_url(parent_calendar: &Url) -> Url {
let random = uuid::Uuid::new_v4().to_hyphenated().to_string(); let random = uuid::Uuid::new_v4().to_hyphenated().to_string();

View file

@ -8,27 +8,27 @@
//! This module can also check the sources after a sync contain the actual data we expect //! This module can also check the sources after a sync contain the actual data we expect
#![cfg(feature = "local_calendar_mocks_remote_calendars")] #![cfg(feature = "local_calendar_mocks_remote_calendars")]
use std::error::Error;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::error::Error;
use url::Url; use url::Url;
use chrono::Utc; use chrono::Utc;
use kitchen_fridge::cache::Cache;
use kitchen_fridge::calendar::cached_calendar::CachedCalendar;
use kitchen_fridge::calendar::SupportedComponents; use kitchen_fridge::calendar::SupportedComponents;
use kitchen_fridge::traits::CalDavSource; use kitchen_fridge::item::SyncStatus;
use kitchen_fridge::mock_behaviour::MockBehaviour;
use kitchen_fridge::provider::Provider;
use kitchen_fridge::task::CompletionStatus;
use kitchen_fridge::traits::BaseCalendar; use kitchen_fridge::traits::BaseCalendar;
use kitchen_fridge::traits::CalDavSource;
use kitchen_fridge::traits::CompleteCalendar; use kitchen_fridge::traits::CompleteCalendar;
use kitchen_fridge::traits::DavCalendar; use kitchen_fridge::traits::DavCalendar;
use kitchen_fridge::cache::Cache;
use kitchen_fridge::Item;
use kitchen_fridge::item::SyncStatus;
use kitchen_fridge::Task;
use kitchen_fridge::task::CompletionStatus;
use kitchen_fridge::calendar::cached_calendar::CachedCalendar;
use kitchen_fridge::provider::Provider;
use kitchen_fridge::mock_behaviour::MockBehaviour;
use kitchen_fridge::utils::random_url; use kitchen_fridge::utils::random_url;
use kitchen_fridge::Item;
use kitchen_fridge::Task;
pub enum LocatedState { pub enum LocatedState {
/// Item does not exist yet or does not exist anymore /// Item does not exist yet or does not exist anymore
@ -60,7 +60,6 @@ pub enum ChangeToApply {
// ChangeCalendar(Url) is useless, as long as changing a calendar is implemented as "delete in one calendar and re-create it in another one" // ChangeCalendar(Url) is useless, as long as changing a calendar is implemented as "delete in one calendar and re-create it in another one"
} }
pub struct ItemScenario { pub struct ItemScenario {
url: Url, url: Url,
initial_state: LocatedState, initial_state: LocatedState,
@ -87,12 +86,11 @@ pub struct ItemScenario {
pub fn scenarii_basic() -> Vec<ItemScenario> { pub fn scenarii_basic() -> Vec<ItemScenario> {
let mut tasks = Vec::new(); let mut tasks = Vec::new();
let first_cal = Url::from("https://some.calend.ar/calendar-1/".parse().unwrap()); let first_cal = "https://some.calend.ar/calendar-1/".parse().unwrap();
let second_cal = Url::from("https://some.calend.ar/calendar-2/".parse().unwrap()); let second_cal = "https://some.calend.ar/calendar-2/".parse().unwrap();
let third_cal = Url::from("https://some.calend.ar/calendar-3/".parse().unwrap()); let third_cal = "https://some.calend.ar/calendar-3/".parse().unwrap();
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&first_cal), url: random_url(&first_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
@ -106,11 +104,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task A"), name: String::from("Task A"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&first_cal), url: random_url(&first_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
@ -120,11 +116,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
local_changes_to_apply: Vec::new(), local_changes_to_apply: Vec::new(),
remote_changes_to_apply: vec![ChangeToApply::Remove], remote_changes_to_apply: vec![ChangeToApply::Remove],
after_sync: LocatedState::None, after_sync: LocatedState::None,
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&first_cal), url: random_url(&first_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
@ -134,29 +128,27 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
local_changes_to_apply: vec![ChangeToApply::Remove], local_changes_to_apply: vec![ChangeToApply::Remove],
remote_changes_to_apply: Vec::new(), remote_changes_to_apply: Vec::new(),
after_sync: LocatedState::None, after_sync: LocatedState::None,
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&first_cal), url: random_url(&first_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
name: String::from("Task D"), name: String::from("Task D"),
completed: false, completed: false,
}), }),
local_changes_to_apply: vec![ChangeToApply::Rename(String::from("Task D, locally renamed"))], local_changes_to_apply: vec![ChangeToApply::Rename(String::from(
"Task D, locally renamed",
))],
remote_changes_to_apply: Vec::new(), remote_changes_to_apply: Vec::new(),
after_sync: LocatedState::BothSynced(ItemState { after_sync: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
name: String::from("Task D, locally renamed"), name: String::from("Task D, locally renamed"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&first_cal), url: random_url(&first_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
@ -164,36 +156,38 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
completed: false, completed: false,
}), }),
local_changes_to_apply: Vec::new(), local_changes_to_apply: Vec::new(),
remote_changes_to_apply: vec![ChangeToApply::Rename(String::from("Task E, remotely renamed"))], remote_changes_to_apply: vec![ChangeToApply::Rename(String::from(
"Task E, remotely renamed",
))],
after_sync: LocatedState::BothSynced(ItemState { after_sync: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
name: String::from("Task E, remotely renamed"), name: String::from("Task E, remotely renamed"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&first_cal), url: random_url(&first_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
name: String::from("Task F"), name: String::from("Task F"),
completed: false, completed: false,
}), }),
local_changes_to_apply: vec![ChangeToApply::Rename(String::from("Task F, locally renamed"))], local_changes_to_apply: vec![ChangeToApply::Rename(String::from(
remote_changes_to_apply: vec![ChangeToApply::Rename(String::from("Task F, remotely renamed"))], "Task F, locally renamed",
))],
remote_changes_to_apply: vec![ChangeToApply::Rename(String::from(
"Task F, remotely renamed",
))],
// Conflict: the server wins // Conflict: the server wins
after_sync: LocatedState::BothSynced(ItemState { after_sync: LocatedState::BothSynced(ItemState {
calendar: first_cal.clone(), calendar: first_cal.clone(),
name: String::from("Task F, remotely renamed"), name: String::from("Task F, remotely renamed"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -207,11 +201,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task G"), name: String::from("Task G"),
completed: true, completed: true,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -225,11 +217,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task H"), name: String::from("Task H"),
completed: true, completed: true,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -237,18 +227,18 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
completed: false, completed: false,
}), }),
local_changes_to_apply: vec![ChangeToApply::SetCompletion(true)], local_changes_to_apply: vec![ChangeToApply::SetCompletion(true)],
remote_changes_to_apply: vec![ChangeToApply::Rename(String::from("Task I, remotely renamed"))], remote_changes_to_apply: vec![ChangeToApply::Rename(String::from(
"Task I, remotely renamed",
))],
// Conflict, the server wins // Conflict, the server wins
after_sync: LocatedState::BothSynced(ItemState { after_sync: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
name: String::from("Task I, remotely renamed"), name: String::from("Task I, remotely renamed"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -258,11 +248,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
local_changes_to_apply: vec![ChangeToApply::SetCompletion(true)], local_changes_to_apply: vec![ChangeToApply::SetCompletion(true)],
remote_changes_to_apply: vec![ChangeToApply::Remove], remote_changes_to_apply: vec![ChangeToApply::Remove],
after_sync: LocatedState::None, after_sync: LocatedState::None,
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -276,11 +264,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task K"), name: String::from("Task K"),
completed: true, completed: true,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -290,11 +276,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
local_changes_to_apply: vec![ChangeToApply::Remove], local_changes_to_apply: vec![ChangeToApply::Remove],
remote_changes_to_apply: vec![ChangeToApply::Remove], remote_changes_to_apply: vec![ChangeToApply::Remove],
after_sync: LocatedState::None, after_sync: LocatedState::None,
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&second_cal), url: random_url(&second_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: second_cal.clone(), calendar: second_cal.clone(),
@ -308,11 +292,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task M"), name: String::from("Task M"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&third_cal), url: random_url(&third_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: third_cal.clone(), calendar: third_cal.clone(),
@ -326,11 +308,9 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task N"), name: String::from("Task N"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&third_cal), url: random_url(&third_cal),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: third_cal.clone(), calendar: third_cal.clone(),
@ -344,12 +324,10 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task O"), name: String::from("Task O"),
completed: false, completed: false,
}), }),
} });
);
let url_p = random_url(&third_cal); let url_p = random_url(&third_cal);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: url_p.clone(), url: url_p.clone(),
initial_state: LocatedState::BothSynced(ItemState { initial_state: LocatedState::BothSynced(ItemState {
calendar: third_cal.clone(), calendar: third_cal.clone(),
@ -366,50 +344,59 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
name: String::from("Task P, locally renamed and un-completed"), name: String::from("Task P, locally renamed and un-completed"),
completed: false, completed: false,
}), }),
} });
);
let url_q = random_url(&third_cal); let url_q = random_url(&third_cal);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: url_q.clone(), url: url_q.clone(),
initial_state: LocatedState::None, initial_state: LocatedState::None,
local_changes_to_apply: Vec::new(), local_changes_to_apply: Vec::new(),
remote_changes_to_apply: vec![ChangeToApply::Create(third_cal.clone(), Item::Task( remote_changes_to_apply: vec![ChangeToApply::Create(
Task::new_with_parameters( third_cal.clone(),
Item::Task(Task::new_with_parameters(
String::from("Task Q, created on the server"), String::from("Task Q, created on the server"),
url_q.to_string(), url_q, url_q.to_string(),
url_q,
CompletionStatus::Uncompleted, CompletionStatus::Uncompleted,
SyncStatus::random_synced(), Some(Utc::now()), Utc::now(), "prod_id".to_string(), Vec::new() ) SyncStatus::random_synced(),
))], Some(Utc::now()),
Utc::now(),
"prod_id".to_string(),
Vec::new(),
)),
)],
after_sync: LocatedState::BothSynced(ItemState { after_sync: LocatedState::BothSynced(ItemState {
calendar: third_cal.clone(), calendar: third_cal.clone(),
name: String::from("Task Q, created on the server"), name: String::from("Task Q, created on the server"),
completed: false, completed: false,
}), }),
} });
);
let url_r = random_url(&third_cal); let url_r = random_url(&third_cal);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: url_r.clone(), url: url_r.clone(),
initial_state: LocatedState::None, initial_state: LocatedState::None,
local_changes_to_apply: vec![ChangeToApply::Create(third_cal.clone(), Item::Task( local_changes_to_apply: vec![ChangeToApply::Create(
Task::new_with_parameters( third_cal.clone(),
Item::Task(Task::new_with_parameters(
String::from("Task R, created locally"), String::from("Task R, created locally"),
url_r.to_string(), url_r, url_r.to_string(),
url_r,
CompletionStatus::Uncompleted, CompletionStatus::Uncompleted,
SyncStatus::NotSynced, Some(Utc::now()), Utc::now(), "prod_id".to_string(), Vec::new() ) SyncStatus::NotSynced,
))], Some(Utc::now()),
Utc::now(),
"prod_id".to_string(),
Vec::new(),
)),
)],
remote_changes_to_apply: Vec::new(), remote_changes_to_apply: Vec::new(),
after_sync: LocatedState::BothSynced(ItemState { after_sync: LocatedState::BothSynced(ItemState {
calendar: third_cal.clone(), calendar: third_cal.clone(),
name: String::from("Task R, created locally"), name: String::from("Task R, created locally"),
completed: false, completed: false,
}), }),
} });
);
tasks tasks
} }
@ -418,11 +405,10 @@ pub fn scenarii_basic() -> Vec<ItemScenario> {
pub fn scenarii_first_sync_to_local() -> Vec<ItemScenario> { pub fn scenarii_first_sync_to_local() -> Vec<ItemScenario> {
let mut tasks = Vec::new(); let mut tasks = Vec::new();
let cal1 = Url::from("https://some.calend.ar/first/".parse().unwrap()); let cal1 = "https://some.calend.ar/first/".parse().unwrap();
let cal2 = Url::from("https://some.calend.ar/second/".parse().unwrap()); let cal2 = "https://some.calend.ar/second/".parse().unwrap();
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal1), url: random_url(&cal1),
initial_state: LocatedState::Remote(ItemState { initial_state: LocatedState::Remote(ItemState {
calendar: cal1.clone(), calendar: cal1.clone(),
@ -436,11 +422,9 @@ pub fn scenarii_first_sync_to_local() -> Vec<ItemScenario> {
name: String::from("Task A1"), name: String::from("Task A1"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal2), url: random_url(&cal2),
initial_state: LocatedState::Remote(ItemState { initial_state: LocatedState::Remote(ItemState {
calendar: cal2.clone(), calendar: cal2.clone(),
@ -454,11 +438,9 @@ pub fn scenarii_first_sync_to_local() -> Vec<ItemScenario> {
name: String::from("Task A2"), name: String::from("Task A2"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal1), url: random_url(&cal1),
initial_state: LocatedState::Remote(ItemState { initial_state: LocatedState::Remote(ItemState {
calendar: cal1.clone(), calendar: cal1.clone(),
@ -472,8 +454,7 @@ pub fn scenarii_first_sync_to_local() -> Vec<ItemScenario> {
name: String::from("Task B1"), name: String::from("Task B1"),
completed: false, completed: false,
}), }),
} });
);
tasks tasks
} }
@ -482,11 +463,10 @@ pub fn scenarii_first_sync_to_local() -> Vec<ItemScenario> {
pub fn scenarii_first_sync_to_server() -> Vec<ItemScenario> { pub fn scenarii_first_sync_to_server() -> Vec<ItemScenario> {
let mut tasks = Vec::new(); let mut tasks = Vec::new();
let cal3 = Url::from("https://some.calend.ar/third/".parse().unwrap()); let cal3 = "https://some.calend.ar/third/".parse().unwrap();
let cal4 = Url::from("https://some.calend.ar/fourth/".parse().unwrap()); let cal4 = "https://some.calend.ar/fourth/".parse().unwrap();
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal3), url: random_url(&cal3),
initial_state: LocatedState::Local(ItemState { initial_state: LocatedState::Local(ItemState {
calendar: cal3.clone(), calendar: cal3.clone(),
@ -500,11 +480,9 @@ pub fn scenarii_first_sync_to_server() -> Vec<ItemScenario> {
name: String::from("Task A3"), name: String::from("Task A3"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal4), url: random_url(&cal4),
initial_state: LocatedState::Local(ItemState { initial_state: LocatedState::Local(ItemState {
calendar: cal4.clone(), calendar: cal4.clone(),
@ -518,11 +496,9 @@ pub fn scenarii_first_sync_to_server() -> Vec<ItemScenario> {
name: String::from("Task A4"), name: String::from("Task A4"),
completed: false, completed: false,
}), }),
} });
);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal3), url: random_url(&cal3),
initial_state: LocatedState::Local(ItemState { initial_state: LocatedState::Local(ItemState {
calendar: cal3.clone(), calendar: cal3.clone(),
@ -536,21 +512,18 @@ pub fn scenarii_first_sync_to_server() -> Vec<ItemScenario> {
name: String::from("Task B3"), name: String::from("Task B3"),
completed: false, completed: false,
}), }),
} });
);
tasks tasks
} }
/// This scenario tests a task added and deleted before a sync happens /// This scenario tests a task added and deleted before a sync happens
pub fn scenarii_transient_task() -> Vec<ItemScenario> { pub fn scenarii_transient_task() -> Vec<ItemScenario> {
let mut tasks = Vec::new(); let mut tasks = Vec::new();
let cal = Url::from("https://some.calend.ar/transient/".parse().unwrap()); let cal = "https://some.calend.ar/transient/".parse().unwrap();
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: random_url(&cal), url: random_url(&cal),
initial_state: LocatedState::Local(ItemState { initial_state: LocatedState::Local(ItemState {
calendar: cal.clone(), calendar: cal.clone(),
@ -564,65 +537,86 @@ pub fn scenarii_transient_task() -> Vec<ItemScenario> {
name: String::from("A task, so that the calendar actually exists"), name: String::from("A task, so that the calendar actually exists"),
completed: false, completed: false,
}), }),
} });
);
let url_transient = random_url(&cal); let url_transient = random_url(&cal);
tasks.push( tasks.push(ItemScenario {
ItemScenario {
url: url_transient.clone(), url: url_transient.clone(),
initial_state: LocatedState::None, initial_state: LocatedState::None,
local_changes_to_apply: vec![ local_changes_to_apply: vec![
ChangeToApply::Create(cal, Item::Task( ChangeToApply::Create(
Task::new_with_parameters( cal,
Item::Task(Task::new_with_parameters(
String::from("A transient task that will be deleted before the sync"), String::from("A transient task that will be deleted before the sync"),
url_transient.to_string(), url_transient, url_transient.to_string(),
url_transient,
CompletionStatus::Uncompleted, CompletionStatus::Uncompleted,
SyncStatus::NotSynced, Some(Utc::now()), Utc::now(), SyncStatus::NotSynced,
"prod_id".to_string(), Vec::new() ) Some(Utc::now()),
Utc::now(),
"prod_id".to_string(),
Vec::new(),
)), )),
),
ChangeToApply::Rename(String::from("A new name")), ChangeToApply::Rename(String::from("A new name")),
ChangeToApply::SetCompletion(true), ChangeToApply::SetCompletion(true),
ChangeToApply::Remove, ChangeToApply::Remove,
], ],
remote_changes_to_apply: Vec::new(), remote_changes_to_apply: Vec::new(),
after_sync: LocatedState::None, after_sync: LocatedState::None,
} });
);
tasks tasks
} }
/// Build a `Provider` that contains the data (defined in the given scenarii) before sync /// Build a `Provider` that contains the data (defined in the given scenarii) before sync
pub async fn populate_test_provider_before_sync(scenarii: &[ItemScenario], mock_behaviour: Arc<Mutex<MockBehaviour>>) -> Provider<Cache, CachedCalendar, Cache, CachedCalendar> { pub async fn populate_test_provider_before_sync(
scenarii: &[ItemScenario],
mock_behaviour: Arc<Mutex<MockBehaviour>>,
) -> Provider<Cache, CachedCalendar, Cache, CachedCalendar> {
let mut provider = populate_test_provider(scenarii, mock_behaviour, false).await; let mut provider = populate_test_provider(scenarii, mock_behaviour, false).await;
apply_changes_on_provider(&mut provider, scenarii).await; apply_changes_on_provider(&mut provider, scenarii).await;
provider provider
} }
/// Build a `Provider` that contains the data (defined in the given scenarii) after sync /// Build a `Provider` that contains the data (defined in the given scenarii) after sync
pub async fn populate_test_provider_after_sync(scenarii: &[ItemScenario], mock_behaviour: Arc<Mutex<MockBehaviour>>) -> Provider<Cache, CachedCalendar, Cache, CachedCalendar> { pub async fn populate_test_provider_after_sync(
scenarii: &[ItemScenario],
mock_behaviour: Arc<Mutex<MockBehaviour>>,
) -> Provider<Cache, CachedCalendar, Cache, CachedCalendar> {
populate_test_provider(scenarii, mock_behaviour, true).await populate_test_provider(scenarii, mock_behaviour, true).await
} }
async fn populate_test_provider(scenarii: &[ItemScenario], mock_behaviour: Arc<Mutex<MockBehaviour>>, populate_for_final_state: bool) -> Provider<Cache, CachedCalendar, Cache, CachedCalendar> { async fn populate_test_provider(
scenarii: &[ItemScenario],
mock_behaviour: Arc<Mutex<MockBehaviour>>,
populate_for_final_state: bool,
) -> Provider<Cache, CachedCalendar, Cache, CachedCalendar> {
let mut local = Cache::new(&PathBuf::from(String::from("test_cache/local/"))); let mut local = Cache::new(&PathBuf::from(String::from("test_cache/local/")));
let mut remote = Cache::new(&PathBuf::from(String::from("test_cache/remote/"))); let mut remote = Cache::new(&PathBuf::from(String::from("test_cache/remote/")));
remote.set_mock_behaviour(Some(mock_behaviour)); remote.set_mock_behaviour(Some(mock_behaviour));
// Create the initial state, as if we synced both sources in a given state // Create the initial state, as if we synced both sources in a given state
for item in scenarii { for item in scenarii {
let required_state = if populate_for_final_state { &item.after_sync } else { &item.initial_state }; let required_state = if populate_for_final_state {
&item.after_sync
} else {
&item.initial_state
};
let (state, sync_status) = match required_state { let (state, sync_status) = match required_state {
LocatedState::None => continue, LocatedState::None => continue,
LocatedState::Local(s) => { LocatedState::Local(s) => {
assert!(populate_for_final_state == false, "You are not supposed to expect an item in this state after sync"); assert!(
!populate_for_final_state,
"You are not supposed to expect an item in this state after sync"
);
(s, SyncStatus::NotSynced) (s, SyncStatus::NotSynced)
}, }
LocatedState::Remote(s) => { LocatedState::Remote(s) => {
assert!(populate_for_final_state == false, "You are not supposed to expect an item in this state after sync"); assert!(
!populate_for_final_state,
"You are not supposed to expect an item in this state after sync"
);
(s, SyncStatus::random_synced()) (s, SyncStatus::random_synced())
} }
LocatedState::BothSynced(s) => (s, SyncStatus::random_synced()), LocatedState::BothSynced(s) => (s, SyncStatus::random_synced()),
@ -634,8 +628,7 @@ async fn populate_test_provider(scenarii: &[ItemScenario], mock_behaviour: Arc<M
true => CompletionStatus::Completed(Some(now)), true => CompletionStatus::Completed(Some(now)),
}; };
let new_item = Item::Task( let new_item = Item::Task(Task::new_with_parameters(
Task::new_with_parameters(
state.name.clone(), state.name.clone(),
item.url.to_string(), item.url.to_string(),
item.url.clone(), item.url.clone(),
@ -643,28 +636,60 @@ async fn populate_test_provider(scenarii: &[ItemScenario], mock_behaviour: Arc<M
sync_status, sync_status,
Some(now), Some(now),
now, now,
"prod_id".to_string(), Vec::new(), "prod_id".to_string(),
Vec::new(),
)); ));
match required_state { match required_state {
LocatedState::None => panic!("Should not happen, we've continued already"), LocatedState::None => panic!("Should not happen, we've continued already"),
LocatedState::Local(s) => { LocatedState::Local(s) => {
get_or_insert_calendar(&mut local, &s.calendar).await.unwrap().lock().unwrap().add_item(new_item).await.unwrap(); get_or_insert_calendar(&mut local, &s.calendar)
}, .await
.unwrap()
.lock()
.unwrap()
.add_item(new_item)
.await
.unwrap();
}
LocatedState::Remote(s) => { LocatedState::Remote(s) => {
get_or_insert_calendar(&mut remote, &s.calendar).await.unwrap().lock().unwrap().add_item(new_item).await.unwrap(); get_or_insert_calendar(&mut remote, &s.calendar)
}, .await
.unwrap()
.lock()
.unwrap()
.add_item(new_item)
.await
.unwrap();
}
LocatedState::BothSynced(s) => { LocatedState::BothSynced(s) => {
get_or_insert_calendar(&mut local, &s.calendar).await.unwrap().lock().unwrap().add_item(new_item.clone()).await.unwrap(); get_or_insert_calendar(&mut local, &s.calendar)
get_or_insert_calendar(&mut remote, &s.calendar).await.unwrap().lock().unwrap().add_item(new_item).await.unwrap(); .await
}, .unwrap()
.lock()
.unwrap()
.add_item(new_item.clone())
.await
.unwrap();
get_or_insert_calendar(&mut remote, &s.calendar)
.await
.unwrap()
.lock()
.unwrap()
.add_item(new_item)
.await
.unwrap();
}
} }
} }
Provider::new(remote, local) Provider::new(remote, local)
} }
/// Apply `local_changes_to_apply` and `remote_changes_to_apply` to a provider that contains data before sync /// Apply `local_changes_to_apply` and `remote_changes_to_apply` to a provider that contains data before sync
async fn apply_changes_on_provider(provider: &mut Provider<Cache, CachedCalendar, Cache, CachedCalendar>, scenarii: &[ItemScenario]) { async fn apply_changes_on_provider(
provider: &mut Provider<Cache, CachedCalendar, Cache, CachedCalendar>,
scenarii: &[ItemScenario],
) {
// Apply changes to each item // Apply changes to each item
for item in scenarii { for item in scenarii {
let initial_calendar_url = match &item.initial_state { let initial_calendar_url = match &item.initial_state {
@ -676,19 +701,38 @@ async fn apply_changes_on_provider(provider: &mut Provider<Cache, CachedCalendar
let mut calendar_url = initial_calendar_url.clone(); let mut calendar_url = initial_calendar_url.clone();
for local_change in &item.local_changes_to_apply { for local_change in &item.local_changes_to_apply {
calendar_url = Some(apply_change(provider.local(), calendar_url, &item.url, local_change, false).await); calendar_url = Some(
apply_change(
provider.local(),
calendar_url,
&item.url,
local_change,
false,
)
.await,
);
} }
let mut calendar_url = initial_calendar_url; let mut calendar_url = initial_calendar_url;
for remote_change in &item.remote_changes_to_apply { for remote_change in &item.remote_changes_to_apply {
calendar_url = Some(apply_change(provider.remote(), calendar_url, &item.url, remote_change, true).await); calendar_url = Some(
apply_change(
provider.remote(),
calendar_url,
&item.url,
remote_change,
true,
)
.await,
);
} }
} }
} }
async fn get_or_insert_calendar(source: &mut Cache, url: &Url) async fn get_or_insert_calendar(
-> Result<Arc<Mutex<CachedCalendar>>, Box<dyn Error>> source: &mut Cache,
{ url: &Url,
) -> Result<Arc<Mutex<CachedCalendar>>, Box<dyn Error>> {
match source.get_calendar(url).await { match source.get_calendar(url).await {
Some(cal) => Ok(cal), Some(cal) => Ok(cal),
None => { None => {
@ -696,18 +740,26 @@ async fn get_or_insert_calendar(source: &mut Cache, url: &Url)
let supported_components = SupportedComponents::TODO; let supported_components = SupportedComponents::TODO;
let color = csscolorparser::parse("#ff8000").unwrap(); // TODO: we should rather have specific colors, depending on the calendars let color = csscolorparser::parse("#ff8000").unwrap(); // TODO: we should rather have specific colors, depending on the calendars
source.create_calendar( source
.create_calendar(
url.clone(), url.clone(),
new_name.to_string(), new_name.to_string(),
supported_components, supported_components,
Some(color), Some(color),
).await )
.await
} }
} }
} }
/// Apply a single change on a given source, and returns the calendar URL that was modified /// Apply a single change on a given source, and returns the calendar URL that was modified
async fn apply_change<S, C>(source: &S, calendar_url: Option<Url>, item_url: &Url, change: &ChangeToApply, is_remote: bool) -> Url async fn apply_change<S, C>(
source: &S,
calendar_url: Option<Url>,
item_url: &Url,
change: &ChangeToApply,
is_remote: bool,
) -> Url
where where
S: CalDavSource<C>, S: CalDavSource<C>,
C: CompleteCalendar + DavCalendar, // in this test, we're using a calendar that mocks both kinds C: CompleteCalendar + DavCalendar, // in this test, we're using a calendar that mocks both kinds
@ -716,21 +768,28 @@ where
Some(cal) => { Some(cal) => {
apply_changes_on_an_existing_item(source, &cal, item_url, change, is_remote).await; apply_changes_on_an_existing_item(source, &cal, item_url, change, is_remote).await;
cal cal
}, }
None => { None => create_test_item(source, change).await,
create_test_item(source, change).await
},
} }
} }
async fn apply_changes_on_an_existing_item<S, C>(source: &S, calendar_url: &Url, item_url: &Url, change: &ChangeToApply, is_remote: bool) async fn apply_changes_on_an_existing_item<S, C>(
where source: &S,
calendar_url: &Url,
item_url: &Url,
change: &ChangeToApply,
is_remote: bool,
) where
S: CalDavSource<C>, S: CalDavSource<C>,
C: CompleteCalendar + DavCalendar, // in this test, we're using a calendar that mocks both kinds C: CompleteCalendar + DavCalendar, // in this test, we're using a calendar that mocks both kinds
{ {
let cal = source.get_calendar(calendar_url).await.unwrap(); let cal = source.get_calendar(calendar_url).await.unwrap();
let mut cal = cal.lock().unwrap(); let mut cal = cal.lock().unwrap();
let task = cal.get_item_by_url_mut(item_url).await.unwrap().unwrap_task_mut(); let task = cal
.get_item_by_url_mut(item_url)
.await
.unwrap()
.unwrap_task_mut();
match change { match change {
ChangeToApply::Rename(new_name) => { ChangeToApply::Rename(new_name) => {
@ -739,7 +798,7 @@ where
} else { } else {
task.set_name(new_name.clone()); task.set_name(new_name.clone());
} }
}, }
ChangeToApply::SetCompletion(new_status) => { ChangeToApply::SetCompletion(new_status) => {
let completion_status = match new_status { let completion_status = match new_status {
false => CompletionStatus::Uncompleted, false => CompletionStatus::Uncompleted,
@ -750,16 +809,16 @@ where
} else { } else {
task.set_completion_status(completion_status); task.set_completion_status(completion_status);
} }
}, }
ChangeToApply::Remove => { ChangeToApply::Remove => {
match is_remote { match is_remote {
false => cal.mark_for_deletion(item_url).await.unwrap(), false => cal.mark_for_deletion(item_url).await.unwrap(),
true => cal.delete_item(item_url).await.unwrap(), true => cal.delete_item(item_url).await.unwrap(),
}; };
}, }
ChangeToApply::Create(_calendar_url, _item) => { ChangeToApply::Create(_calendar_url, _item) => {
panic!("This function only handles already existing items"); panic!("This function only handles already existing items");
}, }
} }
} }
@ -770,15 +829,13 @@ where
C: CompleteCalendar + DavCalendar, // in this test, we're using a calendar that mocks both kinds C: CompleteCalendar + DavCalendar, // in this test, we're using a calendar that mocks both kinds
{ {
match change { match change {
ChangeToApply::Rename(_) | ChangeToApply::Rename(_) | ChangeToApply::SetCompletion(_) | ChangeToApply::Remove => {
ChangeToApply::SetCompletion(_) |
ChangeToApply::Remove => {
panic!("This function only creates items that do not exist yet"); panic!("This function only creates items that do not exist yet");
} }
ChangeToApply::Create(calendar_url, item) => { ChangeToApply::Create(calendar_url, item) => {
let cal = source.get_calendar(calendar_url).await.unwrap(); let cal = source.get_calendar(calendar_url).await.unwrap();
cal.lock().unwrap().add_item(item.clone()).await.unwrap(); cal.lock().unwrap().add_item(item.clone()).await.unwrap();
calendar_url.clone() calendar_url.clone()
}, }
} }
} }

View file

@ -6,8 +6,6 @@ use std::sync::{Arc, Mutex};
#[cfg(feature = "local_calendar_mocks_remote_calendars")] #[cfg(feature = "local_calendar_mocks_remote_calendars")]
use kitchen_fridge::mock_behaviour::MockBehaviour; use kitchen_fridge::mock_behaviour::MockBehaviour;
/// A test that simulates a regular synchronisation between a local cache and a server. /// A test that simulates a regular synchronisation between a local cache and a server.
/// Note that this uses a second cache to "mock" a server. /// Note that this uses a second cache to "mock" a server.
struct TestFlavour { struct TestFlavour {
@ -19,22 +17,54 @@ struct TestFlavour {
#[cfg(not(feature = "local_calendar_mocks_remote_calendars"))] #[cfg(not(feature = "local_calendar_mocks_remote_calendars"))]
impl TestFlavour { impl TestFlavour {
pub fn normal() -> Self { Self{} } pub fn normal() -> Self {
pub fn first_sync_to_local() -> Self { Self{} } Self {}
pub fn first_sync_to_server() -> Self { Self{} } }
pub fn transient_task() -> Self { Self{} } pub fn first_sync_to_local() -> Self {
pub fn normal_with_errors1() -> Self { Self{} } Self {}
pub fn normal_with_errors2() -> Self { Self{} } }
pub fn normal_with_errors3() -> Self { Self{} } pub fn first_sync_to_server() -> Self {
pub fn normal_with_errors4() -> Self { Self{} } Self {}
pub fn normal_with_errors5() -> Self { Self{} } }
pub fn normal_with_errors6() -> Self { Self{} } pub fn transient_task() -> Self {
pub fn normal_with_errors7() -> Self { Self{} } Self {}
pub fn normal_with_errors8() -> Self { Self{} } }
pub fn normal_with_errors9() -> Self { Self{} } pub fn normal_with_errors1() -> Self {
pub fn normal_with_errors10() -> Self { Self{} } Self {}
pub fn normal_with_errors11() -> Self { Self{} } }
pub fn normal_with_errors12() -> Self { Self{} } pub fn normal_with_errors2() -> Self {
Self {}
}
pub fn normal_with_errors3() -> Self {
Self {}
}
pub fn normal_with_errors4() -> Self {
Self {}
}
pub fn normal_with_errors5() -> Self {
Self {}
}
pub fn normal_with_errors6() -> Self {
Self {}
}
pub fn normal_with_errors7() -> Self {
Self {}
}
pub fn normal_with_errors8() -> Self {
Self {}
}
pub fn normal_with_errors9() -> Self {
Self {}
}
pub fn normal_with_errors10() -> Self {
Self {}
}
pub fn normal_with_errors11() -> Self {
Self {}
}
pub fn normal_with_errors12() -> Self {
Self {}
}
pub async fn run(&self, _max_attempts: u32) { pub async fn run(&self, _max_attempts: u32) {
panic!("WARNING: This test required the \"integration_tests\" Cargo feature"); panic!("WARNING: This test required the \"integration_tests\" Cargo feature");
@ -199,19 +229,22 @@ impl TestFlavour {
} }
} }
pub async fn run(&self, max_attempts: u32) { pub async fn run(&self, max_attempts: u32) {
self.mock_behaviour.lock().unwrap().suspend(); self.mock_behaviour.lock().unwrap().suspend();
let mut provider = scenarii::populate_test_provider_before_sync(&self.scenarii, Arc::clone(&self.mock_behaviour)).await; let mut provider = scenarii::populate_test_provider_before_sync(
&self.scenarii,
Arc::clone(&self.mock_behaviour),
)
.await;
print_provider(&provider, "before sync").await; print_provider(&provider, "before sync").await;
self.mock_behaviour.lock().unwrap().resume(); self.mock_behaviour.lock().unwrap().resume();
for attempt in 0..max_attempts { for attempt in 0..max_attempts {
println!("\nSyncing...\n"); println!("\nSyncing...\n");
if provider.sync().await == true { if provider.sync().await {
println!("Sync complete after {} attempts (multiple attempts are due to forced errors in mocked behaviour)", attempt+1); println!("Sync complete after {} attempts (multiple attempts are due to forced errors in mocked behaviour)", attempt+1);
break break;
} }
} }
self.mock_behaviour.lock().unwrap().suspend(); self.mock_behaviour.lock().unwrap().suspend();
@ -219,24 +252,46 @@ impl TestFlavour {
print_provider(&provider, "after sync").await; print_provider(&provider, "after sync").await;
// Check the contents of both sources are the same after sync // Check the contents of both sources are the same after sync
assert!(provider.remote().has_same_observable_content_as(provider.local()).await.unwrap()); assert!(provider
.remote()
.has_same_observable_content_as(provider.local())
.await
.unwrap());
// But also explicitely check that every item is expected // But also explicitely check that every item is expected
let expected_provider = scenarii::populate_test_provider_after_sync(&self.scenarii, Arc::clone(&self.mock_behaviour)).await; let expected_provider = scenarii::populate_test_provider_after_sync(
&self.scenarii,
Arc::clone(&self.mock_behaviour),
)
.await;
assert!(provider.local() .has_same_observable_content_as(expected_provider.local() ).await.unwrap()); assert!(provider
assert!(provider.remote().has_same_observable_content_as(expected_provider.remote()).await.unwrap()); .local()
.has_same_observable_content_as(expected_provider.local())
.await
.unwrap());
assert!(provider
.remote()
.has_same_observable_content_as(expected_provider.remote())
.await
.unwrap());
// Perform a second sync, even if no change has happened, just to check // Perform a second sync, even if no change has happened, just to check
println!("Syncing again"); println!("Syncing again");
provider.sync().await; provider.sync().await;
assert!(provider.local() .has_same_observable_content_as(expected_provider.local() ).await.unwrap()); assert!(provider
assert!(provider.remote().has_same_observable_content_as(expected_provider.remote()).await.unwrap()); .local()
.has_same_observable_content_as(expected_provider.local())
.await
.unwrap());
assert!(provider
.remote()
.has_same_observable_content_as(expected_provider.remote())
.await
.unwrap());
} }
} }
async fn run_flavour(flavour: TestFlavour, max_attempts: u32) { async fn run_flavour(flavour: TestFlavour, max_attempts: u32) {
let _ = env_logger::builder().is_test(true).try_init(); let _ = env_logger::builder().is_test(true).try_init();
flavour.run(max_attempts).await; flavour.run(max_attempts).await;
@ -339,16 +394,18 @@ async fn test_errors_in_regular_sync12() {
} }
#[cfg(feature = "integration_tests")] #[cfg(feature = "integration_tests")]
use kitchen_fridge::{traits::CalDavSource, use kitchen_fridge::{
provider::Provider, cache::Cache, calendar::cached_calendar::CachedCalendar, provider::Provider,
cache::Cache, traits::CalDavSource,
calendar::cached_calendar::CachedCalendar,
}; };
/// Print the contents of the provider. This is usually used for debugging /// Print the contents of the provider. This is usually used for debugging
#[allow(dead_code)] #[allow(dead_code)]
#[cfg(feature = "integration_tests")] #[cfg(feature = "integration_tests")]
async fn print_provider(provider: &Provider<Cache, CachedCalendar, Cache, CachedCalendar>, title: &str) { async fn print_provider(
provider: &Provider<Cache, CachedCalendar, Cache, CachedCalendar>,
title: &str,
) {
let cals_server = provider.remote().get_calendars().await.unwrap(); let cals_server = provider.remote().get_calendars().await.unwrap();
println!("----Server, {}-------", title); println!("----Server, {}-------", title);
kitchen_fridge::utils::print_calendar_list(&cals_server).await; kitchen_fridge::utils::print_calendar_list(&cals_server).await;