diff options
Diffstat (limited to 'src/data/content.rs')
-rw-r--r-- | src/data/content.rs | 262 |
1 files changed, 0 insertions, 262 deletions
diff --git a/src/data/content.rs b/src/data/content.rs deleted file mode 100644 index 4a39967..0000000 --- a/src/data/content.rs +++ /dev/null @@ -1,262 +0,0 @@ -#[cfg(feature="ssr")] -use std::fs::File; -use std::collections::HashMap; -#[cfg(feature="ssr")] -use std::io::{BufRead, BufReader}; -use std::path::{PathBuf, Path}; -use std::sync::Arc; -#[cfg(feature="ssr")] -use tokio::sync::RwLock; -use chrono::{DateTime, Utc}; -use leptos::prelude::StorageAccess; -use serde::Deserialize; -use uuid::Uuid; -#[cfg(feature="ssr")] -use fs2::FileExt; -#[cfg(feature="ssr")] -use tokio::runtime; -#[cfg(feature="ssr")] -use tokio_stream::wrappers::ReadDirStream; -#[cfg(feature="ssr")] -use futures::stream::StreamExt; - -#[derive(Hash, PartialEq, Eq, Clone)] -pub struct PageUuid(Uuid); -#[derive(Hash, PartialEq, Eq, Clone)] -pub struct NamespaceUuid(Uuid); -#[derive(Hash, PartialEq, Eq, Clone)] -pub struct MediaUuid(Uuid); - -pub struct ContentSnapshot { - pub pages: HashMap<PageUuid, Page>, - pub namespaces: HashMap<NamespaceUuid, Namespace>, - media: HashMap<MediaUuid, Media>, - - pub namespace_paths: HashMap<String, NamespaceUuid>, - pub page_paths: HashMap<String, PageUuid>, - media_paths: HashMap<String, MediaUuid>, - - pub render_cache: HashMap<PageUuid, String>, -} - -pub struct Page { - pub uuid: PageUuid, - pub namespace: NamespaceUuid, - pub author: Uuid, - pub title: String, - pub slug: String, - pub current_version: DateTime<Utc>, - pub prev_versions: Vec<DateTime<Utc>>, - content_offset: usize, -} - -pub struct Namespace { - pub uuid: NamespaceUuid, - pub path: String, - pub pages: Vec<PageUuid>, -} - -struct Media { - uuid: MediaUuid, - filename: String, - mime_type: String, - uploaded_by: Uuid, - uploaded_on: Uuid, - used_on: Vec<PageUuid>, -} - -#[cfg(feature="ssr")] -#[derive(Clone)] -pub struct ContentController { - snapshot: Arc<RwLock<Box<Arc<ContentSnapshot>>>>, - lock: Arc<File>, -} - -#[cfg(feature = "ssr")] -impl ContentController { - pub async fn init(data_dir: PathBuf) -> Result<Self, String> { - let lock_path = Path::join(&data_dir, ".lock"); - let lockfile = std::fs::OpenOptions::new() - .read(true).write(true).create(true) - .open(&lock_path) - .map_err(|_| "Could not open data directory".to_string())?; - - lockfile.try_lock_exclusive() - .map_err(|_| "Could not lock data directory".to_string())?; - - // Read the things - let snapshot = Self::read_data(&data_dir).await?; - - Ok(Self { - lock: Arc::new(lockfile), - snapshot: Arc::new(RwLock::new(Box::new(Arc::new(snapshot)))), - }) - } - - async fn read_data(data_dir: &PathBuf) -> Result<ContentSnapshot, String> { - use tokio::fs; - - let pagedata_cache = Arc::new(tokio::sync::Mutex::new(HashMap::<PageUuid, (String, NamespaceUuid)>::new())); - - let namespace_names_dir = Path::join(&data_dir, "namespaces/names"); - let namespace_ids_dir = Path::join(&data_dir, "namespaces/id"); - let namespaces = fs::read_dir(&namespace_names_dir).await - .map_err(|_| "Could not open namespace directory".to_string()) - .map(|dir_entries| { ReadDirStream::new(dir_entries) })? - .filter_map(async |dir_entry| -> Option<Namespace> { - let link_path = dir_entry.as_ref().ok()?.path(); - let target_path = dir_entry.as_ref().ok()? - .metadata().await.ok()? - .is_symlink() - .then_some( - fs::read_link(link_path).await.ok() - )??; - - let last_segment = target_path.file_name()?; - target_path.parent()? - .eq(&namespace_ids_dir).then_some(())?; - - let namespace_name = dir_entry.as_ref().ok()?.file_name().to_str()?.to_string(); - let namespace_uuid = NamespaceUuid(Uuid::try_parse(last_segment.to_str()?).ok()?); - - let namespace_pages = fs::read_dir(Path::join(&namespace_ids_dir, last_segment).join("pages")).await.ok()?; - let namespace_page_uuids = ReadDirStream::new(namespace_pages) - .filter_map(async |dir_entry| -> Option<PageUuid> { - let page_path = dir_entry.as_ref().ok()?.path(); - let page_uuid = dir_entry.as_ref().ok()? - .metadata().await.ok()? - .is_symlink() - .then_some( - fs::read_link(&page_path).await.ok() - )??; - - let page_uuid = PageUuid(Uuid::try_parse(&page_uuid.to_str()?).ok()?); - let page_slug = page_path.file_name()?.to_str()?.to_string(); - - pagedata_cache.lock().await.insert(page_uuid.clone(), (page_slug, namespace_uuid.clone())); - - Some(page_uuid) - }).collect::<Vec<PageUuid>>().await; - - Some(Namespace { - uuid: namespace_uuid, - path: namespace_name, - pages: namespace_page_uuids, - }) - }).collect::<Vec<Namespace>>().await; - - let (namespaces_by_id, namespace_paths): (HashMap<_,_>, HashMap<_,_>) = - namespaces.into_iter() - .map(|namespace| { - let namespace_uuid = namespace.uuid.clone(); - let namespace_path = namespace.path.clone(); - ( - (namespace_uuid.clone(), namespace), - (namespace_path, namespace_uuid) - ) - }) - .unzip(); - - let pages_dir = Path::join(&data_dir, "pages"); - let pages = fs::read_dir(&pages_dir).await - .map_err(|_| "Could not open pages data directory".to_string()) - .map(|dir_entries| { ReadDirStream::new(dir_entries) })? - .filter_map(async |dir_entry| -> Option<Page> { - let page_dir_path = dir_entry.as_ref().ok()?.path(); - let current_path = dir_entry.as_ref().ok()? - .metadata().await.ok()? - .is_dir() - .then_some( - fs::read_link(Path::join(&page_dir_path, "current")).await.ok() - )??; - - Page::init_from_file(¤t_path, pagedata_cache.lock().await.as_borrowed()).await - }).collect::<Vec<Page>>().await; - - let (pages_by_id, page_paths): (HashMap<_,_>, HashMap<_,_>) = - pages.into_iter() - .filter_map(|page| { - let page_uuid = page.uuid.clone(); - let namespace_path = &namespaces_by_id.get(&page.namespace)?.path; - let page_path = page.slug.clone(); - - Some(( - (page_uuid.clone(), page), - (format!("{namespace_path}/{page_path}"), page_uuid) - )) - }) - .unzip(); - - Ok(ContentSnapshot { - pages: pages_by_id, - namespaces: namespaces_by_id, - media: HashMap::new(), - namespace_paths, - page_paths, - media_paths: HashMap::new(), - render_cache: HashMap::new(), - }) - } - - pub async fn get_snapshot(&self) -> Arc<ContentSnapshot> { - self.snapshot.read().await.as_ref().clone() - } - - pub async fn replace_state(&self, updated: ContentSnapshot) { - todo!() - } -} - -const METADATA_DIVIDER : &'static str = "<!-- trans rights ~ath&+ -->"; - -#[cfg(feature = "ssr")] -impl Page { - async fn init_from_file(path: &PathBuf, pagedata_cache: &HashMap::<PageUuid, (String, NamespaceUuid)>) -> Option<Self> { - let mut reader = BufReader::new(File::open(path).ok()?); - let page_uuid = PageUuid(Uuid::try_parse(&path.parent()?.file_name()?.to_str()?).ok()?); - let (page_slug, namespace_uuid) = pagedata_cache.get(&page_uuid)?.as_borrowed(); - - let mut metadata_string = String::new(); - let mut current_line = String::new(); - let mut content_offset = 0; - while let Ok(size) = reader.read_line(&mut current_line) { - content_offset += size; - if size == 0 { - return None - } - - if current_line == METADATA_DIVIDER { - break - } - - metadata_string.push_str(¤t_line); - current_line.truncate(0); - } - - #[derive(Deserialize)] - struct PageMetadata { - title: String, - author: String, - prev_versions: Option<Vec<String>>, - } - - let metadata : PageMetadata = toml::from_str(&metadata_string).ok()?; - let current_version = DateTime::parse_from_rfc3339(path.file_name()?.to_str()?.replace("_", ":").as_str()).ok()?.to_utc(); - let prev_versions = metadata.prev_versions - .unwrap_or(Vec::new()) - .iter() - .filter_map(|str| DateTime::parse_from_rfc3339(str.replace("_", ":").as_str()).ok().map(|timestamp| timestamp.to_utc())) - .collect::<Vec<_>>(); - - Some(Page { - uuid: page_uuid, - author: Uuid::try_parse(&metadata.author).ok()?, - title: metadata.title, - namespace: namespace_uuid.clone(), - slug: page_slug.clone(), - current_version, - prev_versions, - content_offset, - }) - } -} |