use std::fs::File; use std::collections::HashMap; use std::io::{BufRead, BufReader}; use std::path::{PathBuf, Path}; use std::sync::Arc; use tokio::sync::RwLock; use chrono::{DateTime, Utc}; use leptos::prelude::StorageAccess; use serde::Deserialize; use uuid::Uuid; use fs2::FileExt; use tokio::runtime; use tokio_stream::wrappers::ReadDirStream; use futures::stream::StreamExt; #[derive(Hash, PartialEq, Eq, Clone)] struct PageUuid(Uuid); #[derive(Hash, PartialEq, Eq, Clone)] struct NamespaceUuid(Uuid); #[derive(Hash, PartialEq, Eq, Clone)] struct MediaUuid(Uuid); pub struct ContentSnapshot { pub pages: HashMap, pub namespaces: HashMap, media: HashMap, pub namespace_paths: HashMap, pub page_paths: HashMap, media_paths: HashMap, pub render_cache: HashMap, } pub struct Page { pub uuid: PageUuid, pub namespace: NamespaceUuid, pub author: Uuid, pub title: String, pub slug: String, pub current_version: DateTime, pub prev_versions: Vec>, content_offset: usize, } pub struct Namespace { pub uuid: NamespaceUuid, pub path: String, pub pages: Vec, } struct Media { uuid: MediaUuid, filename: String, mime_type: String, uploaded_by: Uuid, uploaded_on: Uuid, used_on: Vec, } #[derive(Clone)] pub struct ContentController { snapshot: Arc>>>, lock: Arc, } #[cfg(feature = "ssr")] impl ContentController { pub fn init(data_dir: PathBuf) -> Result { let lock_path = Path::join(&data_dir, ".lock"); let lockfile = std::fs::OpenOptions::new() .read(true).write(true).create(true) .open(&lock_path) .map_err(|_| "Could not open data directory".to_string())?; lockfile.try_lock_exclusive() .map_err(|_| "Could not lock data directory".to_string())?; let runtime = runtime::Builder::new_multi_thread() .build() .map_err(|_| "Could not start async runtime".to_string())?; // Read the things let snapshot = runtime.block_on(Self::read_data(&data_dir))?; Ok(Self { lock: Arc::new(lockfile), snapshot: Arc::new(RwLock::new(Box::new(Arc::new(snapshot)))), }) } async fn read_data(data_dir: &PathBuf) -> Result { use tokio::fs; let pagedata_cache = Arc::new(tokio::sync::Mutex::new(HashMap::::new())); let namespace_names_dir = Path::join(&data_dir, "namespaces/names"); let namespace_ids_dir = Path::join(&data_dir, "namespaces/id"); let namespaces = fs::read_dir(&namespace_names_dir).await .map_err(|_| "Could not open namespace directory".to_string()) .map(|dir_entries| { ReadDirStream::new(dir_entries) })? .filter_map(async |dir_entry| -> Option { let link_path = dir_entry.as_ref().ok()?.path(); let target_path = dir_entry.as_ref().ok()? .metadata().await.ok()? .is_symlink() .then_some( fs::read_link(link_path).await.ok() )??; let last_segment = target_path.file_name()?; target_path.parent()? .eq(&namespace_ids_dir).then_some(())?; let namespace_name = dir_entry.as_ref().ok()?.file_name().to_str()?.to_string(); let namespace_uuid = NamespaceUuid(Uuid::try_parse(last_segment.to_str()?).ok()?); let namespace_pages = fs::read_dir(Path::join(&namespace_ids_dir, last_segment).join("pages")).await.ok()?; let namespace_page_uuids = ReadDirStream::new(namespace_pages) .filter_map(async |dir_entry| -> Option { let page_path = dir_entry.as_ref().ok()?.path(); let page_uuid = dir_entry.as_ref().ok()? .metadata().await.ok()? .is_symlink() .then_some( fs::read_link(&page_path).await.ok() )??; let page_uuid = PageUuid(Uuid::try_parse(&page_uuid.to_str()?).ok()?); let page_slug = page_path.file_name()?.to_str()?.to_string(); pagedata_cache.lock().await.insert(page_uuid.clone(), (page_slug, namespace_uuid.clone())); Some(page_uuid) }).collect::>().await; Some(Namespace { uuid: namespace_uuid, path: namespace_name, pages: namespace_page_uuids, }) }).collect::>().await; let (namespaces_by_id, namespace_paths): (HashMap<_,_>, HashMap<_,_>) = namespaces.into_iter() .map(|namespace| { let namespace_uuid = namespace.uuid.clone(); let namespace_path = namespace.path.clone(); ( (namespace_uuid.clone(), namespace), (namespace_path, namespace_uuid) ) }) .unzip(); let pages_dir = Path::join(&data_dir, "pages/id"); let pages = fs::read_dir(&pages_dir).await .map_err(|_| "Could not open pages data directory".to_string()) .map(|dir_entries| { ReadDirStream::new(dir_entries) })? .filter_map(async |dir_entry| -> Option { let page_dir_path = dir_entry.as_ref().ok()?.path(); let current_path = dir_entry.as_ref().ok()? .metadata().await.ok()? .is_dir() .then_some( fs::read_link(Path::join(&page_dir_path, "current")).await.ok() )??; Page::init_from_file(¤t_path, pagedata_cache.lock().await.as_borrowed()).await }).collect::>().await; let (pages_by_id, page_paths): (HashMap<_,_>, HashMap<_,_>) = pages.into_iter() .filter_map(|page| { let page_uuid = page.uuid.clone(); let namespace_path = &namespaces_by_id.get(&page.namespace)?.path; let page_path = page.slug.clone(); Some(( (page_uuid.clone(), page), (format!("{namespace_path}/{page_path}"), page_uuid) )) }) .unzip(); Ok(ContentSnapshot { pages: pages_by_id, namespaces: namespaces_by_id, media: HashMap::new(), namespace_paths, page_paths, media_paths: HashMap::new(), render_cache: HashMap::new(), }) } pub async fn get_snapshot(&self) -> Arc { self.snapshot.read().await.as_ref().clone() } pub async fn replace_state(&self, updated: ContentSnapshot) { todo!() } } const METADATA_DIVIDER : &'static str = ""; impl Page { async fn init_from_file(path: &PathBuf, pagedata_cache: &HashMap::) -> Option { let mut reader = BufReader::new(File::open(path).ok()?); let page_uuid = PageUuid(Uuid::try_parse(&path.parent()?.file_name()?.to_str()?).ok()?); let (page_slug, namespace_uuid) = pagedata_cache.get(&page_uuid)?.as_borrowed(); let mut metadata_string = String::new(); let mut current_line = String::new(); let mut content_offset = 0; while let Ok(size) = reader.read_line(&mut current_line) { content_offset += size; if size == 0 { return None } if current_line == METADATA_DIVIDER { break } metadata_string.push_str(¤t_line); current_line.truncate(0); } #[derive(Deserialize)] struct PageMetadata { title: String, author: String, prev_versions: Option>, } let metadata : PageMetadata = toml::from_str(&metadata_string).ok()?; let current_version = DateTime::parse_from_rfc3339(path.file_name()?.to_str()?.replace("_", ":").as_str()).ok()?.to_utc(); let prev_versions = metadata.prev_versions .unwrap_or(Vec::new()) .iter() .filter_map(|str| DateTime::parse_from_rfc3339(str.replace("_", ":").as_str()).ok().map(|timestamp| timestamp.to_utc())) .collect::>(); Some(Page { uuid: page_uuid, author: Uuid::try_parse(&metadata.author).ok()?, title: metadata.title, namespace: namespace_uuid.clone(), slug: page_slug.clone(), current_version, prev_versions, content_offset, }) } }