summary refs log tree commit diff
diff options
context:
space:
mode:
authortempest <git@ashen.earth>2025-04-18 00:58:28 -0600
committertempest <git@ashen.earth>2025-04-18 00:58:28 -0600
commit619373a261ad18c51cd09bc61d116f585c8295ec (patch)
tree041a80d7b57cf221497809bd3889bff1042b842c
parentb856f12cf422b96c37c12df3d7829e4d15ef4453 (diff)
Read correct page HEAD main config
-rw-r--r--src/components/app.rs37
-rw-r--r--src/data/mod.rs58
-rw-r--r--src/data/namespace.rs9
-rw-r--r--src/data/page.rs38
4 files changed, 110 insertions, 32 deletions
diff --git a/src/components/app.rs b/src/components/app.rs
index dd2d018..2814562 100644
--- a/src/components/app.rs
+++ b/src/components/app.rs
@@ -13,9 +13,7 @@ use leptos_router::{
 use super::editor::WikiEditor;
 use super::renderer::WikiPage;
 use crate::components::layout::Layout;
-use crate::data::Namespace;
-use crate::data::Page;
-use crate::data::PageUuid;
+use crate::data::{Namespace, Page, PageData, PageUuid};
 
 pub fn shell(options: LeptosOptions) -> impl IntoView {
     view! {
@@ -69,14 +67,21 @@ struct PageParams {
 async fn get_namespace() -> Result<Namespace, ServerFnError> {
     use crate::data::StormscribeData;
 
-    Ok(StormscribeData::get_namespace())
+    Ok(StormscribeData::get_namespace().await)
 }
 
 #[server]
 async fn get_pages() -> Result<HashMap<PageUuid, Page>, ServerFnError> {
     use crate::data::StormscribeData;
 
-    Ok(StormscribeData::get_pages())
+    Ok(StormscribeData::get_all_pages().await)
+}
+
+#[server]
+async fn lookup_page(path: String) -> Result<PageData, ServerFnError> {
+    use crate::data::StormscribeData;
+
+    Ok(StormscribeData::get_page_data(path).await)
 }
 
 // Renders a page
@@ -91,24 +96,20 @@ fn PageRender() -> impl IntoView {
         .map(|params| params.path.clone().unwrap_or("Unknown path".to_string()))
         .unwrap_or("Could not read params".to_string());
 
-    let namespace = Resource::new(move || {}, |_| get_namespace());
-    let page_resource = Resource::new(move || {}, |_| get_pages());
+    let page_resource = Resource::new(
+        move || page_path.clone(),
+        |page_path| async move { lookup_page(page_path).await },
+    );
 
     view! {
         <Suspense
             fallback=move || view! { <p>"Loading..."</p> }
         >
-            {move || Suspend::new(async move {
-                let name_data = namespace.await;
-                let page_data = page_resource.await;
-                match (name_data, page_data) {
-                    (Ok(names), Ok(pages)) => view! {
-                        <pre>{format!("{names:#?}")}</pre>
-                        <pre>{format!("{pages:#?}")}</pre>
-                    }.into_any(),
-                    _ => view! {<p>Error</p>}.into_any(),
-                }
-            })}
+            {move || page_resource.get()
+                .map(|page| view! {
+                    <pre>{format!("{page:#?}")}</pre>
+                })
+            }
         </Suspense>
     }
     .into_any()
diff --git a/src/data/mod.rs b/src/data/mod.rs
index 0c61bd7..1465fee 100644
--- a/src/data/mod.rs
+++ b/src/data/mod.rs
@@ -1,4 +1,6 @@
 use serde::{Deserialize, Serialize};
+#[cfg(feature = "ssr")]
+use tokio::sync::Mutex;
 use uuid::Uuid;
 
 #[cfg(feature = "ssr")]
@@ -8,7 +10,7 @@ use std::fs::File;
 #[cfg(feature = "ssr")]
 use std::sync::LazyLock;
 
-use std::{collections::HashMap, path::Path};
+use std::{collections::HashMap, path::Path, sync::Arc};
 
 mod config;
 mod namespace;
@@ -44,29 +46,67 @@ static DATA_LOCK: LazyLock<StormscribeData> = LazyLock::new(|| {
 
     StormscribeData {
         file_lock: lockfile,
-        namespaces: Namespaces::init(&Path::join(&config.data_dir, "namespace/")).unwrap(),
-        pages: Pages::init(&Path::join(&config.data_dir, "pages/")).unwrap(),
+        data_snapshot: Mutex::new(Arc::new(DataSnapshot {
+            namespaces: Namespaces::init(&Path::join(&config.data_dir, "namespace/")).unwrap(),
+            pages: Pages::init(&Path::join(&config.data_dir, "pages/")).unwrap(),
+        })),
     }
 });
 
 #[cfg(feature = "ssr")]
 pub struct StormscribeData {
     file_lock: File,
+    data_snapshot: Mutex<Arc<DataSnapshot>>,
+}
+
+struct DataSnapshot {
     namespaces: Namespaces,
     pages: Pages,
 }
 
+#[derive(Deserialize, Serialize, Clone, Debug)]
+pub struct PageData {
+    path: String,
+    metadata: Option<Page>,
+    content: String,
+}
+
 #[cfg(feature = "ssr")]
 impl StormscribeData {
-    fn get_data() -> &'static Self {
-        &DATA_LOCK
+    async fn get_snapshot() -> Arc<DataSnapshot> {
+        DATA_LOCK.data_snapshot.lock().await.clone()
     }
 
-    pub fn get_namespace() -> Namespace {
-        DATA_LOCK.namespaces.root.clone()
+    pub async fn get_namespace() -> Namespace {
+        StormscribeData::get_snapshot()
+            .await
+            .namespaces
+            .root
+            .clone()
     }
 
-    pub fn get_pages() -> HashMap<PageUuid, Page> {
-        DATA_LOCK.pages.pages.clone()
+    pub async fn get_all_pages() -> HashMap<PageUuid, Page> {
+        StormscribeData::get_snapshot().await.pages.pages.clone()
+    }
+
+    pub async fn get_page_data(page_path: String) -> PageData {
+        let data = Self::get_snapshot().await;
+        let page = data
+            .namespaces
+            .get_page_uuid(&page_path)
+            .map(|page_uuid| data.pages.get_page(&page_uuid))
+            .flatten();
+
+        let content = if let Some(page) = page.cloned() {
+            page.read_content().await.ok()
+        } else {
+            None
+        };
+
+        PageData {
+            path: page_path,
+            metadata: page.cloned(),
+            content: content.unwrap_or(String::new()),
+        }
     }
 }
diff --git a/src/data/namespace.rs b/src/data/namespace.rs
index 714ab37..4aa0419 100644
--- a/src/data/namespace.rs
+++ b/src/data/namespace.rs
@@ -66,8 +66,13 @@ impl Namespaces {
         Ok(Self { root })
     }
 
-    pub fn get_page_uuid(&self, path: String) -> Option<PageUuid> {
-        todo!()
+    pub fn get_page_uuid(&self, path: &String) -> Option<PageUuid> {
+        let mut current_namespace = &self.root;
+        for segment in path.trim_matches('/').split('/') {
+            current_namespace = current_namespace.children.get(segment)?;
+        }
+
+        current_namespace.page.clone()
     }
 
     pub fn remove_page(&self, path: String) -> Result<(), String> {
diff --git a/src/data/page.rs b/src/data/page.rs
index 6d0b802..7b7d432 100644
--- a/src/data/page.rs
+++ b/src/data/page.rs
@@ -2,7 +2,7 @@ use std::{
     collections::HashMap,
     fs::{self, File},
     io::{BufRead, BufReader},
-    path::Path,
+    path::{Path, PathBuf},
 };
 
 use chrono::{DateTime, Utc};
@@ -18,6 +18,7 @@ pub struct Page {
     pub title: String,
     pub current_version: DateTime<Utc>,
     pub prev_versions: Vec<DateTime<Utc>>,
+    disk_path: PathBuf,
     content_offset: usize,
 }
 
@@ -129,12 +130,13 @@ impl Pages {
             title: metadata.title,
             current_version,
             prev_versions,
+            disk_path: current_page,
             content_offset,
         })
     }
 
-    pub fn get_page(&self, uuid: PageUuid) -> Option<Page> {
-        todo!()
+    pub fn get_page(&self, uuid: &PageUuid) -> Option<&Page> {
+        self.pages.get(uuid)
     }
 
     pub fn create_page(&self, page: Page) {
@@ -149,3 +151,33 @@ impl Pages {
         todo!()
     }
 }
+
+#[cfg(feature = "ssr")]
+impl Page {
+    pub async fn read_content(&self) -> Result<String, String> {
+        use std::io::Read;
+
+        let file_meta =
+            fs::metadata(&self.disk_path).map_err(|_| "Cannot retrieve file size".to_string())?;
+        let read_length = usize::try_from(file_meta.len())
+            .map_err(|_| "Cannot get file offset".to_string())?
+            - self.content_offset;
+
+        let mut reader = BufReader::new(
+            File::open(&self.disk_path).map_err(|_| "Could not open page file".to_string())?,
+        );
+
+        reader
+            .seek_relative(
+                i64::try_from(self.content_offset)
+                    .map_err(|_| "Invalid seek length".to_string())?,
+            )
+            .map_err(|_| "Could not seek in page file".to_string())?;
+
+        let mut contents = String::with_capacity(read_length);
+        reader
+            .read_to_string(&mut contents)
+            .map_err(|_| "Could not read file".to_string())?;
+        Ok(contents)
+    }
+}