summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actions/page.rs2
-rw-r--r--src/components/app.rs51
-rw-r--r--src/components/editor/mod.rs13
-rw-r--r--src/components/renderer/mod.rs27
-rw-r--r--src/data/content.rs174
-rw-r--r--src/lib.rs3
-rw-r--r--src/main.rs17
7 files changed, 242 insertions, 45 deletions
diff --git a/src/actions/page.rs b/src/actions/page.rs
new file mode 100644
index 0000000..c51e6bc
--- /dev/null
+++ b/src/actions/page.rs
@@ -0,0 +1,2 @@
+#[server]
+pub async fn get_page_content()
diff --git a/src/components/app.rs b/src/components/app.rs
index d93a870..5033359 100644
--- a/src/components/app.rs
+++ b/src/components/app.rs
@@ -1,10 +1,16 @@
 use leptos::prelude::*;
+use leptos::Params;
+use leptos_router::hooks::use_params;
+use leptos_router::hooks::use_query;
+use leptos_router::params::Params;
 use leptos_meta::{provide_meta_context, MetaTags, Stylesheet, Title};
 use leptos_router::{
-    components::{ParentRoute, Route, Router, Routes}, path, StaticSegment
+    components::{ParentRoute, Route, Router, Routes}, path
 };
 
 use crate::components::layout::Layout;
+use super::renderer::WikiPage;
+use super::editor::WikiEditor;
 
 pub fn shell(options: LeptosOptions) -> impl IntoView {
     view! {
@@ -32,7 +38,7 @@ pub fn App() -> impl IntoView {
     view! {
         // injects a stylesheet into the document <head>
         // id=leptos means cargo-leptos will hot-reload this stylesheet
-        <Stylesheet id="leptos" href="/pkg/stormscribe.css"/>
+        <Stylesheet id="leptos" href="/_/stormscribe.css"/>
 
         // sets the document title
         <Title text="Welcome to Leptos"/>
@@ -41,18 +47,49 @@ pub fn App() -> impl IntoView {
         <Router>
             <Routes fallback=|| "Page not found.".into_view()>
                 <ParentRoute path=path!("/") view=Layout>
-                    <Route path=path!("/") view=HomePage/>
+                    <Route path=path!("/~/*path") view=PageRender/>
+                    <Route path=path!("/edit/*path") view=PageEdit/>
                 </ParentRoute>
             </Routes>
         </Router>
     }
 }
 
-/// Renders the home page of your application.
+#[derive(Params, PartialEq)]
+struct PageParams {
+    path: Option<String>
+}
+
+// Renders a page
+#[component]
+fn PageRender() -> impl IntoView {
+    let params = use_params::<PageParams>();
+
+    let page_path = params.read()
+        .as_ref()
+        .ok()
+        .map(|params| params.path.clone()
+             .unwrap_or("Unknown path".to_string()))
+        .unwrap_or("Could not read params".to_string());
+
+    view! {
+        <WikiPage url_path=page_path />
+    }.into_any()
+}
+
+// Renders a page
 #[component]
-fn HomePage() -> impl IntoView {
+fn PageEdit() -> impl IntoView {
+    let params = use_params::<PageParams>();
+
+    let page_path = params.read()
+        .as_ref()
+        .ok()
+        .map(|params| params.path.clone()
+             .unwrap_or("Unknown path".to_string()))
+        .unwrap_or("Could not read params".to_string());
+
     view! {
-        <h1>Article</h1>
-        <p>Article content here we guess?</p>
+        <WikiEditor url_path=page_path />
     }
 }
diff --git a/src/components/editor/mod.rs b/src/components/editor/mod.rs
index e69de29..05e45d3 100644
--- a/src/components/editor/mod.rs
+++ b/src/components/editor/mod.rs
@@ -0,0 +1,13 @@
+use leptos::prelude::*;
+use leptos::{island, view, IntoView};
+
+#[island]
+pub fn WikiEditor(
+    url_path: String,
+) -> impl IntoView {
+    view! {
+        <h1>Article (Editing)</h1>
+        <p>Page render</p>
+        <pre>{url_path}</pre>
+    }
+}
diff --git a/src/components/renderer/mod.rs b/src/components/renderer/mod.rs
index e69de29..6bc029a 100644
--- a/src/components/renderer/mod.rs
+++ b/src/components/renderer/mod.rs
@@ -0,0 +1,27 @@
+use leptos::prelude::*;
+use leptos::{component, view, IntoView};
+
+// use crate::data::content::ContentController;
+
+#[component]
+pub fn WikiPage(
+    url_path: String,
+) -> impl IntoView {
+    // let content_controller = use_context::<ContentController>().unwrap();
+
+    // let _snapshot = Resource::new(
+    //     move || (url_path.clone(), content_controller.clone()),
+    //     |url_path: String| {
+    //         let page_path = url_path.clone();
+    //
+    //         async move {
+    //             let content = content_controller.get_snapshot().await;
+    //             let page = content.page_paths.get(&page_path);
+    //         }
+    //     });
+
+    view! {
+        <h1>Article (Viewing)</h1>
+        <p>Page render</p>
+    }
+}
diff --git a/src/data/content.rs b/src/data/content.rs
index ecd985f..a628be0 100644
--- a/src/data/content.rs
+++ b/src/data/content.rs
@@ -1,9 +1,12 @@
-use std::fs;
+use std::fs::File;
 use std::collections::HashMap;
+use std::io::{BufRead, BufReader};
 use std::path::{PathBuf, Path};
-use std::sync::{Arc,RwLock};
+use std::sync::Arc;
+use tokio::sync::RwLock;
 use chrono::{DateTime, Utc};
-use futures::{FutureExt, TryStreamExt};
+use leptos::prelude::StorageAccess;
+use serde::Deserialize;
 use uuid::Uuid;
 use fs2::FileExt;
 use tokio::runtime;
@@ -17,31 +20,33 @@ struct NamespaceUuid(Uuid);
 #[derive(Hash, PartialEq, Eq, Clone)]
 struct MediaUuid(Uuid);
 
-struct ContentSnapshot {
-    pages: HashMap<PageUuid, Page>,
-    namespaces: HashMap<NamespaceUuid, Namespace>,
+pub struct ContentSnapshot {
+    pub pages: HashMap<PageUuid, Page>,
+    pub namespaces: HashMap<NamespaceUuid, Namespace>,
     media: HashMap<MediaUuid, Media>,
 
-    namespace_path: HashMap<String, NamespaceUuid>,
-    page_path: HashMap<String, PageUuid>,
-    media_path: HashMap<String, MediaUuid>,
+    pub namespace_paths: HashMap<String, NamespaceUuid>,
+    pub page_paths: HashMap<String, PageUuid>,
+    media_paths: HashMap<String, MediaUuid>,
 
-    render_cache: HashMap<PageUuid, String>,
+    pub render_cache: HashMap<PageUuid, String>,
 }
 
-struct Page {
-    uuid: PageUuid,
-    title: String,
-    namespace: NamespaceUuid,
-    slug: String,
-    current_version: DateTime<Utc>,
-    prev_version: DateTime<Utc>,
+pub struct Page {
+    pub uuid: PageUuid,
+    pub namespace: NamespaceUuid,
+    pub author: Uuid,
+    pub title: String,
+    pub slug: String,
+    pub current_version: DateTime<Utc>,
+    pub prev_versions: Vec<DateTime<Utc>>,
+    content_offset: usize,
 }
 
-struct Namespace {
-    uuid: NamespaceUuid,
-    path: String,
-    pages: Vec<PageUuid>,
+pub struct Namespace {
+    pub uuid: NamespaceUuid,
+    pub path: String,
+    pub pages: Vec<PageUuid>,
 }
 
 struct Media {
@@ -53,15 +58,17 @@ struct Media {
     used_on: Vec<PageUuid>,
 }
 
-struct ContentController {
-    snapshot: RwLock<Box<Arc<ContentSnapshot>>>,
-    lock: fs::File,
+#[derive(Clone)]
+pub struct ContentController {
+    snapshot: Arc<RwLock<Box<Arc<ContentSnapshot>>>>,
+    lock: Arc<File>,
 }
 
+#[cfg(feature = "ssr")]
 impl ContentController {
     pub fn init(data_dir: PathBuf) -> Result<Self, String> {
         let lock_path = Path::join(&data_dir, ".lock");
-        let lockfile = fs::OpenOptions::new()
+        let lockfile = std::fs::OpenOptions::new()
             .read(true).write(true).create(true)
             .open(&lock_path)
             .map_err(|_| "Could not open data directory".to_string())?;
@@ -77,19 +84,19 @@ impl ContentController {
         let snapshot = runtime.block_on(Self::read_data(&data_dir))?;
 
         Ok(Self {
-            lock: lockfile,
-            snapshot: RwLock::new(Box::new(Arc::new(snapshot))),
+            lock: Arc::new(lockfile),
+            snapshot: Arc::new(RwLock::new(Box::new(Arc::new(snapshot)))),
         })
     }
 
     async fn read_data(data_dir: &PathBuf) -> Result<ContentSnapshot, String> {
         use tokio::fs;
 
-        let page_slugs = Arc::new(tokio::sync::Mutex::new(HashMap::<PageUuid, String>::new()));
+        let pagedata_cache = Arc::new(tokio::sync::Mutex::new(HashMap::<PageUuid, (String, NamespaceUuid)>::new()));
 
         let namespace_names_dir = Path::join(&data_dir, "namespaces/names");
         let namespace_ids_dir = Path::join(&data_dir, "namespaces/id");
-        let namespace_future = fs::read_dir(&namespace_names_dir).await
+        let namespaces = fs::read_dir(&namespace_names_dir).await
             .map_err(|_| "Could not open namespace directory".to_string())
             .map(|dir_entries| { ReadDirStream::new(dir_entries) })?
             .filter_map(async |dir_entry| -> Option<Namespace> {
@@ -122,7 +129,7 @@ impl ContentController {
                         let page_uuid = PageUuid(Uuid::try_parse(&page_uuid.to_str()?).ok()?);
                         let page_slug = page_path.file_name()?.to_str()?.to_string();
 
-                        page_slugs.lock().await.insert(page_uuid.clone(), page_slug);
+                        pagedata_cache.lock().await.insert(page_uuid.clone(), (page_slug, namespace_uuid.clone()));
 
                         Some(page_uuid)
                     }).collect::<Vec<PageUuid>>().await;
@@ -134,16 +141,117 @@ impl ContentController {
                 })
             }).collect::<Vec<Namespace>>().await;
 
+        let (namespaces_by_id, namespace_paths): (HashMap<_,_>, HashMap<_,_>) =
+            namespaces.into_iter()
+            .map(|namespace| {
+                let namespace_uuid = namespace.uuid.clone();
+                let namespace_path = namespace.path.clone();
+                (
+                    (namespace_uuid.clone(), namespace),
+                    (namespace_path, namespace_uuid)
+                )
+            })
+            .unzip();
+
         let pages_dir = Path::join(&data_dir, "pages/id");
-        let page_future = fs::read_dir(&pages_dir).await
+        let pages = fs::read_dir(&pages_dir).await
             .map_err(|_| "Could not open pages data directory".to_string())
             .map(|dir_entries| { ReadDirStream::new(dir_entries) })?
             .filter_map(async |dir_entry| -> Option<Page> {
+                let page_dir_path = dir_entry.as_ref().ok()?.path();
+                let current_path = dir_entry.as_ref().ok()?
+                    .metadata().await.ok()?
+                    .is_dir()
+                    .then_some(
+                        fs::read_link(Path::join(&page_dir_path, "current")).await.ok()
+                    )??;
+                
+                Page::init_from_file(&current_path, pagedata_cache.lock().await.as_borrowed()).await
+            }).collect::<Vec<Page>>().await;
+
+        let (pages_by_id, page_paths): (HashMap<_,_>, HashMap<_,_>) =
+            pages.into_iter()
+            .filter_map(|page| {
+                let page_uuid = page.uuid.clone();
+                let namespace_path = &namespaces_by_id.get(&page.namespace)?.path;
+                let page_path = page.slug.clone();
 
-            });
+                Some((
+                    (page_uuid.clone(), page),
+                    (format!("{namespace_path}/{page_path}"), page_uuid)
+                ))
+            })
+            .unzip();
 
-        return Err("Unimplemented".to_string());
+        Ok(ContentSnapshot {
+            pages: pages_by_id,
+            namespaces: namespaces_by_id,
+            media: HashMap::new(),
+            namespace_paths,
+            page_paths,
+            media_paths: HashMap::new(),
+            render_cache: HashMap::new(),
+        })
+    }
 
+    pub async fn get_snapshot(&self) -> Arc<ContentSnapshot> {
+        self.snapshot.read().await.as_ref().clone()
     }
 
+    pub async fn replace_state(&self, updated: ContentSnapshot) {
+        todo!()
+    }
+}
+
+const METADATA_DIVIDER : &'static str = "<!-- trans rights ~ath&+ -->";
+
+impl Page {
+    async fn init_from_file(path: &PathBuf, pagedata_cache: &HashMap::<PageUuid, (String, NamespaceUuid)>) -> Option<Self> {
+        let mut reader = BufReader::new(File::open(path).ok()?);
+        let page_uuid = PageUuid(Uuid::try_parse(&path.parent()?.file_name()?.to_str()?).ok()?);
+        let (page_slug, namespace_uuid) = pagedata_cache.get(&page_uuid)?.as_borrowed();
+
+        let mut metadata_string = String::new();
+        let mut current_line = String::new();
+        let mut content_offset = 0;
+        while let Ok(size) = reader.read_line(&mut current_line) {
+            content_offset += size;
+            if size == 0 {
+                return None
+            }
+
+            if current_line == METADATA_DIVIDER {
+                break
+            }
+
+            metadata_string.push_str(&current_line);
+            current_line.truncate(0);
+        }
+
+        #[derive(Deserialize)]
+        struct PageMetadata {
+            title: String,
+            author: String,
+            prev_versions: Option<Vec<String>>,
+        }
+
+        let metadata : PageMetadata = toml::from_str(&metadata_string).ok()?;
+        let current_version = DateTime::parse_from_rfc3339(path.file_name()?.to_str()?.replace("_", ":").as_str()).ok()?.to_utc();
+        let prev_versions = metadata.prev_versions
+            .unwrap_or(Vec::new())
+            .iter()
+            .filter_map(|str| DateTime::parse_from_rfc3339(str.replace("_", ":").as_str()).ok().map(|timestamp| timestamp.to_utc()))
+            .collect::<Vec<_>>();
+
+        Some(Page {
+            uuid: page_uuid,
+            author: Uuid::try_parse(&metadata.author).ok()?,
+            title: metadata.title,
+            namespace: namespace_uuid.clone(),
+            slug: page_slug.clone(),
+            current_version,
+            prev_versions,
+            content_offset,
+        })
+    }
 }
diff --git a/src/lib.rs b/src/lib.rs
index d8d34cc..3ae130e 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,5 +1,8 @@
 pub mod components;
 
+#[cfg(feature = "ssr")]
+pub mod data;
+
 #[cfg(feature = "hydrate")]
 #[wasm_bindgen::prelude::wasm_bindgen]
 pub fn hydrate() {
diff --git a/src/main.rs b/src/main.rs
index a9e3f1d..13512c5 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,15 +1,13 @@
-pub mod data;
-
-use tokio::signal;
 
 #[cfg(feature = "ssr")]
 #[tokio::main]
 async fn main() {
-    use axum::Router;
+    use axum::{response::Redirect, routing::get, Router};
     use leptos::logging::log;
     use leptos::prelude::*;
     use leptos_axum::{generate_route_list, LeptosRoutes};
     use stormscribe::components::app::*;
+    // use stormscribe::data::content::ContentController;
 
     let conf = get_configuration(None).unwrap();
     let addr = conf.leptos_options.site_addr;
@@ -17,8 +15,16 @@ async fn main() {
     // Generate the list of routes in your Leptos App
     let routes = generate_route_list(App);
 
+    // let data_dir = std::path::Path::join(&std::env::current_dir().unwrap(), "data");
+    // let content_controller = ContentController::init(data_dir).unwrap();
+
     let app = Router::new()
-        .leptos_routes(&leptos_options, routes, {
+        .route("/", get(|| async {
+            Redirect::temporary("/~/")
+        }))
+        .leptos_routes_with_context(&leptos_options, routes, move || {
+            // provide_context(content_controller.clone());
+        }, {
             let leptos_options = leptos_options.clone();
             move || shell(leptos_options.clone())
         })
@@ -37,6 +43,7 @@ async fn main() {
 
 #[cfg(feature = "ssr")]
 async fn shutdown_signal() {
+    use tokio::signal;
     let ctrl_c = async {
         signal::ctrl_c()
             .await