From b856f12cf422b96c37c12df3d7829e4d15ef4453 Mon Sep 17 00:00:00 2001
From: tempest
Date: Tue, 15 Apr 2025 00:08:12 -0600
Subject: Can find content data
---
Cargo.lock | 2 +
Cargo.toml | 4 +-
notes/data.md | 8 +-
src/actions/mod.rs | 1 -
src/actions/page.rs | 12 --
src/components/app.rs | 66 +++++++----
src/components/renderer/mod.rs | 7 +-
src/data/config.rs | 49 +++++---
src/data/content.rs | 262 -----------------------------------------
src/data/mod.rs | 73 +++++++++++-
src/data/namespace.rs | 96 +++++++++++++++
src/data/page.rs | 151 ++++++++++++++++++++++++
src/lib.rs | 1 -
src/main.rs | 13 +-
14 files changed, 406 insertions(+), 339 deletions(-)
delete mode 100644 src/actions/mod.rs
delete mode 100644 src/actions/page.rs
delete mode 100644 src/data/content.rs
create mode 100644 src/data/namespace.rs
create mode 100644 src/data/page.rs
diff --git a/Cargo.lock b/Cargo.lock
index 2502c04..706d746 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -243,6 +243,7 @@ dependencies = [
"iana-time-zone",
"js-sys",
"num-traits",
+ "serde",
"wasm-bindgen",
"windows-link",
]
@@ -2375,6 +2376,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
dependencies = [
"getrandom 0.3.2",
+ "serde",
]
[[package]]
diff --git a/Cargo.toml b/Cargo.toml
index b0fe882..3d60e7c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -19,8 +19,8 @@ wasm-bindgen = { version = "=0.2.100", optional = true }
serde = { version = "^1.0.219", features = ["derive"] }
stylance = { version = "0.5.5" }
toml = { version = "0.8.20", features = ["parse"], optional = true }
-uuid = { version = "1.16.0" }
-chrono = { version = "0.4.40" }
+uuid = { version = "1.16.0", features = ["serde"] }
+chrono = { version = "0.4.40", features = ["serde"] }
fs2 = { version = "0.4.3", optional = true}
futures = { version = "0.3.31", optional = true}
diff --git a/notes/data.md b/notes/data.md
index c3e9874..5551133 100644
--- a/notes/data.md
+++ b/notes/data.md
@@ -16,13 +16,9 @@
│ │
│ └─ id/[uuid]
│
-├─ namespaces/
+├─ namespace/
│ │
-│ ├─ names/[name] (symlink)
-│ │
-│ └─ id/[uuid]/
-│ │
-│ └─ pages/[name] (symlink)
+│ └─ [...path]/_page (symlink)
│
├─ pages/[uuid]/
│ │
diff --git a/src/actions/mod.rs b/src/actions/mod.rs
deleted file mode 100644
index 79d2861..0000000
--- a/src/actions/mod.rs
+++ /dev/null
@@ -1 +0,0 @@
-pub mod page;
diff --git a/src/actions/page.rs b/src/actions/page.rs
deleted file mode 100644
index 8d204f5..0000000
--- a/src/actions/page.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-use leptos::prelude::*;
-
-#[server]
-pub async fn get_page_content(url_path: String) -> Result<(), ServerFnError> {
- use crate::data::content::ContentController;
- let content_controller = expect_context::();
- let content_snapshot = content_controller.get_snapshot().await;
- let page_uuid = content_snapshot.page_paths.get(&url_path);
- let page = content_snapshot.pages.get(&page_uuid.unwrap());
-
- return Ok(())
-}
diff --git a/src/components/app.rs b/src/components/app.rs
index a7ac5bd..dd2d018 100644
--- a/src/components/app.rs
+++ b/src/components/app.rs
@@ -1,16 +1,21 @@
+use std::collections::HashMap;
+
use leptos::prelude::*;
use leptos::Params;
+use leptos_meta::{provide_meta_context, MetaTags, Stylesheet, Title};
use leptos_router::hooks::use_params;
-use leptos_router::hooks::use_query;
use leptos_router::params::Params;
-use leptos_meta::{provide_meta_context, MetaTags, Stylesheet, Title};
use leptos_router::{
- components::{ParentRoute, Route, Router, Routes}, path
+ components::{ParentRoute, Route, Router, Routes},
+ path,
};
-use crate::components::layout::Layout;
-use super::renderer::WikiPage;
use super::editor::WikiEditor;
+use super::renderer::WikiPage;
+use crate::components::layout::Layout;
+use crate::data::Namespace;
+use crate::data::Page;
+use crate::data::PageUuid;
pub fn shell(options: LeptosOptions) -> impl IntoView {
view! {
@@ -57,41 +62,56 @@ pub fn App() -> impl IntoView {
#[derive(Params, PartialEq)]
struct PageParams {
- path: Option
+ path: Option,
+}
+
+#[server]
+async fn get_namespace() -> Result {
+ use crate::data::StormscribeData;
+
+ Ok(StormscribeData::get_namespace())
+}
+
+#[server]
+async fn get_pages() -> Result, ServerFnError> {
+ use crate::data::StormscribeData;
+
+ Ok(StormscribeData::get_pages())
}
// Renders a page
#[component]
fn PageRender() -> impl IntoView {
- use crate::actions::page::get_page_content;
-
let params = use_params::();
- let page_path = params.read()
+ let page_path = params
+ .read()
.as_ref()
.ok()
- .map(|params| params.path.clone()
- .unwrap_or("Unknown path".to_string()))
+ .map(|params| params.path.clone().unwrap_or("Unknown path".to_string()))
.unwrap_or("Could not read params".to_string());
- let page_data = Resource::new(
- move || page_path.clone(),
- |page_path| get_page_content(page_path)
- );
+ let namespace = Resource::new(move || {}, |_| get_namespace());
+ let page_resource = Resource::new(move || {}, |_| get_pages());
view! {
"Loading..."
}
>
{move || Suspend::new(async move {
- let data = page_data.await;
- match data {
- Ok(_) => view! {Loaded
}.into_any(),
- Err(_) => view! {Error
}.into_any(),
+ let name_data = namespace.await;
+ let page_data = page_resource.await;
+ match (name_data, page_data) {
+ (Ok(names), Ok(pages)) => view! {
+ {format!("{names:#?}")}
+ {format!("{pages:#?}")}
+ }.into_any(),
+ _ => view! {Error
}.into_any(),
}
})}
- }.into_any()
+ }
+ .into_any()
}
// Renders a page
@@ -99,11 +119,11 @@ fn PageRender() -> impl IntoView {
fn PageEdit() -> impl IntoView {
let params = use_params::();
- let page_path = params.read()
+ let page_path = params
+ .read()
.as_ref()
.ok()
- .map(|params| params.path.clone()
- .unwrap_or("Unknown path".to_string()))
+ .map(|params| params.path.clone().unwrap_or("Unknown path".to_string()))
.unwrap_or("Could not read params".to_string());
view! {
diff --git a/src/components/renderer/mod.rs b/src/components/renderer/mod.rs
index 68aac9c..5a30c2e 100644
--- a/src/components/renderer/mod.rs
+++ b/src/components/renderer/mod.rs
@@ -1,13 +1,10 @@
use leptos::prelude::*;
use leptos::{component, view, IntoView};
-use crate::data::content::{Page,Namespace};
#[component]
-pub fn WikiPage(
- page_data: Page,
- parent_namespaces: Vec,
+pub fn WikiPage(// page_data: Page,
+ // parent_namespaces: Vec
) -> impl IntoView {
-
view! {
Article (Viewing)
Page render
diff --git a/src/data/config.rs b/src/data/config.rs
index 9a17077..11e10cc 100644
--- a/src/data/config.rs
+++ b/src/data/config.rs
@@ -13,34 +13,55 @@ struct ConfigFile {
}
pub struct Config {
- site_title: String,
- data_dir: PathBuf,
- external_root: String,
- listen_port: u16,
- footer_copyright: Option
+ pub site_title: String,
+ pub data_dir: PathBuf,
+ pub external_root: String,
+ pub listen_port: u16,
+ pub footer_copyright: Option,
}
#[cfg(feature = "ssr")]
impl Config {
- pub fn read_from_file>(path: P) -> Result {
- let config_contents = fs::read_to_string(&path)
+ pub fn read_from_file() -> Result {
+ let config_path = Self::get_location()?;
+ let config_contents = fs::read_to_string(&config_path)
.map_err(|_| "Unable to open config file".to_string())?;
- let file : ConfigFile = toml::from_str(&config_contents)
- .map_err(|err| err.to_string())?;
+ let file: ConfigFile = toml::from_str(&config_contents).map_err(|err| err.to_string())?;
let port = file.listen_port.unwrap_or(3000);
Ok(Self {
- site_title: file.site_title
+ site_title: file
+ .site_title
.unwrap_or("Untitled StormScribe Site".to_string()),
- data_dir: file.data_dir.unwrap_or(path.as_ref()
- .canonicalize().map_err(|_| "Cannot resolve config file location".to_string())?
- .parent().ok_or("Cannot resolve data dir".to_string())?.to_path_buf()),
- external_root: file.external_root
+ data_dir: file.data_dir.unwrap_or(
+ PathBuf::from(&config_path)
+ .canonicalize()
+ .map_err(|_| "Cannot resolve config file location".to_string())?
+ .parent()
+ .ok_or("Cannot resolve data dir".to_string())?
+ .to_path_buf(),
+ ),
+ external_root: file
+ .external_root
.unwrap_or(format!("http://localhost:{port}/")),
listen_port: port,
footer_copyright: file.footer_copyright,
})
}
+
+ fn get_location() -> Result {
+ Ok(
+ std::env::var("STORMSCRIBE_CONFIG_FILE").or_else(|_| -> Result {
+ Ok(std::path::Path::join(
+ &std::env::current_dir()
+ .map_err(|_| "Could not read current directory".to_string())?,
+ "config.toml",
+ )
+ .to_string_lossy()
+ .to_string())
+ })?,
+ )
+ }
}
diff --git a/src/data/content.rs b/src/data/content.rs
deleted file mode 100644
index 4a39967..0000000
--- a/src/data/content.rs
+++ /dev/null
@@ -1,262 +0,0 @@
-#[cfg(feature="ssr")]
-use std::fs::File;
-use std::collections::HashMap;
-#[cfg(feature="ssr")]
-use std::io::{BufRead, BufReader};
-use std::path::{PathBuf, Path};
-use std::sync::Arc;
-#[cfg(feature="ssr")]
-use tokio::sync::RwLock;
-use chrono::{DateTime, Utc};
-use leptos::prelude::StorageAccess;
-use serde::Deserialize;
-use uuid::Uuid;
-#[cfg(feature="ssr")]
-use fs2::FileExt;
-#[cfg(feature="ssr")]
-use tokio::runtime;
-#[cfg(feature="ssr")]
-use tokio_stream::wrappers::ReadDirStream;
-#[cfg(feature="ssr")]
-use futures::stream::StreamExt;
-
-#[derive(Hash, PartialEq, Eq, Clone)]
-pub struct PageUuid(Uuid);
-#[derive(Hash, PartialEq, Eq, Clone)]
-pub struct NamespaceUuid(Uuid);
-#[derive(Hash, PartialEq, Eq, Clone)]
-pub struct MediaUuid(Uuid);
-
-pub struct ContentSnapshot {
- pub pages: HashMap,
- pub namespaces: HashMap,
- media: HashMap,
-
- pub namespace_paths: HashMap,
- pub page_paths: HashMap,
- media_paths: HashMap,
-
- pub render_cache: HashMap,
-}
-
-pub struct Page {
- pub uuid: PageUuid,
- pub namespace: NamespaceUuid,
- pub author: Uuid,
- pub title: String,
- pub slug: String,
- pub current_version: DateTime,
- pub prev_versions: Vec>,
- content_offset: usize,
-}
-
-pub struct Namespace {
- pub uuid: NamespaceUuid,
- pub path: String,
- pub pages: Vec,
-}
-
-struct Media {
- uuid: MediaUuid,
- filename: String,
- mime_type: String,
- uploaded_by: Uuid,
- uploaded_on: Uuid,
- used_on: Vec,
-}
-
-#[cfg(feature="ssr")]
-#[derive(Clone)]
-pub struct ContentController {
- snapshot: Arc>>>,
- lock: Arc,
-}
-
-#[cfg(feature = "ssr")]
-impl ContentController {
- pub async fn init(data_dir: PathBuf) -> Result {
- let lock_path = Path::join(&data_dir, ".lock");
- let lockfile = std::fs::OpenOptions::new()
- .read(true).write(true).create(true)
- .open(&lock_path)
- .map_err(|_| "Could not open data directory".to_string())?;
-
- lockfile.try_lock_exclusive()
- .map_err(|_| "Could not lock data directory".to_string())?;
-
- // Read the things
- let snapshot = Self::read_data(&data_dir).await?;
-
- Ok(Self {
- lock: Arc::new(lockfile),
- snapshot: Arc::new(RwLock::new(Box::new(Arc::new(snapshot)))),
- })
- }
-
- async fn read_data(data_dir: &PathBuf) -> Result {
- use tokio::fs;
-
- let pagedata_cache = Arc::new(tokio::sync::Mutex::new(HashMap::::new()));
-
- let namespace_names_dir = Path::join(&data_dir, "namespaces/names");
- let namespace_ids_dir = Path::join(&data_dir, "namespaces/id");
- let namespaces = fs::read_dir(&namespace_names_dir).await
- .map_err(|_| "Could not open namespace directory".to_string())
- .map(|dir_entries| { ReadDirStream::new(dir_entries) })?
- .filter_map(async |dir_entry| -> Option {
- let link_path = dir_entry.as_ref().ok()?.path();
- let target_path = dir_entry.as_ref().ok()?
- .metadata().await.ok()?
- .is_symlink()
- .then_some(
- fs::read_link(link_path).await.ok()
- )??;
-
- let last_segment = target_path.file_name()?;
- target_path.parent()?
- .eq(&namespace_ids_dir).then_some(())?;
-
- let namespace_name = dir_entry.as_ref().ok()?.file_name().to_str()?.to_string();
- let namespace_uuid = NamespaceUuid(Uuid::try_parse(last_segment.to_str()?).ok()?);
-
- let namespace_pages = fs::read_dir(Path::join(&namespace_ids_dir, last_segment).join("pages")).await.ok()?;
- let namespace_page_uuids = ReadDirStream::new(namespace_pages)
- .filter_map(async |dir_entry| -> Option {
- let page_path = dir_entry.as_ref().ok()?.path();
- let page_uuid = dir_entry.as_ref().ok()?
- .metadata().await.ok()?
- .is_symlink()
- .then_some(
- fs::read_link(&page_path).await.ok()
- )??;
-
- let page_uuid = PageUuid(Uuid::try_parse(&page_uuid.to_str()?).ok()?);
- let page_slug = page_path.file_name()?.to_str()?.to_string();
-
- pagedata_cache.lock().await.insert(page_uuid.clone(), (page_slug, namespace_uuid.clone()));
-
- Some(page_uuid)
- }).collect::>().await;
-
- Some(Namespace {
- uuid: namespace_uuid,
- path: namespace_name,
- pages: namespace_page_uuids,
- })
- }).collect::>().await;
-
- let (namespaces_by_id, namespace_paths): (HashMap<_,_>, HashMap<_,_>) =
- namespaces.into_iter()
- .map(|namespace| {
- let namespace_uuid = namespace.uuid.clone();
- let namespace_path = namespace.path.clone();
- (
- (namespace_uuid.clone(), namespace),
- (namespace_path, namespace_uuid)
- )
- })
- .unzip();
-
- let pages_dir = Path::join(&data_dir, "pages");
- let pages = fs::read_dir(&pages_dir).await
- .map_err(|_| "Could not open pages data directory".to_string())
- .map(|dir_entries| { ReadDirStream::new(dir_entries) })?
- .filter_map(async |dir_entry| -> Option {
- let page_dir_path = dir_entry.as_ref().ok()?.path();
- let current_path = dir_entry.as_ref().ok()?
- .metadata().await.ok()?
- .is_dir()
- .then_some(
- fs::read_link(Path::join(&page_dir_path, "current")).await.ok()
- )??;
-
- Page::init_from_file(¤t_path, pagedata_cache.lock().await.as_borrowed()).await
- }).collect::>().await;
-
- let (pages_by_id, page_paths): (HashMap<_,_>, HashMap<_,_>) =
- pages.into_iter()
- .filter_map(|page| {
- let page_uuid = page.uuid.clone();
- let namespace_path = &namespaces_by_id.get(&page.namespace)?.path;
- let page_path = page.slug.clone();
-
- Some((
- (page_uuid.clone(), page),
- (format!("{namespace_path}/{page_path}"), page_uuid)
- ))
- })
- .unzip();
-
- Ok(ContentSnapshot {
- pages: pages_by_id,
- namespaces: namespaces_by_id,
- media: HashMap::new(),
- namespace_paths,
- page_paths,
- media_paths: HashMap::new(),
- render_cache: HashMap::new(),
- })
- }
-
- pub async fn get_snapshot(&self) -> Arc {
- self.snapshot.read().await.as_ref().clone()
- }
-
- pub async fn replace_state(&self, updated: ContentSnapshot) {
- todo!()
- }
-}
-
-const METADATA_DIVIDER : &'static str = "";
-
-#[cfg(feature = "ssr")]
-impl Page {
- async fn init_from_file(path: &PathBuf, pagedata_cache: &HashMap::) -> Option {
- let mut reader = BufReader::new(File::open(path).ok()?);
- let page_uuid = PageUuid(Uuid::try_parse(&path.parent()?.file_name()?.to_str()?).ok()?);
- let (page_slug, namespace_uuid) = pagedata_cache.get(&page_uuid)?.as_borrowed();
-
- let mut metadata_string = String::new();
- let mut current_line = String::new();
- let mut content_offset = 0;
- while let Ok(size) = reader.read_line(&mut current_line) {
- content_offset += size;
- if size == 0 {
- return None
- }
-
- if current_line == METADATA_DIVIDER {
- break
- }
-
- metadata_string.push_str(¤t_line);
- current_line.truncate(0);
- }
-
- #[derive(Deserialize)]
- struct PageMetadata {
- title: String,
- author: String,
- prev_versions: Option>,
- }
-
- let metadata : PageMetadata = toml::from_str(&metadata_string).ok()?;
- let current_version = DateTime::parse_from_rfc3339(path.file_name()?.to_str()?.replace("_", ":").as_str()).ok()?.to_utc();
- let prev_versions = metadata.prev_versions
- .unwrap_or(Vec::new())
- .iter()
- .filter_map(|str| DateTime::parse_from_rfc3339(str.replace("_", ":").as_str()).ok().map(|timestamp| timestamp.to_utc()))
- .collect::>();
-
- Some(Page {
- uuid: page_uuid,
- author: Uuid::try_parse(&metadata.author).ok()?,
- title: metadata.title,
- namespace: namespace_uuid.clone(),
- slug: page_slug.clone(),
- current_version,
- prev_versions,
- content_offset,
- })
- }
-}
diff --git a/src/data/mod.rs b/src/data/mod.rs
index 4767914..0c61bd7 100644
--- a/src/data/mod.rs
+++ b/src/data/mod.rs
@@ -1,3 +1,72 @@
-pub mod config;
-pub mod content;
+use serde::{Deserialize, Serialize};
+use uuid::Uuid;
+#[cfg(feature = "ssr")]
+use fs2::FileExt;
+#[cfg(feature = "ssr")]
+use std::fs::File;
+#[cfg(feature = "ssr")]
+use std::sync::LazyLock;
+
+use std::{collections::HashMap, path::Path};
+
+mod config;
+mod namespace;
+mod page;
+
+use config::Config;
+pub use namespace::{Namespace, Namespaces};
+pub use page::{Page, Pages};
+
+#[derive(Hash, PartialEq, Eq, Clone, Debug, Deserialize, Serialize)]
+pub struct PageUuid(Uuid);
+
+#[cfg(feature = "ssr")]
+pub static CONFIG: LazyLock =
+ LazyLock::new(|| Config::read_from_file().expect("Could not open config file"));
+
+#[cfg(feature = "ssr")]
+static DATA_LOCK: LazyLock = LazyLock::new(|| {
+ let config = &CONFIG;
+ let lock_path = Path::join(&config.data_dir, ".lock");
+ let lockfile = std::fs::OpenOptions::new()
+ .read(true)
+ .write(true)
+ .create(true)
+ .open(&lock_path)
+ .map_err(|_| "Could not open data directory".to_string())
+ .unwrap();
+
+ lockfile
+ .try_lock_exclusive()
+ .map_err(|_| "Could not lock data directory".to_string())
+ .unwrap();
+
+ StormscribeData {
+ file_lock: lockfile,
+ namespaces: Namespaces::init(&Path::join(&config.data_dir, "namespace/")).unwrap(),
+ pages: Pages::init(&Path::join(&config.data_dir, "pages/")).unwrap(),
+ }
+});
+
+#[cfg(feature = "ssr")]
+pub struct StormscribeData {
+ file_lock: File,
+ namespaces: Namespaces,
+ pages: Pages,
+}
+
+#[cfg(feature = "ssr")]
+impl StormscribeData {
+ fn get_data() -> &'static Self {
+ &DATA_LOCK
+ }
+
+ pub fn get_namespace() -> Namespace {
+ DATA_LOCK.namespaces.root.clone()
+ }
+
+ pub fn get_pages() -> HashMap {
+ DATA_LOCK.pages.pages.clone()
+ }
+}
diff --git a/src/data/namespace.rs b/src/data/namespace.rs
new file mode 100644
index 0000000..714ab37
--- /dev/null
+++ b/src/data/namespace.rs
@@ -0,0 +1,96 @@
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+use uuid::Uuid;
+
+use crate::data::PageUuid;
+#[cfg(feature = "ssr")]
+use std::{
+ fs,
+ path::{Path, PathBuf},
+};
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct Namespace {
+ pub page: Option,
+ pub children: HashMap,
+}
+
+pub struct Namespaces {
+ pub root: Namespace,
+}
+
+impl Namespace {
+ pub fn new() -> Self {
+ Self {
+ page: None,
+ children: HashMap::new(),
+ }
+ }
+}
+
+#[cfg(feature = "ssr")]
+impl Namespaces {
+ pub fn init(namespaces_dir: &Path) -> Result {
+ // Read dir recursive
+
+ let mut paths = Vec::new();
+ Self::scan_dir(namespaces_dir, &mut paths);
+ let paths = paths
+ .into_iter()
+ .map(|path| PathBuf::from(path.strip_prefix(namespaces_dir).unwrap()))
+ .collect::>();
+
+ // Build lookup
+ let mut root = Namespace::new();
+
+ for path in paths {
+ let mut current_node = &mut root;
+
+ for segment in path.iter() {
+ let segment = segment.to_string_lossy().to_string();
+ if segment == "_page" {
+ let link_target = namespaces_dir.join(&path).read_link().unwrap();
+ let uuid_string = link_target.file_name().unwrap().to_str().unwrap();
+ let page_uuid = PageUuid(Uuid::try_parse(uuid_string).unwrap());
+ current_node.page = Some(page_uuid);
+ } else {
+ current_node
+ .children
+ .insert(segment.clone(), Namespace::new());
+
+ current_node = current_node.children.get_mut(&segment).unwrap();
+ }
+ }
+ }
+
+ Ok(Self { root })
+ }
+
+ pub fn get_page_uuid(&self, path: String) -> Option {
+ todo!()
+ }
+
+ pub fn remove_page(&self, path: String) -> Result<(), String> {
+ todo!()
+ }
+
+ pub fn add_page(&self, path: String, uuid: PageUuid) -> Result<(), String> {
+ todo!()
+ }
+
+ fn scan_dir(current_dir: &Path, out_vec: &mut Vec) {
+ if !current_dir.is_dir() {
+ return;
+ }
+
+ for entry in fs::read_dir(current_dir).unwrap() {
+ let entry_path = entry.unwrap().path();
+
+ if entry_path.is_dir() && !entry_path.is_symlink() {
+ Self::scan_dir(&entry_path, out_vec);
+ } else {
+ out_vec.push(entry_path.into());
+ }
+ }
+ }
+}
diff --git a/src/data/page.rs b/src/data/page.rs
new file mode 100644
index 0000000..6d0b802
--- /dev/null
+++ b/src/data/page.rs
@@ -0,0 +1,151 @@
+use std::{
+ collections::HashMap,
+ fs::{self, File},
+ io::{BufRead, BufReader},
+ path::Path,
+};
+
+use chrono::{DateTime, Utc};
+use serde::{Deserialize, Serialize};
+use uuid::Uuid;
+
+use super::PageUuid;
+
+#[derive(Clone, Deserialize, Serialize, Debug)]
+pub struct Page {
+ pub uuid: PageUuid,
+ pub author: Uuid,
+ pub title: String,
+ pub current_version: DateTime,
+ pub prev_versions: Vec>,
+ content_offset: usize,
+}
+
+pub struct Pages {
+ pub pages: HashMap,
+}
+
+const METADATA_DIVIDER: &'static str = "";
+
+#[cfg(feature = "ssr")]
+impl Pages {
+ pub fn init(pages_dir: &Path) -> Result {
+ // Read dir
+ let page_dirs = fs::read_dir(&pages_dir)
+ .map_err(|_| "Could not open pages data directory".to_string())?;
+
+ // Parse each
+ let pages = page_dirs
+ .map(|dir_entry| -> Result {
+ let page_dir_path = dir_entry.as_ref().unwrap().path();
+
+ Pages::read_page(&page_dir_path)
+ })
+ .collect::, String>>()?;
+
+ // Build lookup
+ Ok(Self {
+ pages: pages
+ .into_iter()
+ .map(|page| (page.uuid.clone(), page))
+ .collect::>(),
+ })
+ }
+
+ fn read_page(page_dir: &Path) -> Result {
+ let current_page = page_dir
+ .join("current")
+ .canonicalize()
+ .map_err(|_| "Could not canonicalize page location".to_string())?;
+
+ let mut reader = BufReader::new(
+ File::open(¤t_page).map_err(|_| "Could not open page file".to_string())?,
+ );
+ let page_uuid = PageUuid(
+ Uuid::try_parse(
+ &page_dir
+ .file_name()
+ .ok_or("Could not read page directory".to_string())?
+ .to_str()
+ .unwrap(),
+ )
+ .map_err(|_| "Could not parse page UUID".to_string())?,
+ );
+
+ let mut metadata_string = String::new();
+ let mut current_line = String::new();
+ let mut content_offset = 0;
+ 'readloop: while let Ok(size) = reader.read_line(&mut current_line) {
+ content_offset += size;
+ if size == 0 {
+ return Err("Page file is invalid".to_string());
+ }
+
+ if current_line.trim() == METADATA_DIVIDER {
+ break 'readloop;
+ }
+
+ metadata_string.push_str(¤t_line);
+ current_line.truncate(0);
+ }
+
+ #[derive(Deserialize)]
+ struct PageMetadata {
+ title: String,
+ author: String,
+ prev_versions: Option>,
+ }
+
+ let metadata: PageMetadata = toml::from_str(&metadata_string).map_err(|err| {
+ println!("{err:?}");
+ "Page metadata is invalid".to_string()
+ })?;
+ let current_version = DateTime::parse_from_rfc3339(
+ current_page
+ .file_name()
+ .unwrap()
+ .to_str()
+ .unwrap()
+ .replace("_", ":")
+ .as_str(),
+ )
+ .map_err(|_| "Invalid date format".to_string())?
+ .to_utc();
+ let prev_versions = metadata
+ .prev_versions
+ .unwrap_or(Vec::new())
+ .iter()
+ .filter_map(|str| {
+ DateTime::parse_from_rfc3339(str.replace("_", ":").as_str())
+ .ok()
+ .map(|timestamp| timestamp.to_utc())
+ })
+ .collect::>();
+
+ Ok(Page {
+ uuid: page_uuid,
+ author: Uuid::try_parse(&metadata.author)
+ .map_err(|_| "Could not parse author UUID".to_string())?,
+ title: metadata.title,
+ current_version,
+ prev_versions,
+ content_offset,
+ })
+ }
+
+ pub fn get_page(&self, uuid: PageUuid) -> Option {
+ todo!()
+ }
+
+ pub fn create_page(&self, page: Page) {
+ todo!()
+ }
+
+ pub fn update_page(&self, page: Page) {
+ todo!()
+ }
+
+ pub fn delete_page(&self, uuid: PageUuid) -> Result<(), String> {
+ todo!()
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
index 0f5c708..955e795 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,5 +1,4 @@
pub mod components;
-pub mod actions;
pub mod data;
#[cfg(feature = "hydrate")]
diff --git a/src/main.rs b/src/main.rs
index d3c4f58..8cffc27 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,4 +1,3 @@
-
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
@@ -7,7 +6,6 @@ async fn main() {
use leptos::prelude::*;
use leptos_axum::{generate_route_list, LeptosRoutes};
use stormscribe::components::app::*;
- use stormscribe::data::content::ContentController;
let conf = get_configuration(None).unwrap();
let addr = conf.leptos_options.site_addr;
@@ -15,16 +13,9 @@ async fn main() {
// Generate the list of routes in your Leptos App
let routes = generate_route_list(App);
- let data_dir = std::path::Path::join(&std::env::current_dir().unwrap(), "data");
- let content_controller = ContentController::init(data_dir).await.unwrap();
-
let app = Router::new()
- .route("/", get(|| async {
- Redirect::temporary("/~/")
- }))
- .leptos_routes_with_context(&leptos_options, routes, move || {
- provide_context(content_controller.clone());
- }, {
+ .route("/", get(|| async { Redirect::temporary("/~/") }))
+ .leptos_routes(&leptos_options, routes, {
let leptos_options = leptos_options.clone();
move || shell(leptos_options.clone())
})
--
cgit 1.4.1