Some stuff kind of working

This commit is contained in:
Dawid Ciężarkiewicz 2020-05-10 00:17:03 -07:00
parent e556d733b1
commit 94d3101cd1
7 changed files with 198 additions and 40 deletions

13
Cargo.lock generated
View File

@ -1062,6 +1062,17 @@ version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399" checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
[[package]]
name = "serde_derive"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10be45e22e5597d4b88afcc71f9d7bfadcd604bf0c78a3ab4582b8d2b37f39f3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.52" version = "1.0.52"
@ -1188,6 +1199,8 @@ dependencies = [
"pulldown-cmark", "pulldown-cmark",
"rand 0.6.5", "rand 0.6.5",
"regex", "regex",
"serde",
"serde_derive",
"structopt", "structopt",
"tokio", "tokio",
"walkdir", "walkdir",

View File

@ -30,3 +30,5 @@ digest = "*"
hex = "*" hex = "*"
walkdir = "*" walkdir = "*"
async-trait = "0.1.30" async-trait = "0.1.30"
serde = "*"
serde_derive = "*"

View File

@ -59,9 +59,14 @@ impl<T> Index<T> {
pub fn find(&self, tags: &[TagRef]) -> FindResults { pub fn find(&self, tags: &[TagRef]) -> FindResults {
let mut matching_pages: Vec<String> = vec![]; let mut matching_pages: Vec<String> = vec![];
let mut matching_tags: Vec<String> = vec![]; let mut matching_tags: Vec<String> = vec![];
let mut already_tried_tags = HashSet::new();
for tag in tags { for tag in tags {
if already_tried_tags.contains(tag) {
continue;
}
already_tried_tags.insert(tag);
if matching_tags.is_empty() { if matching_tags.is_empty() {
if let Some(ids) = dbg!(&self.page_ids_by_tag).get(*tag) { if let Some(ids) = &self.page_ids_by_tag.get(*tag) {
matching_pages = ids.iter().map(|id| id.to_owned()).collect(); matching_pages = ids.iter().map(|id| id.to_owned()).collect();
matching_tags.push(tag.to_string()) matching_tags.push(tag.to_string())
} else { } else {
@ -98,7 +103,7 @@ impl<T> Index<T> {
} }
fn add_data_for_page(&mut self, page: &page::Parsed) { fn add_data_for_page(&mut self, page: &page::Parsed) {
for tag in dbg!(&page.tags) { for tag in &page.tags {
self.page_ids_by_tag self.page_ids_by_tag
.entry(tag.clone()) .entry(tag.clone())
.or_default() .or_default()

View File

@ -6,6 +6,8 @@ use std::sync::Arc;
use structopt::StructOpt; use structopt::StructOpt;
use warp::{path::FullPath, Filter}; use warp::{path::FullPath, Filter};
use serde_derive::Deserialize;
use page::StoreMut; use page::StoreMut;
/// Command line options /// Command line options
@ -34,24 +36,107 @@ fn with_state(
warp::any().map(move || state.clone()) warp::any().map(move || state.clone())
} }
async fn handler( fn warp_temporary_redirect(location: &str) -> warp::http::Response<&'static str> {
state: Arc<State>, warp::http::Response::builder()
path: FullPath, .status(307)
) -> std::result::Result<Box<dyn warp::Reply>, warp::Rejection> { .header(warp::http::header::LOCATION, location)
let tags: Vec<_> = path .body("")
.as_str() .expect("correct redirect")
}
fn warp_temporary_redirect_after_post(location: &str) -> warp::http::Response<&'static str> {
warp::http::Response::builder()
.status(303)
.header(warp::http::header::LOCATION, location)
.body("")
.expect("correct redirect")
}
fn get_rid_of_windows_newlines(s: String) -> String {
s.chars().filter(|ch| *ch != '\r').collect()
}
#[derive(Deserialize, Debug)]
struct GetPrompt {
edit: Option<bool>,
}
#[derive(Deserialize, Debug)]
struct PostForm {
body: String,
}
fn html_for_editing_page(page: &page::Parsed) -> String {
format!(
"<form action='.' method='POST'><textarea name='body'>{}</textarea><br/><input type=submit></form>",
page.source_body
)
}
fn path_to_tags(path: &FullPath) -> Vec<&str> {
path.as_str()
.split('/') .split('/')
.map(|t| t.trim()) .map(|t| t.trim())
.filter(|t| t != &"") .filter(|t| t != &"")
.collect(); .collect()
}
async fn handle_post(
state: Arc<State>,
path: FullPath,
form: PostForm,
) -> std::result::Result<Box<dyn warp::Reply>, warp::Rejection> {
let tags = path_to_tags(&path);
let mut write = state.page_store.write().await;
let results = write.find(tags.as_slice());
match results.matching_pages.len() {
1 => {
let page = write
.get(results.matching_pages[0].clone())
.await
.map_err(|e| warp::reject::custom(RejectAnyhow(e)))?;
let page = page.with_new_source_body(&get_rid_of_windows_newlines(form.body));
write
.put(&page)
.await
.map_err(|e| warp::reject::custom(RejectAnyhow(e)))?;
Ok(Box::new(warp_temporary_redirect_after_post(".".into())))
}
_ => {
// TODO: ERROR
Ok(Box::new(format!("Results: {:?}", results)))
}
}
}
async fn handle_get(
state: Arc<State>,
path: FullPath,
query: GetPrompt,
) -> std::result::Result<Box<dyn warp::Reply>, warp::Rejection> {
let tags = path_to_tags(&path);
let read = state.page_store.read().await; let read = state.page_store.read().await;
let results = read.find(tags.as_slice()); let results = read.find(tags.as_slice());
if results.matching_tags != tags {
return Ok(Box::new(warp_temporary_redirect(
&("/".to_string() + &results.matching_tags.join("/")),
)));
}
if results.matching_pages.len() == 1 { if results.matching_pages.len() == 1 {
let page = read let page = read
.get(results.matching_pages[0].clone()) .get(results.matching_pages[0].clone())
.await .await
.map_err(|e| warp::reject::custom(RejectAnyhow(e)))?; .map_err(|e| warp::reject::custom(RejectAnyhow(e)))?;
Ok(Box::new(warp::reply::html(page.html))) Ok(Box::new(warp::reply::html(if query.edit.is_none() {
page.html
+ "<form action='.' method='get'><input type='hidden' name='edit' value='true' /><button type='submit'/>Edit Page</form>"
} else {
html_for_editing_page(&page)
})))
} else { } else {
Ok(Box::new(format!("Results: {:?}", results))) Ok(Box::new(format!("Results: {:?}", results)))
} }
@ -66,9 +151,17 @@ async fn start(opts: &cli::Opts) -> Result<()> {
)), )),
}); });
let handler = warp::any() let handler = warp::any()
.and(with_state(state)) .and(with_state(state.clone()))
.and(warp::path::full()) .and(warp::path::full())
.and_then(handler); .and(warp::query::<GetPrompt>())
.and(warp::get())
.and_then(handle_get)
.or(warp::any()
.and(with_state(state))
.and(warp::path::full())
.and(warp::post())
.and(warp::filters::body::form())
.and_then(handle_post));
info!("Listening on port {}", opts.port); info!("Listening on port {}", opts.port);
let _serve = warp::serve(handler).run(([127, 0, 0, 1], opts.port)).await; let _serve = warp::serve(handler).run(([127, 0, 0, 1], opts.port)).await;
@ -81,7 +174,7 @@ fn main() -> Result<()> {
tokio::runtime::Runtime::new() tokio::runtime::Runtime::new()
.unwrap() .unwrap()
.block_on(start(&opts)); .block_on(start(&opts))?;
Ok(()) Ok(())
} }

View File

@ -1,9 +1,10 @@
pub mod store; pub mod store;
#[allow(unused)]
use anyhow::Result;
use lazy_static::lazy_static; use lazy_static::lazy_static;
pub use store::{InMemoryStore, Store, StoreMut}; pub use store::{InMemoryStore, Store, StoreMut};
use anyhow::Result;
use digest::Digest; use digest::Digest;
pub type Id = String; pub type Id = String;
@ -18,6 +19,7 @@ pub struct Source(String);
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
pub struct Parsed { pub struct Parsed {
pub source: Source, pub source: Source,
pub source_body: String,
pub html: String, pub html: String,
pub headers: Headers, pub headers: Headers,
pub tags: Vec<Tag>, pub tags: Vec<Tag>,
@ -36,7 +38,9 @@ fn split_headers_and_body(source: &Source) -> (&str, &str) {
if let Some(cap) = RE.captures_iter(&source.0).next() { if let Some(cap) = RE.captures_iter(&source.0).next() {
( (
cap.get(1).expect("be there").as_str(), // important: trimming headers, prevent them from accumulating newlines in the output
// during rewrites
cap.get(1).expect("be there").as_str().trim(),
cap.get(2).expect("be there").as_str(), cap.get(2).expect("be there").as_str(),
) )
} else { } else {
@ -92,25 +96,51 @@ impl Headers {
} }
} }
} }
fn to_markdown_string(&self) -> String {
"<!---\n".to_string() + &self.all + "\n-->\n"
}
}
fn parse_tags(body: &str) -> Vec<String> {
lazy_static! {
static ref RE: regex::Regex = regex::Regex::new(r"#([a-zA-Z0-9]+)").expect("correct regex");
}
RE.captures_iter(&body)
.map(|m| m.get(1).expect("a value").as_str().to_lowercase())
.collect()
} }
impl Parsed { impl Parsed {
fn from_markdown(source: Source) -> Parsed { fn from_full_source(source: Source) -> Parsed {
let (headers, body) = split_headers_and_body(&source); let (headers, body) = split_headers_and_body(&source);
let headers = Headers::parse(headers, &source); let headers = Headers::parse(headers, &source);
let parser = pulldown_cmark::Parser::new(body); Self::from_headers_and_body(headers, body.to_owned())
}
fn from_headers_and_body(headers: Headers, body: String) -> Parsed {
let source = headers.to_markdown_string() + &body;
let parser = pulldown_cmark::Parser::new(&body);
let mut html_output = String::new(); let mut html_output = String::new();
pulldown_cmark::html::push_html(&mut html_output, parser); pulldown_cmark::html::push_html(&mut html_output, parser);
let tags = parse_tags(&body);
Parsed { Parsed {
headers, headers,
html: html_output, html: html_output,
source, source_body: body,
tags: vec!["TODO".into()], source: Source(source),
tags,
title: "TODO".into(), title: "TODO".into(),
} }
} }
pub fn with_new_source_body(&self, new_body_source: &str) -> Self {
Self::from_headers_and_body(self.headers.clone(), new_body_source.to_owned())
}
} }
#[test] #[test]
@ -137,7 +167,7 @@ c: d "#
#[test] #[test]
fn parse_markdown_metadata_test() -> Result<()> { fn parse_markdown_metadata_test() -> Result<()> {
let page = Parsed::from_markdown(Source( let page = Parsed::from_full_source(Source(
r#" r#"
<!--- <!---
@ -159,8 +189,3 @@ tagwiki-id: 123
assert_eq!(page.headers.id, "xyz"); assert_eq!(page.headers.id, "xyz");
Ok(()) Ok(())
} }
fn add_to_store(_store: &impl Store, source: Source) -> Result<()> {
let _page = Parsed::from_markdown(source);
Ok(())
}

View File

@ -127,17 +127,10 @@ pub struct InMemoryStore {
} }
impl InMemoryStore { impl InMemoryStore {
#[allow(unused)]
pub fn new() -> Self { pub fn new() -> Self {
Default::default() Default::default()
} }
/*
fn inner(&self) -> Result<std::sync::MutexGuard<InMemoryStoreInner>> {
self.inner
.lock()
.map_err(|e| format_err!("Lock failed {}", e))
}
*/
} }
#[async_trait] #[async_trait]

View File

@ -20,7 +20,7 @@ impl FsStore {
..Self::default() ..Self::default()
}; };
for entry in walkdir::WalkDir::new(&s.root_path) { for entry in walkdir::WalkDir::new(&s.root_path) {
match Self::try_reading_page_from_entry_res(entry) { match s.try_reading_page_from_entry_res(entry) {
Ok(Some((page, path))) => { Ok(Some((page, path))) => {
s.id_to_path.insert(page.headers.id.clone(), path.clone()); s.id_to_path.insert(page.headers.id.clone(), path.clone());
s.path_to_page.insert(path, page); s.path_to_page.insert(path, page);
@ -64,14 +64,16 @@ impl FsStore {
} }
fn try_reading_page_from_entry_res( fn try_reading_page_from_entry_res(
&self,
entry: walkdir::Result<walkdir::DirEntry>, entry: walkdir::Result<walkdir::DirEntry>,
) -> Result<Option<(page::Parsed, PathBuf)>> { ) -> Result<Option<(page::Parsed, PathBuf)>> {
let entry = entry?; let entry = entry?;
Self::try_reading_page_from_entry(&entry) self.try_reading_page_from_entry(&entry)
.with_context(|| format!("While reading path: {}", entry.path().display())) .with_context(|| format!("While reading path: {}", entry.path().display()))
} }
fn try_reading_page_from_entry( fn try_reading_page_from_entry(
&self,
entry: &walkdir::DirEntry, entry: &walkdir::DirEntry,
) -> Result<Option<(page::Parsed, PathBuf)>> { ) -> Result<Option<(page::Parsed, PathBuf)>> {
if !entry.file_type().is_file() { if !entry.file_type().is_file() {
@ -88,13 +90,38 @@ impl FsStore {
reader.read_to_string(&mut source.0)?; reader.read_to_string(&mut source.0)?;
Ok(Some(( Ok(Some((
page::Parsed::from_markdown(source), page::Parsed::from_full_source(source),
entry.path().to_owned(), entry
.path()
.strip_prefix(&self.root_path)
.expect("correct prefix")
.to_owned(),
))) )))
} }
fn write_page_to_file(&self, _rel_path: &Path, _page: &page::Parsed) -> Result<()> { async fn write_page_to_file(&self, rel_path: &Path, page: &page::Parsed) -> Result<()> {
todo!(); let page = page.clone();
use std::io::Write;
let path = self.root_path.join(rel_path);
let tmp_path = path.with_extension(format!("md.tmp.{}", crate::util::random_string(8)));
tokio::task::spawn_blocking(move || -> Result<()> {
let mut file = std::fs::File::create(&tmp_path)?;
file.write_all(b"<!---\n")?;
file.write_all(page.headers.all.as_bytes())?;
file.write_all(b"\n-->\n")?;
file.write_all(page.source_body.as_bytes())?;
file.flush()?;
file.sync_data()?;
drop(file);
std::fs::rename(tmp_path, path)?;
Ok(())
})
.await??;
Ok(())
} }
} }
@ -114,7 +141,7 @@ impl page::StoreMut for FsStore {
self.title_to_new_rel_path(&page.title) self.title_to_new_rel_path(&page.title)
}; };
self.write_page_to_file(&path, &page)?; self.write_page_to_file(&path, &page).await?;
self.id_to_path self.id_to_path
.insert(page.headers.id.clone(), path.clone()); .insert(page.headers.id.clone(), path.clone());
self.path_to_page.insert(path, page.clone()); self.path_to_page.insert(path, page.clone());