search, page parsing, better cmark abstraction
This commit is contained in:
parent
3ad3b1aad0
commit
19297cb6c6
5
.gitignore
vendored
5
.gitignore
vendored
@ -1,3 +1,6 @@
|
||||
minoteaur.sqlite3
|
||||
dump.sql
|
||||
minoteaur
|
||||
minoteaur
|
||||
node_modules
|
||||
*.fossil
|
||||
dump.sql
|
135
package-lock.json
generated
Normal file
135
package-lock.json
generated
Normal file
@ -0,0 +1,135 @@
|
||||
{
|
||||
"requires": true,
|
||||
"lockfileVersion": 1,
|
||||
"dependencies": {
|
||||
"anymatch": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz",
|
||||
"integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==",
|
||||
"requires": {
|
||||
"normalize-path": "^3.0.0",
|
||||
"picomatch": "^2.0.4"
|
||||
}
|
||||
},
|
||||
"binary-extensions": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
|
||||
"integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA=="
|
||||
},
|
||||
"braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"requires": {
|
||||
"fill-range": "^7.0.1"
|
||||
}
|
||||
},
|
||||
"chokidar": {
|
||||
"version": "3.5.1",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz",
|
||||
"integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==",
|
||||
"requires": {
|
||||
"anymatch": "~3.1.1",
|
||||
"braces": "~3.0.2",
|
||||
"fsevents": "~2.3.1",
|
||||
"glob-parent": "~5.1.0",
|
||||
"is-binary-path": "~2.1.0",
|
||||
"is-glob": "~4.0.1",
|
||||
"normalize-path": "~3.0.0",
|
||||
"readdirp": "~3.5.0"
|
||||
}
|
||||
},
|
||||
"esbuild": {
|
||||
"version": "0.8.39",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.8.39.tgz",
|
||||
"integrity": "sha512-/do5H74a5ChyeKRWfkDh3EpICXpsz6dWTtFFbotb7BlIHvWqnRrZYDb8IBubOHdEtKzuiksilRO19aBtp3/HHQ=="
|
||||
},
|
||||
"fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"requires": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"fsevents": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.1.tgz",
|
||||
"integrity": "sha512-YR47Eg4hChJGAB1O3yEAOkGO+rlzutoICGqGo9EZ4lKWokzZRSyIW1QmTzqjtw8MJdj9srP869CuWw/hyzSiBw==",
|
||||
"optional": true
|
||||
},
|
||||
"glob-parent": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz",
|
||||
"integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==",
|
||||
"requires": {
|
||||
"is-glob": "^4.0.1"
|
||||
}
|
||||
},
|
||||
"is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
|
||||
"requires": {
|
||||
"binary-extensions": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"is-extglob": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||
"integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI="
|
||||
},
|
||||
"is-glob": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz",
|
||||
"integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==",
|
||||
"requires": {
|
||||
"is-extglob": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"is-number": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="
|
||||
},
|
||||
"mithril": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/mithril/-/mithril-2.0.4.tgz",
|
||||
"integrity": "sha512-mgw+DMZlhMS4PpprF6dl7ZoeZq5GGcAuWnrg5e12MvaGauc4jzWsDZtVGRCktsiQczOEUr2K5teKbE5k44RlOg=="
|
||||
},
|
||||
"normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="
|
||||
},
|
||||
"picomatch": {
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz",
|
||||
"integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg=="
|
||||
},
|
||||
"readdirp": {
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz",
|
||||
"integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==",
|
||||
"requires": {
|
||||
"picomatch": "^2.2.1"
|
||||
}
|
||||
},
|
||||
"sass": {
|
||||
"version": "1.32.6",
|
||||
"resolved": "https://registry.npmjs.org/sass/-/sass-1.32.6.tgz",
|
||||
"integrity": "sha512-1bcDHDcSqeFtMr0JXI3xc/CXX6c4p0wHHivJdru8W7waM7a1WjKMm4m/Z5sY7CbVw4Whi2Chpcw6DFfSWwGLzQ==",
|
||||
"requires": {
|
||||
"chokidar": ">=2.0.0 <4.0.0"
|
||||
}
|
||||
},
|
||||
"to-regex-range": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"requires": {
|
||||
"is-number": "^7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
97
src/client.js
Normal file
97
src/client.js
Normal file
@ -0,0 +1,97 @@
|
||||
import m from "mithril"
|
||||
|
||||
const searchButton = document.querySelector("nav .search")
|
||||
const mountpoint = document.createElement("div")
|
||||
document.querySelector("main").insertBefore(mountpoint, document.querySelector(".header"))
|
||||
|
||||
const state = {
|
||||
showingSearchDialog: false,
|
||||
searchResults: [],
|
||||
searchError: null,
|
||||
searchQuery: ""
|
||||
}
|
||||
|
||||
const lowercaseFirst = ([first, ...rest]) => first.toLowerCase() + rest.join("")
|
||||
const uppercaseFirst = ([first, ...rest]) => first.toUpperCase() + rest.join("")
|
||||
const pageToSlug = page => page.split(/[ _]/).map(lowercaseFirst).join("_")
|
||||
const slugToPage = slug => slug.split(/[ _]/).map(uppercaseFirst).join(" ")
|
||||
|
||||
const urlForPage = (page, subpage) => {
|
||||
let p = `/${encodeURIComponent(pageToSlug(page))}`
|
||||
if (subpage) { p += "/" + subpage }
|
||||
return p
|
||||
}
|
||||
|
||||
const handleHTTPError = e => {
|
||||
if (e.code === 0) { return }
|
||||
let x = `Server error ${e.code}`
|
||||
if (e.message) { x += " " + e.message }
|
||||
alert(x)
|
||||
}
|
||||
|
||||
const onsearch = ev => {
|
||||
const query = ev.target.value
|
||||
state.searchQuery = query
|
||||
m.request({
|
||||
url: "/api/search",
|
||||
params: { q: query }
|
||||
}).then(x => {
|
||||
if (typeof x === "string") { // SQLite syntax error
|
||||
console.log("ERR", x)
|
||||
state.searchError = x
|
||||
} else {
|
||||
state.searchResults = x
|
||||
state.searchError = null
|
||||
}
|
||||
}, e => handleHTTPError)
|
||||
}
|
||||
|
||||
const currentPage = slugToPage(decodeURIComponent(/^\/([^/]+)/.exec(location.pathname)[1]).replace(/\+/g, " "))
|
||||
|
||||
const searchKeyHandler = ev => {
|
||||
if (ev.keyCode === 13) { // enter key
|
||||
// not very useful to just navigate to the same page
|
||||
const otherResults = state.searchResults.filter(r => r.page !== currentPage)
|
||||
if (otherResults[0]) { location.href = urlForPage(otherResults[0].page) }
|
||||
}
|
||||
}
|
||||
|
||||
const SearchDialog = {
|
||||
view: () => m(".dialog.search", [
|
||||
m("h1", "Search"),
|
||||
m("input[type=search]", { placeholder: "Query", oninput: onsearch, onkeydown: searchKeyHandler, value: state.searchQuery, oncreate: ({ dom }) => dom.focus() }),
|
||||
state.searchError && m(".error", state.searchError),
|
||||
m("ul", state.searchResults.map(x => m("li", [
|
||||
m(".flex-space", [ m("a.wikilink", { href: urlForPage(x.page) }, x.page), m("", x.rank.toFixed(3)) ]),
|
||||
m("", x.snippet.map(s => s[0] ? m("span.highlight", s[1]) : s[1]))
|
||||
])))
|
||||
])
|
||||
}
|
||||
|
||||
const App = {
|
||||
view: () => m("", state.showingSearchDialog ? m(SearchDialog) : null)
|
||||
}
|
||||
|
||||
searchButton.addEventListener("click", e => {
|
||||
state.showingSearchDialog = !state.showingSearchDialog
|
||||
e.preventDefault()
|
||||
m.redraw()
|
||||
})
|
||||
|
||||
document.body.addEventListener("keydown", e => {
|
||||
if (e.target === document.body) { // maybe use alt instead? or right shift or something - this just detects unfocused keypresses
|
||||
if (e.key === "e") {
|
||||
location.pathname = urlForPage(currentPage, "edit")
|
||||
} else if (e.key === "v") {
|
||||
location.pathname = urlForPage(currentPage)
|
||||
} else if (e.key === "r") {
|
||||
location.pathname = urlForPage(currentPage, "revisions")
|
||||
} else if (e.key === "/") {
|
||||
state.showingSearchDialog = !state.showingSearchDialog
|
||||
e.preventDefault()
|
||||
m.redraw()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
m.mount(mountpoint, App)
|
178
src/domain.nim
178
src/domain.nim
@ -2,8 +2,8 @@ import tiny_sqlite
|
||||
import logging
|
||||
import options
|
||||
import times
|
||||
import zstd/compress
|
||||
import zstd/decompress
|
||||
import zstd/compress as zstd_compress
|
||||
import zstd/decompress as zstd_decompress
|
||||
import sequtils
|
||||
import strutils except splitWhitespace
|
||||
import json
|
||||
@ -11,45 +11,75 @@ import std/jsonutils
|
||||
import nimlevenshtein
|
||||
import sugar
|
||||
import unicode
|
||||
import math
|
||||
|
||||
func timeToTimestamp*(t: Time): int64 = toUnix(t) * 1000 + (nanosecond(t) div 1000000)
|
||||
func timestampToTime*(ts: int64): Time = initTime(ts div 1000, (ts mod 1000) * 1000000)
|
||||
func timestampToStr*(t: Time): string = intToStr(int(timeToTimestamp(t)))
|
||||
|
||||
# store time as milliseconds
|
||||
proc toDbValue(t: Time): DbValue = DbValue(kind: sqliteInteger, intVal: timeToTimestamp(t))
|
||||
proc fromDbValue(value: DbValue, T: typedesc[Time]): Time = timestampToTime(value.intVal)
|
||||
import util
|
||||
from ./md import parsePage
|
||||
|
||||
let migrations = @[
|
||||
"""CREATE TABLE pages (
|
||||
page TEXT NOT NULL PRIMARY KEY,
|
||||
updated INTEGER NOT NULL,
|
||||
created INTEGER NOT NULL
|
||||
);
|
||||
CREATE TABLE revisions (
|
||||
page TEXT NOT NULL REFERENCES pages(page),
|
||||
timestamp INTEGER NOT NULL,
|
||||
meta TEXT NOT NULL,
|
||||
fullData BLOB
|
||||
);"""
|
||||
#[
|
||||
`pages` stores the content of all pages, as well as when they were last updated and created - this is all the information needed to render the current version of a page
|
||||
It's mildly inefficient space-wise to store the latest content here AND in the revisions table (in compressed form), but dealing with this better would probably require complex logic elsewhere
|
||||
which I don't think is worth it - I anticipate that media files will be much bigger, and probably significant amounts of old revisions (it would be worth investigating storing compact diffs).
|
||||
|
||||
`revisions` stores all changes to a page, with metadata as JSON (messagepack is generally better, but SQLite can only query JSON) and optionally a separate blob storing larger associated data
|
||||
(currently, the entire page content, zstd-compressed)
|
||||
|
||||
rowids (INTEGER PRIMARY KEY) are explicitly extant here due to FTS external content requiring them to be stable to work but are not to be used much.
|
||||
]#
|
||||
"""
|
||||
CREATE TABLE pages (
|
||||
uid INTEGER PRIMARY KEY,
|
||||
page TEXT NOT NULL UNIQUE,
|
||||
updated INTEGER NOT NULL,
|
||||
created INTEGER NOT NULL,
|
||||
content TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE revisions (
|
||||
uid INTEGER PRIMARY KEY,
|
||||
page TEXT NOT NULL REFERENCES pages(page),
|
||||
timestamp INTEGER NOT NULL,
|
||||
meta TEXT NOT NULL,
|
||||
fullData BLOB
|
||||
);
|
||||
""",
|
||||
"""
|
||||
CREATE VIRTUAL TABLE pages_fts USING fts5 (
|
||||
page, content,
|
||||
tokenize='porter unicode61 remove_diacritics 2',
|
||||
content=pages, content_rowid=uid
|
||||
);
|
||||
""",
|
||||
"""
|
||||
CREATE TABLE links (
|
||||
uid INTEGER PRIMARY KEY,
|
||||
from TEXT NOT NULL,
|
||||
to TEXT NOT NULL,
|
||||
linkText TEXT NOT NULL,
|
||||
context TEXT NOT NULL
|
||||
);
|
||||
"""
|
||||
]
|
||||
|
||||
type
|
||||
Encoding = enum
|
||||
encPlain = 0, encZstd = 1
|
||||
RevisionType = enum
|
||||
rtNewContent = 0
|
||||
RevisionMeta = object
|
||||
case typ*: RevisionType
|
||||
of rtNewContent:
|
||||
Encoding* {.pure} = enum
|
||||
Plain = 0, Zstd = 1
|
||||
RevisionType* {.pure.} = enum
|
||||
NewContent = 0
|
||||
RevisionMeta* = object
|
||||
case kind*: RevisionType
|
||||
of NewContent:
|
||||
encoding*: Encoding
|
||||
editDistance*: Option[int]
|
||||
size*: Option[int]
|
||||
words*: Option[int]
|
||||
|
||||
Revision = object
|
||||
meta*: Revisionmeta
|
||||
Revision* = object
|
||||
meta*: RevisionMeta
|
||||
time*: Time
|
||||
SearchResult* = object
|
||||
page*: string
|
||||
rank*: float
|
||||
snippet*: seq[(bool, string)]
|
||||
|
||||
var logger = newConsoleLogger()
|
||||
|
||||
@ -67,6 +97,7 @@ type
|
||||
Page = object
|
||||
page*, content*: string
|
||||
created*, updated*: Time
|
||||
uid*: int64
|
||||
|
||||
proc parse*(s: string, T: typedesc): T = fromJson(result, parseJSON(s), Joptions(allowExtraKeys: true, allowMissingKeys: true))
|
||||
|
||||
@ -74,20 +105,26 @@ proc processFullRevisionRow(row: ResultRow): (RevisionMeta, string) =
|
||||
let (metaJSON, full) = row.unpack((string, seq[byte]))
|
||||
let meta = parse(metaJSON, RevisionMeta)
|
||||
var content = cast[string](full)
|
||||
if meta.encoding == encZstd:
|
||||
content = cast[string](decompress(content))
|
||||
if meta.encoding == Zstd:
|
||||
content = cast[string](zstd_decompress.decompress(content))
|
||||
(meta, content)
|
||||
|
||||
proc fetchPage*(db: DbConn, page: string, revision: Option[Time] = none(Time)): Option[Page] =
|
||||
# retrieve row for page
|
||||
db.one("SELECT updated, created FROM pages WHERE page = ?", page).flatMap(proc(row: ResultRow): Option[Page] =
|
||||
let (updated, created) = row.unpack((Time, Time))
|
||||
let rev =
|
||||
if revision.isSome: db.one("SELECT meta, fullData FROM revisions WHERE page = ? AND json_extract(meta, '$.typ') = 0 AND timestamp = ?", page, revision)
|
||||
else: db.one("SELECT meta, fullData FROM revisions WHERE page = ? AND json_extract(meta, '$.typ') = 0 ORDER BY timestamp DESC LIMIT 1", page)
|
||||
proc fetchPage*(db: DbConn, page: string): Option[Page] =
|
||||
# retrieve the current version of the page directly
|
||||
db.one("SELECT uid, updated, created, content FROM pages WHERE page = ?", page).map(proc(row: ResultRow): Page =
|
||||
let (uid, updated, created, content) = row.unpack((int64, Time, Time, string))
|
||||
Page(page: page, created: created, updated: updated, content: content, uid: uid)
|
||||
)
|
||||
|
||||
proc fetchPage*(db: DbConn, page: string, revision: Time): Option[Page] =
|
||||
# retrieve page row
|
||||
db.one("SELECT uid, updated, created FROM pages WHERE page = ?", page).flatMap(proc(row: ResultRow): Option[Page] =
|
||||
let (uid, updated, created) = row.unpack((int64, Time, Time))
|
||||
# retrieve the older revision
|
||||
let rev = db.one("SELECT meta, fullData FROM revisions WHERE page = ? AND json_extract(meta, '$.kind') = 0 AND timestamp = ?", page, revision)
|
||||
rev.map(proc(row: ResultRow): Page =
|
||||
let (meta, content) = processFullRevisionRow(row)
|
||||
Page(page: page, created: created, updated: updated, content: content)
|
||||
Page(page: page, created: created, updated: updated, content: content, uid: uid)
|
||||
)
|
||||
)
|
||||
|
||||
@ -102,22 +139,38 @@ func wordCount(s: string): int =
|
||||
break
|
||||
|
||||
proc updatePage*(db: DbConn, page: string, content: string) =
|
||||
let previous = fetchPage(db, page).map(p => p.content).get("")
|
||||
echo parsePage(content)
|
||||
let previous = fetchPage(db, page)
|
||||
# if there is no previous content, empty string instead
|
||||
let previousContent = previous.map(p => p.content).get("")
|
||||
|
||||
let compressed = compress(content, level=10)
|
||||
var enc = encPlain
|
||||
# use zstandard-compressed version if it is smaller
|
||||
let compressed = zstd_compress.compress(content, level=10)
|
||||
var enc = Plain
|
||||
var data = cast[seq[byte]](content)
|
||||
if len(compressed) < len(data):
|
||||
enc = encZstd
|
||||
enc = Zstd
|
||||
data = compressed
|
||||
|
||||
let meta = $toJson(RevisionMeta(typ: rtNewContent, encoding: enc,
|
||||
editDistance: some distance(previous, content), size: some len(content), words: some wordCount(content)))
|
||||
# generate some useful metadata and encode to JSON
|
||||
let meta = $toJson(RevisionMeta(kind: NewContent, encoding: enc,
|
||||
editDistance: some distance(previousContent, content), size: some len(content), words: some wordCount(content)))
|
||||
let ts = getTime()
|
||||
|
||||
let revisionID = snowflake()
|
||||
let pageID = previous.map(p => p.uid).get(snowflake())
|
||||
# actually write to database
|
||||
db.transaction:
|
||||
db.exec("INSERT INTO revisions VALUES (?, ?, ?, ?)", page, ts, meta, data)
|
||||
db.exec("INSERT INTO pages VALUES (?, ?, ?) ON CONFLICT (page) DO UPDATE SET updated = ?", page, ts, ts, ts)
|
||||
if isSome previous:
|
||||
# update existing data and remove FTS index entry for it
|
||||
db.exec("UPDATE pages SET content = ?, updated = ? WHERE uid = ?", content, ts, pageID)
|
||||
# pages_fts is an external content FTS table, so deletion has to be done like this
|
||||
db.exec("INSERT INTO pages_fts (pages_fts, rowid, page, content) VALUES ('delete', ?, ?, ?)", pageID, page, previousContent)
|
||||
else:
|
||||
db.exec("INSERT INTO pages VALUES (?, ?, ?, ?, ?)", pageID, page, ts, ts, content)
|
||||
# push to full text search index
|
||||
db.exec("INSERT INTO pages_fts (rowid, page, content) VALUES (?, ?, ?)", pageID, page, content)
|
||||
db.exec("INSERT INTO revisions VALUES (?, ?, ?, ?, ?)", revisionID, page, ts, meta, data)
|
||||
|
||||
proc fetchRevisions*(db: DbConn, page: string): seq[Revision] =
|
||||
db.all("SELECT timestamp, meta FROM revisions WHERE page = ? ORDER BY timestamp DESC", page).map(proc (row: ResultRow): Revision =
|
||||
@ -131,7 +184,30 @@ proc processRevisionRow(r: ResultRow): Revision =
|
||||
|
||||
proc adjacentRevisions*(db: DbConn, page: string, ts: Time): (Option[Revision], Option[Revision]) =
|
||||
# revision after given timestamp
|
||||
let next = db.one("SELECT timestamp, meta FROM revisions WHERE page = ? AND json_extract(meta, '$.typ') = 0 AND timestamp > ? ORDER BY timestamp ASC LIMIT 1", page, ts)
|
||||
let next = db.one("SELECT timestamp, meta FROM revisions WHERE page = ? AND json_extract(meta, '$.kind') = 0 AND timestamp > ? ORDER BY timestamp ASC LIMIT 1", page, ts)
|
||||
# revision before given timestamp
|
||||
let prev = db.one("SELECT timestamp, meta FROM revisions WHERE page = ? AND json_extract(meta, '$.typ') = 0 AND timestamp < ? ORDER BY timestamp DESC LIMIT 1", page, ts)
|
||||
(next.map(processRevisionRow), prev.map(processRevisionRow))
|
||||
let prev = db.one("SELECT timestamp, meta FROM revisions WHERE page = ? AND json_extract(meta, '$.kind') = 0 AND timestamp < ? ORDER BY timestamp DESC LIMIT 1", page, ts)
|
||||
(next.map(processRevisionRow), prev.map(processRevisionRow))
|
||||
|
||||
proc processSearchRow(row: ResultRow): SearchResult =
|
||||
let (page, rank, snippet) = row.unpack((string, float, string))
|
||||
var pos = 0
|
||||
# split snippet up into an array of highlighted/unhighlighted bits
|
||||
var snips: seq[(bool, string)] = @[]
|
||||
while true:
|
||||
let newpos = find(snippet, "<hlstart>", pos)
|
||||
if newpos == -1:
|
||||
break
|
||||
snips.add((false, snippet[pos .. newpos - 1]))
|
||||
var endpos = find(snippet, "<hlend>", newpos)
|
||||
# if no <hlend> (this *probably* shouldn't happen) then just highlight remaining rest of string
|
||||
if endpos == -1:
|
||||
endpos = len(snippet)
|
||||
snips.add((true, snippet[newpos + len("<hlstart>") .. endpos - 1]))
|
||||
pos = endpos + len("<hlend>")
|
||||
snips.add((false, snippet[pos .. len(snippet) - 1]))
|
||||
# filter out empty snippet fragments because they're not useful, rescale rank for nicer display
|
||||
SearchResult(page: page, rank: log10(-rank * 1e7), snippet: snips.filter(x => len(x[1]) > 0))
|
||||
|
||||
proc search*(db: DbConn, query: string): seq[SearchResult] =
|
||||
db.all("SELECT page, rank, snippet(pages_fts, 1, '<hlstart>', '<hlend>', ' ... ', 32) FROM pages_fts WHERE pages_fts MATCH ? AND rank MATCH 'bm25(5.0, 1.0)' ORDER BY rank", query).map(processSearchRow)
|
251
src/md.nim
251
src/md.nim
@ -1,72 +1,215 @@
|
||||
import karax/[karaxdsl, vdom]
|
||||
import cmark/native
|
||||
import cmark/native as cmark except Node, Parser
|
||||
# the builtin re library would probably be better for this - it can directly take cstrings (so better perf when dealing with the cstrings from cmark) and may be faster
|
||||
# unfortunately it does not expose a findAll thing which returns the *positions* of everything for some weird reason
|
||||
import regex
|
||||
from strutils import join, find
|
||||
import unicode
|
||||
import sets
|
||||
|
||||
from ./util import pageToSlug, slugToPage, autoInitializedThreadvar
|
||||
|
||||
cmark_gfm_core_extensions_ensure_registered()
|
||||
|
||||
func wikilink(page, linkText: string): string =
|
||||
let vdom = buildHtml(a(href=page, class="wikilink")): text linkText
|
||||
$vdom
|
||||
type
|
||||
Node = object
|
||||
raw: NodePtr
|
||||
BorrowedNode = object
|
||||
raw: NodePtr
|
||||
Parser = object
|
||||
raw: ParserPtr
|
||||
|
||||
proc pushNodeAfter(ty: NodeType, content: string, pushAfter: NodePtr) =
|
||||
let node = cmark_node_new(ty)
|
||||
assert cmark_node_set_literal(node, content) == 1
|
||||
assert cmark_node_insert_before(pushAfter, node) == 1
|
||||
proc `=copy`(dest: var Node, source: Node) {.error.}
|
||||
proc `=destroy`(x: var Node) = cmark_node_free(x.raw)
|
||||
proc `=destroy`(x: var BorrowedNode) = discard
|
||||
|
||||
proc renderToHtml*(input: string): string =
|
||||
let wlRegex = re"\[\[([^:\]]+):?([^\]]+)?\]\]"
|
||||
let opt = CMARK_OPT_UNSAFE or CMARK_OPT_FOOTNOTES or CMARK_OPT_STRIKETHROUGH_DOUBLE_TILDE or CMARK_OPT_TABLE_PREFER_STYLE_ATTRIBUTES
|
||||
proc `=destroy`(x: var Parser) = cmark_parser_free(x.raw)
|
||||
|
||||
let
|
||||
str: cstring = input
|
||||
len: csize_t = len(input).csize_t
|
||||
parser: ParserPtr = cmark_parser_new(opt.cint)
|
||||
proc borrow(n: Node): BorrowedNode = BorrowedNode(raw: n.raw)
|
||||
|
||||
proc newParser(options: int64, extensions: seq[string]): Parser =
|
||||
let parser: ParserPtr = cmark_parser_new(options.cint)
|
||||
if parser == nil: raise newException(CatchableError, "failed to initialize parser")
|
||||
defer: cmark_parser_free(parser)
|
||||
|
||||
for ext in @["table", "strikethrough"]:
|
||||
# load and enable desired syntax extensions
|
||||
# these are freed with the parser (probably)
|
||||
for ext in extensions:
|
||||
let e: cstring = ext
|
||||
let eptr = cmark_find_syntax_extension(e)
|
||||
if eptr == nil: raise newException(LibraryError, "failed to find extension " & ext)
|
||||
if cmark_parser_attach_syntax_extension(parser, eptr) == 0: raise newException(CatchableError, "failed to attach extension " & ext)
|
||||
if eptr == nil:
|
||||
cmark_parser_free(parser)
|
||||
raise newException(LibraryError, "failed to find extension " & ext)
|
||||
if cmark_parser_attach_syntax_extension(parser, eptr) == 0:
|
||||
cmark_parser_free(parser)
|
||||
raise newException(CatchableError, "failed to attach extension " & ext)
|
||||
Parser(raw: parser)
|
||||
|
||||
cmark_parser_feed(parser, str, len)
|
||||
let doc = cmark_parser_finish(parser)
|
||||
defer: cmark_node_free(doc)
|
||||
if doc == nil: raise newException(CatchableError, "parsing failed")
|
||||
proc parse(p: Parser, document: string): Node =
|
||||
let
|
||||
str: cstring = document
|
||||
length = len(document).csize_t
|
||||
cmark_parser_feed(p.raw, str, length)
|
||||
let ast = cmark_parser_finish(p.raw)
|
||||
if ast == nil: raise newException(CatchableError, "parsing failed - should not occur")
|
||||
Node(raw: ast)
|
||||
|
||||
block:
|
||||
let iter = cmark_iter_new(doc)
|
||||
defer: cmark_iter_free(iter)
|
||||
while true:
|
||||
let evType = cmark_iter_next(iter)
|
||||
if evType == etDone: break
|
||||
let node: NodePtr = cmark_iter_get_node(iter)
|
||||
if cmark_node_get_type(node) == ntText:
|
||||
let ntext = $cmark_node_get_literal(node)
|
||||
# check for wikilinks in text node
|
||||
let matches = findAll(ntext, wlRegex)
|
||||
# if there are any, put in the appropriate HTML nodes
|
||||
if len(matches) > 0:
|
||||
var lastix = 0
|
||||
for match in matches:
|
||||
let page = ntext[match.captures[0][0]] # I don't know why this doesn't use Option. Perhaps sometimes there are somehow > 1 ranges.
|
||||
# if there is a separate linkText field, use this, otherwise just use the page
|
||||
let linkText =
|
||||
if len(match.captures[1]) > 0: ntext[match.captures[1][0]]
|
||||
else: page
|
||||
let html = wikilink(page, linkText)
|
||||
# push text before this onto the tree, as well as the HTML of the wikilink
|
||||
pushNodeAfter(ntText, ntext[lastix..<match.boundaries.a], node)
|
||||
pushNodeAfter(ntHtmlInline, html, node)
|
||||
lastix = match.boundaries.b + 1
|
||||
# push final text, if relevant
|
||||
if lastix != len(ntext) - 1: pushNodeAfter(ntText, ntext[lastix..<len(ntext)], node)
|
||||
cmark_node_free(node)
|
||||
proc nodeType(n: BorrowedNode): NodeType = cmark_node_get_type(n.raw)
|
||||
proc nodeContent(n: BorrowedNode): string = $cmark_node_get_literal(n.raw)
|
||||
|
||||
let html: cstring = cmark_render_html(doc, opt.cint, cmark_parser_get_syntax_extensions(parser))
|
||||
proc newNode(ty: NodeType, content: string): Node =
|
||||
let raw = cmark_node_new(ty)
|
||||
if raw == nil: raise newException(CatchableError, "node creation failed")
|
||||
if cmark_node_set_literal(raw, content) != 1:
|
||||
cmark_node_free(raw)
|
||||
raise newException(CatchableError, "node content setting failed")
|
||||
Node(raw: raw)
|
||||
|
||||
proc parentNode(parentOf: BorrowedNode): BorrowedNode = BorrowedNode(raw: cmark_node_parent(parentOf.raw))
|
||||
proc pushNodeAfter(after: BorrowedNode, node: sink Node) {.nodestroy.} = assert cmark_node_insert_before(after.raw, node.raw) == 1
|
||||
proc unlinkNode(node: sink BorrowedNode): Node {.nodestroy.} =
|
||||
cmark_node_unlink(node.raw)
|
||||
Node(raw: node.raw)
|
||||
|
||||
proc render(ast: Node, options: int64, parser: Parser): string =
|
||||
let html: cstring = cmark_render_html(ast.raw, options.cint, cmark_parser_get_syntax_extensions(parser.raw))
|
||||
defer: free(html)
|
||||
result = $html
|
||||
|
||||
result = $html
|
||||
iterator cmarkTree(root: BorrowedNode): (EventType, BorrowedNode) {.inline.} =
|
||||
var iter = cmark_iter_new(root.raw)
|
||||
if iter == nil: raise newException(CatchableError, "iterator initialization failed")
|
||||
defer: cmark_iter_free(iter)
|
||||
while true:
|
||||
let ev = cmark_iter_next(iter)
|
||||
if ev == etDone: break
|
||||
let node: NodePtr = cmark_iter_get_node(iter)
|
||||
yield (ev, BorrowedNode(raw: node))
|
||||
|
||||
func wikilink(page, linkText: string): string =
|
||||
let vdom = buildHtml(a(href=pageToSlug(page), class="wikilink")): text linkText
|
||||
$vdom
|
||||
|
||||
autoInitializedThreadvar(wlRegex, Regex, re"\[\[([^:\]]+):?([^\]]+)?\]\]")
|
||||
autoInitializedThreadvar(newlinesRegex, Regex, re"\n{2,}")
|
||||
|
||||
proc renderToHtml*(input: string): string =
|
||||
let wlRegex = wlRegex()
|
||||
let opt = CMARK_OPT_UNSAFE or CMARK_OPT_FOOTNOTES or CMARK_OPT_STRIKETHROUGH_DOUBLE_TILDE or CMARK_OPT_TABLE_PREFER_STYLE_ATTRIBUTES
|
||||
|
||||
# initialize parser with the extensions in use, parse things
|
||||
let parser = newParser(opt, @["table", "strikethrough"])
|
||||
let doc = parse(parser, input)
|
||||
|
||||
# iterate over AST using built-in cmark-gfm AST iteration thing
|
||||
for (evType, node) in cmarkTree(borrow(doc)):
|
||||
# if it is a text node
|
||||
if nodeType(node) == ntText:
|
||||
let ntext = nodeContent(node)
|
||||
# check for wikilinks in text node
|
||||
let matches = findAll(ntext, wlRegex)
|
||||
# if there are any, put in the appropriate HTML nodes
|
||||
if len(matches) > 0:
|
||||
var lastpos = 0
|
||||
# I think this does similar things to the snippet highlight code, perhaps it could be factored out somehow
|
||||
for match in matches:
|
||||
let page = ntext[match.captures[0][0]] # I don't know why this doesn't use Option. Perhaps sometimes there are somehow > 1 ranges.
|
||||
# if there is a separate linkText field, use this, otherwise just use the page
|
||||
let linkText =
|
||||
if len(match.captures[1]) > 0: ntext[match.captures[1][0]]
|
||||
else: page
|
||||
let html = wikilink(page, linkText)
|
||||
# push text before this onto the tree, as well as the HTML of the wikilink
|
||||
pushNodeAfter(node, newNode(ntText, ntext[lastpos..<match.boundaries.a]))
|
||||
pushNodeAfter(node, newNode(ntHtmlInline, html))
|
||||
lastpos = match.boundaries.b + 1
|
||||
# push final text, if extant
|
||||
if lastpos != len(ntext): pushNodeAfter(node, newNode(ntText, ntext[lastpos..<len(ntext)]))
|
||||
# remove original text node
|
||||
discard unlinkNode(node)
|
||||
|
||||
render(doc, opt, parser)
|
||||
|
||||
proc textContent(node: BorrowedNode): string =
|
||||
let newlinesRegex = newlinesRegex()
|
||||
for (evType, node) in cmarkTree(node):
|
||||
let ntype = nodeType(node)
|
||||
if ntype == ntText or ntype == ntCode:
|
||||
result &= nodeContent(node)
|
||||
elif int64(ntype) < CMARK_NODE_TYPE_INLINE and evType == etExit and ntype != ntItem:
|
||||
result &= "\n"
|
||||
elif ntype == ntSoftBreak:
|
||||
result &= " "
|
||||
elif ntype == ntLineBreak:
|
||||
result &= "\n"
|
||||
replace(strip(result), newlinesRegex, "\n")
|
||||
|
||||
proc findParagraphParent(node: BorrowedNode): BorrowedNode =
|
||||
result = node
|
||||
while nodeType(result) != ntParagraph: result = parentNode(result)
|
||||
|
||||
type
|
||||
Link* = object
|
||||
page*, text*, context*: string
|
||||
ParsedPage* = object
|
||||
links*: seq[Link]
|
||||
fullText: string
|
||||
|
||||
# Generates context for a link given the surrounding string and its position in it
|
||||
# Takes a given quantity of space-separated words from both sides
|
||||
# If not enough exist on one side, takes more from the other
|
||||
# TODO: treat a wikilink as one token
|
||||
proc linkContext(str: string, startPos: int, endPos: int, lookaround: int): string =
|
||||
var earlierToks = splitWhitespace(str[0..<startPos])
|
||||
var linkText = str[startPos..endPos]
|
||||
var laterToks = splitWhitespace(str[endPos + 1..^1])
|
||||
let bdlook = lookaround * 2
|
||||
result =
|
||||
# both are longer than necessary so take tokens symmetrically
|
||||
if earlierToks.len >= lookaround and laterToks.len >= lookaround:
|
||||
earlierToks[^lookaround..^1].join(" ") & linkText & laterToks[0..<lookaround].join(" ")
|
||||
# later is shorter than wanted, take more from earlier
|
||||
elif earlierToks.len >= lookaround and laterToks.len < lookaround:
|
||||
earlierToks[^(bdlook - laterToks.len)..^1].join(" ") & linkText & laterToks.join(" ")
|
||||
# mirrored version of previous case
|
||||
elif earlierToks.len < lookaround and laterToks.len >= lookaround:
|
||||
earlierToks.join(" ") & linkText & laterToks[0..<(bdlook - earlierToks.len)].join(" ")
|
||||
# both too short, use all of both
|
||||
else: earlierToks.join(" ") & linkText & laterToks.join(" ")
|
||||
|
||||
proc parsePage*(input: string): ParsedPage =
|
||||
let wlRegex = wlRegex()
|
||||
let opt = CMARK_OPT_UNSAFE or CMARK_OPT_FOOTNOTES or CMARK_OPT_STRIKETHROUGH_DOUBLE_TILDE or CMARK_OPT_TABLE_PREFER_STYLE_ATTRIBUTES
|
||||
|
||||
let parser = newParser(opt, @["table", "strikethrough"])
|
||||
let doc = parse(parser, input)
|
||||
|
||||
var wikilinks: seq[Link] = @[]
|
||||
var seenPages: HashSet[string]
|
||||
|
||||
for (evType, node) in cmarkTree(borrow(doc)):
|
||||
if nodeType(node) == ntText:
|
||||
let ntext = nodeContent(node)
|
||||
let matches = findAll(ntext, wlRegex)
|
||||
if len(matches) > 0:
|
||||
let paragraph = textContent(findParagraphParent(node))
|
||||
var matchEnd = 0
|
||||
for match in matches:
|
||||
echo $match
|
||||
let page = ntext[match.captures[0][0]]
|
||||
let linkText =
|
||||
if len(match.captures[1]) > 0: ntext[match.captures[1][0]]
|
||||
else: page
|
||||
|
||||
let canonicalPage = slugToPage(page)
|
||||
if not (canonicalPage in seenPages):
|
||||
# matches in this text node will not necessarily line up with ones in the surrounding textual contentso look up the wikilink's source in the paragraph
|
||||
# kind of hacky but should work in any scenario which isn't deliberately constructed pathologically, especially since it will only return stuff after the last link
|
||||
let fullLink = ntext[match.boundaries]
|
||||
let matchInParagraph = find(paragraph, fullLink, matchEnd)
|
||||
matchEnd = matchInParagraph + fullLink.len
|
||||
let context = linkContext(paragraph, matchInParagraph, matchEnd, 12)
|
||||
|
||||
# add to wikilinks list, and deduplicate
|
||||
wikilinks.add(Link(page: canonicalPage, text: linkText, context: context))
|
||||
seenPages.incl(canonicalPage)
|
||||
|
||||
ParsedPage(links: wikilinks, fullText: textContent(borrow(doc)))
|
@ -7,10 +7,13 @@ import tiny_sqlite
|
||||
import options
|
||||
import times
|
||||
import sugar
|
||||
import std/jsonutils
|
||||
import strutils
|
||||
import prologue/middlewares/csrf
|
||||
|
||||
from ./domain import nil
|
||||
from ./md import nil
|
||||
import util
|
||||
|
||||
let
|
||||
env = loadPrologueEnv(".env")
|
||||
@ -27,19 +30,24 @@ func base(title: string, navItems: seq[VNode], bodyItems: VNode): string =
|
||||
let vnode = buildHtml(html):
|
||||
head:
|
||||
link(rel="stylesheet", href="/static/style.css")
|
||||
script(src="/static/client.js", `defer`="true")
|
||||
meta(charset="utf8")
|
||||
meta(name="viewport", content="width=device-width,initial-scale=1.0")
|
||||
title: text title
|
||||
body:
|
||||
main:
|
||||
nav:
|
||||
a(class="link-button search", href=""): text "Search"
|
||||
for n in navItems: n
|
||||
tdiv(class="header"):
|
||||
h1: text title
|
||||
bodyItems
|
||||
$vnode
|
||||
|
||||
domain.migrate(openDatabase("./minoteaur.sqlite3"))
|
||||
block:
|
||||
let db = openDatabase("./minoteaur.sqlite3")
|
||||
domain.migrate(db)
|
||||
close(db)
|
||||
|
||||
type
|
||||
AppContext = ref object of Context
|
||||
@ -49,34 +57,46 @@ type
|
||||
var db {.threadvar.}: Option[DbConn]
|
||||
|
||||
proc dbMiddleware(): HandlerAsync =
|
||||
# horrible accursed hack to make exitproc work
|
||||
result = proc(ctx: AppContext) {.async.} =
|
||||
# open new DB connection for thread if there isn't one
|
||||
if db.isNone:
|
||||
db = some openDatabase("./minoteaur.sqlite3")
|
||||
# close DB connection on thread exit
|
||||
onThreadDestruction(proc() =
|
||||
try: db.get().close()
|
||||
except: discard)
|
||||
echo "Opening database connection"
|
||||
var conn = openDatabase("./minoteaur.sqlite3")
|
||||
conn.exec("PRAGMA foreign_keys = ON")
|
||||
db = some conn
|
||||
ctx.db = get db
|
||||
await switch(ctx)
|
||||
|
||||
proc headersMiddleware(): HandlerAsync =
|
||||
result = proc(ctx: AppContext) {.async.} =
|
||||
await switch(ctx)
|
||||
ctx.response.setHeader("X-Content-Type-Options", "nosniff")
|
||||
# user-controlled inline JS/CSS is explicitly turned on
|
||||
# this does partly defeat the point of a CSP, but this is still able to prevent connecting to other sites unwantedly
|
||||
ctx.response.setHeader("Content-Security-Policy", "default-src 'self' 'unsafe-inline'; img-src * data:; media-src * data:; form-action 'self'; frame-ancestors 'self'")
|
||||
ctx.response.setHeader("Referrer-Policy", "origin-when-cross-origin")
|
||||
|
||||
proc displayTime(t: Time): string = t.format("uuuu-MM-dd HH:mm:ss", utc())
|
||||
|
||||
func pageUrlFor(ctx: AppContext, route: string, page: string, query: openArray[(string, string)] = @[]): string = ctx.urlFor(route, { "page": encodeUrl(page) }, query)
|
||||
func pageUrlFor(ctx: AppContext, route: string, page: string, query: openArray[(string, string)] = @[]): string = ctx.urlFor(route, { "page": encodeUrl(pageToSlug(page)) }, query)
|
||||
func pageButton(ctx: AppContext, route: string, page: string, label: string, query: openArray[(string, string)] = @[]): VNode = navButton(label, pageUrlFor(ctx, route, page, query), route)
|
||||
|
||||
proc formCsrfToken(ctx: AppContext): VNode = verbatim csrfToken(ctx)
|
||||
|
||||
proc edit(ctx: AppContext) {.async.} =
|
||||
let page = decodeUrl(ctx.getPathParams("page"))
|
||||
let page = slugToPage(decodeUrl(ctx.getPathParams("page")))
|
||||
let pageData = domain.fetchPage(ctx.db, page)
|
||||
let html =
|
||||
buildHtml(form(`method`="post", class="edit-form")):
|
||||
textarea(name="content"): text pageData.map(p => p.content).get("")
|
||||
input(`type`="submit", value="Save", name="action", class="save")
|
||||
textarea(name="content"): text pageData.map(p => p.content).get("")
|
||||
formCsrfToken(ctx)
|
||||
let verb = if pageData.isSome: "Editing " else: "Creating "
|
||||
resp base(verb & page, @[pageButton(ctx, "view-page", page, "View"), pageButton(ctx, "page-revisions", page, "Revisions")], html)
|
||||
|
||||
proc revisions(ctx: AppContext) {.async.} =
|
||||
let page = decodeUrl(ctx.getPathParams("page"))
|
||||
let page = slugToPage(decodeUrl(ctx.getPathParams("page")))
|
||||
let revs = domain.fetchRevisions(ctx.db, page)
|
||||
let html =
|
||||
buildHtml(table(class="rev-table")):
|
||||
@ -88,7 +108,7 @@ proc revisions(ctx: AppContext) {.async.} =
|
||||
for rev in revs:
|
||||
tr:
|
||||
td(class="ts"):
|
||||
a(href=ctx.urlFor("view-page", { "page": encodeUrl(page) }, { "ts": domain.timestampToStr(rev.time) })):
|
||||
a(href=ctx.urlFor("view-page", { "page": pageToSlug(encodeUrl(page)) }, { "ts": timestampToStr(rev.time) })):
|
||||
text displayTime(rev.time)
|
||||
td: text rev.meta.editDistance.map(x => $x).get("")
|
||||
td: text rev.meta.size.map(x => formatSize(x)).get("")
|
||||
@ -96,20 +116,20 @@ proc revisions(ctx: AppContext) {.async.} =
|
||||
resp base("Revisions of " & page, @[pageButton(ctx, "view-page", page, "View"), pageButton(ctx, "edit-page", page, "Edit")], html)
|
||||
|
||||
proc handleEdit(ctx: AppContext) {.async.} =
|
||||
let page = decodeUrl(ctx.getPathParams("page"))
|
||||
let page = slugToPage(decodeUrl(ctx.getPathParams("page")))
|
||||
domain.updatePage(ctx.db, page, ctx.getFormParams("content"))
|
||||
resp redirect(pageUrlFor(ctx, "view-page", page))
|
||||
resp redirect(pageUrlFor(ctx, "view-page", page), Http303)
|
||||
|
||||
proc view(ctx: AppContext) {.async.} =
|
||||
let page = decodeUrl(ctx.getPathParams("page"))
|
||||
let page = slugToPage(decodeUrl(ctx.getPathParams("page")))
|
||||
let rawRevision = ctx.getQueryParams("ts")
|
||||
let viewSource = ctx.getQueryParams("source") != ""
|
||||
let revisionTs = if rawRevision == "": none(Time) else: some domain.timestampToTime(parseInt rawRevision)
|
||||
let revisionTs = if rawRevision == "": none(Time) else: some timestampToTime(parseInt rawRevision)
|
||||
let viewingOldRevision = revisionTs.isSome
|
||||
|
||||
let pageData = domain.fetchPage(ctx.db, page, revisionTs)
|
||||
let pageData = if viewingOldRevision: domain.fetchPage(ctx.db, page, get revisionTs) else: domain.fetchPage(ctx.db, page)
|
||||
if pageData.isNone:
|
||||
resp redirect(pageUrlFor(ctx, "edit-page", page))
|
||||
resp redirect(pageUrlFor(ctx, "edit-page", page), Http302)
|
||||
else:
|
||||
let pageData = get pageData
|
||||
let mainBody = if viewSource: buildHtml(pre): text pageData.content else: verbatim md.renderToHtml(pageData.content)
|
||||
@ -134,18 +154,28 @@ proc view(ctx: AppContext) {.async.} =
|
||||
text displayTime(rts)
|
||||
tdiv(class="md"): mainBody
|
||||
var buttons = @[pageButton(ctx, "edit-page", page, "Edit"), pageButton(ctx, "page-revisions", page, "Revisions"), pageButton(ctx, "view-page", page, "Latest")]
|
||||
if next.isSome: buttons.add(pageButton(ctx, "next-page", page, "Next", { "ts": domain.timestampToStr (get next).time }))
|
||||
if prev.isSome: buttons.add(pageButton(ctx, "prev-page", page, "Previous", { "ts": domain.timestampToStr (get prev).time }))
|
||||
if next.isSome: buttons.add(pageButton(ctx, "next-page", page, "Next", { "ts": timestampToStr (get next).time }))
|
||||
if prev.isSome: buttons.add(pageButton(ctx, "prev-page", page, "Previous", { "ts": timestampToStr (get prev).time }))
|
||||
resp base(page, buttons, html)
|
||||
|
||||
proc favicon(ctx: Context) {.async.} = resp "bee"
|
||||
proc search(ctx: AppContext) {.async.} =
|
||||
let query = ctx.getQueryParams("q")
|
||||
var results: seq[domain.SearchResult] = @[]
|
||||
try:
|
||||
if query != "": results = domain.search(ctx.db, query)
|
||||
except SqliteError as e: # SQLite apparently treats FTS queries containing some things outside of quotes as syntax errors. These should probably be shown to the user.
|
||||
resp jsonResponse toJson($e.msg)
|
||||
return
|
||||
resp jsonResponse toJson(results)
|
||||
|
||||
proc favicon(ctx: Context) {.async.} = resp error404()
|
||||
proc index(ctx: Context) {.async.} = resp "bee(s)"
|
||||
|
||||
var app = newApp(settings = settings)
|
||||
app.use(@[staticFileMiddleware("static"), sessionMiddleware(settings), extendContextMiddleware(AppContext), dbMiddleware()])
|
||||
app.use(@[staticFileMiddleware("static"), sessionMiddleware(settings), extendContextMiddleware(AppContext), dbMiddleware(), headersMiddleware(), csrfMiddleware()])
|
||||
app.get("/", index)
|
||||
app.get("/favicon.ico", favicon)
|
||||
app.get("/api/search", search, name="search")
|
||||
app.get("/{page}/edit", edit, name="edit-page")
|
||||
app.get("/{page}/revisions", revisions, name="page-revisions")
|
||||
app.post("/{page}/edit", handleEdit, name="handle-edit")
|
||||
|
@ -11,10 +11,11 @@ body
|
||||
min-height: 100vh
|
||||
|
||||
main
|
||||
width: 50em
|
||||
max-width: 50em
|
||||
padding: 0 1em 1em 1em
|
||||
margin-left: auto
|
||||
margin-right: auto
|
||||
position: relative
|
||||
|
||||
strong
|
||||
font-weight: 600
|
||||
@ -22,7 +23,7 @@ strong
|
||||
h1, h2, h3, h4, h5, h6
|
||||
&:first-of-type
|
||||
border-bottom: 1px solid gray
|
||||
margin: 0
|
||||
margin: 0 0 0.5em 0
|
||||
font-weight: 500
|
||||
a
|
||||
color: inherit
|
||||
@ -58,9 +59,6 @@ table
|
||||
&.ts
|
||||
white-space: nowrap
|
||||
|
||||
.header
|
||||
margin-bottom: 0.5em
|
||||
|
||||
.md
|
||||
margin-top: 0.5em
|
||||
> *, p
|
||||
@ -74,7 +72,7 @@ nav
|
||||
|
||||
a.wikilink
|
||||
text-decoration: none
|
||||
color: #01a049
|
||||
color: #0165fc
|
||||
font-style: italic
|
||||
&:hover
|
||||
text-decoration: underline
|
||||
@ -104,10 +102,36 @@ a.wikilink
|
||||
background-color: #5170d7
|
||||
&.prev-page
|
||||
background-color: #bc13fe
|
||||
&.search
|
||||
background-color: #fac205
|
||||
|
||||
input[type=search], input[type=text]
|
||||
border: 1px solid gray
|
||||
padding: 0.75em
|
||||
width: 100%
|
||||
|
||||
.edit-form
|
||||
textarea
|
||||
resize: vertical
|
||||
width: 100%
|
||||
height: 70vh
|
||||
border: 1px solid gray
|
||||
border: 1px solid gray
|
||||
|
||||
.highlight
|
||||
background-color: yellow
|
||||
|
||||
.dialog
|
||||
width: 100%
|
||||
background: white
|
||||
padding: 1em
|
||||
border: 1px solid gray
|
||||
|
||||
.flex-space
|
||||
display: flex
|
||||
justify-content: space-between
|
||||
|
||||
.error
|
||||
color: red
|
||||
|
||||
img
|
||||
max-width: 100%
|
63
src/util.nim
Normal file
63
src/util.nim
Normal file
@ -0,0 +1,63 @@
|
||||
import times
|
||||
import unicode
|
||||
import strutils except splitWhitespace
|
||||
import sequtils
|
||||
import tiny_sqlite
|
||||
import random
|
||||
import math
|
||||
import times
|
||||
import options
|
||||
|
||||
func lowercaseFirstLetter(s: string): string =
|
||||
if len(s) == 0:
|
||||
return ""
|
||||
var
|
||||
rune: Rune
|
||||
i = 0
|
||||
fastRuneAt(s, i, rune, doInc = true)
|
||||
result = $toLower(rune) & substr(s, i)
|
||||
func pageToSlug*(page: string): string = page.split({'_', ' '}).map(lowercaseFirstLetter).join("_")
|
||||
func slugToPage*(slug: string): string = slug.split({'_', ' '}).map(capitalize).join(" ")
|
||||
|
||||
func timeToTimestamp*(t: Time): int64 = toUnix(t) * 1000 + (nanosecond(t) div 1000000)
|
||||
func timestampToTime*(ts: int64): Time = initTime(ts div 1000, (ts mod 1000) * 1000000)
|
||||
func timestampToStr*(t: Time): string = intToStr(int(timeToTimestamp(t)))
|
||||
|
||||
# store time as milliseconds
|
||||
proc toDbValue*(t: Time): DbValue = DbValue(kind: sqliteInteger, intVal: timeToTimestamp(t))
|
||||
proc fromDbValue*(value: DbValue, T: typedesc[Time]): Time = timestampToTime(value.intVal)
|
||||
|
||||
# count words, defined as things separated by whitespace which are not purely punctuation characters
|
||||
# alternative definitions may include dropping number-only words, and/or splitting at full stops too
|
||||
func wordCount(s: string): int =
|
||||
for word in splitWhitespace(s):
|
||||
if len(word) == 0: continue
|
||||
for bytechar in word:
|
||||
if not (bytechar in {'[', ']', ':', '.', '!'}):
|
||||
inc result
|
||||
break
|
||||
|
||||
template autoInitializedThreadvar*(name: untyped, typ: typedesc, initialize: typed): untyped =
|
||||
var data* {.threadvar.}: Option[typ]
|
||||
proc `name`(): typ =
|
||||
if isSome(data): result = get data
|
||||
else:
|
||||
result = initialize
|
||||
data = some result
|
||||
|
||||
# https://github.com/aisk/simpleflake.nim/blob/master/src/simpleflake.nim - unique 64-bit timestamped ID generation
|
||||
# not actually identical to that as this has 2 bits less randomness to avoid timestamp overflow issues in 2034 (the application is likely to be replaced by 2139 so the new time is probably fine)
|
||||
# This is a signed integer for SQLite compatibility
|
||||
const SIMPLEFLAKE_EPOCH = 946702800
|
||||
const SIMPLEFLAKE_RANDOM_LENGTH = 21
|
||||
|
||||
let now = times.getTime()
|
||||
autoInitializedThreadvar(threadRNG, Rand, random.initRand(now.toUnix * 1_000_000_000 + now.nanosecond))
|
||||
|
||||
proc snowflake*(): int64 =
|
||||
var rng = threadRNG()
|
||||
let now = times.getTime().toUnixFloat()
|
||||
var ts = int64((now - SIMPLEFLAKE_EPOCH) * 1000)
|
||||
let randomBits = int64(rng.rand(2 ^ SIMPLEFLAKE_RANDOM_LENGTH))
|
||||
|
||||
return ts shl SIMPLEFLAKE_RANDOM_LENGTH or randomBits
|
2
static/client.js
Normal file
2
static/client.js
Normal file
File diff suppressed because one or more lines are too long
7
static/client.js.map
Normal file
7
static/client.js.map
Normal file
File diff suppressed because one or more lines are too long
@ -1 +1 @@
|
||||
html{scrollbar-color:#000 #d3d3d3}*{box-sizing:border-box}body{font-family:"Fira Sans","Noto Sans","Segoe UI",Verdana,sans-serif;font-weight:300;margin:0;min-height:100vh}main{width:50em;padding:0 1em 1em 1em;margin-left:auto;margin-right:auto}strong{font-weight:600}h1,h2,h3,h4,h5,h6{margin:0;font-weight:500}h1:first-of-type,h2:first-of-type,h3:first-of-type,h4:first-of-type,h5:first-of-type,h6:first-of-type{border-bottom:1px solid gray}h1 a,h2 a,h3 a,h4 a,h5 a,h6 a{color:inherit}:not(pre)>code{background:#000;color:#fff;padding:.3em}ul,ol{padding-left:1em}ul{list-style-type:square}blockquote{border-left:.3em solid #000;padding-left:.3em}table{border-collapse:collapse}table th{background:#000;color:#fff;font-weight:normal}table td,table th{padding:.2em .5em}table td{border:1px solid gray}.rev-table{width:100%}.rev-table td{border:none}.rev-table td.ts{white-space:nowrap}.header{margin-bottom:.5em}.md{margin-top:.5em}.md>*,.md p{margin:0 0 .5em 0}nav{margin-bottom:.5em}.timestamp{color:gray}a.wikilink{text-decoration:none;color:#01a049;font-style:italic}a.wikilink:hover{text-decoration:underline}.link-button,button,input[type=submit]{border:none;padding:.75em;background:gray;text-align:center;text-decoration:none;color:#000;display:inline-block;font-size:1rem}.link-button:hover,button:hover,input[type=submit]:hover{background-image:linear-gradient(rgba(0, 0, 0, 0.2), rgba(0, 0, 0, 0.2))}.link-button.view-page,button.view-page,input[type=submit].view-page{background-color:#76cd26}.link-button.edit-page,button.edit-page,input[type=submit].edit-page{background-color:#75bbfd}.link-button.page-revisions,button.page-revisions,input[type=submit].page-revisions{background-color:#f97306}.link-button.save,button.save,input[type=submit].save{background-color:#06c2ac}.link-button.next-page,button.next-page,input[type=submit].next-page{background-color:#5170d7}.link-button.prev-page,button.prev-page,input[type=submit].prev-page{background-color:#bc13fe}.edit-form textarea{resize:vertical;width:100%;height:70vh;border:1px solid gray}/*# sourceMappingURL=style.css.map */
|
||||
html{scrollbar-color:#000 #d3d3d3}*{box-sizing:border-box}body{font-family:"Fira Sans","Noto Sans","Segoe UI",Verdana,sans-serif;font-weight:300;margin:0;min-height:100vh}main{max-width:50em;padding:0 1em 1em 1em;margin-left:auto;margin-right:auto;position:relative}strong{font-weight:600}h1,h2,h3,h4,h5,h6{margin:0 0 .5em 0;font-weight:500}h1:first-of-type,h2:first-of-type,h3:first-of-type,h4:first-of-type,h5:first-of-type,h6:first-of-type{border-bottom:1px solid gray}h1 a,h2 a,h3 a,h4 a,h5 a,h6 a{color:inherit}:not(pre)>code{background:#000;color:#fff;padding:.3em}ul,ol{padding-left:1em}ul{list-style-type:square}blockquote{border-left:.3em solid #000;padding-left:.3em}table{border-collapse:collapse}table th{background:#000;color:#fff;font-weight:normal}table td,table th{padding:.2em .5em}table td{border:1px solid gray}.rev-table{width:100%}.rev-table td{border:none}.rev-table td.ts{white-space:nowrap}.md{margin-top:.5em}.md>*,.md p{margin:0 0 .5em 0}nav{margin-bottom:.5em}.timestamp{color:gray}a.wikilink{text-decoration:none;color:#0165fc;font-style:italic}a.wikilink:hover{text-decoration:underline}.link-button,button,input[type=submit]{border:none;padding:.75em;background:gray;text-align:center;text-decoration:none;color:#000;display:inline-block;font-size:1rem}.link-button:hover,button:hover,input[type=submit]:hover{background-image:linear-gradient(rgba(0, 0, 0, 0.2), rgba(0, 0, 0, 0.2))}.link-button.view-page,button.view-page,input[type=submit].view-page{background-color:#76cd26}.link-button.edit-page,button.edit-page,input[type=submit].edit-page{background-color:#75bbfd}.link-button.page-revisions,button.page-revisions,input[type=submit].page-revisions{background-color:#f97306}.link-button.save,button.save,input[type=submit].save{background-color:#06c2ac}.link-button.next-page,button.next-page,input[type=submit].next-page{background-color:#5170d7}.link-button.prev-page,button.prev-page,input[type=submit].prev-page{background-color:#bc13fe}.link-button.search,button.search,input[type=submit].search{background-color:#fac205}input[type=search],input[type=text]{border:1px solid gray;padding:.75em;width:100%}.edit-form textarea{resize:vertical;width:100%;height:70vh;border:1px solid gray}.highlight{background-color:#ff0}.dialog{width:100%;background:#fff;padding:1em;border:1px solid gray}.flex-space{display:flex;justify-content:space-between}.error{color:red}img{max-width:100%}/*# sourceMappingURL=style.css.map */
|
||||
|
@ -1 +1 @@
|
||||
{"version":3,"sourceRoot":"","sources":["../src/style.sass"],"names":[],"mappings":"AAAA,KACI,6BAEJ,EACI,sBAEJ,KACI,kEACA,gBACA,SACA,iBAEJ,KACI,WACA,sBACA,iBACA,kBAEJ,OACI,gBAEJ,kBAGI,SACA,gBAHA,sGACI,6BAGJ,8BACI,cAER,eACI,gBACA,WACA,aAEJ,MACI,iBACJ,GACI,uBACJ,WACI,4BACA,kBACJ,MACI,yBAEA,SACI,gBACA,WACA,mBACJ,kBACI,kBACJ,SACI,sBAER,WACI,WACA,cACI,YACA,iBACI,mBAEZ,QACI,mBAEJ,IACI,gBACA,YACI,kBAER,IACI,mBAEJ,WACI,WAEJ,WACI,qBACA,cACA,kBACA,iBACI,0BAER,uCACI,YACA,cACA,gBACA,kBACA,qBACA,WACA,qBACA,eACA,yDAEI,yEAEJ,qEACI,yBACJ,qEACI,yBACJ,oFACI,yBACJ,sDACI,yBACJ,qEACI,yBACJ,qEACI,yBAGJ,oBACI,gBACA,WACA,YACA","file":"style.css"}
|
||||
{"version":3,"sourceRoot":"","sources":["../src/style.sass"],"names":[],"mappings":"AAAA,KACI,6BAEJ,EACI,sBAEJ,KACI,kEACA,gBACA,SACA,iBAEJ,KACI,eACA,sBACA,iBACA,kBACA,kBAEJ,OACI,gBAEJ,kBAGI,kBACA,gBAHA,sGACI,6BAGJ,8BACI,cAER,eACI,gBACA,WACA,aAEJ,MACI,iBACJ,GACI,uBACJ,WACI,4BACA,kBACJ,MACI,yBAEA,SACI,gBACA,WACA,mBACJ,kBACI,kBACJ,SACI,sBAER,WACI,WACA,cACI,YACA,iBACI,mBAEZ,IACI,gBACA,YACI,kBAER,IACI,mBAEJ,WACI,WAEJ,WACI,qBACA,cACA,kBACA,iBACI,0BAER,uCACI,YACA,cACA,gBACA,kBACA,qBACA,WACA,qBACA,eACA,yDAEI,yEAEJ,qEACI,yBACJ,qEACI,yBACJ,oFACI,yBACJ,sDACI,yBACJ,qEACI,yBACJ,qEACI,yBACJ,4DACI,yBAER,oCACI,sBACA,cACA,WAGA,oBACI,gBACA,WACA,YACA,sBAER,WACI,sBAEJ,QACI,WACA,gBACA,YACA,sBAEJ,YACI,aACA,8BAEJ,OACI,UAEJ,IACI","file":"style.css"}
|
@ -1,2 +1,2 @@
|
||||
#!/bin/sh
|
||||
npx sass --watch -s compressed src/style.sass:static/style.css
|
||||
npx -p sass sass --watch -s compressed src/style.sass:static/style.css & npx esbuild --bundle src/client.js --outfile=static/client.js --sourcemap --minify --watch
|
Loading…
x
Reference in New Issue
Block a user