feat: dynamic page regeneration (#24)

the asset system is reworked to support "dynamic" entries, where each
entry is a separate file on disk containing the latest generation's
headers+raw+gzip+zstd. when calling view.regenerate, it will look for
pages that had "export const regenerate" during generation, and render
those pages using the view system, but then store the results as assets
instead of sending as a response.

pages configured as regenerable are also bundled as views, using the
non-aliasing key "page:${page.id}". this cannot alias because file
paths may not contain a colon.
This commit is contained in:
clover caruso 2025-08-11 22:43:27 -07:00
parent ff4c861ce1
commit f1d4be2553
41 changed files with 1143 additions and 640 deletions

View file

@ -3,14 +3,8 @@
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils"; utils.url = "github:numtide/flake-utils";
}; };
outputs = outputs = inputs: inputs.utils.lib.eachDefaultSystem (system:
{ nixpkgs, utils, ... }: with inputs.nixpkgs.legacyPackages.${system}; {
utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell { devShells.default = pkgs.mkShell {
buildInputs = [ buildInputs = [
pkgs.nodejs_24 # runtime pkgs.nodejs_24 # runtime
@ -24,6 +18,12 @@
pkgs.rsync pkgs.rsync
]; ];
}; };
} devShells.min = pkgs.mkShell {
); buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
pkgs.rsync
];
};
});
} }

View file

@ -88,19 +88,19 @@ export async function bundleClientJavaScript(
for (const file of outputFiles) { for (const file of outputFiles) {
const { text } = file; const { text } = file;
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/"); let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
const { inputs } = UNWRAP(metafile.outputs["out!" + route]); const { entryPoint } = UNWRAP(metafile.outputs["out!" + route]);
const sources = Object.keys(inputs).filter((x) => !isIgnoredSource(x));
// Register non-chunks as script entries. // Register non-chunks as script entries.
const chunk = route.startsWith("/js/c."); const chunk = route.startsWith("/js/c.");
if (!chunk) { if (!chunk) {
const key = hot.getScriptId(path.resolve(sources[sources.length - 1])); const key = hot.getScriptId(toAbs(UNWRAP(entryPoint)));
console.log(route, key);
route = "/js/" + key.replace(/\.client\.tsx?/, ".js"); route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
scripts[key] = text; scripts[key] = text;
} }
// Register chunks and public scripts as assets. // Register chunks and public scripts as assets.
if (chunk || publicScriptRoutes.includes(route)) { if (chunk || publicScriptRoutes.includes(route)) {
p.push(io.writeAsset(route, text)); p.push(io.writeAsset({ pathname: route, buffer: text }));
} }
} }
await Promise.all(p); await Promise.all(p);
@ -111,7 +111,7 @@ export type ServerPlatform = "node" | "passthru";
export interface ServerSideOptions { export interface ServerSideOptions {
entries: string[]; entries: string[];
viewItems: sg.FileItem[]; viewItems: sg.FileItem[];
viewRefs: incr.Ref<PreparedView>[]; viewRefs: incr.Ref<PageOrView>[];
styleMap: Map<string, incr.Ref<string>>; styleMap: Map<string, incr.Ref<string>>;
scriptMap: incr.Ref<Record<string, string>>; scriptMap: incr.Ref<Record<string, string>>;
platform: ServerPlatform; platform: ServerPlatform;
@ -124,13 +124,39 @@ export async function bundleServerJavaScript({
entries, entries,
platform, platform,
}: ServerSideOptions) { }: ServerSideOptions) {
const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => { const regenKeys: Record<string, string[]> = {};
const regenTtls: view.Ttl[] = [];
for (const ref of viewRefs) {
const value = UNWRAP(ref.value);
if (value.type === "page" && (value.regenerate?.tags?.length ?? 0) > 0) {
for (const tag of value.regenerate!.tags!) {
(regenKeys[tag] ??= []).push(`page:${value.id}`);
}
}
if (value.type === "page" && (value.regenerate?.seconds ?? 0) > 0) {
regenTtls.push({
key: `page:${value.id}` as view.Key,
seconds: value.regenerate!.seconds!,
});
}
}
const wViewSource = incr.work(
async (
_,
{ viewItems, regenKeys, regenTtls }: {
viewItems: sg.FileItem[];
regenKeys: Record<string, string[]>;
regenTtls: view.Ttl[];
},
) => {
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_"); const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
return { return {
magicWord, magicWord,
file: [ file: [
...viewItems.map( ...viewItems.map(
(view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`, (view, i) =>
`import * as view${i} from ${JSON.stringify(view.file)}`,
), ),
`const styles = ${magicWord}[-2]`, `const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`, `export const scripts = ${magicWord}[-1]`,
@ -143,17 +169,43 @@ export async function bundleServerJavaScript({
` layout: view${i}.layout?.default ?? null,`, ` layout: view${i}.layout?.default ?? null,`,
` inlineCss: styles[${magicWord}[${i}]]`, ` inlineCss: styles[${magicWord}[${i}]]`,
` },`, ` },`,
].join("\n"), ].join("\n")
), ),
"}", "}",
`export const regenTags = ${JSON.stringify(regenKeys)};`,
`export const regenTtls = ${JSON.stringify(regenTtls)};`,
].join("\n"), ].join("\n"),
}; };
}, viewItems); },
{ viewItems, regenKeys, regenTtls },
);
const wBundles = entries.map( await incr.work(
(entry) => async (io, { regenKeys, viewItems }) => {
io.writeFile(
"../ts/view.d.ts",
[ [
entry, "export interface RegisteredViews {",
...viewItems
.filter((view) => !view.id.startsWith("page:"))
.map(
(view) =>
` ${JSON.stringify(view.id)}: ` +
`typeof import(${
JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))
}),`,
),
"}",
"export type RegenKey = " +
(regenKeys.map((key) => JSON.stringify(key)).join(" | ") ||
"never"),
].join("\n"),
);
},
{ regenKeys: Object.keys(regenKeys), viewItems },
);
const wBundles = entries.map((entry) =>
incr.work(async (io, entry) => { incr.work(async (io, entry) => {
const pkg = await io.readJson<{ const pkg = await io.readJson<{
dependencies: Record<string, string>; dependencies: Record<string, string>;
@ -203,10 +255,7 @@ export async function bundleServerJavaScript({
chunkNames: "c.[hash]", chunkNames: "c.[hash]",
entryNames: path.basename(entry, path.extname(entry)), entryNames: path.basename(entry, path.extname(entry)),
entryPoints: [ entryPoints: [
path.join( path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
import.meta.dirname,
"backend/entry-" + platform + ".ts",
),
], ],
platform: "node", platform: "node",
format: "esm", format: "esm",
@ -244,7 +293,7 @@ export async function bundleServerJavaScript({
// file in more than one chunk. // file in more than one chunk.
if ( if (
magicWord && magicWord &&
metafile.outputs[key].inputs["framework/lib/view.ts"] UNWRAP(metafile.outputs[key]).inputs["framework/lib/view.ts"]
) { ) {
ASSERT(!fileWithMagicWord); ASSERT(!fileWithMagicWord);
fileWithMagicWord = { fileWithMagicWord = {
@ -257,11 +306,10 @@ export async function bundleServerJavaScript({
} }
} }
return fileWithMagicWord; return fileWithMagicWord;
}, entry), }, entry)
] as const,
); );
const wProcessed = wBundles.map(async ([entry, wBundle]) => { const wProcessed = wBundles.map(async (wBundle) => {
if (!(await wBundle)) return; if (!(await wBundle)) return;
await incr.work(async (io) => { await incr.work(async (io) => {
// Only the reachable resources need to be read and inserted into the bundle. // Only the reachable resources need to be read and inserted into the bundle.
@ -278,7 +326,7 @@ export async function bundleServerJavaScript({
const viewStyleKeys = views.map((view) => view.styleKey); const viewStyleKeys = views.map((view) => view.styleKey);
const viewCssBundles = await Promise.all( const viewCssBundles = await Promise.all(
viewStyleKeys.map((key) => viewStyleKeys.map((key) =>
io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key)), io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key))
), ),
); );
const styleList = Array.from(new Set(viewCssBundles)); const styleList = Array.from(new Set(viewCssBundles));
@ -297,7 +345,7 @@ export async function bundleServerJavaScript({
return JSON.stringify(Object.fromEntries(neededScripts)); return JSON.stringify(Object.fromEntries(neededScripts));
} }
// Reference an index into `styleList` // Reference an index into `styleList`
return `${styleList.indexOf(viewCssBundles[i])}`; return `${styleList.indexOf(UNWRAP(viewCssBundles[i]))}`;
}); });
io.writeFile(basename, text); io.writeFile(basename, text);
@ -322,3 +370,5 @@ import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime"; import * as mime from "#sitegen/mime";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import * as sg from "#sitegen"; import * as sg from "#sitegen";
import type { PageOrView } from "./generate.ts";
import type * as view from "#sitegen/view";

View file

@ -57,12 +57,14 @@ export function styleKey(
export async function bundleCssFiles( export async function bundleCssFiles(
io: Io, io: Io,
{ cssImports, theme, dev }: { { cssImports, theme, dev }: {
cssImports: string[], cssImports: string[];
theme: Theme, theme: Theme;
dev: boolean, dev: boolean;
} },
) { ) {
cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file))); cssImports = await Promise.all(
cssImports.map((file) => io.trackFile("src/" + file)),
);
const plugin = { const plugin = {
name: "clover css", name: "clover css",
setup(b) { setup(b) {
@ -111,4 +113,5 @@ import * as esbuild from "esbuild";
import * as fs from "#sitegen/fs"; import * as fs from "#sitegen/fs";
import * as hot from "./hot.ts"; import * as hot from "./hot.ts";
import * as path from "node:path"; import * as path from "node:path";
import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts"; import { virtualFiles } from "./esbuild-support.ts";
import type { Io } from "./incremental.ts";

View file

@ -9,4 +9,4 @@ globalThis.UNWRAP = (t, ...args) => {
globalThis.ASSERT = assert.ok; globalThis.ASSERT = assert.ok;
import * as util from "node:util"; import * as util from "node:util";
import * as assert from 'node:assert' import * as assert from "node:assert";

View file

@ -51,4 +51,4 @@ declare global {
} }
} }
import * as render from "./render.ts"; import * as render from "#engine/render";

View file

@ -18,6 +18,7 @@ export const createTemplate = (
const r = render.current; const r = render.current;
// Support using Marko outside of Clover SSR // Support using Marko outside of Clover SSR
if (!r) return renderer(props, n); if (!r) return renderer(props, n);
render.setCurrent(null);
const markoResult = renderFn.call(renderer, { const markoResult = renderFn.call(renderer, {
...props, ...props,
$global: { clover: r, cloverAsyncMarker }, $global: { clover: r, cloverAsyncMarker },

View file

@ -1,5 +1,5 @@
import { test } from "node:test"; import { test } from "node:test";
import * as render from "./render.ts"; import * as render from "#engine/render";
test("sanity", (t) => t.assert.equal(render.sync("gm <3").text, "gm &lt;3")); test("sanity", (t) => t.assert.equal(render.sync("gm <3").text, "gm &lt;3"));
test("simple tree", (t) => test("simple tree", (t) =>

View file

@ -118,8 +118,11 @@ export function resolveNode(r: State, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") { if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHtml(node); if (typeof node === "string") return escapeHtmlContent(node);
if (typeof node === "number") return String(node); // no escaping ever if (typeof node === "number") return String(node); // no escaping ever
if (typeof node === "symbol" && node.toString() === kElement.toString()) {
throw new Error(`There are two instances of Clover SSR loaded!`);
}
throw new Error(`Cannot render ${inspect(node)} to HTML`); throw new Error(`Cannot render ${inspect(node)} to HTML`);
} }
if (node instanceof Promise) { if (node instanceof Promise) {
@ -217,12 +220,14 @@ function stringifyElement(element: ResolvedElement) {
let attr; let attr;
switch (prop) { switch (prop) {
default: default:
attr = `${prop}=${quoteIfNeeded(escapeHtml(String(value)))}`; attr = `${prop}=${quoteIfNeeded(escapeAttribute(String(value)))}`;
break; break;
case "className": case "className":
// Legacy React Compat // Legacy React Compat
case "class": case "class":
attr = `class=${quoteIfNeeded(escapeHtml(clsx(value as ClsxInput)))}`; attr = `class=${
quoteIfNeeded(escapeAttribute(clsx(value as ClsxInput)))
}`;
break; break;
case "htmlFor": case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`"); throw new Error("Do not use the `htmlFor` attribute. Use `for`");
@ -233,7 +238,7 @@ function stringifyElement(element: ResolvedElement) {
case "key": case "key":
continue; continue;
} }
if (needSpace) (out += " "), (needSpace = !attr.endsWith('"')); if (needSpace) ((out += " "), (needSpace = !attr.endsWith('"')));
out += attr; out += attr;
} }
out += ">"; out += ">";
@ -254,14 +259,16 @@ export function stringifyStyleAttribute(style: Record<string, string>) {
let out = ``; let out = ``;
for (const styleName in style) { for (const styleName in style) {
if (out) out += ";"; if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${escapeHtml( out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeAttribute(
String(style[styleName]), String(style[styleName]),
)}`; )
}`;
} }
return "style=" + quoteIfNeeded(out); return "style=" + quoteIfNeeded(out);
} }
export function quoteIfNeeded(text: string) { export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"'; if (text.match(/["/>]/)) return '"' + text + '"';
return text; return text;
} }
@ -303,6 +310,21 @@ export function clsx(mix: ClsxInput) {
return str; return str;
} }
export const escapeHtmlContent = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
// TODO: combine into one function which decides if an attribute needs quotes
// and escapes it correctly depending on the context.
const escapeAttribute = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
/** @deprecated */
export const escapeHtml = (unsafeText: string) => export const escapeHtml = (unsafeText: string) =>
String(unsafeText) String(unsafeText)
.replace(/&/g, "&amp;") .replace(/&/g, "&amp;")

View file

@ -42,7 +42,7 @@ export function Suspense({ children, fallback }: SuspenseProps): render.Node {
r.asyncDone = () => { r.asyncDone = () => {
const rejections = r.rejections; const rejections = r.rejections;
if (rejections && rejections.length > 0) throw new Error("TODO"); if (rejections && rejections.length > 0) throw new Error("TODO");
state.pushChunk?.(name, (ip[0] = resolved)); state.pushChunk?.(name, ip[0] = resolved);
}; };
return render.raw(ip); return render.raw(ip);
} }
@ -99,4 +99,4 @@ export async function* renderStreaming<
return addonOutput as unknown as T; return addonOutput as unknown as T;
} }
import * as render from "./render.ts"; import * as render from "#engine/render";

View file

@ -1,7 +1,12 @@
type Awaitable<T> = T | Promise<T>; type Awaitable<T> = T | Promise<T>;
export function virtualFiles( export function virtualFiles(
map: Record<string, string | esbuild.OnLoadResult | (() => Awaitable<string | esbuild.OnLoadResult>)>, map: Record<
string,
| string
| esbuild.OnLoadResult
| (() => Awaitable<string | esbuild.OnLoadResult>)
>,
) { ) {
return { return {
name: "clover vfs", name: "clover vfs",
@ -22,7 +27,7 @@ export function virtualFiles(
{ filter: /./, namespace: "vfs" }, { filter: /./, namespace: "vfs" },
async ({ path }) => { async ({ path }) => {
let entry = map[path]; let entry = map[path];
if (typeof entry === 'function') entry = await entry(); if (typeof entry === "function") entry = await entry();
return ({ return ({
resolveDir: ".", resolveDir: ".",
loader: "ts", loader: "ts",
@ -88,7 +93,6 @@ export function markoViaBuildCache(): esbuild.Plugin {
if (!fs.existsSync(file)) { if (!fs.existsSync(file)) {
console.warn(`File does not exist: ${file}`); console.warn(`File does not exist: ${file}`);
} }
console.log(markoCache.keys());
throw new Error("Marko file not in cache: " + file); throw new Error("Marko file not in cache: " + file);
} }
return ({ return ({
@ -106,7 +110,7 @@ export function isIgnoredSource(source: string) {
return source.includes("<define:") || return source.includes("<define:") ||
source.startsWith("vfs:") || source.startsWith("vfs:") ||
source.startsWith("dropped:") || source.startsWith("dropped:") ||
source.includes("node_modules") source.includes("node_modules");
} }
import * as esbuild from "esbuild"; import * as esbuild from "esbuild";
@ -114,4 +118,5 @@ import * as string from "#sitegen/string";
import * as path from "node:path"; import * as path from "node:path";
import * as fs from "#sitegen/fs"; import * as fs from "#sitegen/fs";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import * as hot from "./hot.ts";import { markoCache } from "./marko.ts"; import * as hot from "./hot.ts";
import { markoCache } from "./marko.ts";

View file

@ -7,15 +7,16 @@ const { toRel, toAbs } = incr;
const globalCssPath = toAbs("src/global.css"); const globalCssPath = toAbs("src/global.css");
export async function main() { export async function main() {
await incr.restore(); if (!process.argv.includes("-f")) await incr.restore();
await incr.compile(generate); await incr.compile(generate);
} }
export async function generate() { export async function generate() {
// -- read config and discover files -- // -- read config and discover files --
const siteConfig = await incr.work(readManifest); const siteConfig = await incr.work(readManifest);
const { staticFiles, scripts, views, pages } = const { staticFiles, scripts, views, pages } = await discoverAllFiles(
await discoverAllFiles(siteConfig); siteConfig,
);
// TODO: make sure that `static` and `pages` does not overlap // TODO: make sure that `static` and `pages` does not overlap
@ -28,13 +29,21 @@ export async function generate() {
staticFiles.map((item) => staticFiles.map((item) =>
incr.work( incr.work(
async (io, { id, file }) => async (io, { id, file }) =>
void (await io.writeAsset(id, await io.readFile(file))), void (await io.writeAsset({
pathname: id,
buffer: await io.readFile(file),
})),
item, item,
), )
), ),
); );
const routes = await Promise.all([...builtViews, ...builtPages]); const routes = await Promise.all([...builtViews, ...builtPages]);
const viewsAndDynPages: incr.Ref<PageOrView>[] = [
...builtViews,
...builtPages.filter((page) => UNWRAP(page.value).regenerate),
];
// -- page resources -- // -- page resources --
const scriptMap = incr.work(bundle.bundleClientJavaScript, { const scriptMap = incr.work(bundle.bundleClientJavaScript, {
clientRefs: routes.flatMap((x) => x.clientRefs), clientRefs: routes.flatMap((x) => x.clientRefs),
@ -49,13 +58,16 @@ export async function generate() {
platform: "node", platform: "node",
styleMap, styleMap,
scriptMap, scriptMap,
viewItems: views, viewItems: viewsAndDynPages.map((ref) => {
viewRefs: builtViews, const { id, file, type } = UNWRAP(ref.value);
return { id: type === "page" ? `page:${id}` : id, file };
}),
viewRefs: viewsAndDynPages,
}); });
// -- assemble page assets -- // -- assemble page assets --
const pAssemblePages = builtPages.map((page) => const pAssemblePages = builtPages.map((page) =>
assembleAndWritePage(page, styleMap, scriptMap), assembleAndWritePage(page, styleMap, scriptMap)
); );
await Promise.all([builtBackend, builtStaticFiles, ...pAssemblePages]); await Promise.all([builtBackend, builtStaticFiles, ...pAssemblePages]);
@ -77,7 +89,7 @@ export async function discoverAllFiles(
return ( return (
await Promise.all( await Promise.all(
siteConfig.siteSections.map(({ root: sectionRoot }) => siteConfig.siteSections.map(({ root: sectionRoot }) =>
incr.work(scanSiteSection, toAbs(sectionRoot)), incr.work(scanSiteSection, toAbs(sectionRoot))
), ),
) )
).reduce((acc, next) => ({ ).reduce((acc, next) => ({
@ -105,8 +117,7 @@ export async function scanSiteSection(io: Io, sectionRoot: string) {
let scripts: FileItem[] = []; let scripts: FileItem[] = [];
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub); const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix = const rootPrefix = hot.projectSrc === sectionRoot
hot.projectSrc === sectionRoot
? "" ? ""
: path.relative(hot.projectSrc, sectionRoot) + "/"; : path.relative(hot.projectSrc, sectionRoot) + "/";
const kinds = [ const kinds = [
@ -181,7 +192,8 @@ export async function preparePage(io: Io, item: sg.FileItem) {
meta: metadata, meta: metadata,
theme: pageTheme, theme: pageTheme,
layout, layout,
} = await io.import<any>(item.file); regenerate,
} = await io.import<sg.PageExports>(item.file);
if (!Page) throw new Error("Page is missing a 'default' export."); if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) throw new Error("Page is missing 'meta' export with a title."); if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
@ -219,16 +231,20 @@ export async function preparePage(io: Io, item: sg.FileItem) {
"Page is missing 'meta.title'. " + "All pages need a title tag.", "Page is missing 'meta.title'. " + "All pages need a title tag.",
); );
} }
const styleKey = css.styleKey(cssImports, theme); const styleKey = css.styleKey(cssImports, theme);
return { return {
type: "page",
id: item.id, id: item.id,
file: item.file,
regenerate,
html: text, html: text,
meta: renderedMeta, meta: renderedMeta,
cssImports, cssImports,
theme: theme ?? null, theme: theme ?? null,
styleKey, styleKey,
clientRefs: Array.from(addon[sg.userData.key].scripts), clientRefs: Array.from(addon[sg.userData.key].scripts),
}; } as const;
} }
export async function prepareView(io: Io, item: sg.FileItem) { export async function prepareView(io: Io, item: sg.FileItem) {
@ -243,13 +259,15 @@ export async function prepareView(io: Io, item: sg.FileItem) {
); );
const styleKey = css.styleKey(cssImports, theme); const styleKey = css.styleKey(cssImports, theme);
return { return {
file: path.relative(hot.projectRoot, item.file), type: "view",
id: item.id,
file: item.file,
cssImports, cssImports,
theme, theme,
clientRefs: hot.getClientScriptRefs(item.file), clientRefs: hot.getClientScriptRefs(item.file),
hasLayout: !!module.layout?.default, hasLayout: !!module.layout?.default,
styleKey, styleKey,
}; } as const;
} }
export type PreparedView = Awaited<ReturnType<typeof prepareView>>; export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
@ -274,7 +292,8 @@ export async function assembleAndWritePage(
scriptWork: incr.Ref<Record<string, string>>, scriptWork: incr.Ref<Record<string, string>>,
) { ) {
const page = await pageWork; const page = await pageWork;
return incr.work(async (io, { id, html, meta, styleKey, clientRefs }) => { return incr.work(
async (io, { id, html, meta, styleKey, clientRefs, regenerate }) => {
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey))); const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
const scriptIds = clientRefs.map(hot.getScriptId); const scriptIds = clientRefs.map(hot.getScriptId);
@ -284,18 +303,27 @@ export async function assembleAndWritePage(
.map((x) => `{${x}}`) .map((x) => `{${x}}`)
.join("\n"); .join("\n");
const doc = wrapDocument({ const buffer = sg.wrapDocument({
body: html, body: html,
head: meta, head: meta,
inlineCss, inlineCss,
scripts, scripts,
}); });
await io.writeAsset(id, doc, { await io.writeAsset({
pathname: id,
buffer,
headers: {
"Content-Type": "text/html", "Content-Type": "text/html",
},
regenerative: !!regenerate,
}); });
}, page); },
page,
);
} }
export type PageOrView = PreparedPage | PreparedView;
import * as sg from "#sitegen"; import * as sg from "#sitegen";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import { Io } from "./incremental.ts"; import { Io } from "./incremental.ts";
@ -307,4 +335,3 @@ import * as fs from "#sitegen/fs";
import type { FileItem } from "#sitegen"; import type { FileItem } from "#sitegen";
import * as path from "node:path"; import * as path from "node:path";
import * as meta from "#sitegen/meta"; import * as meta from "#sitegen/meta";
import { wrapDocument } from "./lib/view.ts";

View file

@ -78,8 +78,7 @@ Module.prototype._compile = function (
} }
} }
fileStats.set(filename, { fileStats.set(filename, {
cssImportsRecursive: cssImportsRecursive: cssImportsMaybe.length > 0
cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe)) ? Array.from(new Set(cssImportsMaybe))
: null, : null,
imports, imports,
@ -137,8 +136,7 @@ export function loadEsbuildCode(
src = code; src = code;
} }
if (src.includes("import.meta")) { if (src.includes("import.meta")) {
src = src = `
`
import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())}; import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())};
import.meta.dirname = ${JSON.stringify(path.dirname(filepath))}; import.meta.dirname = ${JSON.stringify(path.dirname(filepath))};
import.meta.filename = ${JSON.stringify(filepath)}; import.meta.filename = ${JSON.stringify(filepath)};

View file

@ -11,7 +11,7 @@ test("trivial case", async () => {
await setTimeout(1000); await setTimeout(1000);
const contents = await io.readFile(file1.path); const contents = await io.readFile(file1.path);
return [contents, Math.random()] as const; return [contents, Math.random()] as const;
} },
}); });
const second = incr.work({ const second = incr.work({
label: "second compute", label: "second compute",
@ -19,7 +19,7 @@ test("trivial case", async () => {
async run(io) { async run(io) {
await setTimeout(1000); await setTimeout(1000);
return io.readWork(first)[0].toUpperCase(); return io.readWork(first)[0].toUpperCase();
} },
}); });
const third = incr.work({ const third = incr.work({
label: "third compute", label: "third compute",
@ -27,7 +27,7 @@ test("trivial case", async () => {
async run(io) { async run(io) {
await setTimeout(1000); await setTimeout(1000);
return io.readWork(first)[1] * 1000; return io.readWork(first)[1] * 1000;
} },
}); });
return incr.work({ return incr.work({
label: "last compute", label: "last compute",
@ -37,8 +37,8 @@ test("trivial case", async () => {
return { return {
second: io.readWork(second), second: io.readWork(second),
third: io.readWork(third), third: io.readWork(third),
} };
} },
}); });
} }
const { value: first } = await incr.compile(compilation); const { value: first } = await incr.compile(compilation);
@ -52,5 +52,5 @@ test("trivial case", async () => {
import * as incr from "./incremental2.ts"; import * as incr from "./incremental2.ts";
import { beforeEach, test } from "node:test"; import { beforeEach, test } from "node:test";
import { tmpFile } from "#sitegen/testing";import { setTimeout } from "node:timers/promises"; import { tmpFile } from "#sitegen/testing";
import { setTimeout } from "node:timers/promises";

View file

@ -2,8 +2,7 @@
// See `work()`, `compile()`, and `invalidate()` for details. // See `work()`, `compile()`, and `invalidate()` for details.
// //
// All state is serializable to allow recovering state across sessions. // All state is serializable to allow recovering state across sessions.
// This framework special-cases the asset map, but is otherwise // This library special-cases the asset map, but is otherwise agnostic.
// agnostic of what it is a compiler for.
let running = false; let running = false;
let jobs = 0; let jobs = 0;
let newKeys = 0; let newKeys = 0;
@ -15,27 +14,25 @@ let writes = new Map<string, FileWrite>();
let assets = new Map<string, Asset>(); // keyed by hash let assets = new Map<string, Asset>(); // keyed by hash
export interface Ref<T> { export interface Ref<T> {
/** This method is compatible with `await` syntax */
then(
onFulfilled: (value: T) => void,
onRejected: (error: unknown) => void,
): void;
key: string; key: string;
/** This method is compatible with `await` syntax */
then(resolve: (value: T) => void, reject: (error: unknown) => void): void;
get value(): T | null;
} }
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>; type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
/** /**
* Declare and a unit of work. Return value is memoized and * Declare and a unit of work. Return value is memoized and only rebuilt when
* only rebuilt when inputs (declared via `Io`) change. Outputs * inputs change. Inputs are tracked via the `io` interface, as well as a hash
* are written at the end of a compilation (see `compile`). * of the `input` param and caller source code. Outputs are written at the end
* of a compilation (see `compile`).
* *
* If the returned `Ref` is not awaited or read * Work items are lazy, only started when `Ref` is awaited or `io.readWork`ed.
* via io.readWork, the job is never started.
*/ */
export function work<O>(job: Job<void, O>): Ref<O>; export function work<O>(job: Job<void, O>): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>; export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> { export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const source = JSON.stringify(util.getCallSites(2)[1]); const source = JSON.stringify(UNWRAP(util.getCallSites(2)[1]));
const keySource = [source, util.inspect(input)].join(":"); const keySource = [source, util.inspect(input)].join(":");
const key = crypto.createHash("sha1").update(keySource).digest("base64url"); const key = crypto.createHash("sha1").update(keySource).digest("base64url");
ASSERT(running); ASSERT(running);
@ -48,7 +45,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const prev = works.get(key) as Work<O> | null; const prev = works.get(key) as Work<O> | null;
if (prev) { if (prev) {
return { key, then: (done) => done(prev.value) }; return { key, then: (done) => done(prev.value), value: prev.value };
} }
async function perform() { async function perform() {
@ -59,13 +56,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const value = await job(io, input); const value = await job(io, input);
validateSerializable(value, ""); validateSerializable(value, "");
const { reads, writes } = io; const { reads, writes } = io;
works.set(key, { works.set(key, { value, affects: [], reads, writes });
value,
affects: [],
reads,
writes,
debug: source,
});
for (const add of reads.files) { for (const add of reads.files) {
const { affects } = UNWRAP(files.get(add)); const { affects } = UNWRAP(files.get(add));
ASSERT(!affects.includes(key)); ASSERT(!affects.includes(key));
@ -85,7 +76,12 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
let cached: Promise<O>; let cached: Promise<O>;
return { return {
key, key,
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject), then(fufill, reject) {
(cached ??= perform()).then(fufill, reject);
},
get value() {
return (works.get(this.key)?.value as O) ?? null;
},
}; };
} }
@ -97,11 +93,7 @@ export async function compile<T>(compiler: () => Promise<T>) {
const start = performance.now(); const start = performance.now();
const timerSpinner = new Spinner({ const timerSpinner = new Spinner({
text: () => text: () =>
`sitegen! [${ `sitegen! [${((performance.now() - start) / 1000).toFixed(1)}s]`,
((performance.now() - start) / 1000).toFixed(
1,
)
}s]`,
fps: 10, fps: 10,
}); });
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() }; using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -114,8 +106,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
return { return {
value, value,
watchFiles: new Set(files.keys()), watchFiles: new Set(files.keys()),
newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)), newOutputs: Array.from(seenWrites)
newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')), .filter((x) => x.startsWith("f:"))
.map((x) => x.slice(2)),
newAssets: !Array.from(seenWrites).some((x) => x.startsWith("a:")),
}; };
} finally { } finally {
running = false; running = false;
@ -127,9 +121,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
export async function flush(start: number) { export async function flush(start: number) {
// Trim // Trim
const detachedFiles = new Set<string>; const detachedFiles = new Set<string>();
const referencedAssets = new Set<string>; const referencedAssets = new Set<string>();
for (const [k, { writes: { assets } }] of works) { for (const [k, v] of works) {
const assets = v.writes.assets;
if (seenWorks.has(k)) { if (seenWorks.has(k)) {
for (const asset of assets.values()) referencedAssets.add(asset.hash); for (const asset of assets.values()) referencedAssets.add(asset.hash);
continue; continue;
@ -141,12 +136,9 @@ export async function flush(start: number) {
files.delete(k); files.delete(k);
detachedFiles.add(k); detachedFiles.add(k);
} }
for (const k of assets.keys()) { for (const k of assets.keys()) if (!referencedAssets.has(k)) assets.delete(k);
if (!referencedAssets.has(k))
assets.delete(k);
}
const p = []; const p: Promise<void>[] = [];
// File writes // File writes
let dist = 0; let dist = 0;
for (const [key, { buffer, size }] of writes) { for (const [key, { buffer, size }] of writes) {
@ -155,10 +147,14 @@ export async function flush(start: number) {
} }
// Asset map // Asset map
{ {
const { json, blob } = getAssetManifest(); const { json, blob, dynamic, dts } = getAssetManifest();
const jsonString = Buffer.from(JSON.stringify(json)); const jsonString = Buffer.from(JSON.stringify(json));
p.push(fs.writeMkdir(".clover/o/static.json", jsonString)); p.push(fs.writeMkdir(".clover/o/asset.json", jsonString));
p.push(fs.writeMkdir(".clover/o/static.blob", blob)); p.push(fs.writeMkdir(".clover/o/asset.blob", blob));
p.push(fs.writeMkdir(".clover/ts/asset.d.ts", dts));
for (const [k, v] of dynamic) {
p.push(fs.writeMkdir(`.clover/o/dynamic/${k}`, v));
}
dist += blob.byteLength + jsonString.byteLength; dist += blob.byteLength + jsonString.byteLength;
} }
await Promise.all(p); await Promise.all(p);
@ -171,9 +167,8 @@ export async function flush(start: number) {
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`); console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
console.writeLine(` - ${assets.size} static assets`); console.writeLine(` - ${assets.size} static assets`);
console.writeLine( console.writeLine(
` - dist: ${formatSize(dist)}, incremental: ${ ` - dist: ${formatSize(dist)},` +
formatSize(serialized.byteLength) ` incremental: ${formatSize(serialized.byteLength)}`,
}`,
); );
} }
@ -212,13 +207,14 @@ function deleteWork(key: string) {
affects.splice(affects.indexOf(key), 1); affects.splice(affects.indexOf(key), 1);
} }
for (const remove of affects) { for (const remove of affects) {
const { reads: { works: list } } = UNWRAP(works.get(remove), remove); const {
reads: { works: list },
} = UNWRAP(works.get(remove), remove);
ASSERT(list.has(key)); ASSERT(list.has(key));
list.delete(key); list.delete(key);
} }
for (const file of w.files) { for (const file of w.files) {
if (UNWRAP(writes.get(file)).work === key) if (UNWRAP(writes.get(file)).work === key) writes.delete(file);
writes.delete(file);
} }
// Assets are temporarily kept, trimmed via manual GC after compilation. // Assets are temporarily kept, trimmed via manual GC after compilation.
@ -234,23 +230,32 @@ export function reset() {
} }
export function serialize() { export function serialize() {
const fileEntries = Array.from(files, ([k, v]) => const fileEntries = Array.from(
files,
([k, v]) =>
[ [
k, k,
v.type, v.type,
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null, v.type === "f" ? v.lastModified : v.type === "d" ? v.contentHash : null,
...v.affects, ...v.affects,
] as const); ] as const,
const workEntries = Array.from(works, ([k, v]) => );
const workEntries = Array.from(
works,
([k, v]) =>
[ [
k, k,
v.value, v.value,
Array.from(v.reads.files), Array.from(v.reads.files),
Array.from(v.reads.works), Array.from(v.reads.works),
Array.from(v.writes.files), Array.from(v.writes.files),
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const), Array.from(
v.writes.assets,
([k, { headers }]) => [k, headers] as const,
),
v.affects, v.affects,
] as const); ] as const,
);
const expectedFilesOnDisk = Array.from( const expectedFilesOnDisk = Array.from(
writes, writes,
([k, { size, work }]) => [k, size, work] as const, ([k, { size, work }]) => [k, size, work] as const,
@ -280,7 +285,7 @@ async function deserialize(buffer: Buffer) {
if (type === "f") { if (type === "f") {
ASSERT(typeof content === "number"); ASSERT(typeof content === "number");
files.set(k, { type, affects, lastModified: content }); files.set(k, { type, affects, lastModified: content });
} else if (type === 'd') { } else if (type === "d") {
ASSERT(typeof content === "string"); ASSERT(typeof content === "string");
files.set(k, { type, affects, contentHash: content, contents: [] }); files.set(k, { type, affects, contentHash: content, contents: [] });
} else { } else {
@ -288,15 +293,8 @@ async function deserialize(buffer: Buffer) {
} }
} }
for (const entry of workEntries) { for (const entry of workEntries) {
const [ const [k, value, readFiles, readWorks, writeFiles, writeAssets, affects] =
k, entry;
value,
readFiles,
readWorks,
writeFiles,
writeAssets,
affects,
] = entry;
works.set(k, { works.set(k, {
value, value,
reads: { reads: {
@ -305,23 +303,30 @@ async function deserialize(buffer: Buffer) {
}, },
writes: { writes: {
files: new Set(writeFiles), files: new Set(writeFiles),
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, { assets: new Map(
Array.from(writeAssets, ([k, headers]) => [
k,
{
hash: JSON.parse(UNWRAP(headers.etag)), hash: JSON.parse(UNWRAP(headers.etag)),
headers, headers,
}])), },
]),
),
}, },
affects, affects,
}); });
} }
const statFiles = await Promise.all(expectedFilesOnDisk const statFiles = await Promise.all(
.map(([k, size, work]) => expectedFilesOnDisk.map(([k, size, work]) =>
fs.stat(path.join(".clover/o", k)) fs
.stat(path.join(".clover/o", k))
.catch((err) => { .catch((err) => {
if (err.code === "ENOENT") return null; if (err.code === "ENOENT") return null;
throw err; throw err;
}) })
.then((stat) => ({ k, size, work, stat })) .then((stat) => ({ k, size, work, stat }))
)); ),
);
for (const { k, stat, work, size } of statFiles) { for (const { k, stat, work, size } of statFiles) {
if (stat?.size === size) { if (stat?.size === size) {
writes.set(k, { writes.set(k, {
@ -337,19 +342,28 @@ async function deserialize(buffer: Buffer) {
assets.set(hash, { raw, gzip, zstd }); assets.set(hash, { raw, gzip, zstd });
} }
await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file))); await Promise.all(
Array.from(files, ([key, file]) => invalidateEntry(key, file, false)),
);
} }
export async function invalidate(filePath: string): Promise<boolean> { export async function invalidate(
filePath: string,
unloadModule: boolean = true,
): Promise<boolean> {
const key = toRel(toAbs(filePath)); const key = toRel(toAbs(filePath));
const file = UNWRAP(files.get(key), `Untracked file '${key}'`) const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
return invalidateEntry(key, file) return invalidateEntry(key, file, unloadModule);
} }
export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> { export async function invalidateEntry(
key: string,
file: TrackedFile,
unloadModule: boolean,
): Promise<boolean> {
try { try {
if (file.type === "d") { if (file.type === "d") {
const contents = file.contents = await fs.readdir(key); const contents = (file.contents = await fs.readdir(key));
contents.sort(); contents.sort();
const contentHash = crypto const contentHash = crypto
.createHash("sha1") .createHash("sha1")
@ -359,48 +373,79 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
file.contentHash = contentHash; file.contentHash = contentHash;
throw new Error(); throw new Error();
} }
} else if (file.type === 'f') { } else if (file.type === "f") {
const lastModified = await fs.stat(key) const lastModified = await fs.stat(key).then(
.then(x => Math.floor(x.mtimeMs), () => 0); (x) => Math.floor(x.mtimeMs),
() => 0,
);
if (file.lastModified !== lastModified) { if (file.lastModified !== lastModified) {
file.lastModified = lastModified; file.lastModified = lastModified;
throw new Error(); throw new Error();
} }
} else { } else {
file.type satisfies 'null'; file.type satisfies "null";
const stat = await fs.stat(key).catch(() => null); const stat = await fs.stat(key).catch(() => null);
if (stat) throw new Error(); if (stat) throw new Error();
} }
return false; return false;
} catch (e) { } catch (e) {
forceInvalidate(file); forceInvalidate(file);
if (unloadModule) {
// TODO: handle when this triggers unloading of `generate.ts`
hot.unload(toAbs(key)); hot.unload(toAbs(key));
if (file.type === 'null') files.delete(key); }
if (file.type === "null") files.delete(key);
return true; return true;
} }
} }
export function getAssetManifest() { export function getAssetManifest() {
const dynamic = new Map<string, Buffer>();
const writer = new BufferWriter(); const writer = new BufferWriter();
const asset = Object.fromEntries( const assetMap = Object.fromEntries(
Array.from(works, (work) => work[1].writes.assets) Array.from(works, (work) => work[1].writes.assets)
.filter((map) => map.size > 0) .filter((map) => map.size > 0)
.flatMap((map) => .flatMap((map) =>
Array.from(map, ([key, { hash, headers }]) => { Array.from(map, ([key, { hash, headers, regenerative }]) => {
const { raw, gzip, zstd } = UNWRAP( const { raw, gzip, zstd } = UNWRAP(
assets.get(hash), assets.get(hash),
`Asset ${key} (${hash})`, `Asset ${key} (${hash})`,
); );
return [key, { if (regenerative) {
const id = crypto
.createHash("sha1")
.update(key)
.digest("hex")
.slice(0, 16); /* TODO */
dynamic.set(
id,
manifest.packDynamicBuffer(raw, gzip, zstd, headers),
);
return [key, { type: 1, id }] as const;
}
return [
key,
{
type: 0,
raw: writer.write(raw, "raw:" + hash), raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash), gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash), zstd: writer.write(zstd, "zstd:" + hash),
headers, headers,
}] as const; },
] as const;
}) })
), ),
) satisfies BuiltAssetMap; ) satisfies manifest.Manifest;
return { json: asset, blob: writer.get() }; return {
json: assetMap,
blob: writer.get(),
dynamic,
dts: "export type AssetKey = " +
Object.keys(assetMap)
.map((key) => JSON.stringify(key))
.join(" | ") +
"\n",
};
} }
/* Input/Output with automatic tracking. /* Input/Output with automatic tracking.
@ -446,7 +491,7 @@ export class Io {
const { key, resolved } = this.#trackFs(dir); const { key, resolved } = this.#trackFs(dir);
const existing = files.get(key); const existing = files.get(key);
try { try {
if (existing?.type === 'd') return existing.contents; if (existing?.type === "d") return existing.contents;
const contents = await fs.readdir(resolved); const contents = await fs.readdir(resolved);
contents.sort(); contents.sort();
const contentHash = crypto const contentHash = crypto
@ -512,26 +557,27 @@ export class Io {
} }
} }
} }
async writeAsset( async writeAsset(asset: {
pathname: string, pathname: string;
blob: string | Buffer, buffer: string | Buffer;
headersOption?: HeadersInit, regenerative?: boolean;
) { headers?: HeadersInit;
ASSERT(pathname.startsWith("/")); }) {
ASSERT(!seenWrites.has("a:" + pathname)); ASSERT(asset.pathname.startsWith("/"));
ASSERT(!seenWrites.has("a:" + asset.pathname));
const buffer = typeof blob === "string" ? Buffer.from(blob) : blob; const buffer = typeof asset.buffer === "string"
? Buffer.from(asset.buffer)
const headers = new Headers(headersOption ?? {}); : asset.buffer;
const headers = new Headers(asset.headers ?? {});
const hash = crypto.createHash("sha1").update(buffer).digest("hex"); const hash = crypto.createHash("sha1").update(buffer).digest("hex");
if (!headers.has("Content-Type")) { if (!headers.has("Content-Type")) {
headers.set("Content-Type", mime.contentTypeFor(pathname)); headers.set("Content-Type", mime.contentTypeFor(asset.pathname));
} }
headers.set("ETag", JSON.stringify(hash)); headers.set("etag", JSON.stringify(hash));
this.writes.assets.set(pathname, { this.writes.assets.set(asset.pathname, {
hash, hash,
// @ts-expect-error TODO
headers: Object.fromEntries(headers), headers: Object.fromEntries(headers),
regenerative: !!asset.regenerative,
}); });
if (!assets.has(hash)) { if (!assets.has(hash)) {
jobs += 1; jobs += 1;
@ -593,7 +639,10 @@ export function validateSerializable(value: unknown, key: string) {
} else if (value && typeof value === "object") { } else if (value && typeof value === "object") {
if (Array.isArray(value)) { if (Array.isArray(value)) {
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`)); value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
} else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) { } else if (
Object.getPrototypeOf(value) === Object.prototype ||
Buffer.isBuffer(value)
) {
Object.entries(value).forEach(([k, v]) => Object.entries(value).forEach(([k, v]) =>
validateSerializable(v, `${key}.${k}`) validateSerializable(v, `${key}.${k}`)
); );
@ -631,53 +680,48 @@ interface FileWrite {
} }
interface Writes { interface Writes {
files: Set<string>; files: Set<string>;
assets: Map<string, { assets: Map<string, AssetWrite>;
hash: string;
headers: Record<string, string>;
}>;
} }
interface Asset { interface Asset {
raw: Buffer; raw: Buffer;
gzip: Buffer; gzip: Buffer;
zstd: Buffer; zstd: Buffer;
} }
interface AssetWrite {
hash: string;
headers: Record<string, string>;
regenerative: boolean;
}
interface Work<T = unknown> { interface Work<T = unknown> {
debug?: string;
value: T; value: T;
reads: Reads; reads: Reads;
writes: Writes; writes: Writes;
affects: string[]; affects: string[];
} }
type TrackedFile = type TrackedFile =
& { & { affects: string[] }
affects: string[];
}
& ( & (
| { type: "f"; lastModified: number } | { type: "f"; lastModified: number }
| { type: "d"; contentHash: string; contents: string[] } | { type: "d"; contentHash: string; contents: string[] }
| { type: "null"; } | { type: "null" }
); );
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}
export interface BuiltAsset {
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
headers: Record<string, string>;
}
const gzip = util.promisify(zlib.gzip); const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress); const zstdCompress = util.promisify(zlib.zstdCompress);
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as hot from "./hot.ts"; import * as hot from "./hot.ts";
import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";
import * as manifest from "#sitegen/assets";
import * as path from "node:path";
import * as util from "node:util"; import * as util from "node:util";
import * as crypto from "node:crypto"; import * as crypto from "node:crypto";
import * as mime from "#sitegen/mime";
import * as zlib from "node:zlib"; import * as zlib from "node:zlib";
import * as console from "@paperclover/console"; import * as console from "@paperclover/console";
import { Spinner } from "@paperclover/console/Spinner"; import { Spinner } from "@paperclover/console/Spinner";
import { formatSize } from "@/file-viewer/format.ts"; import { formatSize } from "@/file-viewer/format.ts";
import * as msgpackr from "msgpackr"; import * as msgpackr from "msgpackr";

View file

@ -1,73 +1,114 @@
interface Loaded { // Static and dynamic assets are built alongside the server binary.
map: BuiltAssetMap; // This module implements decoding and serving of the asset blobs,
buf: Buffer; // but also implements patching of dynamic assets. The `Manifest`
} // is generated by `incremental.ts`
let assets: Loaded | null = null; const root = import.meta.dirname;
let current: Loaded | null = null;
export type StaticPageId = string; // TODO: rename all these types
type DynamicId = string;
export type { Key };
export type Manifest =
& {
[K in Key]: StaticAsset | DynamicAsset;
}
& {
[string: string]: StaticAsset | DynamicAsset;
};
export interface StaticAsset extends AssetBase {
type: 0;
}
interface AssetBase {
headers: Record<string, string> & { etag: string };
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
}
export interface DynamicAsset {
type: 1;
id: DynamicId;
}
interface Loaded {
map: Manifest;
static: Buffer;
dynamic: Map<DynamicId, DynamicEntry>;
}
export interface DynamicEntry extends AssetBase {
buffer: Buffer;
}
export async function reload() { export async function reload() {
const [map, buf] = await Promise.all([ const map = await fs.readJson<Manifest>(path.join(root, "asset.json"));
fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"), const statics = await fs.readFile(path.join(root, "asset.blob"));
fs.readFile(path.join(import.meta.dirname, "static.blob")), const dynamic = new Map(
]); await Promise.all(
return (assets = { map: JSON.parse(map), buf }); Object.entries(map)
} .filter((entry): entry is [string, DynamicAsset] => entry[1].type === 1)
.map(async ([k, v]) =>
export function reloadSync() { [
const map = fs.readFileSync( v.id,
path.join(import.meta.dirname, "static.json"), await fs.readFile(path.join(root, "dynamic", v.id))
"utf8", .then(loadRegenerative),
] as const
),
),
); );
const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob")); return (current = { map, static: statics, dynamic });
return (assets = { map: JSON.parse(map), buf });
} }
export async function middleware(c: Context, next: Next) { export async function middleware(c: Context, next: Next) {
if (!assets) await reload(); if (!current) current = await reload();
const asset = assets!.map[c.req.path]; const asset = current.map[c.req.path];
if (asset) return assetInner(c, asset, 200); if (asset) return assetInner(c, asset, 200);
return next(); return next();
} }
export async function notFound(c: Context) { export async function notFound(c: Context) {
if (!assets) await reload(); if (!current) current = await reload();
let pathname = c.req.path; let pathname = c.req.path;
do { do {
const asset = assets!.map[pathname + "/404"]; const asset = current.map[pathname + "/404"];
if (asset) return assetInner(c, asset, 404); if (asset) return assetInner(c, asset, 404);
pathname = pathname.slice(0, pathname.lastIndexOf("/")); pathname = pathname.slice(0, pathname.lastIndexOf("/"));
} while (pathname); } while (pathname);
const asset = assets!.map["/404"]; const asset = current.map["/404"];
if (asset) return assetInner(c, asset, 404); if (asset) return assetInner(c, asset, 404);
return c.text("the 'Not Found' page was not found", 404); return c.text("the 'Not Found' page was not found", 404);
} }
export async function serveAsset( export async function serveAsset(c: Context, id: Key, status: StatusCode) {
c: Context, return assetInner(c, (current ?? (await reload())).map[id], status);
id: StaticPageId,
status: StatusCode,
) {
return assetInner(c, (assets ?? (await reload())).map[id], status);
} }
/** @deprecated */
export function hasAsset(id: string) { export function hasAsset(id: string) {
return (assets ?? reloadSync()).map[id] !== undefined; return UNWRAP(current).map[id] !== undefined;
} }
export function etagMatches(etag: string, ifNoneMatch: string) { export function etagMatches(etag: string, ifNoneMatch: string) {
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1; return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
} }
function subarrayAsset([start, end]: BufferView) { function assetInner(c: Context, asset: Manifest[Key], status: StatusCode) {
return assets!.buf.subarray(start, end); ASSERT(current);
if (asset.type === 0) {
return respondWithBufferAndViews(c, current.static, asset, status);
} else {
const entry = UNWRAP(current.dynamic.get(asset.id));
return respondWithBufferAndViews(c, entry.buffer, entry, status);
}
} }
function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) { function respondWithBufferAndViews(
const ifnonematch = c.req.header("If-None-Match"); c: Context,
if (ifnonematch) { buffer: Buffer,
const etag = asset.headers.ETag; asset: AssetBase,
if (etagMatches(etag, ifnonematch)) { status: StatusCode,
) {
const ifNoneMatch = c.req.header("If-None-Match");
if (ifNoneMatch) {
const etag = asset.headers.etag;
if (etagMatches(etag, ifNoneMatch)) {
return (c.res = new Response(null, { return (c.res = new Response(null, {
status: 304, status: 304,
statusText: "Not Modified", statusText: "Not Modified",
@ -80,24 +121,103 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
const acceptEncoding = c.req.header("Accept-Encoding") ?? ""; const acceptEncoding = c.req.header("Accept-Encoding") ?? "";
let body; let body;
let headers = asset.headers; let headers = asset.headers;
if (acceptEncoding.includes("zstd") && asset.zstd) { if (acceptEncoding.includes("zstd")) {
body = subarrayAsset(asset.zstd); body = buffer.subarray(...asset.zstd);
headers = { headers = {
...asset.headers, ...asset.headers,
"Content-Encoding": "zstd", "Content-Encoding": "zstd",
}; };
} else if (acceptEncoding.includes("gzip") && asset.gzip) { } else if (acceptEncoding.includes("gzip")) {
body = subarrayAsset(asset.gzip); body = buffer.subarray(...asset.gzip);
headers = { headers = {
...asset.headers, ...asset.headers,
"Content-Encoding": "gzip", "Content-Encoding": "gzip",
}; };
} else { } else {
body = subarrayAsset(asset.raw); body = buffer.subarray(...asset.raw);
} }
return (c.res = new Response(body, { headers, status })); return (c.res = new Response(body, { headers, status }));
} }
export function packDynamicBuffer(
raw: Buffer,
gzip: Buffer,
zstd: Buffer,
headers: Record<string, string>,
) {
const headersBuffer = Buffer.from(
Object.entries(headers)
.map((entry) => entry.join(":"))
.join("\n"),
"utf-8",
);
const header = new Uint32Array(3);
header[0] = headersBuffer.byteLength + header.byteLength;
header[1] = header[0] + raw.byteLength;
header[2] = header[1] + gzip.byteLength;
return Buffer.concat([
Buffer.from(header.buffer),
headersBuffer,
raw,
gzip,
zstd,
]);
}
function loadRegenerative(buffer: Buffer): DynamicEntry {
const headersEnd = buffer.readUInt32LE(0);
const headers = Object.fromEntries(
buffer
.subarray(3 * 4, headersEnd)
.toString("utf-8")
.split("\n")
.map((line) => {
const i = line.indexOf(":");
return [line.slice(0, i), line.slice(i + 1)];
}),
);
const raw = buffer.readUInt32LE(4);
const gzip = buffer.readUInt32LE(8);
const hasEtag = (v: object): v is typeof v & { etag: string } =>
"etag" in v && typeof v.etag === "string";
ASSERT(hasEtag(headers));
return {
headers,
buffer,
raw: [headersEnd, raw],
gzip: [raw, gzip],
zstd: [gzip, buffer.byteLength],
};
}
const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress);
export async function overwriteDynamic(
key: Key,
value: string | Buffer,
headers: Record<string, string>,
) {
if (!current) current = await reload();
const asset = UNWRAP(current.map[key]);
ASSERT(asset.type === 1);
UNWRAP(current.dynamic.has(asset.id));
const buffer = Buffer.from(value);
const etag = JSON.stringify(
crypto.createHash("sha1").update(buffer).digest("hex"),
);
const [gzipBuffer, zstdBuffer] = await Promise.all([
gzip(buffer),
zstdCompress(buffer),
]);
const packed = packDynamicBuffer(buffer, gzipBuffer, zstdBuffer, {
...headers,
etag,
});
current.dynamic.set(asset.id, loadRegenerative(packed));
await fs.writeFile(path.join(root, "dynamic", asset.id), packed);
}
process.on("message", (msg: any) => { process.on("message", (msg: any) => {
if (msg?.type === "clover.assets.reload") reload(); if (msg?.type === "clover.assets.reload") reload();
}); });
@ -105,6 +225,10 @@ process.on("message", (msg: any) => {
import * as fs from "#sitegen/fs"; import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono"; import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status"; import type { StatusCode } from "hono/utils/http-status";
import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts"; import type { BufferView } from "../incremental.ts";
import { Buffer } from "node:buffer"; import { Buffer } from "node:buffer";
import * as path from "node:path"; import * as path from "node:path";
import type { AssetKey as Key } from "../../.clover/ts/asset.d.ts";
import * as crypto from "node:crypto";
import * as zlib from "node:zlib";
import * as util from "node:util";

View file

@ -51,8 +51,8 @@ export class Queue<T, R> {
let n = 0; let n = 0;
for (const item of active) { for (const item of active) {
let itemText = "- " + item.format(now); let itemText = "- " + item.format(now);
text += text += `\n` +
`\n` + itemText.slice(0, Math.max(0, process.stdout.columns - 1)); itemText.slice(0, Math.max(0, process.stdout.columns - 1));
if (n > 10) { if (n > 10) {
text += `\n ... + ${active.length - n} more`; text += `\n ... + ${active.length - n} more`;
break; break;

View file

@ -4,6 +4,7 @@ export {
createReadStream, createReadStream,
createWriteStream, createWriteStream,
existsSync, existsSync,
type FileHandle,
open, open,
readdir, readdir,
readdirSync, readdirSync,
@ -15,7 +16,6 @@ export {
statSync, statSync,
writeFile, writeFile,
writeFileSync, writeFileSync,
type FileHandle,
}; };
export function mkdir(dir: string) { export function mkdir(dir: string) {
@ -98,6 +98,7 @@ import {
writeFileSync, writeFileSync,
} from "node:fs"; } from "node:fs";
import { import {
type FileHandle,
mkdir as nodeMkdir, mkdir as nodeMkdir,
open, open,
readdir, readdir,
@ -106,6 +107,5 @@ import {
rmdir, rmdir,
stat, stat,
writeFile, writeFile,
type FileHandle,
} from "node:fs/promises"; } from "node:fs/promises";
export { Stats } from "node:fs"; export { Stats } from "node:fs";

View file

@ -1,9 +1,11 @@
/* Impementation of CommonMark specification for markdown with support /* Implementation of [CommonMark] specification for markdown with support
* for custom syntax extensions via the parser options. Instead of * for custom syntax extensions via the parser options. Instead of
* returning an AST that has a second conversion pass to JSX, the * returning an AST that has a second conversion pass to JSX, the
* returned value of 'parse' is 'engine.Node' which can be stringified * returned value of 'parse' is 'engine.Node' which can be stringified
* via clover's SSR engine. This way, generation optimizations, async * via Clover's SSR engine. This way, generation optimizations, async
* components, and other features are gained for free here. * components, and other features are gained for free here.
*
* [CommonMark]: https://spec.commonmark.org/0.31.2/
*/ */
function parse(src: string, options: Partial<ParseOpts> = {}) {} function parse(src: string, options: Partial<ParseOpts> = {}) {}
@ -16,6 +18,9 @@ export function Markdown({
return parse(src, options); return parse(src, options);
} }
// TODO: This implementation is flawed because it is impossible to sanely handle
// emphasis and strong emphasis, and all their edge cases. Instead of making these
// using extensions interface, they should be special cased.
function parseInline(src: string, options: Partial<InlineOpts> = {}) { function parseInline(src: string, options: Partial<InlineOpts> = {}) {
const { rules = inlineRules, links = new Map() } = options; const { rules = inlineRules, links = new Map() } = options;
const opts: InlineOpts = { rules, links }; const opts: InlineOpts = { rules, links };
@ -110,12 +115,11 @@ export const inlineRules: Record<string, InlineRule> = {
} else if (afterText[0] === "[") { } else if (afterText[0] === "[") {
const splitTarget = splitFirst(afterText.slice(1), /]/); const splitTarget = splitFirst(afterText.slice(1), /]/);
if (!splitTarget) return null; if (!splitTarget) return null;
const name = const name = splitTarget.first.trim().length === 0
splitTarget.first.trim().length === 0 // Collapsed reference link
? // Collapsed reference link ? textSrc.trim()
textSrc.trim() // Full Reference Link
: // Full Reference Link : splitTarget.first.trim();
splitTarget.first.trim();
const target = opts.links.get(name); const target = opts.links.get(name);
if (!target) return null; if (!target) return null;
({ href, title } = target); ({ href, title } = target);

View file

@ -1,6 +1,25 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string; export type ScriptId = string;
export interface PageExports extends ViewExports {
regenerate?: PageRegenerateOptions;
}
export interface ViewExports {
default: render.Component;
meta: meta.Meta | ((props: { ssr: true }) => Promise<meta.Meta> | meta.Meta);
theme?: css.Theme;
layout?: Layout;
}
export interface Layout {
default: render.Component;
theme?: css.Theme;
// TODO: nested layout
}
export interface PageRegenerateOptions {
tags?: string[];
seconds?: number;
debounce?: number;
}
/** /**
* A filesystem object associated with some ID, * A filesystem object associated with some ID,
* such as a page's route to it's source file. * such as a page's route to it's source file.
@ -30,4 +49,24 @@ export function addScript(id: ScriptId | { value: ScriptId }) {
userData.get().scripts.add(typeof id === "string" ? id : id.value); userData.get().scripts.add(typeof id === "string" ? id : id.value);
} }
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as render from "#engine/render"; import * as render from "#engine/render";
import type * as meta from "./meta.ts";
import type * as css from "../css.ts";

View file

@ -10,7 +10,9 @@ export function getDb(file: string) {
if (db) return db; if (db) return db;
const fileWithExt = file.includes(".") ? file : file + ".sqlite"; const fileWithExt = file.includes(".") ? file : file + ".sqlite";
db = new WrappedDatabase( db = new WrappedDatabase(
new DatabaseSync(path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt)), new DatabaseSync(
path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt),
),
); );
map.set(file, db); map.set(file, db);
return db; return db;

View file

@ -1,5 +1,9 @@
export function tmpFile(basename: string) { export function tmpFile(basename: string) {
const file = path.join(import.meta.dirname, '../../.clover/testing', basename); const file = path.join(
import.meta.dirname,
"../../.clover/testing",
basename,
);
return { return {
path: file, path: file,
read: fs.readFile.bind(fs, file), read: fs.readFile.bind(fs, file),
@ -7,5 +11,5 @@ export function tmpFile(basename: string) {
}; };
} }
import * as path from 'node:path'; import * as path from "node:path";
import * as fs from './fs.ts'; import * as fs from "./fs.ts";

View file

@ -1,38 +1,64 @@
export interface View { // The "view" system allows rendering dynamic pages within backends.
// This is done by scanning all `views` dirs, bundling their client
// resources, and then providing `serve` which renders a page.
//
// This system also implements page regeneration.
let codegen: Codegen;
try {
codegen = require("$views");
} catch {
throw new Error("Can only import '#sitegen/view' in backends.");
}
// Generated in `bundle.ts`
export interface Codegen {
views: { [K in Key]: View<PropsFromModule<ViewMap[K]>> };
scripts: Record<string, string>;
regenTtls: Ttl[];
regenTags: Record<RegenKey, Key[]>;
}
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
export interface View<Props extends Record<string, unknown>> {
component: render.Component; component: render.Component;
meta: meta: meta.Meta | ((props: Props) => Promise<meta.Meta> | meta.Meta);
| meta.Meta
| ((props: { context?: hono.Context }) => Promise<meta.Meta> | meta.Meta);
layout?: render.Component; layout?: render.Component;
inlineCss: string; inlineCss: string;
scripts: Record<string, string>; scripts: Record<string, string>;
} }
export interface Ttl {
seconds: number;
key: Key;
}
export type Key = keyof ViewMap;
let views: Record<string, View> = null!; export async function serve<K extends Key>(
let scripts: Record<string, string> = null!;
export async function renderView(
context: hono.Context, context: hono.Context,
id: string, id: K,
props: Record<string, unknown>, props: PropsFromModule<ViewMap[K]>,
) { ) {
return context.html(await renderViewToString(id, { context, ...props })); return context.html(await renderToString(id, { context, ...props }));
} }
export async function renderViewToString( type PropsFromModule<M extends any> = M extends {
id: string, default: (props: infer T) => render.Node;
props: Record<string, unknown>, } ? T
: never;
export async function renderToString<K extends Key>(
id: K,
props: PropsFromModule<ViewMap[K]>,
) { ) {
views ?? ({ views, scripts } = require("$views"));
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
const { const {
component, component,
inlineCss, inlineCss,
layout, layout,
meta: metadata, meta: metadata,
}: View = UNWRAP(views[id], `Missing view ${id}`); }: View<PropsFromModule<ViewMap[K]>> = UNWRAP(
codegen.views[id],
`Missing view ${id}`,
);
// -- metadata -- // -- metadata --
const renderedMetaPromise = Promise.resolve( const renderedMetaPromise = Promise.resolve(
@ -48,48 +74,48 @@ export async function renderViewToString(
} = await render.async(page, { [sg.userData.key]: sg.initRender() }); } = await render.async(page, { [sg.userData.key]: sg.initRender() });
// -- join document and send -- // -- join document and send --
return wrapDocument({ return sg.wrapDocument({
body, body,
head: await renderedMetaPromise, head: await renderedMetaPromise,
inlineCss, inlineCss,
scripts: joinScripts( scripts: joinScripts(
Array.from(sitegen.scripts, (id) => Array.from(
UNWRAP(scripts[id], `Missing script ${id}`), sitegen!.scripts,
(id) => UNWRAP(codegen.scripts[id], `Missing script ${id}`),
), ),
), ),
}); });
} }
export function provideViewData(v: typeof views, s: typeof scripts) { export function regenerate(tag: RegenKey) {
(views = v), (scripts = s); for (const view of codegen.regenTags[tag]) {
const key = view.slice("page:".length);
renderToString(view, {})
.then((result) => {
console.info(`regenerate ${key}`);
asset.overwriteDynamic(key as asset.Key, result, {
"content-type": "text/html",
});
})
.catch((e) => {
console.error(`Failed regenerating ${view} from tag ${tag}`, e);
});
}
} }
export function joinScripts(scriptSources: string[]) { function joinScripts(scriptSources: string[]) {
const { length } = scriptSources; const { length } = scriptSources;
if (length === 0) return ""; if (length === 0) return "";
if (length === 1) return scriptSources[0]; if (0 in scriptSources) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";"); return scriptSources.map((source) => `{${source}}`).join(";");
} }
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as meta from "./meta.ts"; import * as meta from "./meta.ts";
import type * as hono from "#hono"; import type * as hono from "#hono";
import * as render from "#engine/render"; import * as render from "#engine/render";
import * as sg from "./sitegen.ts"; import * as sg from "./sitegen.ts";
import * as asset from "./assets.ts";
import type {
RegenKey,
RegisteredViews as ViewMap,
} from "../../.clover/ts/view.d.ts";

View file

@ -14,16 +14,17 @@ export function loadMarko(module: NodeJS.Module, filepath: string) {
// bare client import statements to it's own usage. // bare client import statements to it's own usage.
const scannedClientRefs = new Set<string>(); const scannedClientRefs = new Set<string>();
if (src.match(/^\s*client\s+import\s+["']/m)) { if (src.match(/^\s*client\s+import\s+["']/m)) {
src = src = src.replace(
src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m, /^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => { (_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`); const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = hot.resolveClientRef(filepath, ref); const resolved = hot.resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved); scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${JSON.stringify( return `<CloverScriptInclude=${
JSON.stringify(
hot.getScriptId(resolved), hot.getScriptId(resolved),
)} />`; )
} />`;
}, },
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n'; ) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
} }

View file

@ -11,7 +11,7 @@ let watch: Watch;
export async function main() { export async function main() {
// Catch up state by running a main build. // Catch up state by running a main build.
await incr.restore(); if (!process.argv.includes("-f")) await incr.restore();
watch = new Watch(rebuild); watch = new Watch(rebuild);
rebuild([]); rebuild([]);
} }
@ -36,15 +36,16 @@ function onSubprocessClose(code: number | null, signal: string | null) {
} }
async function rebuild(files: string[]) { async function rebuild(files: string[]) {
const hasInvalidated = files.length === 0 const hasInvalidated = files.length === 0 ||
|| (await Promise.all(files.map(incr.invalidate))).some(Boolean); (await Promise.all(files.map((file) => incr.invalidate(file))))
.some(Boolean);
if (!hasInvalidated) return; if (!hasInvalidated) return;
incr.compile(generate.generate).then(({ incr.compile(generate.generate).then(({
watchFiles, watchFiles,
newOutputs, newOutputs,
newAssets newAssets,
}) => { }) => {
const removeWatch = [...watch.files].filter(x => !watchFiles.has(x)) const removeWatch = [...watch.files].filter((x) => !watchFiles.has(x));
for (const file of removeWatch) watch.remove(file); for (const file of removeWatch) watch.remove(file);
watch.add(...watchFiles); watch.add(...watchFiles);
// Restart the server if it was changed or not running. // Restart the server if it was changed or not running.
@ -60,8 +61,8 @@ async function rebuild(files: string[]) {
function statusLine() { function statusLine() {
console.info( console.info(
`Watching ${watch.files.size} files ` `Watching ${watch.files.size} files ` +
+ `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`, `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
); );
} }
@ -142,7 +143,7 @@ class Watch {
#getFiles(absPath: string, event: fs.WatchEventType) { #getFiles(absPath: string, event: fs.WatchEventType) {
const files = []; const files = [];
if (this.files.has(absPath)) files.push(absPath); if (this.files.has(absPath)) files.push(absPath);
if (event === 'rename') { if (event === "rename") {
const dir = path.dirname(absPath); const dir = path.dirname(absPath);
if (this.files.has(dir)) files.push(dir); if (this.files.has(dir)) files.push(dir);
} }

View file

@ -1,6 +1,6 @@
// This is the main file for the backend // This is the main file for paperclover.net's server.
const app = new Hono(); const app = new Hono();
const logHttp = scoped("http", { color: "magenta" }); const logHttp = console.scoped("http", { color: "magenta" });
// Middleware // Middleware
app.use(trimTrailingSlash()); app.use(trimTrailingSlash());
@ -38,4 +38,4 @@ import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash"; import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets"; import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts"; import * as admin from "./admin.ts";
import { scoped } from "@paperclover/console"; import * as console from "@paperclover/console";

View file

@ -1,10 +0,0 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-07-08",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";

View file

@ -0,0 +1,87 @@
- imports at the bottom
- order your file by importance.
- 'G' to jump to imports, etc
- prefer namespace imports
- easier to type and refactor. easier to read.
- large files are okay
- all files are their own library
- split files up by making components modular, not by "oh it's too big"
- engine/render.ts is a standalone library, in order to split JSX, Suspense,
and Marko out, the main file was made modular.
- lowercase
- name objects ultra-concisely
- filenames are often one word describing what they contain
- avoid useless descriptors like "utils", "helpers", and "data"
- examples
- async.ts contains all the async library functions.
- watch.ts contains the file watcher and watch-reload mode.
- render.*, Io
- be ultra-concise in comments
- no "discarded" variables, embrace `void x`
- makes code more readable
- note how i want to write a lint for this
- note the one proposal i want about void
- push the ts inference engine (as const, ReturnType, etc)
- reduces how much you repeat yourself making it easier to refactor things
- use the code as the source of truth
- push the ts inference engine (generics)
- do not implement crazy things with the TS engine, instead use generic input
types, and then use regular control to narrow and transform the return type.
source of truth is your code.
- UNWRAP, ASSERT utility globals are amazing
- ban postfix '!'
- stripped for production frontend builds
- destructure often
- use the one example from work lol
- package.json "imports" are amazing
- remapping
- implementation switching
- testing
- embrace the web and node.js APIs
- sitegen relies on so many node features that bun and deno fail to run it.
- overlay modules are great
- avoid dependencies
- once you build your own mini standard library you win
- talk about regrets with mdx
## imports at the bottom
Here is an abridged version of my website's `backend.ts`. When reading it from
top to bottom it is immediately obvious that it is a Hono web server.
```ts
// This is the main file for paperclover.net's server.
const app = new Hono();
const logHttp = console.scoped("http", { color: "magenta" });
// Middleware
app.use(...);
...
// Backends
app.route("", require("./q+a/backend.ts").app);
...
export default app;
...
import { type Context, Hono, type Next } from "#hono";
import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts";
import * as console from "@paperclover/console";
```
Since `import`/`export` statements are hoisted like `var` and `function`, the
position of these statements within the file does not matter. The imported
modules have to be loaded first before this file can start. With this, I've
found it nicer to sort the file by _importance_ rather than by arbitrary rules
dictated by how C-style `#include`s worked.
Start with a documentation comment, then the most important
functions/variables/types, sort the file by importance. Imports are not really
important since you very quickly get to know where common namespaces come from.
And since they're at the bottom, you can just press `G` in Vim or `CMD+Down` on
the Mac to scroll to the end of the file.

View file

@ -1,17 +1,20 @@
export const blog: BlogMeta = { export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language", title: "Marko is the coziest HTML templating language",
desc: "...todo...", desc: "...todo...",
date: "2025-06-13", created: "2025-06-13",
draft: true, draft: true,
}; };
export const meta = formatBlogMeta(blob); export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx"; export * as layout from "@/blog/layout.tsx";
I've been recently playing around [Marko][1], and after adding limited support I've been recently playing around [Marko], and after adding limited support
for it in my website generator, [sitegen][2], I instantly fell in love with how for it in my website generator, [sitegen], I instantly fell in love with how
minimalistic it is in comparison to JSX, Astro components, and Svelte. minimalistic it is in comparison to JSX, Astro components, and Svelte.
## Introduction [Marko]: https://next.markojs.com
[sitegen]: https://paperclover.dev/clo/sitegen
## Introduction to Marko
If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving
JavaScript into HTML. Attributes are JavaScript expressions. JavaScript into HTML. Attributes are JavaScript expressions.
@ -168,8 +171,8 @@ just like a regular function call, with '('.
<Something(item, index) /> <Something(item, index) />
``` ```
In fact, attributes can just be sugar over this syntax; _this technically isn't In fact, attributes can just be sugar over this syntax. (this technically isn't
true but it's close enough for the example_ true but it's close enough for the example)
``` ```
<SpecialButton type="submit" class="red" /> <SpecialButton type="submit" class="red" />
@ -237,15 +240,58 @@ used it. A brief example of it, modified from their documentation.
</form> </form>
``` ```
## Usage on `paperclover.net` <SectionHeader updated="2025-08-11">Usage on `paperclover.net`</Section>
TODO: document a lot of feedback, how i embedded Marko Using Marko for HTML generation is quite easy. `.marko` files can be compiled
into `.js` using the `@marko/compiler` library.
My website uses statically generated HTML. That is why I have not needed to use ```ts
reactive variables. My generator doesn't even try compiling components const src = fs.readFileSync("page.marko", "utf8");
client-side. const compile = marko.compileSync(src, filepath);
fs.writeFileSync("page.js", compile.code);
Here is the actual component used to render [questions on the clover q+a][/q+a]. const page = require("./page.js");
console.info(page);
import * as fs from "node:fs";
import * as marko from "@marko/compiler";
```
To get client side JavaScript, an option can be passed to the Marko compiler to
generate the client side code. While it is a big selling point of Marko, I do
not use any of their client side features, instead deferring to manually-written
frontend scripts. This is because that is how my website has been for years,
statically generated. And for websites like mine that are content focused, this
is the correct way to do things.
Since I have a custom HTML generation library (built on JSX and some React-like
patterns), I have written a simple integration for it to utilize Marko
components, which is loaded by replacing the generated import to `marko/html`,
which lets me overwrite functions like `createTemplate` (to change the signature
of a component), `dynamicTag` (to allow Marko to render non-Marko components),
and `fork` (to enable async integration with the rendering framework). An
additional feature of this is I have a Node.js loader hook to allow importing
these files directly.
```tsx
function Page() {
const q = Question.getByDate(new Date("2025-06-07 12:12 EST"));
return <div>
<h1>example question</h1>
<QuestionRender question={q} />
</div>;
}
// The synchronous render can be used because `Page` and `question.marko`
// do not await any promises (SQLite runs synchronously)
console.info(render.sync(<Page />).text);
import * as render from "#engine/render";
import QuestionRender from "@/q+a/tags/question.marko";
import { Question } from "@/q+a/models/Question.ts";
```
Here is the `question.marko` tag used to render [questions on the clover q+a](/q+a).
```marko ```marko
// Renders a `Question` entry including its markdown body. // Renders a `Question` entry including its markdown body.
@ -287,4 +333,7 @@ import { formatQuestionTimestamp, formatQuestionISOTimestamp } from "@/q+a/forma
import { CloverMarkdown } from "@/q+a/clover-markdown.tsx"; import { CloverMarkdown } from "@/q+a/clover-markdown.tsx";
``` ```
The integration is great, `client import` is quite a magical concept, and I've
tuned it to do the expected thing in my framework.
import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts'; import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts';

View file

@ -93,7 +93,7 @@ app.get("/file/*", async (c, next) => {
} satisfies APIDirectoryList; } satisfies APIDirectoryList;
return c.json(json); return c.json(json);
} }
c.res = await renderView(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, { c.res = await view.serve(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
file, file,
hasCotyledonCookie, hasCotyledonCookie,
}); });
@ -111,7 +111,7 @@ app.get("/file/*", async (c, next) => {
!lofi !lofi
) { ) {
prefetchFile(file.path); prefetchFile(file.path);
c.res = await renderView(c, "file-viewer/clofi", { c.res = await view.serve(c, "file-viewer/clofi", {
file, file,
hasCotyledonCookie, hasCotyledonCookie,
}); });
@ -125,11 +125,10 @@ app.get("/file/*", async (c, next) => {
let encoding = decideEncoding(c.req.header("Accept-Encoding")); let encoding = decideEncoding(c.req.header("Accept-Encoding"));
let sizeHeader = let sizeHeader = encoding === "raw"
encoding === "raw"
? expectedSize ? expectedSize
: // Size cannot be known because of compression modes // Size cannot be known because of compression modes
undefined; : undefined;
// Etag // Etag
{ {
@ -217,7 +216,7 @@ app.get("/canvas/:script", async (c, next) => {
if (!hasAsset(`/js/canvas/${script}.js`)) { if (!hasAsset(`/js/canvas/${script}.js`)) {
return next(); return next();
} }
return renderView(c, "file-viewer/canvas", { return view.serve(c, "file-viewer/canvas", {
script, script,
}); });
}); });
@ -295,8 +294,7 @@ function handleRanges(
): Response { ): Response {
// TODO: multiple ranges // TODO: multiple ranges
const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0); const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0);
const rangeBody = const rangeBody = streamOrBuffer instanceof ReadableStream
streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges) ? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize); : applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
return new Response(rangeBody, { return new Response(rangeBody, {
@ -420,7 +418,7 @@ import { type Context, Hono } from "hono";
import * as render from "#engine/render"; import * as render from "#engine/render";
import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets"; import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets";
import { renderView } from "#sitegen/view"; import * as view from "#sitegen/view";
import { contentTypeFor } from "#sitegen/mime"; import { contentTypeFor } from "#sitegen/mime";
import { requireFriendAuth } from "@/friend-auth.ts"; import { requireFriendAuth } from "@/friend-auth.ts";

View file

@ -17,9 +17,11 @@ export async function main() {
const start = performance.now(); const start = performance.now();
const timerSpinner = new Spinner({ const timerSpinner = new Spinner({
text: () => text: () =>
`paper clover's scan3 [${((performance.now() - start) / 1000).toFixed( `paper clover's scan3 [${
((performance.now() - start) / 1000).toFixed(
1, 1,
)}s]`, )
}s]`,
fps: 10, fps: 10,
}); });
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() }; using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -44,7 +46,7 @@ export async function main() {
.flatMap((child) => .flatMap((child) =>
child.kind === MediaFileKind.directory child.kind === MediaFileKind.directory
? child.getRecursiveFileChildren() ? child.getRecursiveFileChildren()
: child, : child
); );
qMeta.addMany( qMeta.addMany(
@ -103,9 +105,11 @@ export async function main() {
) { ) {
date = mediaFile.date; date = mediaFile.date;
console.warn( console.warn(
`M-time on ${publicPath} was likely corrupted. ${formatDate( `M-time on ${publicPath} was likely corrupted. ${
formatDate(
mediaFile.date, mediaFile.date,
)} -> ${formatDate(stat.mtime)}`, )
} -> ${formatDate(stat.mtime)}`,
); );
} }
mediaFile = MediaFile.createFile({ mediaFile = MediaFile.createFile({
@ -162,7 +166,7 @@ export async function main() {
}: Omit<ProcessFileArgs, "spin">) { }: Omit<ProcessFileArgs, "spin">) {
const ext = mediaFile.extensionNonEmpty.toLowerCase(); const ext = mediaFile.extensionNonEmpty.toLowerCase();
let possible = processors.filter((p) => let possible = processors.filter((p) =>
p.include ? p.include.has(ext) : !p.exclude?.has(ext), p.include ? p.include.has(ext) : !p.exclude?.has(ext)
); );
if (possible.length === 0) return; if (possible.length === 0) return;
@ -196,7 +200,7 @@ export async function main() {
); );
} else { } else {
possible = order.map(({ id }) => possible = order.map(({ id }) =>
UNWRAP(possible.find((p) => p.id === id)), UNWRAP(possible.find((p) => p.id === id))
); );
} }
@ -260,8 +264,9 @@ export async function main() {
const children = dir.getChildren(); const children = dir.getChildren();
// readme.txt // readme.txt
const readmeContent = const readmeContent = children.find((x) =>
children.find((x) => x.basename === "readme.txt")?.contents ?? ""; x.basename === "readme.txt"
)?.contents ?? "";
// dirsort // dirsort
let dirsort: string[] | null = null; let dirsort: string[] | null = null;
@ -354,7 +359,9 @@ export async function main() {
console.info( console.info(
"Global Stats:\n" + "Global Stats:\n" +
`- File Count: \x1b[1m${count}\x1b[0m\n` + `- File Count: \x1b[1m${count}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${formatSize(MediaFile.getByPath("/")!.size)}\x1b[0m\n` + `- Canonical Size: \x1b[1m${
formatSize(MediaFile.getByPath("/")!.size)
}\x1b[0m\n` +
`- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`, `- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`,
); );
} }
@ -459,7 +466,6 @@ const procLoadTextContents: Process = {
async run({ absPath, mediaFile, stat }) { async run({ absPath, mediaFile, stat }) {
if (stat.size > 1_000_000) return; if (stat.size > 1_000_000) return;
const text = await fs.readFile(absPath, "utf-8"); const text = await fs.readFile(absPath, "utf-8");
console.log({ text });
mediaFile.setContents(text); mediaFile.setContents(text);
}, },
}; };
@ -696,7 +702,7 @@ async function unproduceAsset(key: string) {
const ref = AssetRef.get(key); const ref = AssetRef.get(key);
if (ref) { if (ref) {
ref.unref(); ref.unref();
console.log(`unref ${key}`); console.warn(`TODO: unref ${key}`);
// TODO: remove associated files from target // TODO: remove associated files from target
} }
} }

View file

@ -29,7 +29,9 @@ export function formatDurationLong(seconds: number) {
const hours = Math.floor(seconds / 3600); const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60); const minutes = Math.floor((seconds % 3600) / 60);
const remainingSeconds = seconds % 60; const remainingSeconds = seconds % 60;
return `${hours}:${minutes.toString().padStart(2, "0")}:${remainingSeconds.toString().padStart(2, "0")}`; return `${hours}:${minutes.toString().padStart(2, "0")}:${
remainingSeconds.toString().padStart(2, "0")
}`;
} }
export function escapeUri(uri: string) { export function escapeUri(uri: string) {
@ -100,21 +102,27 @@ export function highlightLinksInTextView(
// Case 1: https:// or http:// URLs // Case 1: https:// or http:// URLs
if (match.startsWith("http")) { if (match.startsWith("http")) {
if (match.includes(findDomain)) { if (match.includes(findDomain)) {
return `<a href="${match return `<a href="${
match
.replace(/https?:\/\/paperclover\.net\/+/, "/") .replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")}">${match}</a>`; .replace(/\/\/+/g, "/")
}">${match}</a>`;
} }
return `<a href="${match.replace( return `<a href="${
match.replace(
/\/\/+/g, /\/\/+/g,
"/", "/",
)}" target="_blank" rel="noopener noreferrer">${match}</a>`; )
}" target="_blank" rel="noopener noreferrer">${match}</a>`;
} }
// Case 2: domain URLs without protocol // Case 2: domain URLs without protocol
if (match.startsWith(findDomain)) { if (match.startsWith(findDomain)) {
return `<a href="${match return `<a href="${
match
.replace(findDomain + "/", "/") .replace(findDomain + "/", "/")
.replace(/\/\/+/g, "/")}">${match}</a>`; .replace(/\/\/+/g, "/")
}">${match}</a>`;
} }
// Case 3: /file/ URLs // Case 3: /file/ URLs
@ -145,7 +153,7 @@ export function highlightLinksInTextView(
// Match sibling file names (only if they're not already part of a link) // Match sibling file names (only if they're not already part of a link)
if (siblingFiles.length > 0) { if (siblingFiles.length > 0) {
const escapedBasenames = siblingFiles.map((f) => const escapedBasenames = siblingFiles.map((f) =>
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
); );
const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g"); const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g");
const parts = processedText.split(/(<[^>]*>)/); const parts = processedText.split(/(<[^>]*>)/);
@ -155,9 +163,11 @@ export function highlightLinksInTextView(
parts[i] = parts[i].replace(pattern, (match: string) => { parts[i] = parts[i].replace(pattern, (match: string) => {
const file = siblingLookup[match]; const file = siblingLookup[match];
if (file) { if (file) {
return `<a href="/file/${file.path return `<a href="/file/${
file.path
.replace(/^\//, "") .replace(/^\//, "")
.replace(/\/\/+/g, "/")}">${match}</a>`; .replace(/\/\/+/g, "/")
}">${match}</a>`;
} }
return match; return match;
}); });
@ -240,9 +250,11 @@ export function highlightConvo(text: string) {
return paras return paras
.map(({ speaker, lines }) => { .map(({ speaker, lines }) => {
return `<div class="s-${speaker}">${lines return `<div class="s-${speaker}">${
lines
.map((line) => `<div class="line">${line}</div>`) .map((line) => `<div class="line">${line}</div>`)
.join("\n")}</div>`; .join("\n")
}</div>`;
}) })
.join("\n"); .join("\n");
} }
@ -267,10 +279,12 @@ export function formatDate(dateTime: Date) {
? dateTime < unknownDate ? dateTime < unknownDate
? "??.??.??" ? "??.??.??"
: `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}` : `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}`
: `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${dateTime : `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${
dateTime
.getDate() .getDate()
.toString() .toString()
.padStart(2, "0")}.${dateTime.getFullYear().toString().slice(2)}`; .padStart(2, "0")
}.${dateTime.getFullYear().toString().slice(2)}`;
} }
import type { MediaFile } from "@/file-viewer/models/MediaFile.ts"; import type { MediaFile } from "@/file-viewer/models/MediaFile.ts";

View file

@ -131,8 +131,12 @@ export class Parse {
percentage: Number(percentageStr), percentage: Number(percentageStr),
timeElapsed, timeElapsed,
transferNumber: this.currentTransfer, transferNumber: this.currentTransfer,
filesToCheck: toCheckStr ? this.toCheck = Number(toCheckStr) : this.toCheck, filesToCheck: toCheckStr
totalFiles: totalStr ? this.totalFiles = Number(totalStr) : this.totalFiles, ? this.toCheck = Number(toCheckStr)
: this.toCheck,
totalFiles: totalStr
? this.totalFiles = Number(totalStr)
: this.totalFiles,
speed: speed || null, speed: speed || null,
}; };
} }

View file

@ -4,9 +4,7 @@
font-weight: 400 750; font-weight: 400 750;
font-style: normal; font-style: normal;
font-display: swap; font-display: swap;
font-variation-settings: font-variation-settings: "CASL" 0.25, "MONO" 0;
"CASL" 0.25,
"MONO" 0;
font-style: oblique -15deg 0deg; font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E; unicode-range: U+0020-007E;
} }
@ -16,9 +14,7 @@
font-weight: 400 800; font-weight: 400 800;
font-style: normal; font-style: normal;
font-display: swap; font-display: swap;
font-variation-settings: font-variation-settings: "CASL" 0.25, "MONO" 1;
"CASL" 0.25,
"MONO" 1;
font-style: oblique -15deg 0deg; font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E; unicode-range: U+0020-007E;
} }
@ -28,13 +24,21 @@
font-weight: 400 800; font-weight: 400 800;
font-style: normal; font-style: normal;
font-display: swap; font-display: swap;
font-variation-settings: font-variation-settings: "CASL" 0.25, "MONO" 1;
"CASL" 0.25,
"MONO" 1;
font-style: oblique -15deg 0deg; font-style: oblique -15deg 0deg;
unicode-range: unicode-range:
U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022, U+00C0-00FF,
U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F; U+00A9,
U+2190-2193,
U+2018,
U+2019,
U+201C,
U+201D,
U+2022,
U+00A0-00A8,
U+00AA-00BF,
U+2194-2199,
U+0100-017F;
} }
*, *,

View file

@ -45,7 +45,7 @@ app.post("/q+a", async (c) => {
return sendSuccess(c, new Date()); return sendSuccess(c, new Date());
} }
const ipAddr = c.req.header("cf-connecting-ip"); const ipAddr = c.req.header("x-forwarded-for");
if (ipAddr) { if (ipAddr) {
input.sourceName = uniqueNamesGenerator({ input.sourceName = uniqueNamesGenerator({
dictionaries: [adjectives, colors, animals], dictionaries: [adjectives, colors, animals],
@ -54,11 +54,6 @@ app.post("/q+a", async (c) => {
}); });
} }
const cfIPCountry = c.req.header("cf-ipcountry");
if (cfIPCountry) {
input.sourceLocation = cfIPCountry;
}
if (ipAddr && PROXYCHECK_API_KEY) { if (ipAddr && PROXYCHECK_API_KEY) {
const proxyCheck = await fetch( const proxyCheck = await fetch(
`https://proxycheck.io/v2/?key=${PROXYCHECK_API_KEY}&risk=1&vpn=1`, `https://proxycheck.io/v2/?key=${PROXYCHECK_API_KEY}&risk=1&vpn=1`,
@ -77,17 +72,19 @@ app.post("/q+a", async (c) => {
proxyCheck[ipAddr].organisation ?? proxyCheck[ipAddr].organisation ??
proxyCheck[ipAddr].provider ?? "unknown"; proxyCheck[ipAddr].provider ?? "unknown";
} }
if (Number(proxyCheck[ipAddr].risk) > 72) { if (Number(proxyCheck[ipAddr].risk) > 78) {
return questionFailure( return questionFailure(
c, c,
403, 403,
"This IP address has been flagged as a high risk IP address. If you are using a VPN/Proxy, please disable it and try again.", "This IP address has been flagged as a high risk IP address. If " +
"you are using a VPN/Proxy, please disable it and try again.",
text, text,
); );
} }
} }
} }
view.regenerate("q+a inbox");
const date = Question.create( const date = Question.create(
QuestionType.pending, QuestionType.pending,
JSON.stringify(input), JSON.stringify(input),
@ -104,7 +101,7 @@ async function sendSuccess(c: Context, date: Date) {
id: formatQuestionId(date), id: formatQuestionId(date),
}, { status: 200 }); }, { status: 200 });
} }
c.res = await renderView(c, "q+a/success", { c.res = await view.serve(c, "q+a/success", {
permalink: `https://paperclover.net/q+a/${formatQuestionId(date)}`, permalink: `https://paperclover.net/q+a/${formatQuestionId(date)}`,
}); });
} }
@ -130,7 +127,7 @@ app.get("/q+a/:id", async (c, next) => {
if (image) { if (image) {
return getQuestionImage(question, c.req.method === "HEAD"); return getQuestionImage(question, c.req.method === "HEAD");
} }
return renderView(c, "q+a/permalink", { question }); return view.serve(c, "q+a/permalink", { question });
}); });
// Admin // Admin
@ -138,7 +135,7 @@ app.get("/admin/q+a", async (c) => {
return serveAsset(c, "/admin/q+a", 200); return serveAsset(c, "/admin/q+a", 200);
}); });
app.get("/admin/q+a/inbox", async (c) => { app.get("/admin/q+a/inbox", async (c) => {
return renderView(c, "q+a/backend-inbox", {}); return view.serve(c, "q+a/backend-inbox", {});
}); });
app.delete("/admin/q+a/:id", async (c, next) => { app.delete("/admin/q+a/:id", async (c, next) => {
const id = c.req.param("id"); const id = c.req.param("id");
@ -152,6 +149,7 @@ app.delete("/admin/q+a/:id", async (c, next) => {
} else { } else {
Question.rejectByQmid(question.qmid); Question.rejectByQmid(question.qmid);
} }
view.regenerate("q+a");
return c.json({ success: true, message: "ok" }); return c.json({ success: true, message: "ok" });
}); });
app.patch("/admin/q+a/:id", async (c, next) => { app.patch("/admin/q+a/:id", async (c, next) => {
@ -165,6 +163,7 @@ app.patch("/admin/q+a/:id", async (c, next) => {
return questionFailure(c, 400, "Bad Request"); return questionFailure(c, 400, "Bad Request");
} }
Question.updateByQmid(question.qmid, form.text, form.type); Question.updateByQmid(question.qmid, form.text, form.type);
view.regenerate("q+a");
return c.json({ success: true, message: "ok" }); return c.json({ success: true, message: "ok" });
}); });
app.get("/admin/q+a/:id", async (c, next) => { app.get("/admin/q+a/:id", async (c, next) => {
@ -177,20 +176,22 @@ app.get("/admin/q+a/:id", async (c, next) => {
let pendingInfo: null | PendingQuestionData = null; let pendingInfo: null | PendingQuestionData = null;
if (question.type === QuestionType.pending) { if (question.type === QuestionType.pending) {
pendingInfo = JSON.parse(question.text) as PendingQuestionData; pendingInfo = JSON.parse(question.text) as PendingQuestionData;
question.text = pendingInfo.prompt.trim().split("\n").map((line) => question.text = pendingInfo.prompt
line.trim().length === 0 ? "" : `q: ${line.trim()}` .trim()
).join("\n") + "\n\n"; .split("\n")
.map((line) => (line.trim().length === 0 ? "" : `q: ${line.trim()}`))
.join("\n") + "\n\n";
question.type = QuestionType.normal; question.type = QuestionType.normal;
} }
return renderView(c, "q+a/editor", { return view.serve(c, "q+a/editor", {
pendingInfo, pendingInfo,
question, question,
}); });
}); });
app.get("/q+a/things/random", async (c) => { app.get("/q+a/things/random", async (c) => {
c.res = await renderView(c, "q+a/things-random", {}); c.res = await view.serve(c, "q+a/things-random", {});
}); });
async function questionFailure( async function questionFailure(
@ -202,7 +203,7 @@ async function questionFailure(
if (c.req.header("Accept")?.includes("application/json")) { if (c.req.header("Accept")?.includes("application/json")) {
return c.json({ success: false, message, id: null }, { status }); return c.json({ success: false, message, id: null }, { status });
} }
return await renderView(c, "q+a/fail", { return await view.serve(c, "q+a/fail", {
error: message, error: message,
content, content,
}); });
@ -218,11 +219,8 @@ import {
} from "unique-names-generator"; } from "unique-names-generator";
import { hasAdminToken } from "../admin.ts"; import { hasAdminToken } from "../admin.ts";
import { serveAsset } from "#sitegen/assets"; import { serveAsset } from "#sitegen/assets";
import { import type { PendingQuestionData } from "./models/PendingQuestion.ts";
PendingQuestion,
PendingQuestionData,
} from "./models/PendingQuestion.ts";
import { Question, QuestionType } from "./models/Question.ts"; import { Question, QuestionType } from "./models/Question.ts";
import { renderView } from "#sitegen/view"; import * as view from "#sitegen/view";
import { getQuestionImage } from "./image.tsx"; import { getQuestionImage } from "./image.tsx";
import { formatQuestionId, questionIdToTimestamp } from "./format.ts"; import { formatQuestionId, questionIdToTimestamp } from "./format.ts";

View file

@ -144,9 +144,7 @@ function ListRenderer(node: ASTNode, children: any[]) {
const T = node.ordered ? "ol" : "ul"; const T = node.ordered ? "ol" : "ul";
return ( return (
<T> <T>
{children.map((child) => ( {children.map((child) => <li>{child}</li>)}
<li>{child}</li>
))}
</T> </T>
); );
} }

View file

@ -11,7 +11,7 @@ const getBrowser = RefCountedExpirable(
); );
export async function renderQuestionImage(question: Question) { export async function renderQuestionImage(question: Question) {
const html = await renderViewToString("q+a/image-embed", { question }); const html = await view.renderToString("q+a/image-embed", { question });
// this browser session will be reused if multiple images are generated // this browser session will be reused if multiple images are generated
// either at the same time or within a 5-minute time span. the dispose // either at the same time or within a 5-minute time span. the dispose
@ -44,14 +44,15 @@ export async function getQuestionImage(
question: Question, question: Question,
headOnly: boolean, headOnly: boolean,
): Promise<Response> { ): Promise<Response> {
const hash = crypto.createHash("sha1") const hash = crypto
.createHash("sha1")
.update(question.qmid + question.type + question.text) .update(question.qmid + question.type + question.text)
.digest("hex"); .digest("hex");
const headers = { const headers = {
"Content-Type": "image/png", "Content-Type": "image/png",
"Cache-Control": "public, max-age=31536000", "Cache-Control": "public, max-age=31536000",
"ETag": `"${hash}"`, ETag: `"${hash}"`,
"Last-Modified": question.date.toUTCString(), "Last-Modified": question.date.toUTCString(),
}; };
@ -78,4 +79,4 @@ import * as path from "node:path";
import * as puppeteer from "puppeteer"; import * as puppeteer from "puppeteer";
import { Question } from "@/q+a/models/Question.ts"; import { Question } from "@/q+a/models/Question.ts";
import { RefCountedExpirable } from "#sitegen/async"; import { RefCountedExpirable } from "#sitegen/async";
import { renderViewToString } from "#sitegen/view"; import * as view from "#sitegen/view";

View file

@ -7,7 +7,7 @@ export const meta: Metadata = {
description: "ask clover a question", description: "ask clover a question",
}; };
export const regenerate = { export const regenerate = {
manual: true, tags: ["q+a", "q+a inbox"]
}; };
<const/inboxSize = PendingQuestion.getAll().length /> <const/inboxSize = PendingQuestion.getAll().length />

View file

@ -1,11 +1,9 @@
export * as layout from "../layout.tsx";
export const regenerate = {
manual: true,
};
export interface Input { export interface Input {
admin?: boolean; admin?: boolean;
} }
export * as layout from "../layout.tsx";
export const regenerate = { tags: ["q+a"] };
export const meta: Metadata = { export const meta: Metadata = {
title: "paper clover q+a", title: "paper clover q+a",
description: "ask clover a question", description: "ask clover a question",
@ -14,7 +12,7 @@ export const meta: Metadata = {
<const/{ admin = false } = input /> <const/{ admin = false } = input />
<const/questions = [...Question.getAll()] /> <const/questions = [...Question.getAll()] />
<if=true> <if=!admin>
<question-form /> <question-form />
</> </>
<for|question| of=questions> <for|question| of=questions>

View file

@ -19,8 +19,8 @@ export const meta = { title: 'question answer inbox' };
</time> </time>
<div style="color: dodgerblue; margin-bottom: 0.25rem"> <div style="color: dodgerblue; margin-bottom: 0.25rem">
${q.sourceName} ${q.sourceName}
${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : null} ${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : ''}
${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : null} ${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : ''}
</div> </div>
<p style="white-space: pre-wrap">${q.prompt}</p> <p style="white-space: pre-wrap">${q.prompt}</p>
<p> <p>

View file

@ -33,8 +33,12 @@ const rawFileRoot = process.env.CLOVER_FILE_RAW ??
const derivedFileRoot = process.env.CLOVER_FILE_DERIVED ?? const derivedFileRoot = process.env.CLOVER_FILE_DERIVED ??
path.join(nasRoot, "Documents/Config/paperclover/derived"); path.join(nasRoot, "Documents/Config/paperclover/derived");
if (!fs.existsSync(rawFileRoot)) throw new Error(`${rawFileRoot} does not exist`); if (!fs.existsSync(rawFileRoot)) {
if (!fs.existsSync(derivedFileRoot)) throw new Error(`${derivedFileRoot} does not exist`); throw new Error(`${rawFileRoot} does not exist`);
}
if (!fs.existsSync(derivedFileRoot)) {
throw new Error(`${derivedFileRoot} does not exist`);
}
type Awaitable<T> = T | Promise<T>; type Awaitable<T> = T | Promise<T>;

View file

@ -5,7 +5,7 @@
"incremental": true, "incremental": true,
"jsx": "react-jsxdev", "jsx": "react-jsxdev",
"jsxImportSource": "#engine", "jsxImportSource": "#engine",
"lib": ["dom", "esnext", "esnext.iterator"], "lib": ["esnext", "dom", "dom.iterable"],
"module": "nodenext", "module": "nodenext",
"noEmit": true, "noEmit": true,
"outDir": ".clover/ts", "outDir": ".clover/ts",
@ -14,7 +14,8 @@
"skipLibCheck": true, "skipLibCheck": true,
"strict": true, "strict": true,
"verbatimModuleSyntax": true, "verbatimModuleSyntax": true,
"target": "es2022" "target": "es2022",
"noUncheckedIndexedAccess": true
}, },
"include": ["framework/**/*", "src/**/*"] "include": ["framework/**/*", "src/**/*"]
} }