feat: dynamic page regeneration (#24)

the asset system is reworked to support "dynamic" entries, where each
entry is a separate file on disk containing the latest generation's
headers+raw+gzip+zstd. when calling view.regenerate, it will look for
pages that had "export const regenerate" during generation, and render
those pages using the view system, but then store the results as assets
instead of sending as a response.

pages configured as regenerable are also bundled as views, using the
non-aliasing key "page:${page.id}". this cannot alias because file
paths may not contain a colon.
This commit is contained in:
clover caruso 2025-08-11 22:43:27 -07:00
parent 568fd83c34
commit c5ac450f21
41 changed files with 1143 additions and 640 deletions

View file

@ -3,27 +3,27 @@
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils";
};
outputs =
{ nixpkgs, utils, ... }:
utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
(pkgs.ffmpeg.override {
withOpus = true;
withSvtav1 = true;
withJxl = true;
withWebp = true;
})
pkgs.rsync
];
};
}
);
outputs = inputs: inputs.utils.lib.eachDefaultSystem (system:
with inputs.nixpkgs.legacyPackages.${system}; {
devShells.default = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
(pkgs.ffmpeg.override {
withOpus = true;
withSvtav1 = true;
withJxl = true;
withWebp = true;
})
pkgs.rsync
];
};
devShells.min = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
pkgs.rsync
];
};
});
}

View file

@ -88,19 +88,19 @@ export async function bundleClientJavaScript(
for (const file of outputFiles) {
const { text } = file;
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
const sources = Object.keys(inputs).filter((x) => !isIgnoredSource(x));
const { entryPoint } = UNWRAP(metafile.outputs["out!" + route]);
// Register non-chunks as script entries.
const chunk = route.startsWith("/js/c.");
if (!chunk) {
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
const key = hot.getScriptId(toAbs(UNWRAP(entryPoint)));
console.log(route, key);
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
scripts[key] = text;
}
// Register chunks and public scripts as assets.
if (chunk || publicScriptRoutes.includes(route)) {
p.push(io.writeAsset(route, text));
p.push(io.writeAsset({ pathname: route, buffer: text }));
}
}
await Promise.all(p);
@ -111,7 +111,7 @@ export type ServerPlatform = "node" | "passthru";
export interface ServerSideOptions {
entries: string[];
viewItems: sg.FileItem[];
viewRefs: incr.Ref<PreparedView>[];
viewRefs: incr.Ref<PageOrView>[];
styleMap: Map<string, incr.Ref<string>>;
scriptMap: incr.Ref<Record<string, string>>;
platform: ServerPlatform;
@ -124,144 +124,192 @@ export async function bundleServerJavaScript({
entries,
platform,
}: ServerSideOptions) {
const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => {
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
return {
magicWord,
file: [
...viewItems.map(
(view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`,
),
`const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`,
"export const views = {",
...viewItems.map((view, i) =>
[
` ${JSON.stringify(view.id)}: {`,
` component: view${i}.default,`,
` meta: view${i}.meta,`,
` layout: view${i}.layout?.default ?? null,`,
` inlineCss: styles[${magicWord}[${i}]]`,
` },`,
].join("\n"),
),
"}",
].join("\n"),
};
}, viewItems);
const regenKeys: Record<string, string[]> = {};
const regenTtls: view.Ttl[] = [];
for (const ref of viewRefs) {
const value = UNWRAP(ref.value);
if (value.type === "page" && (value.regenerate?.tags?.length ?? 0) > 0) {
for (const tag of value.regenerate!.tags!) {
(regenKeys[tag] ??= []).push(`page:${value.id}`);
}
}
if (value.type === "page" && (value.regenerate?.seconds ?? 0) > 0) {
regenTtls.push({
key: `page:${value.id}` as view.Key,
seconds: value.regenerate!.seconds!,
});
}
}
const wBundles = entries.map(
(entry) =>
[
entry,
incr.work(async (io, entry) => {
const pkg = await io.readJson<{
dependencies: Record<string, string>;
}>("package.json");
let magicWord = null as string | null;
// -- plugins --
const serverPlugins: esbuild.Plugin[] = [
virtualFiles({
// only add dependency when imported.
$views: async () => {
const view = await io.readWork(wViewSource);
({ magicWord } = view);
return view.file;
},
}),
projectRelativeResolution(),
markoViaBuildCache(),
{
name: "replace client references",
setup(b) {
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
contents: hot.resolveClientRefs(
await fs.readFile(file, "utf-8"),
file,
).code,
loader: path.extname(file).slice(1) as esbuild.Loader,
}));
},
},
{
name: "mark css external",
setup(b) {
b.onResolve({ filter: /\.css$/ }, () => ({
path: ".",
namespace: "dropped",
}));
b.onLoad({ filter: /./, namespace: "dropped" }, () => ({
contents: "",
}));
},
},
];
const { metafile, outputFiles } = await esbuild.build({
bundle: true,
chunkNames: "c.[hash]",
entryNames: path.basename(entry, path.extname(entry)),
entryPoints: [
path.join(
import.meta.dirname,
"backend/entry-" + platform + ".ts",
),
],
platform: "node",
format: "esm",
minify: false,
outdir: "out!",
plugins: serverPlugins,
splitting: true,
logLevel: "silent",
write: false,
metafile: true,
jsx: "automatic",
jsxImportSource: "#engine",
jsxDev: false,
define: {
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
"globalThis.CLOVER_SERVER_ENTRY": JSON.stringify(entry),
},
external: Object.keys(pkg.dependencies).filter(
(x) => !x.startsWith("@paperclover"),
),
});
await trackEsbuild(io, metafile);
let fileWithMagicWord: {
bytes: Buffer;
basename: string;
magicWord: string;
} | null = null;
for (const output of outputFiles) {
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
const key = "out!/" + basename.replaceAll("\\", "/");
// If this contains the generated "$views" file, then
// mark this file as the one for replacement. Because
// `splitting` is `true`, esbuild will not emit this
// file in more than one chunk.
if (
magicWord &&
metafile.outputs[key].inputs["framework/lib/view.ts"]
) {
ASSERT(!fileWithMagicWord);
fileWithMagicWord = {
basename,
bytes: Buffer.from(output.contents),
magicWord,
};
} else {
io.writeFile(basename, Buffer.from(output.contents));
}
}
return fileWithMagicWord;
}, entry),
] as const,
const wViewSource = incr.work(
async (
_,
{ viewItems, regenKeys, regenTtls }: {
viewItems: sg.FileItem[];
regenKeys: Record<string, string[]>;
regenTtls: view.Ttl[];
},
) => {
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
return {
magicWord,
file: [
...viewItems.map(
(view, i) =>
`import * as view${i} from ${JSON.stringify(view.file)}`,
),
`const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`,
"export const views = {",
...viewItems.map((view, i) =>
[
` ${JSON.stringify(view.id)}: {`,
` component: view${i}.default,`,
` meta: view${i}.meta,`,
` layout: view${i}.layout?.default ?? null,`,
` inlineCss: styles[${magicWord}[${i}]]`,
` },`,
].join("\n")
),
"}",
`export const regenTags = ${JSON.stringify(regenKeys)};`,
`export const regenTtls = ${JSON.stringify(regenTtls)};`,
].join("\n"),
};
},
{ viewItems, regenKeys, regenTtls },
);
const wProcessed = wBundles.map(async ([entry, wBundle]) => {
await incr.work(
async (io, { regenKeys, viewItems }) => {
io.writeFile(
"../ts/view.d.ts",
[
"export interface RegisteredViews {",
...viewItems
.filter((view) => !view.id.startsWith("page:"))
.map(
(view) =>
` ${JSON.stringify(view.id)}: ` +
`typeof import(${
JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))
}),`,
),
"}",
"export type RegenKey = " +
(regenKeys.map((key) => JSON.stringify(key)).join(" | ") ||
"never"),
].join("\n"),
);
},
{ regenKeys: Object.keys(regenKeys), viewItems },
);
const wBundles = entries.map((entry) =>
incr.work(async (io, entry) => {
const pkg = await io.readJson<{
dependencies: Record<string, string>;
}>("package.json");
let magicWord = null as string | null;
// -- plugins --
const serverPlugins: esbuild.Plugin[] = [
virtualFiles({
// only add dependency when imported.
$views: async () => {
const view = await io.readWork(wViewSource);
({ magicWord } = view);
return view.file;
},
}),
projectRelativeResolution(),
markoViaBuildCache(),
{
name: "replace client references",
setup(b) {
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
contents: hot.resolveClientRefs(
await fs.readFile(file, "utf-8"),
file,
).code,
loader: path.extname(file).slice(1) as esbuild.Loader,
}));
},
},
{
name: "mark css external",
setup(b) {
b.onResolve({ filter: /\.css$/ }, () => ({
path: ".",
namespace: "dropped",
}));
b.onLoad({ filter: /./, namespace: "dropped" }, () => ({
contents: "",
}));
},
},
];
const { metafile, outputFiles } = await esbuild.build({
bundle: true,
chunkNames: "c.[hash]",
entryNames: path.basename(entry, path.extname(entry)),
entryPoints: [
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
],
platform: "node",
format: "esm",
minify: false,
outdir: "out!",
plugins: serverPlugins,
splitting: true,
logLevel: "silent",
write: false,
metafile: true,
jsx: "automatic",
jsxImportSource: "#engine",
jsxDev: false,
define: {
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
"globalThis.CLOVER_SERVER_ENTRY": JSON.stringify(entry),
},
external: Object.keys(pkg.dependencies).filter(
(x) => !x.startsWith("@paperclover"),
),
});
await trackEsbuild(io, metafile);
let fileWithMagicWord: {
bytes: Buffer;
basename: string;
magicWord: string;
} | null = null;
for (const output of outputFiles) {
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
const key = "out!/" + basename.replaceAll("\\", "/");
// If this contains the generated "$views" file, then
// mark this file as the one for replacement. Because
// `splitting` is `true`, esbuild will not emit this
// file in more than one chunk.
if (
magicWord &&
UNWRAP(metafile.outputs[key]).inputs["framework/lib/view.ts"]
) {
ASSERT(!fileWithMagicWord);
fileWithMagicWord = {
basename,
bytes: Buffer.from(output.contents),
magicWord,
};
} else {
io.writeFile(basename, Buffer.from(output.contents));
}
}
return fileWithMagicWord;
}, entry)
);
const wProcessed = wBundles.map(async (wBundle) => {
if (!(await wBundle)) return;
await incr.work(async (io) => {
// Only the reachable resources need to be read and inserted into the bundle.
@ -278,7 +326,7 @@ export async function bundleServerJavaScript({
const viewStyleKeys = views.map((view) => view.styleKey);
const viewCssBundles = await Promise.all(
viewStyleKeys.map((key) =>
io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key)),
io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key))
),
);
const styleList = Array.from(new Set(viewCssBundles));
@ -297,7 +345,7 @@ export async function bundleServerJavaScript({
return JSON.stringify(Object.fromEntries(neededScripts));
}
// Reference an index into `styleList`
return `${styleList.indexOf(viewCssBundles[i])}`;
return `${styleList.indexOf(UNWRAP(viewCssBundles[i]))}`;
});
io.writeFile(basename, text);
@ -322,3 +370,5 @@ import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";
import * as incr from "./incremental.ts";
import * as sg from "#sitegen";
import type { PageOrView } from "./generate.ts";
import type * as view from "#sitegen/view";

View file

@ -57,12 +57,14 @@ export function styleKey(
export async function bundleCssFiles(
io: Io,
{ cssImports, theme, dev }: {
cssImports: string[],
theme: Theme,
dev: boolean,
}
cssImports: string[];
theme: Theme;
dev: boolean;
},
) {
cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file)));
cssImports = await Promise.all(
cssImports.map((file) => io.trackFile("src/" + file)),
);
const plugin = {
name: "clover css",
setup(b) {
@ -111,4 +113,5 @@ import * as esbuild from "esbuild";
import * as fs from "#sitegen/fs";
import * as hot from "./hot.ts";
import * as path from "node:path";
import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts";
import { virtualFiles } from "./esbuild-support.ts";
import type { Io } from "./incremental.ts";

View file

@ -9,4 +9,4 @@ globalThis.UNWRAP = (t, ...args) => {
globalThis.ASSERT = assert.ok;
import * as util from "node:util";
import * as assert from 'node:assert'
import * as assert from "node:assert";

View file

@ -51,4 +51,4 @@ declare global {
}
}
import * as render from "./render.ts";
import * as render from "#engine/render";

View file

@ -18,6 +18,7 @@ export const createTemplate = (
const r = render.current;
// Support using Marko outside of Clover SSR
if (!r) return renderer(props, n);
render.setCurrent(null);
const markoResult = renderFn.call(renderer, {
...props,
$global: { clover: r, cloverAsyncMarker },

View file

@ -1,5 +1,5 @@
import { test } from "node:test";
import * as render from "./render.ts";
import * as render from "#engine/render";
test("sanity", (t) => t.assert.equal(render.sync("gm <3").text, "gm &lt;3"));
test("simple tree", (t) =>

View file

@ -118,8 +118,11 @@ export function resolveNode(r: State, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHtml(node);
if (typeof node === "string") return escapeHtmlContent(node);
if (typeof node === "number") return String(node); // no escaping ever
if (typeof node === "symbol" && node.toString() === kElement.toString()) {
throw new Error(`There are two instances of Clover SSR loaded!`);
}
throw new Error(`Cannot render ${inspect(node)} to HTML`);
}
if (node instanceof Promise) {
@ -217,12 +220,14 @@ function stringifyElement(element: ResolvedElement) {
let attr;
switch (prop) {
default:
attr = `${prop}=${quoteIfNeeded(escapeHtml(String(value)))}`;
attr = `${prop}=${quoteIfNeeded(escapeAttribute(String(value)))}`;
break;
case "className":
// Legacy React Compat
case "class":
attr = `class=${quoteIfNeeded(escapeHtml(clsx(value as ClsxInput)))}`;
attr = `class=${
quoteIfNeeded(escapeAttribute(clsx(value as ClsxInput)))
}`;
break;
case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`");
@ -233,7 +238,7 @@ function stringifyElement(element: ResolvedElement) {
case "key":
continue;
}
if (needSpace) (out += " "), (needSpace = !attr.endsWith('"'));
if (needSpace) ((out += " "), (needSpace = !attr.endsWith('"')));
out += attr;
}
out += ">";
@ -254,14 +259,16 @@ export function stringifyStyleAttribute(style: Record<string, string>) {
let out = ``;
for (const styleName in style) {
if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${escapeHtml(
String(style[styleName]),
)}`;
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeAttribute(
String(style[styleName]),
)
}`;
}
return "style=" + quoteIfNeeded(out);
}
export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"';
if (text.match(/["/>]/)) return '"' + text + '"';
return text;
}
@ -303,6 +310,21 @@ export function clsx(mix: ClsxInput) {
return str;
}
export const escapeHtmlContent = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
// TODO: combine into one function which decides if an attribute needs quotes
// and escapes it correctly depending on the context.
const escapeAttribute = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
/** @deprecated */
export const escapeHtml = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")

View file

@ -42,7 +42,7 @@ export function Suspense({ children, fallback }: SuspenseProps): render.Node {
r.asyncDone = () => {
const rejections = r.rejections;
if (rejections && rejections.length > 0) throw new Error("TODO");
state.pushChunk?.(name, (ip[0] = resolved));
state.pushChunk?.(name, ip[0] = resolved);
};
return render.raw(ip);
}
@ -99,4 +99,4 @@ export async function* renderStreaming<
return addonOutput as unknown as T;
}
import * as render from "./render.ts";
import * as render from "#engine/render";

View file

@ -1,7 +1,12 @@
type Awaitable<T> = T | Promise<T>;
export function virtualFiles(
map: Record<string, string | esbuild.OnLoadResult | (() => Awaitable<string | esbuild.OnLoadResult>)>,
map: Record<
string,
| string
| esbuild.OnLoadResult
| (() => Awaitable<string | esbuild.OnLoadResult>)
>,
) {
return {
name: "clover vfs",
@ -22,7 +27,7 @@ export function virtualFiles(
{ filter: /./, namespace: "vfs" },
async ({ path }) => {
let entry = map[path];
if (typeof entry === 'function') entry = await entry();
if (typeof entry === "function") entry = await entry();
return ({
resolveDir: ".",
loader: "ts",
@ -88,7 +93,6 @@ export function markoViaBuildCache(): esbuild.Plugin {
if (!fs.existsSync(file)) {
console.warn(`File does not exist: ${file}`);
}
console.log(markoCache.keys());
throw new Error("Marko file not in cache: " + file);
}
return ({
@ -106,7 +110,7 @@ export function isIgnoredSource(source: string) {
return source.includes("<define:") ||
source.startsWith("vfs:") ||
source.startsWith("dropped:") ||
source.includes("node_modules")
source.includes("node_modules");
}
import * as esbuild from "esbuild";
@ -114,4 +118,5 @@ import * as string from "#sitegen/string";
import * as path from "node:path";
import * as fs from "#sitegen/fs";
import * as incr from "./incremental.ts";
import * as hot from "./hot.ts";import { markoCache } from "./marko.ts";
import * as hot from "./hot.ts";
import { markoCache } from "./marko.ts";

View file

@ -7,15 +7,16 @@ const { toRel, toAbs } = incr;
const globalCssPath = toAbs("src/global.css");
export async function main() {
await incr.restore();
if (!process.argv.includes("-f")) await incr.restore();
await incr.compile(generate);
}
export async function generate() {
// -- read config and discover files --
const siteConfig = await incr.work(readManifest);
const { staticFiles, scripts, views, pages } =
await discoverAllFiles(siteConfig);
const { staticFiles, scripts, views, pages } = await discoverAllFiles(
siteConfig,
);
// TODO: make sure that `static` and `pages` does not overlap
@ -28,13 +29,21 @@ export async function generate() {
staticFiles.map((item) =>
incr.work(
async (io, { id, file }) =>
void (await io.writeAsset(id, await io.readFile(file))),
void (await io.writeAsset({
pathname: id,
buffer: await io.readFile(file),
})),
item,
),
)
),
);
const routes = await Promise.all([...builtViews, ...builtPages]);
const viewsAndDynPages: incr.Ref<PageOrView>[] = [
...builtViews,
...builtPages.filter((page) => UNWRAP(page.value).regenerate),
];
// -- page resources --
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
clientRefs: routes.flatMap((x) => x.clientRefs),
@ -49,13 +58,16 @@ export async function generate() {
platform: "node",
styleMap,
scriptMap,
viewItems: views,
viewRefs: builtViews,
viewItems: viewsAndDynPages.map((ref) => {
const { id, file, type } = UNWRAP(ref.value);
return { id: type === "page" ? `page:${id}` : id, file };
}),
viewRefs: viewsAndDynPages,
});
// -- assemble page assets --
const pAssemblePages = builtPages.map((page) =>
assembleAndWritePage(page, styleMap, scriptMap),
assembleAndWritePage(page, styleMap, scriptMap)
);
await Promise.all([builtBackend, builtStaticFiles, ...pAssemblePages]);
@ -77,7 +89,7 @@ export async function discoverAllFiles(
return (
await Promise.all(
siteConfig.siteSections.map(({ root: sectionRoot }) =>
incr.work(scanSiteSection, toAbs(sectionRoot)),
incr.work(scanSiteSection, toAbs(sectionRoot))
),
)
).reduce((acc, next) => ({
@ -105,10 +117,9 @@ export async function scanSiteSection(io: Io, sectionRoot: string) {
let scripts: FileItem[] = [];
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix =
hot.projectSrc === sectionRoot
? ""
: path.relative(hot.projectSrc, sectionRoot) + "/";
const rootPrefix = hot.projectSrc === sectionRoot
? ""
: path.relative(hot.projectSrc, sectionRoot) + "/";
const kinds = [
{
dir: sectionPath("pages"),
@ -181,7 +192,8 @@ export async function preparePage(io: Io, item: sg.FileItem) {
meta: metadata,
theme: pageTheme,
layout,
} = await io.import<any>(item.file);
regenerate,
} = await io.import<sg.PageExports>(item.file);
if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
@ -219,16 +231,20 @@ export async function preparePage(io: Io, item: sg.FileItem) {
"Page is missing 'meta.title'. " + "All pages need a title tag.",
);
}
const styleKey = css.styleKey(cssImports, theme);
return {
type: "page",
id: item.id,
file: item.file,
regenerate,
html: text,
meta: renderedMeta,
cssImports,
theme: theme ?? null,
styleKey,
clientRefs: Array.from(addon[sg.userData.key].scripts),
};
} as const;
}
export async function prepareView(io: Io, item: sg.FileItem) {
@ -243,13 +259,15 @@ export async function prepareView(io: Io, item: sg.FileItem) {
);
const styleKey = css.styleKey(cssImports, theme);
return {
file: path.relative(hot.projectRoot, item.file),
type: "view",
id: item.id,
file: item.file,
cssImports,
theme,
clientRefs: hot.getClientScriptRefs(item.file),
hasLayout: !!module.layout?.default,
styleKey,
};
} as const;
}
export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
@ -274,28 +292,38 @@ export async function assembleAndWritePage(
scriptWork: incr.Ref<Record<string, string>>,
) {
const page = await pageWork;
return incr.work(async (io, { id, html, meta, styleKey, clientRefs }) => {
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
return incr.work(
async (io, { id, html, meta, styleKey, clientRefs, regenerate }) => {
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
const scriptIds = clientRefs.map(hot.getScriptId);
const scriptMap = await io.readWork(scriptWork);
const scripts = scriptIds
.map((ref) => UNWRAP(scriptMap[ref], `Missing script ${ref}`))
.map((x) => `{${x}}`)
.join("\n");
const scriptIds = clientRefs.map(hot.getScriptId);
const scriptMap = await io.readWork(scriptWork);
const scripts = scriptIds
.map((ref) => UNWRAP(scriptMap[ref], `Missing script ${ref}`))
.map((x) => `{${x}}`)
.join("\n");
const doc = wrapDocument({
body: html,
head: meta,
inlineCss,
scripts,
});
await io.writeAsset(id, doc, {
"Content-Type": "text/html",
});
}, page);
const buffer = sg.wrapDocument({
body: html,
head: meta,
inlineCss,
scripts,
});
await io.writeAsset({
pathname: id,
buffer,
headers: {
"Content-Type": "text/html",
},
regenerative: !!regenerate,
});
},
page,
);
}
export type PageOrView = PreparedPage | PreparedView;
import * as sg from "#sitegen";
import * as incr from "./incremental.ts";
import { Io } from "./incremental.ts";
@ -307,4 +335,3 @@ import * as fs from "#sitegen/fs";
import type { FileItem } from "#sitegen";
import * as path from "node:path";
import * as meta from "#sitegen/meta";
import { wrapDocument } from "./lib/view.ts";

View file

@ -78,10 +78,9 @@ Module.prototype._compile = function (
}
}
fileStats.set(filename, {
cssImportsRecursive:
cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe))
: null,
cssImportsRecursive: cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe))
: null,
imports,
lastModified: Math.floor(stat.mtimeMs),
});
@ -137,14 +136,13 @@ export function loadEsbuildCode(
src = code;
}
if (src.includes("import.meta")) {
src =
`
src = `
import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())};
import.meta.dirname = ${JSON.stringify(path.dirname(filepath))};
import.meta.filename = ${JSON.stringify(filepath)};
`
.trim()
.replace(/[\n\s]/g, "") + src;
.trim()
.replace(/[\n\s]/g, "") + src;
}
src = esbuild.transformSync(src, {
loader,

View file

@ -7,38 +7,38 @@ test("trivial case", async () => {
async function compilation() {
const first = incr.work({
label: "first compute",
async run (io) {
async run(io) {
await setTimeout(1000);
const contents = await io.readFile(file1.path);
return [contents, Math.random()] as const;
}
},
});
const second = incr.work({
label: "second compute",
wait: first,
async run (io) {
async run(io) {
await setTimeout(1000);
return io.readWork(first)[0].toUpperCase();
}
},
});
const third = incr.work({
label: "third compute",
wait: first,
async run (io) {
async run(io) {
await setTimeout(1000);
return io.readWork(first)[1] * 1000;
}
},
});
return incr.work({
label: "last compute",
wait: [second, third],
async run (io) {
async run(io) {
await setTimeout(1000);
return {
second: io.readWork(second),
third: io.readWork(third),
}
}
};
},
});
}
const { value: first } = await incr.compile(compilation);
@ -52,5 +52,5 @@ test("trivial case", async () => {
import * as incr from "./incremental2.ts";
import { beforeEach, test } from "node:test";
import { tmpFile } from "#sitegen/testing";import { setTimeout } from "node:timers/promises";
import { tmpFile } from "#sitegen/testing";
import { setTimeout } from "node:timers/promises";

View file

@ -2,8 +2,7 @@
// See `work()`, `compile()`, and `invalidate()` for details.
//
// All state is serializable to allow recovering state across sessions.
// This framework special-cases the asset map, but is otherwise
// agnostic of what it is a compiler for.
// This library special-cases the asset map, but is otherwise agnostic.
let running = false;
let jobs = 0;
let newKeys = 0;
@ -15,27 +14,25 @@ let writes = new Map<string, FileWrite>();
let assets = new Map<string, Asset>(); // keyed by hash
export interface Ref<T> {
/** This method is compatible with `await` syntax */
then(
onFulfilled: (value: T) => void,
onRejected: (error: unknown) => void,
): void;
key: string;
/** This method is compatible with `await` syntax */
then(resolve: (value: T) => void, reject: (error: unknown) => void): void;
get value(): T | null;
}
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
/**
* Declare and a unit of work. Return value is memoized and
* only rebuilt when inputs (declared via `Io`) change. Outputs
* are written at the end of a compilation (see `compile`).
* Declare and a unit of work. Return value is memoized and only rebuilt when
* inputs change. Inputs are tracked via the `io` interface, as well as a hash
* of the `input` param and caller source code. Outputs are written at the end
* of a compilation (see `compile`).
*
* If the returned `Ref` is not awaited or read
* via io.readWork, the job is never started.
* Work items are lazy, only started when `Ref` is awaited or `io.readWork`ed.
*/
export function work<O>(job: Job<void, O>): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const source = JSON.stringify(util.getCallSites(2)[1]);
const source = JSON.stringify(UNWRAP(util.getCallSites(2)[1]));
const keySource = [source, util.inspect(input)].join(":");
const key = crypto.createHash("sha1").update(keySource).digest("base64url");
ASSERT(running);
@ -48,7 +45,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const prev = works.get(key) as Work<O> | null;
if (prev) {
return { key, then: (done) => done(prev.value) };
return { key, then: (done) => done(prev.value), value: prev.value };
}
async function perform() {
@ -59,13 +56,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const value = await job(io, input);
validateSerializable(value, "");
const { reads, writes } = io;
works.set(key, {
value,
affects: [],
reads,
writes,
debug: source,
});
works.set(key, { value, affects: [], reads, writes });
for (const add of reads.files) {
const { affects } = UNWRAP(files.get(add));
ASSERT(!affects.includes(key));
@ -85,7 +76,12 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
let cached: Promise<O>;
return {
key,
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject),
then(fufill, reject) {
(cached ??= perform()).then(fufill, reject);
},
get value() {
return (works.get(this.key)?.value as O) ?? null;
},
};
}
@ -97,11 +93,7 @@ export async function compile<T>(compiler: () => Promise<T>) {
const start = performance.now();
const timerSpinner = new Spinner({
text: () =>
`sitegen! [${
((performance.now() - start) / 1000).toFixed(
1,
)
}s]`,
`sitegen! [${((performance.now() - start) / 1000).toFixed(1)}s]`,
fps: 10,
});
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -114,8 +106,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
return {
value,
watchFiles: new Set(files.keys()),
newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)),
newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')),
newOutputs: Array.from(seenWrites)
.filter((x) => x.startsWith("f:"))
.map((x) => x.slice(2)),
newAssets: !Array.from(seenWrites).some((x) => x.startsWith("a:")),
};
} finally {
running = false;
@ -127,9 +121,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
export async function flush(start: number) {
// Trim
const detachedFiles = new Set<string>;
const referencedAssets = new Set<string>;
for (const [k, { writes: { assets } }] of works) {
const detachedFiles = new Set<string>();
const referencedAssets = new Set<string>();
for (const [k, v] of works) {
const assets = v.writes.assets;
if (seenWorks.has(k)) {
for (const asset of assets.values()) referencedAssets.add(asset.hash);
continue;
@ -141,12 +136,9 @@ export async function flush(start: number) {
files.delete(k);
detachedFiles.add(k);
}
for (const k of assets.keys()) {
if (!referencedAssets.has(k))
assets.delete(k);
}
for (const k of assets.keys()) if (!referencedAssets.has(k)) assets.delete(k);
const p = [];
const p: Promise<void>[] = [];
// File writes
let dist = 0;
for (const [key, { buffer, size }] of writes) {
@ -155,10 +147,14 @@ export async function flush(start: number) {
}
// Asset map
{
const { json, blob } = getAssetManifest();
const { json, blob, dynamic, dts } = getAssetManifest();
const jsonString = Buffer.from(JSON.stringify(json));
p.push(fs.writeMkdir(".clover/o/static.json", jsonString));
p.push(fs.writeMkdir(".clover/o/static.blob", blob));
p.push(fs.writeMkdir(".clover/o/asset.json", jsonString));
p.push(fs.writeMkdir(".clover/o/asset.blob", blob));
p.push(fs.writeMkdir(".clover/ts/asset.d.ts", dts));
for (const [k, v] of dynamic) {
p.push(fs.writeMkdir(`.clover/o/dynamic/${k}`, v));
}
dist += blob.byteLength + jsonString.byteLength;
}
await Promise.all(p);
@ -171,9 +167,8 @@ export async function flush(start: number) {
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
console.writeLine(` - ${assets.size} static assets`);
console.writeLine(
` - dist: ${formatSize(dist)}, incremental: ${
formatSize(serialized.byteLength)
}`,
` - dist: ${formatSize(dist)},` +
` incremental: ${formatSize(serialized.byteLength)}`,
);
}
@ -212,13 +207,14 @@ function deleteWork(key: string) {
affects.splice(affects.indexOf(key), 1);
}
for (const remove of affects) {
const { reads: { works: list } } = UNWRAP(works.get(remove), remove);
const {
reads: { works: list },
} = UNWRAP(works.get(remove), remove);
ASSERT(list.has(key));
list.delete(key);
}
for (const file of w.files) {
if (UNWRAP(writes.get(file)).work === key)
writes.delete(file);
if (UNWRAP(writes.get(file)).work === key) writes.delete(file);
}
// Assets are temporarily kept, trimmed via manual GC after compilation.
@ -234,23 +230,32 @@ export function reset() {
}
export function serialize() {
const fileEntries = Array.from(files, ([k, v]) =>
[
k,
v.type,
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null,
...v.affects,
] as const);
const workEntries = Array.from(works, ([k, v]) =>
[
k,
v.value,
Array.from(v.reads.files),
Array.from(v.reads.works),
Array.from(v.writes.files),
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const),
v.affects,
] as const);
const fileEntries = Array.from(
files,
([k, v]) =>
[
k,
v.type,
v.type === "f" ? v.lastModified : v.type === "d" ? v.contentHash : null,
...v.affects,
] as const,
);
const workEntries = Array.from(
works,
([k, v]) =>
[
k,
v.value,
Array.from(v.reads.files),
Array.from(v.reads.works),
Array.from(v.writes.files),
Array.from(
v.writes.assets,
([k, { headers }]) => [k, headers] as const,
),
v.affects,
] as const,
);
const expectedFilesOnDisk = Array.from(
writes,
([k, { size, work }]) => [k, size, work] as const,
@ -280,7 +285,7 @@ async function deserialize(buffer: Buffer) {
if (type === "f") {
ASSERT(typeof content === "number");
files.set(k, { type, affects, lastModified: content });
} else if (type === 'd') {
} else if (type === "d") {
ASSERT(typeof content === "string");
files.set(k, { type, affects, contentHash: content, contents: [] });
} else {
@ -288,15 +293,8 @@ async function deserialize(buffer: Buffer) {
}
}
for (const entry of workEntries) {
const [
k,
value,
readFiles,
readWorks,
writeFiles,
writeAssets,
affects,
] = entry;
const [k, value, readFiles, readWorks, writeFiles, writeAssets, affects] =
entry;
works.set(k, {
value,
reads: {
@ -305,23 +303,30 @@ async function deserialize(buffer: Buffer) {
},
writes: {
files: new Set(writeFiles),
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, {
hash: JSON.parse(UNWRAP(headers.etag)),
headers,
}])),
assets: new Map(
Array.from(writeAssets, ([k, headers]) => [
k,
{
hash: JSON.parse(UNWRAP(headers.etag)),
headers,
},
]),
),
},
affects,
});
}
const statFiles = await Promise.all(expectedFilesOnDisk
.map(([k, size, work]) =>
fs.stat(path.join(".clover/o", k))
const statFiles = await Promise.all(
expectedFilesOnDisk.map(([k, size, work]) =>
fs
.stat(path.join(".clover/o", k))
.catch((err) => {
if (err.code === "ENOENT") return null;
throw err;
})
.then((stat) => ({ k, size, work, stat }))
));
),
);
for (const { k, stat, work, size } of statFiles) {
if (stat?.size === size) {
writes.set(k, {
@ -337,19 +342,28 @@ async function deserialize(buffer: Buffer) {
assets.set(hash, { raw, gzip, zstd });
}
await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file)));
await Promise.all(
Array.from(files, ([key, file]) => invalidateEntry(key, file, false)),
);
}
export async function invalidate(filePath: string): Promise<boolean> {
export async function invalidate(
filePath: string,
unloadModule: boolean = true,
): Promise<boolean> {
const key = toRel(toAbs(filePath));
const file = UNWRAP(files.get(key), `Untracked file '${key}'`)
return invalidateEntry(key, file)
const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
return invalidateEntry(key, file, unloadModule);
}
export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> {
export async function invalidateEntry(
key: string,
file: TrackedFile,
unloadModule: boolean,
): Promise<boolean> {
try {
if (file.type === "d") {
const contents = file.contents = await fs.readdir(key);
const contents = (file.contents = await fs.readdir(key));
contents.sort();
const contentHash = crypto
.createHash("sha1")
@ -359,48 +373,79 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
file.contentHash = contentHash;
throw new Error();
}
} else if (file.type === 'f') {
const lastModified = await fs.stat(key)
.then(x => Math.floor(x.mtimeMs), () => 0);
} else if (file.type === "f") {
const lastModified = await fs.stat(key).then(
(x) => Math.floor(x.mtimeMs),
() => 0,
);
if (file.lastModified !== lastModified) {
file.lastModified = lastModified;
throw new Error();
}
} else {
file.type satisfies 'null';
file.type satisfies "null";
const stat = await fs.stat(key).catch(() => null);
if (stat) throw new Error();
if (stat) throw new Error();
}
return false;
} catch (e) {
forceInvalidate(file);
hot.unload(toAbs(key));
if (file.type === 'null') files.delete(key);
if (unloadModule) {
// TODO: handle when this triggers unloading of `generate.ts`
hot.unload(toAbs(key));
}
if (file.type === "null") files.delete(key);
return true;
}
}
export function getAssetManifest() {
const dynamic = new Map<string, Buffer>();
const writer = new BufferWriter();
const asset = Object.fromEntries(
const assetMap = Object.fromEntries(
Array.from(works, (work) => work[1].writes.assets)
.filter((map) => map.size > 0)
.flatMap((map) =>
Array.from(map, ([key, { hash, headers }]) => {
Array.from(map, ([key, { hash, headers, regenerative }]) => {
const { raw, gzip, zstd } = UNWRAP(
assets.get(hash),
`Asset ${key} (${hash})`,
);
return [key, {
raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash),
headers,
}] as const;
if (regenerative) {
const id = crypto
.createHash("sha1")
.update(key)
.digest("hex")
.slice(0, 16); /* TODO */
dynamic.set(
id,
manifest.packDynamicBuffer(raw, gzip, zstd, headers),
);
return [key, { type: 1, id }] as const;
}
return [
key,
{
type: 0,
raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash),
headers,
},
] as const;
})
),
) satisfies BuiltAssetMap;
return { json: asset, blob: writer.get() };
) satisfies manifest.Manifest;
return {
json: assetMap,
blob: writer.get(),
dynamic,
dts: "export type AssetKey = " +
Object.keys(assetMap)
.map((key) => JSON.stringify(key))
.join(" | ") +
"\n",
};
}
/* Input/Output with automatic tracking.
@ -444,9 +489,9 @@ export class Io {
}
async readDir(dir: string) {
const { key, resolved } = this.#trackFs(dir);
const existing = files.get(key);
const existing = files.get(key);
try {
if (existing?.type === 'd') return existing.contents;
if (existing?.type === "d") return existing.contents;
const contents = await fs.readdir(resolved);
contents.sort();
const contentHash = crypto
@ -512,26 +557,27 @@ export class Io {
}
}
}
async writeAsset(
pathname: string,
blob: string | Buffer,
headersOption?: HeadersInit,
) {
ASSERT(pathname.startsWith("/"));
ASSERT(!seenWrites.has("a:" + pathname));
const buffer = typeof blob === "string" ? Buffer.from(blob) : blob;
const headers = new Headers(headersOption ?? {});
async writeAsset(asset: {
pathname: string;
buffer: string | Buffer;
regenerative?: boolean;
headers?: HeadersInit;
}) {
ASSERT(asset.pathname.startsWith("/"));
ASSERT(!seenWrites.has("a:" + asset.pathname));
const buffer = typeof asset.buffer === "string"
? Buffer.from(asset.buffer)
: asset.buffer;
const headers = new Headers(asset.headers ?? {});
const hash = crypto.createHash("sha1").update(buffer).digest("hex");
if (!headers.has("Content-Type")) {
headers.set("Content-Type", mime.contentTypeFor(pathname));
headers.set("Content-Type", mime.contentTypeFor(asset.pathname));
}
headers.set("ETag", JSON.stringify(hash));
this.writes.assets.set(pathname, {
headers.set("etag", JSON.stringify(hash));
this.writes.assets.set(asset.pathname, {
hash,
// @ts-expect-error TODO
headers: Object.fromEntries(headers),
regenerative: !!asset.regenerative,
});
if (!assets.has(hash)) {
jobs += 1;
@ -593,7 +639,10 @@ export function validateSerializable(value: unknown, key: string) {
} else if (value && typeof value === "object") {
if (Array.isArray(value)) {
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
} else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) {
} else if (
Object.getPrototypeOf(value) === Object.prototype ||
Buffer.isBuffer(value)
) {
Object.entries(value).forEach(([k, v]) =>
validateSerializable(v, `${key}.${k}`)
);
@ -631,53 +680,48 @@ interface FileWrite {
}
interface Writes {
files: Set<string>;
assets: Map<string, {
hash: string;
headers: Record<string, string>;
}>;
assets: Map<string, AssetWrite>;
}
interface Asset {
raw: Buffer;
gzip: Buffer;
zstd: Buffer;
}
interface AssetWrite {
hash: string;
headers: Record<string, string>;
regenerative: boolean;
}
interface Work<T = unknown> {
debug?: string;
value: T;
reads: Reads;
writes: Writes;
affects: string[];
}
type TrackedFile =
& {
affects: string[];
}
& { affects: string[] }
& (
| { type: "f"; lastModified: number }
| { type: "d"; contentHash: string; contents: string[] }
| { type: "null"; }
| { type: "null" }
);
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}
export interface BuiltAsset {
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
headers: Record<string, string>;
}
const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress);
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as hot from "./hot.ts";
import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";
import * as manifest from "#sitegen/assets";
import * as path from "node:path";
import * as util from "node:util";
import * as crypto from "node:crypto";
import * as mime from "#sitegen/mime";
import * as zlib from "node:zlib";
import * as console from "@paperclover/console";
import { Spinner } from "@paperclover/console/Spinner";
import { formatSize } from "@/file-viewer/format.ts";
import * as msgpackr from "msgpackr";

View file

@ -1,73 +1,114 @@
interface Loaded {
map: BuiltAssetMap;
buf: Buffer;
}
let assets: Loaded | null = null;
// Static and dynamic assets are built alongside the server binary.
// This module implements decoding and serving of the asset blobs,
// but also implements patching of dynamic assets. The `Manifest`
// is generated by `incremental.ts`
const root = import.meta.dirname;
let current: Loaded | null = null;
export type StaticPageId = string;
// TODO: rename all these types
type DynamicId = string;
export type { Key };
export type Manifest =
& {
[K in Key]: StaticAsset | DynamicAsset;
}
& {
[string: string]: StaticAsset | DynamicAsset;
};
export interface StaticAsset extends AssetBase {
type: 0;
}
interface AssetBase {
headers: Record<string, string> & { etag: string };
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
}
export interface DynamicAsset {
type: 1;
id: DynamicId;
}
interface Loaded {
map: Manifest;
static: Buffer;
dynamic: Map<DynamicId, DynamicEntry>;
}
export interface DynamicEntry extends AssetBase {
buffer: Buffer;
}
export async function reload() {
const [map, buf] = await Promise.all([
fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"),
fs.readFile(path.join(import.meta.dirname, "static.blob")),
]);
return (assets = { map: JSON.parse(map), buf });
}
export function reloadSync() {
const map = fs.readFileSync(
path.join(import.meta.dirname, "static.json"),
"utf8",
const map = await fs.readJson<Manifest>(path.join(root, "asset.json"));
const statics = await fs.readFile(path.join(root, "asset.blob"));
const dynamic = new Map(
await Promise.all(
Object.entries(map)
.filter((entry): entry is [string, DynamicAsset] => entry[1].type === 1)
.map(async ([k, v]) =>
[
v.id,
await fs.readFile(path.join(root, "dynamic", v.id))
.then(loadRegenerative),
] as const
),
),
);
const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob"));
return (assets = { map: JSON.parse(map), buf });
return (current = { map, static: statics, dynamic });
}
export async function middleware(c: Context, next: Next) {
if (!assets) await reload();
const asset = assets!.map[c.req.path];
if (!current) current = await reload();
const asset = current.map[c.req.path];
if (asset) return assetInner(c, asset, 200);
return next();
}
export async function notFound(c: Context) {
if (!assets) await reload();
if (!current) current = await reload();
let pathname = c.req.path;
do {
const asset = assets!.map[pathname + "/404"];
const asset = current.map[pathname + "/404"];
if (asset) return assetInner(c, asset, 404);
pathname = pathname.slice(0, pathname.lastIndexOf("/"));
} while (pathname);
const asset = assets!.map["/404"];
const asset = current.map["/404"];
if (asset) return assetInner(c, asset, 404);
return c.text("the 'Not Found' page was not found", 404);
}
export async function serveAsset(
c: Context,
id: StaticPageId,
status: StatusCode,
) {
return assetInner(c, (assets ?? (await reload())).map[id], status);
export async function serveAsset(c: Context, id: Key, status: StatusCode) {
return assetInner(c, (current ?? (await reload())).map[id], status);
}
/** @deprecated */
export function hasAsset(id: string) {
return (assets ?? reloadSync()).map[id] !== undefined;
return UNWRAP(current).map[id] !== undefined;
}
export function etagMatches(etag: string, ifNoneMatch: string) {
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
}
function subarrayAsset([start, end]: BufferView) {
return assets!.buf.subarray(start, end);
function assetInner(c: Context, asset: Manifest[Key], status: StatusCode) {
ASSERT(current);
if (asset.type === 0) {
return respondWithBufferAndViews(c, current.static, asset, status);
} else {
const entry = UNWRAP(current.dynamic.get(asset.id));
return respondWithBufferAndViews(c, entry.buffer, entry, status);
}
}
function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
const ifnonematch = c.req.header("If-None-Match");
if (ifnonematch) {
const etag = asset.headers.ETag;
if (etagMatches(etag, ifnonematch)) {
function respondWithBufferAndViews(
c: Context,
buffer: Buffer,
asset: AssetBase,
status: StatusCode,
) {
const ifNoneMatch = c.req.header("If-None-Match");
if (ifNoneMatch) {
const etag = asset.headers.etag;
if (etagMatches(etag, ifNoneMatch)) {
return (c.res = new Response(null, {
status: 304,
statusText: "Not Modified",
@ -80,24 +121,103 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
const acceptEncoding = c.req.header("Accept-Encoding") ?? "";
let body;
let headers = asset.headers;
if (acceptEncoding.includes("zstd") && asset.zstd) {
body = subarrayAsset(asset.zstd);
if (acceptEncoding.includes("zstd")) {
body = buffer.subarray(...asset.zstd);
headers = {
...asset.headers,
"Content-Encoding": "zstd",
};
} else if (acceptEncoding.includes("gzip") && asset.gzip) {
body = subarrayAsset(asset.gzip);
} else if (acceptEncoding.includes("gzip")) {
body = buffer.subarray(...asset.gzip);
headers = {
...asset.headers,
"Content-Encoding": "gzip",
};
} else {
body = subarrayAsset(asset.raw);
body = buffer.subarray(...asset.raw);
}
return (c.res = new Response(body, { headers, status }));
}
export function packDynamicBuffer(
raw: Buffer,
gzip: Buffer,
zstd: Buffer,
headers: Record<string, string>,
) {
const headersBuffer = Buffer.from(
Object.entries(headers)
.map((entry) => entry.join(":"))
.join("\n"),
"utf-8",
);
const header = new Uint32Array(3);
header[0] = headersBuffer.byteLength + header.byteLength;
header[1] = header[0] + raw.byteLength;
header[2] = header[1] + gzip.byteLength;
return Buffer.concat([
Buffer.from(header.buffer),
headersBuffer,
raw,
gzip,
zstd,
]);
}
function loadRegenerative(buffer: Buffer): DynamicEntry {
const headersEnd = buffer.readUInt32LE(0);
const headers = Object.fromEntries(
buffer
.subarray(3 * 4, headersEnd)
.toString("utf-8")
.split("\n")
.map((line) => {
const i = line.indexOf(":");
return [line.slice(0, i), line.slice(i + 1)];
}),
);
const raw = buffer.readUInt32LE(4);
const gzip = buffer.readUInt32LE(8);
const hasEtag = (v: object): v is typeof v & { etag: string } =>
"etag" in v && typeof v.etag === "string";
ASSERT(hasEtag(headers));
return {
headers,
buffer,
raw: [headersEnd, raw],
gzip: [raw, gzip],
zstd: [gzip, buffer.byteLength],
};
}
const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress);
export async function overwriteDynamic(
key: Key,
value: string | Buffer,
headers: Record<string, string>,
) {
if (!current) current = await reload();
const asset = UNWRAP(current.map[key]);
ASSERT(asset.type === 1);
UNWRAP(current.dynamic.has(asset.id));
const buffer = Buffer.from(value);
const etag = JSON.stringify(
crypto.createHash("sha1").update(buffer).digest("hex"),
);
const [gzipBuffer, zstdBuffer] = await Promise.all([
gzip(buffer),
zstdCompress(buffer),
]);
const packed = packDynamicBuffer(buffer, gzipBuffer, zstdBuffer, {
...headers,
etag,
});
current.dynamic.set(asset.id, loadRegenerative(packed));
await fs.writeFile(path.join(root, "dynamic", asset.id), packed);
}
process.on("message", (msg: any) => {
if (msg?.type === "clover.assets.reload") reload();
});
@ -105,6 +225,10 @@ process.on("message", (msg: any) => {
import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status";
import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts";
import type { BufferView } from "../incremental.ts";
import { Buffer } from "node:buffer";
import * as path from "node:path";
import type { AssetKey as Key } from "../../.clover/ts/asset.d.ts";
import * as crypto from "node:crypto";
import * as zlib from "node:zlib";
import * as util from "node:util";

View file

@ -51,8 +51,8 @@ export class Queue<T, R> {
let n = 0;
for (const item of active) {
let itemText = "- " + item.format(now);
text +=
`\n` + itemText.slice(0, Math.max(0, process.stdout.columns - 1));
text += `\n` +
itemText.slice(0, Math.max(0, process.stdout.columns - 1));
if (n > 10) {
text += `\n ... + ${active.length - n} more`;
break;

View file

@ -4,6 +4,7 @@ export {
createReadStream,
createWriteStream,
existsSync,
type FileHandle,
open,
readdir,
readdirSync,
@ -15,7 +16,6 @@ export {
statSync,
writeFile,
writeFileSync,
type FileHandle,
};
export function mkdir(dir: string) {
@ -98,6 +98,7 @@ import {
writeFileSync,
} from "node:fs";
import {
type FileHandle,
mkdir as nodeMkdir,
open,
readdir,
@ -106,6 +107,5 @@ import {
rmdir,
stat,
writeFile,
type FileHandle,
} from "node:fs/promises";
export { Stats } from "node:fs";

View file

@ -1,9 +1,11 @@
/* Impementation of CommonMark specification for markdown with support
/* Implementation of [CommonMark] specification for markdown with support
* for custom syntax extensions via the parser options. Instead of
* returning an AST that has a second conversion pass to JSX, the
* returned value of 'parse' is 'engine.Node' which can be stringified
* via clover's SSR engine. This way, generation optimizations, async
* via Clover's SSR engine. This way, generation optimizations, async
* components, and other features are gained for free here.
*
* [CommonMark]: https://spec.commonmark.org/0.31.2/
*/
function parse(src: string, options: Partial<ParseOpts> = {}) {}
@ -16,6 +18,9 @@ export function Markdown({
return parse(src, options);
}
// TODO: This implementation is flawed because it is impossible to sanely handle
// emphasis and strong emphasis, and all their edge cases. Instead of making these
// using extensions interface, they should be special cased.
function parseInline(src: string, options: Partial<InlineOpts> = {}) {
const { rules = inlineRules, links = new Map() } = options;
const opts: InlineOpts = { rules, links };
@ -110,12 +115,11 @@ export const inlineRules: Record<string, InlineRule> = {
} else if (afterText[0] === "[") {
const splitTarget = splitFirst(afterText.slice(1), /]/);
if (!splitTarget) return null;
const name =
splitTarget.first.trim().length === 0
? // Collapsed reference link
textSrc.trim()
: // Full Reference Link
splitTarget.first.trim();
const name = splitTarget.first.trim().length === 0
// Collapsed reference link
? textSrc.trim()
// Full Reference Link
: splitTarget.first.trim();
const target = opts.links.get(name);
if (!target) return null;
({ href, title } = target);

View file

@ -1,6 +1,25 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string;
export interface PageExports extends ViewExports {
regenerate?: PageRegenerateOptions;
}
export interface ViewExports {
default: render.Component;
meta: meta.Meta | ((props: { ssr: true }) => Promise<meta.Meta> | meta.Meta);
theme?: css.Theme;
layout?: Layout;
}
export interface Layout {
default: render.Component;
theme?: css.Theme;
// TODO: nested layout
}
export interface PageRegenerateOptions {
tags?: string[];
seconds?: number;
debounce?: number;
}
/**
* A filesystem object associated with some ID,
* such as a page's route to it's source file.
@ -30,4 +49,24 @@ export function addScript(id: ScriptId | { value: ScriptId }) {
userData.get().scripts.add(typeof id === "string" ? id : id.value);
}
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as render from "#engine/render";
import type * as meta from "./meta.ts";
import type * as css from "../css.ts";

View file

@ -10,7 +10,9 @@ export function getDb(file: string) {
if (db) return db;
const fileWithExt = file.includes(".") ? file : file + ".sqlite";
db = new WrappedDatabase(
new DatabaseSync(path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt)),
new DatabaseSync(
path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt),
),
);
map.set(file, db);
return db;

View file

@ -1,5 +1,9 @@
export function tmpFile(basename: string) {
const file = path.join(import.meta.dirname, '../../.clover/testing', basename);
const file = path.join(
import.meta.dirname,
"../../.clover/testing",
basename,
);
return {
path: file,
read: fs.readFile.bind(fs, file),
@ -7,5 +11,5 @@ export function tmpFile(basename: string) {
};
}
import * as path from 'node:path';
import * as fs from './fs.ts';
import * as path from "node:path";
import * as fs from "./fs.ts";

View file

@ -1,38 +1,64 @@
export interface View {
// The "view" system allows rendering dynamic pages within backends.
// This is done by scanning all `views` dirs, bundling their client
// resources, and then providing `serve` which renders a page.
//
// This system also implements page regeneration.
let codegen: Codegen;
try {
codegen = require("$views");
} catch {
throw new Error("Can only import '#sitegen/view' in backends.");
}
// Generated in `bundle.ts`
export interface Codegen {
views: { [K in Key]: View<PropsFromModule<ViewMap[K]>> };
scripts: Record<string, string>;
regenTtls: Ttl[];
regenTags: Record<RegenKey, Key[]>;
}
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
export interface View<Props extends Record<string, unknown>> {
component: render.Component;
meta:
| meta.Meta
| ((props: { context?: hono.Context }) => Promise<meta.Meta> | meta.Meta);
meta: meta.Meta | ((props: Props) => Promise<meta.Meta> | meta.Meta);
layout?: render.Component;
inlineCss: string;
scripts: Record<string, string>;
}
export interface Ttl {
seconds: number;
key: Key;
}
export type Key = keyof ViewMap;
let views: Record<string, View> = null!;
let scripts: Record<string, string> = null!;
export async function renderView(
export async function serve<K extends Key>(
context: hono.Context,
id: string,
props: Record<string, unknown>,
id: K,
props: PropsFromModule<ViewMap[K]>,
) {
return context.html(await renderViewToString(id, { context, ...props }));
return context.html(await renderToString(id, { context, ...props }));
}
export async function renderViewToString(
id: string,
props: Record<string, unknown>,
type PropsFromModule<M extends any> = M extends {
default: (props: infer T) => render.Node;
} ? T
: never;
export async function renderToString<K extends Key>(
id: K,
props: PropsFromModule<ViewMap[K]>,
) {
views ?? ({ views, scripts } = require("$views"));
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
const {
component,
inlineCss,
layout,
meta: metadata,
}: View = UNWRAP(views[id], `Missing view ${id}`);
}: View<PropsFromModule<ViewMap[K]>> = UNWRAP(
codegen.views[id],
`Missing view ${id}`,
);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
@ -48,48 +74,48 @@ export async function renderViewToString(
} = await render.async(page, { [sg.userData.key]: sg.initRender() });
// -- join document and send --
return wrapDocument({
return sg.wrapDocument({
body,
head: await renderedMetaPromise,
inlineCss,
scripts: joinScripts(
Array.from(sitegen.scripts, (id) =>
UNWRAP(scripts[id], `Missing script ${id}`),
Array.from(
sitegen!.scripts,
(id) => UNWRAP(codegen.scripts[id], `Missing script ${id}`),
),
),
});
}
export function provideViewData(v: typeof views, s: typeof scripts) {
(views = v), (scripts = s);
export function regenerate(tag: RegenKey) {
for (const view of codegen.regenTags[tag]) {
const key = view.slice("page:".length);
renderToString(view, {})
.then((result) => {
console.info(`regenerate ${key}`);
asset.overwriteDynamic(key as asset.Key, result, {
"content-type": "text/html",
});
})
.catch((e) => {
console.error(`Failed regenerating ${view} from tag ${tag}`, e);
});
}
}
export function joinScripts(scriptSources: string[]) {
function joinScripts(scriptSources: string[]) {
const { length } = scriptSources;
if (length === 0) return "";
if (length === 1) return scriptSources[0];
if (0 in scriptSources) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";");
}
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as meta from "./meta.ts";
import type * as hono from "#hono";
import * as render from "#engine/render";
import * as sg from "./sitegen.ts";
import * as asset from "./assets.ts";
import type {
RegenKey,
RegisteredViews as ViewMap,
} from "../../.clover/ts/view.d.ts";

View file

@ -14,18 +14,19 @@ export function loadMarko(module: NodeJS.Module, filepath: string) {
// bare client import statements to it's own usage.
const scannedClientRefs = new Set<string>();
if (src.match(/^\s*client\s+import\s+["']/m)) {
src =
src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = hot.resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${JSON.stringify(
src = src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = hot.resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${
JSON.stringify(
hot.getScriptId(resolved),
)} />`;
},
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
)
} />`;
},
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
}
src = marko.compileSync(src, filepath).code;

View file

@ -11,7 +11,7 @@ let watch: Watch;
export async function main() {
// Catch up state by running a main build.
await incr.restore();
if (!process.argv.includes("-f")) await incr.restore();
watch = new Watch(rebuild);
rebuild([]);
}
@ -36,15 +36,16 @@ function onSubprocessClose(code: number | null, signal: string | null) {
}
async function rebuild(files: string[]) {
const hasInvalidated = files.length === 0
|| (await Promise.all(files.map(incr.invalidate))).some(Boolean);
const hasInvalidated = files.length === 0 ||
(await Promise.all(files.map((file) => incr.invalidate(file))))
.some(Boolean);
if (!hasInvalidated) return;
incr.compile(generate.generate).then(({
watchFiles,
newOutputs,
newAssets
newAssets,
}) => {
const removeWatch = [...watch.files].filter(x => !watchFiles.has(x))
const removeWatch = [...watch.files].filter((x) => !watchFiles.has(x));
for (const file of removeWatch) watch.remove(file);
watch.add(...watchFiles);
// Restart the server if it was changed or not running.
@ -60,8 +61,8 @@ async function rebuild(files: string[]) {
function statusLine() {
console.info(
`Watching ${watch.files.size} files `
+ `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
`Watching ${watch.files.size} files ` +
`\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
);
}
@ -142,7 +143,7 @@ class Watch {
#getFiles(absPath: string, event: fs.WatchEventType) {
const files = [];
if (this.files.has(absPath)) files.push(absPath);
if (event === 'rename') {
if (event === "rename") {
const dir = path.dirname(absPath);
if (this.files.has(dir)) files.push(dir);
}
@ -153,7 +154,7 @@ class Watch {
if (!subPath) return;
const files = this.#getFiles(path.join(root, subPath), event);
if (files.length === 0) return;
for(const file of files) this.stale.add(file);
for (const file of files) this.stale.add(file);
const { debounce } = this;
if (debounce !== null) clearTimeout(debounce);
this.debounce = setTimeout(() => {

View file

@ -1,6 +1,6 @@
// This is the main file for the backend
// This is the main file for paperclover.net's server.
const app = new Hono();
const logHttp = scoped("http", { color: "magenta" });
const logHttp = console.scoped("http", { color: "magenta" });
// Middleware
app.use(trimTrailingSlash());
@ -38,4 +38,4 @@ import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts";
import { scoped } from "@paperclover/console";
import * as console from "@paperclover/console";

View file

@ -1,10 +0,0 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-07-08",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";

View file

@ -0,0 +1,87 @@
- imports at the bottom
- order your file by importance.
- 'G' to jump to imports, etc
- prefer namespace imports
- easier to type and refactor. easier to read.
- large files are okay
- all files are their own library
- split files up by making components modular, not by "oh it's too big"
- engine/render.ts is a standalone library, in order to split JSX, Suspense,
and Marko out, the main file was made modular.
- lowercase
- name objects ultra-concisely
- filenames are often one word describing what they contain
- avoid useless descriptors like "utils", "helpers", and "data"
- examples
- async.ts contains all the async library functions.
- watch.ts contains the file watcher and watch-reload mode.
- render.*, Io
- be ultra-concise in comments
- no "discarded" variables, embrace `void x`
- makes code more readable
- note how i want to write a lint for this
- note the one proposal i want about void
- push the ts inference engine (as const, ReturnType, etc)
- reduces how much you repeat yourself making it easier to refactor things
- use the code as the source of truth
- push the ts inference engine (generics)
- do not implement crazy things with the TS engine, instead use generic input
types, and then use regular control to narrow and transform the return type.
source of truth is your code.
- UNWRAP, ASSERT utility globals are amazing
- ban postfix '!'
- stripped for production frontend builds
- destructure often
- use the one example from work lol
- package.json "imports" are amazing
- remapping
- implementation switching
- testing
- embrace the web and node.js APIs
- sitegen relies on so many node features that bun and deno fail to run it.
- overlay modules are great
- avoid dependencies
- once you build your own mini standard library you win
- talk about regrets with mdx
## imports at the bottom
Here is an abridged version of my website's `backend.ts`. When reading it from
top to bottom it is immediately obvious that it is a Hono web server.
```ts
// This is the main file for paperclover.net's server.
const app = new Hono();
const logHttp = console.scoped("http", { color: "magenta" });
// Middleware
app.use(...);
...
// Backends
app.route("", require("./q+a/backend.ts").app);
...
export default app;
...
import { type Context, Hono, type Next } from "#hono";
import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts";
import * as console from "@paperclover/console";
```
Since `import`/`export` statements are hoisted like `var` and `function`, the
position of these statements within the file does not matter. The imported
modules have to be loaded first before this file can start. With this, I've
found it nicer to sort the file by _importance_ rather than by arbitrary rules
dictated by how C-style `#include`s worked.
Start with a documentation comment, then the most important
functions/variables/types, sort the file by importance. Imports are not really
important since you very quickly get to know where common namespaces come from.
And since they're at the bottom, you can just press `G` in Vim or `CMD+Down` on
the Mac to scroll to the end of the file.

View file

@ -1,17 +1,20 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-06-13",
created: "2025-06-13",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";
I've been recently playing around [Marko][1], and after adding limited support
for it in my website generator, [sitegen][2], I instantly fell in love with how
I've been recently playing around [Marko], and after adding limited support
for it in my website generator, [sitegen], I instantly fell in love with how
minimalistic it is in comparison to JSX, Astro components, and Svelte.
## Introduction
[Marko]: https://next.markojs.com
[sitegen]: https://paperclover.dev/clo/sitegen
## Introduction to Marko
If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving
JavaScript into HTML. Attributes are JavaScript expressions.
@ -168,8 +171,8 @@ just like a regular function call, with '('.
<Something(item, index) />
```
In fact, attributes can just be sugar over this syntax; _this technically isn't
true but it's close enough for the example_
In fact, attributes can just be sugar over this syntax. (this technically isn't
true but it's close enough for the example)
```
<SpecialButton type="submit" class="red" />
@ -237,15 +240,58 @@ used it. A brief example of it, modified from their documentation.
</form>
```
## Usage on `paperclover.net`
<SectionHeader updated="2025-08-11">Usage on `paperclover.net`</Section>
TODO: document a lot of feedback, how i embedded Marko
Using Marko for HTML generation is quite easy. `.marko` files can be compiled
into `.js` using the `@marko/compiler` library.
My website uses statically generated HTML. That is why I have not needed to use
reactive variables. My generator doesn't even try compiling components
client-side.
```ts
const src = fs.readFileSync("page.marko", "utf8");
const compile = marko.compileSync(src, filepath);
fs.writeFileSync("page.js", compile.code);
Here is the actual component used to render [questions on the clover q+a][/q+a].
const page = require("./page.js");
console.info(page);
import * as fs from "node:fs";
import * as marko from "@marko/compiler";
```
To get client side JavaScript, an option can be passed to the Marko compiler to
generate the client side code. While it is a big selling point of Marko, I do
not use any of their client side features, instead deferring to manually-written
frontend scripts. This is because that is how my website has been for years,
statically generated. And for websites like mine that are content focused, this
is the correct way to do things.
Since I have a custom HTML generation library (built on JSX and some React-like
patterns), I have written a simple integration for it to utilize Marko
components, which is loaded by replacing the generated import to `marko/html`,
which lets me overwrite functions like `createTemplate` (to change the signature
of a component), `dynamicTag` (to allow Marko to render non-Marko components),
and `fork` (to enable async integration with the rendering framework). An
additional feature of this is I have a Node.js loader hook to allow importing
these files directly.
```tsx
function Page() {
const q = Question.getByDate(new Date("2025-06-07 12:12 EST"));
return <div>
<h1>example question</h1>
<QuestionRender question={q} />
</div>;
}
// The synchronous render can be used because `Page` and `question.marko`
// do not await any promises (SQLite runs synchronously)
console.info(render.sync(<Page />).text);
import * as render from "#engine/render";
import QuestionRender from "@/q+a/tags/question.marko";
import { Question } from "@/q+a/models/Question.ts";
```
Here is the `question.marko` tag used to render [questions on the clover q+a](/q+a).
```marko
// Renders a `Question` entry including its markdown body.
@ -287,4 +333,7 @@ import { formatQuestionTimestamp, formatQuestionISOTimestamp } from "@/q+a/forma
import { CloverMarkdown } from "@/q+a/clover-markdown.tsx";
```
The integration is great, `client import` is quite a magical concept, and I've
tuned it to do the expected thing in my framework.
import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts';

View file

@ -93,7 +93,7 @@ app.get("/file/*", async (c, next) => {
} satisfies APIDirectoryList;
return c.json(json);
}
c.res = await renderView(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
c.res = await view.serve(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
file,
hasCotyledonCookie,
});
@ -111,7 +111,7 @@ app.get("/file/*", async (c, next) => {
!lofi
) {
prefetchFile(file.path);
c.res = await renderView(c, "file-viewer/clofi", {
c.res = await view.serve(c, "file-viewer/clofi", {
file,
hasCotyledonCookie,
});
@ -125,11 +125,10 @@ app.get("/file/*", async (c, next) => {
let encoding = decideEncoding(c.req.header("Accept-Encoding"));
let sizeHeader =
encoding === "raw"
? expectedSize
: // Size cannot be known because of compression modes
undefined;
let sizeHeader = encoding === "raw"
? expectedSize
// Size cannot be known because of compression modes
: undefined;
// Etag
{
@ -217,7 +216,7 @@ app.get("/canvas/:script", async (c, next) => {
if (!hasAsset(`/js/canvas/${script}.js`)) {
return next();
}
return renderView(c, "file-viewer/canvas", {
return view.serve(c, "file-viewer/canvas", {
script,
});
});
@ -295,10 +294,9 @@ function handleRanges(
): Response {
// TODO: multiple ranges
const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0);
const rangeBody =
streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
const rangeBody = streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
return new Response(rangeBody, {
status: 206,
headers: {
@ -420,7 +418,7 @@ import { type Context, Hono } from "hono";
import * as render from "#engine/render";
import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets";
import { renderView } from "#sitegen/view";
import * as view from "#sitegen/view";
import { contentTypeFor } from "#sitegen/mime";
import { requireFriendAuth } from "@/friend-auth.ts";

View file

@ -17,9 +17,11 @@ export async function main() {
const start = performance.now();
const timerSpinner = new Spinner({
text: () =>
`paper clover's scan3 [${((performance.now() - start) / 1000).toFixed(
1,
)}s]`,
`paper clover's scan3 [${
((performance.now() - start) / 1000).toFixed(
1,
)
}s]`,
fps: 10,
});
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -44,7 +46,7 @@ export async function main() {
.flatMap((child) =>
child.kind === MediaFileKind.directory
? child.getRecursiveFileChildren()
: child,
: child
);
qMeta.addMany(
@ -103,9 +105,11 @@ export async function main() {
) {
date = mediaFile.date;
console.warn(
`M-time on ${publicPath} was likely corrupted. ${formatDate(
mediaFile.date,
)} -> ${formatDate(stat.mtime)}`,
`M-time on ${publicPath} was likely corrupted. ${
formatDate(
mediaFile.date,
)
} -> ${formatDate(stat.mtime)}`,
);
}
mediaFile = MediaFile.createFile({
@ -162,7 +166,7 @@ export async function main() {
}: Omit<ProcessFileArgs, "spin">) {
const ext = mediaFile.extensionNonEmpty.toLowerCase();
let possible = processors.filter((p) =>
p.include ? p.include.has(ext) : !p.exclude?.has(ext),
p.include ? p.include.has(ext) : !p.exclude?.has(ext)
);
if (possible.length === 0) return;
@ -196,7 +200,7 @@ export async function main() {
);
} else {
possible = order.map(({ id }) =>
UNWRAP(possible.find((p) => p.id === id)),
UNWRAP(possible.find((p) => p.id === id))
);
}
@ -260,8 +264,9 @@ export async function main() {
const children = dir.getChildren();
// readme.txt
const readmeContent =
children.find((x) => x.basename === "readme.txt")?.contents ?? "";
const readmeContent = children.find((x) =>
x.basename === "readme.txt"
)?.contents ?? "";
// dirsort
let dirsort: string[] | null = null;
@ -354,7 +359,9 @@ export async function main() {
console.info(
"Global Stats:\n" +
`- File Count: \x1b[1m${count}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${formatSize(MediaFile.getByPath("/")!.size)}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${
formatSize(MediaFile.getByPath("/")!.size)
}\x1b[0m\n` +
`- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`,
);
}
@ -459,7 +466,6 @@ const procLoadTextContents: Process = {
async run({ absPath, mediaFile, stat }) {
if (stat.size > 1_000_000) return;
const text = await fs.readFile(absPath, "utf-8");
console.log({ text });
mediaFile.setContents(text);
},
};
@ -696,7 +702,7 @@ async function unproduceAsset(key: string) {
const ref = AssetRef.get(key);
if (ref) {
ref.unref();
console.log(`unref ${key}`);
console.warn(`TODO: unref ${key}`);
// TODO: remove associated files from target
}
}

View file

@ -29,7 +29,9 @@ export function formatDurationLong(seconds: number) {
const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60);
const remainingSeconds = seconds % 60;
return `${hours}:${minutes.toString().padStart(2, "0")}:${remainingSeconds.toString().padStart(2, "0")}`;
return `${hours}:${minutes.toString().padStart(2, "0")}:${
remainingSeconds.toString().padStart(2, "0")
}`;
}
export function escapeUri(uri: string) {
@ -100,21 +102,27 @@ export function highlightLinksInTextView(
// Case 1: https:// or http:// URLs
if (match.startsWith("http")) {
if (match.includes(findDomain)) {
return `<a href="${match
.replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")}">${match}</a>`;
return `<a href="${
match
.replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return `<a href="${match.replace(
/\/\/+/g,
"/",
)}" target="_blank" rel="noopener noreferrer">${match}</a>`;
return `<a href="${
match.replace(
/\/\/+/g,
"/",
)
}" target="_blank" rel="noopener noreferrer">${match}</a>`;
}
// Case 2: domain URLs without protocol
if (match.startsWith(findDomain)) {
return `<a href="${match
.replace(findDomain + "/", "/")
.replace(/\/\/+/g, "/")}">${match}</a>`;
return `<a href="${
match
.replace(findDomain + "/", "/")
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
// Case 3: /file/ URLs
@ -145,7 +153,7 @@ export function highlightLinksInTextView(
// Match sibling file names (only if they're not already part of a link)
if (siblingFiles.length > 0) {
const escapedBasenames = siblingFiles.map((f) =>
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"),
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
);
const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g");
const parts = processedText.split(/(<[^>]*>)/);
@ -155,9 +163,11 @@ export function highlightLinksInTextView(
parts[i] = parts[i].replace(pattern, (match: string) => {
const file = siblingLookup[match];
if (file) {
return `<a href="/file/${file.path
.replace(/^\//, "")
.replace(/\/\/+/g, "/")}">${match}</a>`;
return `<a href="/file/${
file.path
.replace(/^\//, "")
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return match;
});
@ -240,9 +250,11 @@ export function highlightConvo(text: string) {
return paras
.map(({ speaker, lines }) => {
return `<div class="s-${speaker}">${lines
.map((line) => `<div class="line">${line}</div>`)
.join("\n")}</div>`;
return `<div class="s-${speaker}">${
lines
.map((line) => `<div class="line">${line}</div>`)
.join("\n")
}</div>`;
})
.join("\n");
}
@ -267,10 +279,12 @@ export function formatDate(dateTime: Date) {
? dateTime < unknownDate
? "??.??.??"
: `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}`
: `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${dateTime
: `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${
dateTime
.getDate()
.toString()
.padStart(2, "0")}.${dateTime.getFullYear().toString().slice(2)}`;
.padStart(2, "0")
}.${dateTime.getFullYear().toString().slice(2)}`;
}
import type { MediaFile } from "@/file-viewer/models/MediaFile.ts";

View file

@ -131,8 +131,12 @@ export class Parse {
percentage: Number(percentageStr),
timeElapsed,
transferNumber: this.currentTransfer,
filesToCheck: toCheckStr ? this.toCheck = Number(toCheckStr) : this.toCheck,
totalFiles: totalStr ? this.totalFiles = Number(totalStr) : this.totalFiles,
filesToCheck: toCheckStr
? this.toCheck = Number(toCheckStr)
: this.toCheck,
totalFiles: totalStr
? this.totalFiles = Number(totalStr)
: this.totalFiles,
speed: speed || null,
};
}

View file

@ -4,9 +4,7 @@
font-weight: 400 750;
font-style: normal;
font-display: swap;
font-variation-settings:
"CASL" 0.25,
"MONO" 0;
font-variation-settings: "CASL" 0.25, "MONO" 0;
font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E;
}
@ -16,9 +14,7 @@
font-weight: 400 800;
font-style: normal;
font-display: swap;
font-variation-settings:
"CASL" 0.25,
"MONO" 1;
font-variation-settings: "CASL" 0.25, "MONO" 1;
font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E;
}
@ -28,13 +24,21 @@
font-weight: 400 800;
font-style: normal;
font-display: swap;
font-variation-settings:
"CASL" 0.25,
"MONO" 1;
font-variation-settings: "CASL" 0.25, "MONO" 1;
font-style: oblique -15deg 0deg;
unicode-range:
U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022,
U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F;
U+00C0-00FF,
U+00A9,
U+2190-2193,
U+2018,
U+2019,
U+201C,
U+201D,
U+2022,
U+00A0-00A8,
U+00AA-00BF,
U+2194-2199,
U+0100-017F;
}
*,

View file

@ -45,7 +45,7 @@ app.post("/q+a", async (c) => {
return sendSuccess(c, new Date());
}
const ipAddr = c.req.header("cf-connecting-ip");
const ipAddr = c.req.header("x-forwarded-for");
if (ipAddr) {
input.sourceName = uniqueNamesGenerator({
dictionaries: [adjectives, colors, animals],
@ -54,11 +54,6 @@ app.post("/q+a", async (c) => {
});
}
const cfIPCountry = c.req.header("cf-ipcountry");
if (cfIPCountry) {
input.sourceLocation = cfIPCountry;
}
if (ipAddr && PROXYCHECK_API_KEY) {
const proxyCheck = await fetch(
`https://proxycheck.io/v2/?key=${PROXYCHECK_API_KEY}&risk=1&vpn=1`,
@ -77,17 +72,19 @@ app.post("/q+a", async (c) => {
proxyCheck[ipAddr].organisation ??
proxyCheck[ipAddr].provider ?? "unknown";
}
if (Number(proxyCheck[ipAddr].risk) > 72) {
if (Number(proxyCheck[ipAddr].risk) > 78) {
return questionFailure(
c,
403,
"This IP address has been flagged as a high risk IP address. If you are using a VPN/Proxy, please disable it and try again.",
"This IP address has been flagged as a high risk IP address. If " +
"you are using a VPN/Proxy, please disable it and try again.",
text,
);
}
}
}
view.regenerate("q+a inbox");
const date = Question.create(
QuestionType.pending,
JSON.stringify(input),
@ -104,7 +101,7 @@ async function sendSuccess(c: Context, date: Date) {
id: formatQuestionId(date),
}, { status: 200 });
}
c.res = await renderView(c, "q+a/success", {
c.res = await view.serve(c, "q+a/success", {
permalink: `https://paperclover.net/q+a/${formatQuestionId(date)}`,
});
}
@ -130,7 +127,7 @@ app.get("/q+a/:id", async (c, next) => {
if (image) {
return getQuestionImage(question, c.req.method === "HEAD");
}
return renderView(c, "q+a/permalink", { question });
return view.serve(c, "q+a/permalink", { question });
});
// Admin
@ -138,7 +135,7 @@ app.get("/admin/q+a", async (c) => {
return serveAsset(c, "/admin/q+a", 200);
});
app.get("/admin/q+a/inbox", async (c) => {
return renderView(c, "q+a/backend-inbox", {});
return view.serve(c, "q+a/backend-inbox", {});
});
app.delete("/admin/q+a/:id", async (c, next) => {
const id = c.req.param("id");
@ -152,6 +149,7 @@ app.delete("/admin/q+a/:id", async (c, next) => {
} else {
Question.rejectByQmid(question.qmid);
}
view.regenerate("q+a");
return c.json({ success: true, message: "ok" });
});
app.patch("/admin/q+a/:id", async (c, next) => {
@ -165,6 +163,7 @@ app.patch("/admin/q+a/:id", async (c, next) => {
return questionFailure(c, 400, "Bad Request");
}
Question.updateByQmid(question.qmid, form.text, form.type);
view.regenerate("q+a");
return c.json({ success: true, message: "ok" });
});
app.get("/admin/q+a/:id", async (c, next) => {
@ -177,20 +176,22 @@ app.get("/admin/q+a/:id", async (c, next) => {
let pendingInfo: null | PendingQuestionData = null;
if (question.type === QuestionType.pending) {
pendingInfo = JSON.parse(question.text) as PendingQuestionData;
question.text = pendingInfo.prompt.trim().split("\n").map((line) =>
line.trim().length === 0 ? "" : `q: ${line.trim()}`
).join("\n") + "\n\n";
question.text = pendingInfo.prompt
.trim()
.split("\n")
.map((line) => (line.trim().length === 0 ? "" : `q: ${line.trim()}`))
.join("\n") + "\n\n";
question.type = QuestionType.normal;
}
return renderView(c, "q+a/editor", {
return view.serve(c, "q+a/editor", {
pendingInfo,
question,
});
});
app.get("/q+a/things/random", async (c) => {
c.res = await renderView(c, "q+a/things-random", {});
c.res = await view.serve(c, "q+a/things-random", {});
});
async function questionFailure(
@ -202,7 +203,7 @@ async function questionFailure(
if (c.req.header("Accept")?.includes("application/json")) {
return c.json({ success: false, message, id: null }, { status });
}
return await renderView(c, "q+a/fail", {
return await view.serve(c, "q+a/fail", {
error: message,
content,
});
@ -218,11 +219,8 @@ import {
} from "unique-names-generator";
import { hasAdminToken } from "../admin.ts";
import { serveAsset } from "#sitegen/assets";
import {
PendingQuestion,
PendingQuestionData,
} from "./models/PendingQuestion.ts";
import type { PendingQuestionData } from "./models/PendingQuestion.ts";
import { Question, QuestionType } from "./models/Question.ts";
import { renderView } from "#sitegen/view";
import * as view from "#sitegen/view";
import { getQuestionImage } from "./image.tsx";
import { formatQuestionId, questionIdToTimestamp } from "./format.ts";

View file

@ -144,9 +144,7 @@ function ListRenderer(node: ASTNode, children: any[]) {
const T = node.ordered ? "ol" : "ul";
return (
<T>
{children.map((child) => (
<li>{child}</li>
))}
{children.map((child) => <li>{child}</li>)}
</T>
);
}

View file

@ -11,7 +11,7 @@ const getBrowser = RefCountedExpirable(
);
export async function renderQuestionImage(question: Question) {
const html = await renderViewToString("q+a/image-embed", { question });
const html = await view.renderToString("q+a/image-embed", { question });
// this browser session will be reused if multiple images are generated
// either at the same time or within a 5-minute time span. the dispose
@ -44,14 +44,15 @@ export async function getQuestionImage(
question: Question,
headOnly: boolean,
): Promise<Response> {
const hash = crypto.createHash("sha1")
const hash = crypto
.createHash("sha1")
.update(question.qmid + question.type + question.text)
.digest("hex");
const headers = {
"Content-Type": "image/png",
"Cache-Control": "public, max-age=31536000",
"ETag": `"${hash}"`,
ETag: `"${hash}"`,
"Last-Modified": question.date.toUTCString(),
};
@ -78,4 +79,4 @@ import * as path from "node:path";
import * as puppeteer from "puppeteer";
import { Question } from "@/q+a/models/Question.ts";
import { RefCountedExpirable } from "#sitegen/async";
import { renderViewToString } from "#sitegen/view";
import * as view from "#sitegen/view";

View file

@ -7,7 +7,7 @@ export const meta: Metadata = {
description: "ask clover a question",
};
export const regenerate = {
manual: true,
tags: ["q+a", "q+a inbox"]
};
<const/inboxSize = PendingQuestion.getAll().length />

View file

@ -1,11 +1,9 @@
export * as layout from "../layout.tsx";
export const regenerate = {
manual: true,
};
export interface Input {
admin?: boolean;
}
export * as layout from "../layout.tsx";
export const regenerate = { tags: ["q+a"] };
export const meta: Metadata = {
title: "paper clover q+a",
description: "ask clover a question",
@ -14,7 +12,7 @@ export const meta: Metadata = {
<const/{ admin = false } = input />
<const/questions = [...Question.getAll()] />
<if=true>
<if=!admin>
<question-form />
</>
<for|question| of=questions>

View file

@ -19,8 +19,8 @@ export const meta = { title: 'question answer inbox' };
</time>
<div style="color: dodgerblue; margin-bottom: 0.25rem">
${q.sourceName}
${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : null}
${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : null}
${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : ''}
${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : ''}
</div>
<p style="white-space: pre-wrap">${q.prompt}</p>
<p>

View file

@ -33,8 +33,12 @@ const rawFileRoot = process.env.CLOVER_FILE_RAW ??
const derivedFileRoot = process.env.CLOVER_FILE_DERIVED ??
path.join(nasRoot, "Documents/Config/paperclover/derived");
if (!fs.existsSync(rawFileRoot)) throw new Error(`${rawFileRoot} does not exist`);
if (!fs.existsSync(derivedFileRoot)) throw new Error(`${derivedFileRoot} does not exist`);
if (!fs.existsSync(rawFileRoot)) {
throw new Error(`${rawFileRoot} does not exist`);
}
if (!fs.existsSync(derivedFileRoot)) {
throw new Error(`${derivedFileRoot} does not exist`);
}
type Awaitable<T> = T | Promise<T>;

View file

@ -5,7 +5,7 @@
"incremental": true,
"jsx": "react-jsxdev",
"jsxImportSource": "#engine",
"lib": ["dom", "esnext", "esnext.iterator"],
"lib": ["esnext", "dom", "dom.iterable"],
"module": "nodenext",
"noEmit": true,
"outDir": ".clover/ts",
@ -14,7 +14,8 @@
"skipLibCheck": true,
"strict": true,
"verbatimModuleSyntax": true,
"target": "es2022"
"target": "es2022",
"noUncheckedIndexedAccess": true
},
"include": ["framework/**/*", "src/**/*"]
}