feat: dynamic page regeneration #24

Merged
clo merged 3 commits from regen into master 2025-08-11 22:43:27 -07:00
38 changed files with 736 additions and 755 deletions
Showing only changes of commit 56f13c676c - Show all commits

View file

@ -3,14 +3,8 @@
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils"; utils.url = "github:numtide/flake-utils";
}; };
outputs = outputs = inputs: inputs.utils.lib.eachDefaultSystem (system:
{ nixpkgs, utils, ... }: with inputs.nixpkgs.legacyPackages.${system}; {
utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell { devShells.default = pkgs.mkShell {
buildInputs = [ buildInputs = [
pkgs.nodejs_24 # runtime pkgs.nodejs_24 # runtime
@ -24,6 +18,12 @@
pkgs.rsync pkgs.rsync
]; ];
}; };
} devShells.min = pkgs.mkShell {
); buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
pkgs.rsync
];
};
});
} }

View file

@ -88,19 +88,19 @@ export async function bundleClientJavaScript(
for (const file of outputFiles) { for (const file of outputFiles) {
const { text } = file; const { text } = file;
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/"); let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
const { inputs } = UNWRAP(metafile.outputs["out!" + route]); const { entryPoint } = UNWRAP(metafile.outputs["out!" + route]);
const sources = Object.keys(inputs).filter((x) => !isIgnoredSource(x));
// Register non-chunks as script entries. // Register non-chunks as script entries.
const chunk = route.startsWith("/js/c."); const chunk = route.startsWith("/js/c.");
if (!chunk) { if (!chunk) {
const key = hot.getScriptId(path.resolve(sources[sources.length - 1])); const key = hot.getScriptId(toAbs(UNWRAP(entryPoint)));
console.log(route, key);
route = "/js/" + key.replace(/\.client\.tsx?/, ".js"); route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
scripts[key] = text; scripts[key] = text;
} }
// Register chunks and public scripts as assets. // Register chunks and public scripts as assets.
if (chunk || publicScriptRoutes.includes(route)) { if (chunk || publicScriptRoutes.includes(route)) {
p.push(io.writeAsset(route, text)); p.push(io.writeAsset({ pathname: route, buffer: text }));
} }
} }
await Promise.all(p); await Promise.all(p);
@ -124,13 +124,39 @@ export async function bundleServerJavaScript({
entries, entries,
platform, platform,
}: ServerSideOptions) { }: ServerSideOptions) {
const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => { const regenKeys: Record<string, string[]> = {};
const regenTtls: view.Ttl[] = [];
for (const ref of viewRefs) {
const value = UNWRAP(ref.value);
if (value.type === "page" && (value.regenerate?.tags?.length ?? 0) > 0) {
for (const tag of value.regenerate!.tags!) {
(regenKeys[tag] ??= []).push(`page:${value.id}`);
}
}
if (value.type === "page" && (value.regenerate?.seconds ?? 0) > 0) {
regenTtls.push({
key: `page:${value.id}` as view.Key,
seconds: value.regenerate!.seconds!,
});
}
}
const wViewSource = incr.work(
async (
_,
{ viewItems, regenKeys, regenTtls }: {
viewItems: sg.FileItem[];
regenKeys: Record<string, string[]>;
regenTtls: view.Ttl[];
},
) => {
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_"); const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
return { return {
magicWord, magicWord,
file: [ file: [
...viewItems.map( ...viewItems.map(
(view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`, (view, i) =>
`import * as view${i} from ${JSON.stringify(view.file)}`,
), ),
`const styles = ${magicWord}[-2]`, `const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`, `export const scripts = ${magicWord}[-1]`,
@ -143,14 +169,19 @@ export async function bundleServerJavaScript({
` layout: view${i}.layout?.default ?? null,`, ` layout: view${i}.layout?.default ?? null,`,
` inlineCss: styles[${magicWord}[${i}]]`, ` inlineCss: styles[${magicWord}[${i}]]`,
` },`, ` },`,
].join("\n"), ].join("\n")
), ),
"}", "}",
`export const regenTags = ${JSON.stringify(regenKeys)};`,
`export const regenTtls = ${JSON.stringify(regenTtls)};`,
].join("\n"), ].join("\n"),
}; };
}, viewItems); },
{ viewItems, regenKeys, regenTtls },
);
await incr.work(async (io) => { await incr.work(
async (io, { regenKeys, viewItems }) => {
io.writeFile( io.writeFile(
"../ts/view.d.ts", "../ts/view.d.ts",
[ [
@ -160,12 +191,19 @@ export async function bundleServerJavaScript({
.map( .map(
(view) => (view) =>
` ${JSON.stringify(view.id)}: ` + ` ${JSON.stringify(view.id)}: ` +
`typeof import(${JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))}),`, `typeof import(${
JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))
}),`,
), ),
"}", "}",
"export type RegenKey = " +
(regenKeys.map((key) => JSON.stringify(key)).join(" | ") ||
"never"),
].join("\n"), ].join("\n"),
); );
}); },
{ regenKeys: Object.keys(regenKeys), viewItems },
);
const wBundles = entries.map((entry) => const wBundles = entries.map((entry) =>
incr.work(async (io, entry) => { incr.work(async (io, entry) => {
@ -255,7 +293,7 @@ export async function bundleServerJavaScript({
// file in more than one chunk. // file in more than one chunk.
if ( if (
magicWord && magicWord &&
metafile.outputs[key].inputs["framework/lib/view.ts"] UNWRAP(metafile.outputs[key]).inputs["framework/lib/view.ts"]
) { ) {
ASSERT(!fileWithMagicWord); ASSERT(!fileWithMagicWord);
fileWithMagicWord = { fileWithMagicWord = {
@ -268,7 +306,7 @@ export async function bundleServerJavaScript({
} }
} }
return fileWithMagicWord; return fileWithMagicWord;
}, entry), }, entry)
); );
const wProcessed = wBundles.map(async (wBundle) => { const wProcessed = wBundles.map(async (wBundle) => {
@ -288,7 +326,7 @@ export async function bundleServerJavaScript({
const viewStyleKeys = views.map((view) => view.styleKey); const viewStyleKeys = views.map((view) => view.styleKey);
const viewCssBundles = await Promise.all( const viewCssBundles = await Promise.all(
viewStyleKeys.map((key) => viewStyleKeys.map((key) =>
io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key)), io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key))
), ),
); );
const styleList = Array.from(new Set(viewCssBundles)); const styleList = Array.from(new Set(viewCssBundles));
@ -307,7 +345,7 @@ export async function bundleServerJavaScript({
return JSON.stringify(Object.fromEntries(neededScripts)); return JSON.stringify(Object.fromEntries(neededScripts));
} }
// Reference an index into `styleList` // Reference an index into `styleList`
return `${styleList.indexOf(viewCssBundles[i])}`; return `${styleList.indexOf(UNWRAP(viewCssBundles[i]))}`;
}); });
io.writeFile(basename, text); io.writeFile(basename, text);
@ -333,3 +371,4 @@ import * as mime from "#sitegen/mime";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import * as sg from "#sitegen"; import * as sg from "#sitegen";
import type { PageOrView } from "./generate.ts"; import type { PageOrView } from "./generate.ts";
import type * as view from "#sitegen/view";

View file

@ -57,12 +57,14 @@ export function styleKey(
export async function bundleCssFiles( export async function bundleCssFiles(
io: Io, io: Io,
{ cssImports, theme, dev }: { { cssImports, theme, dev }: {
cssImports: string[], cssImports: string[];
theme: Theme, theme: Theme;
dev: boolean, dev: boolean;
} },
) { ) {
cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file))); cssImports = await Promise.all(
cssImports.map((file) => io.trackFile("src/" + file)),
);
const plugin = { const plugin = {
name: "clover css", name: "clover css",
setup(b) { setup(b) {
@ -111,4 +113,5 @@ import * as esbuild from "esbuild";
import * as fs from "#sitegen/fs"; import * as fs from "#sitegen/fs";
import * as hot from "./hot.ts"; import * as hot from "./hot.ts";
import * as path from "node:path"; import * as path from "node:path";
import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts"; import { virtualFiles } from "./esbuild-support.ts";
import type { Io } from "./incremental.ts";

View file

@ -9,4 +9,4 @@ globalThis.UNWRAP = (t, ...args) => {
globalThis.ASSERT = assert.ok; globalThis.ASSERT = assert.ok;
import * as util from "node:util"; import * as util from "node:util";
import * as assert from 'node:assert' import * as assert from "node:assert";

View file

@ -51,4 +51,4 @@ declare global {
} }
} }
import * as render from "./render.ts"; import * as render from "#engine/render";

View file

@ -1,5 +1,5 @@
import { test } from "node:test"; import { test } from "node:test";
import * as render from "./render.ts"; import * as render from "#engine/render";
test("sanity", (t) => t.assert.equal(render.sync("gm <3").text, "gm &lt;3")); test("sanity", (t) => t.assert.equal(render.sync("gm <3").text, "gm &lt;3"));
test("simple tree", (t) => test("simple tree", (t) =>

View file

@ -118,8 +118,11 @@ export function resolveNode(r: State, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") { if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHtml(node); if (typeof node === "string") return escapeHtmlContent(node);
if (typeof node === "number") return String(node); // no escaping ever if (typeof node === "number") return String(node); // no escaping ever
if (typeof node === "symbol" && node.toString() === kElement.toString()) {
throw new Error(`There are two instances of Clover SSR loaded!`);
}
throw new Error(`Cannot render ${inspect(node)} to HTML`); throw new Error(`Cannot render ${inspect(node)} to HTML`);
} }
if (node instanceof Promise) { if (node instanceof Promise) {
@ -217,12 +220,14 @@ function stringifyElement(element: ResolvedElement) {
let attr; let attr;
switch (prop) { switch (prop) {
default: default:
attr = `${prop}=${quoteIfNeeded(escapeHtml(String(value)))}`; attr = `${prop}=${quoteIfNeeded(escapeAttribute(String(value)))}`;
break; break;
case "className": case "className":
// Legacy React Compat // Legacy React Compat
case "class": case "class":
attr = `class=${quoteIfNeeded(escapeHtml(clsx(value as ClsxInput)))}`; attr = `class=${
quoteIfNeeded(escapeAttribute(clsx(value as ClsxInput)))
}`;
break; break;
case "htmlFor": case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`"); throw new Error("Do not use the `htmlFor` attribute. Use `for`");
@ -233,7 +238,7 @@ function stringifyElement(element: ResolvedElement) {
case "key": case "key":
continue; continue;
} }
if (needSpace) (out += " "), (needSpace = !attr.endsWith('"')); if (needSpace) ((out += " "), (needSpace = !attr.endsWith('"')));
out += attr; out += attr;
} }
out += ">"; out += ">";
@ -254,14 +259,16 @@ export function stringifyStyleAttribute(style: Record<string, string>) {
let out = ``; let out = ``;
for (const styleName in style) { for (const styleName in style) {
if (out) out += ";"; if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${escapeHtml( out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeAttribute(
String(style[styleName]), String(style[styleName]),
)}`; )
}`;
} }
return "style=" + quoteIfNeeded(out); return "style=" + quoteIfNeeded(out);
} }
export function quoteIfNeeded(text: string) { export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"'; if (text.match(/["/>]/)) return '"' + text + '"';
return text; return text;
} }
@ -303,6 +310,21 @@ export function clsx(mix: ClsxInput) {
return str; return str;
} }
export const escapeHtmlContent = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
// TODO: combine into one function which decides if an attribute needs quotes
// and escapes it correctly depending on the context.
const escapeAttribute = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
/** @deprecated */
export const escapeHtml = (unsafeText: string) => export const escapeHtml = (unsafeText: string) =>
String(unsafeText) String(unsafeText)
.replace(/&/g, "&amp;") .replace(/&/g, "&amp;")

View file

@ -42,7 +42,7 @@ export function Suspense({ children, fallback }: SuspenseProps): render.Node {
r.asyncDone = () => { r.asyncDone = () => {
const rejections = r.rejections; const rejections = r.rejections;
if (rejections && rejections.length > 0) throw new Error("TODO"); if (rejections && rejections.length > 0) throw new Error("TODO");
state.pushChunk?.(name, (ip[0] = resolved)); state.pushChunk?.(name, ip[0] = resolved);
}; };
return render.raw(ip); return render.raw(ip);
} }
@ -99,4 +99,4 @@ export async function* renderStreaming<
return addonOutput as unknown as T; return addonOutput as unknown as T;
} }
import * as render from "./render.ts"; import * as render from "#engine/render";

View file

@ -1,7 +1,12 @@
type Awaitable<T> = T | Promise<T>; type Awaitable<T> = T | Promise<T>;
export function virtualFiles( export function virtualFiles(
map: Record<string, string | esbuild.OnLoadResult | (() => Awaitable<string | esbuild.OnLoadResult>)>, map: Record<
string,
| string
| esbuild.OnLoadResult
| (() => Awaitable<string | esbuild.OnLoadResult>)
>,
) { ) {
return { return {
name: "clover vfs", name: "clover vfs",
@ -22,7 +27,7 @@ export function virtualFiles(
{ filter: /./, namespace: "vfs" }, { filter: /./, namespace: "vfs" },
async ({ path }) => { async ({ path }) => {
let entry = map[path]; let entry = map[path];
if (typeof entry === 'function') entry = await entry(); if (typeof entry === "function") entry = await entry();
return ({ return ({
resolveDir: ".", resolveDir: ".",
loader: "ts", loader: "ts",
@ -88,7 +93,6 @@ export function markoViaBuildCache(): esbuild.Plugin {
if (!fs.existsSync(file)) { if (!fs.existsSync(file)) {
console.warn(`File does not exist: ${file}`); console.warn(`File does not exist: ${file}`);
} }
console.log(markoCache.keys());
throw new Error("Marko file not in cache: " + file); throw new Error("Marko file not in cache: " + file);
} }
return ({ return ({
@ -106,7 +110,7 @@ export function isIgnoredSource(source: string) {
return source.includes("<define:") || return source.includes("<define:") ||
source.startsWith("vfs:") || source.startsWith("vfs:") ||
source.startsWith("dropped:") || source.startsWith("dropped:") ||
source.includes("node_modules") source.includes("node_modules");
} }
import * as esbuild from "esbuild"; import * as esbuild from "esbuild";
@ -114,4 +118,5 @@ import * as string from "#sitegen/string";
import * as path from "node:path"; import * as path from "node:path";
import * as fs from "#sitegen/fs"; import * as fs from "#sitegen/fs";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import * as hot from "./hot.ts";import { markoCache } from "./marko.ts"; import * as hot from "./hot.ts";
import { markoCache } from "./marko.ts";

View file

@ -7,15 +7,16 @@ const { toRel, toAbs } = incr;
const globalCssPath = toAbs("src/global.css"); const globalCssPath = toAbs("src/global.css");
export async function main() { export async function main() {
await incr.restore(); if (!process.argv.includes("-f")) await incr.restore();
await incr.compile(generate); await incr.compile(generate);
} }
export async function generate() { export async function generate() {
// -- read config and discover files -- // -- read config and discover files --
const siteConfig = await incr.work(readManifest); const siteConfig = await incr.work(readManifest);
const { staticFiles, scripts, views, pages } = const { staticFiles, scripts, views, pages } = await discoverAllFiles(
await discoverAllFiles(siteConfig); siteConfig,
);
// TODO: make sure that `static` and `pages` does not overlap // TODO: make sure that `static` and `pages` does not overlap
@ -28,9 +29,12 @@ export async function generate() {
staticFiles.map((item) => staticFiles.map((item) =>
incr.work( incr.work(
async (io, { id, file }) => async (io, { id, file }) =>
void (await io.writeAsset(id, await io.readFile(file))), void (await io.writeAsset({
pathname: id,
buffer: await io.readFile(file),
})),
item, item,
), )
), ),
); );
const routes = await Promise.all([...builtViews, ...builtPages]); const routes = await Promise.all([...builtViews, ...builtPages]);
@ -63,7 +67,7 @@ export async function generate() {
// -- assemble page assets -- // -- assemble page assets --
const pAssemblePages = builtPages.map((page) => const pAssemblePages = builtPages.map((page) =>
assembleAndWritePage(page, styleMap, scriptMap), assembleAndWritePage(page, styleMap, scriptMap)
); );
await Promise.all([builtBackend, builtStaticFiles, ...pAssemblePages]); await Promise.all([builtBackend, builtStaticFiles, ...pAssemblePages]);
@ -85,7 +89,7 @@ export async function discoverAllFiles(
return ( return (
await Promise.all( await Promise.all(
siteConfig.siteSections.map(({ root: sectionRoot }) => siteConfig.siteSections.map(({ root: sectionRoot }) =>
incr.work(scanSiteSection, toAbs(sectionRoot)), incr.work(scanSiteSection, toAbs(sectionRoot))
), ),
) )
).reduce((acc, next) => ({ ).reduce((acc, next) => ({
@ -113,8 +117,7 @@ export async function scanSiteSection(io: Io, sectionRoot: string) {
let scripts: FileItem[] = []; let scripts: FileItem[] = [];
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub); const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix = const rootPrefix = hot.projectSrc === sectionRoot
hot.projectSrc === sectionRoot
? "" ? ""
: path.relative(hot.projectSrc, sectionRoot) + "/"; : path.relative(hot.projectSrc, sectionRoot) + "/";
const kinds = [ const kinds = [
@ -190,7 +193,7 @@ export async function preparePage(io: Io, item: sg.FileItem) {
theme: pageTheme, theme: pageTheme,
layout, layout,
regenerate, regenerate,
} = await io.import<any>(item.file); } = await io.import<sg.PageExports>(item.file);
if (!Page) throw new Error("Page is missing a 'default' export."); if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) throw new Error("Page is missing 'meta' export with a title."); if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
@ -229,23 +232,12 @@ export async function preparePage(io: Io, item: sg.FileItem) {
); );
} }
// -- regeneration --
let regeneration: Regeneration | null = null;
if (typeof regenerate?.seconds === "number") {
regeneration ??= {};
regeneration.seconds = regenerate.seconds;
}
if (regenerate?.tags) {
regeneration ??= {};
regeneration.tags = regenerate.tags;
}
const styleKey = css.styleKey(cssImports, theme); const styleKey = css.styleKey(cssImports, theme);
return { return {
type: "page", type: "page",
id: item.id, id: item.id,
file: item.file, file: item.file,
regenerate: regeneration, regenerate,
html: text, html: text,
meta: renderedMeta, meta: renderedMeta,
cssImports, cssImports,
@ -255,11 +247,6 @@ export async function preparePage(io: Io, item: sg.FileItem) {
} as const; } as const;
} }
interface Regeneration {
seconds?: number;
tags?: string[];
}
export async function prepareView(io: Io, item: sg.FileItem) { export async function prepareView(io: Io, item: sg.FileItem) {
const module = await io.import<any>(item.file); const module = await io.import<any>(item.file);
if (!module.meta) throw new Error(`View is missing 'export const meta'`); if (!module.meta) throw new Error(`View is missing 'export const meta'`);
@ -305,7 +292,8 @@ export async function assembleAndWritePage(
scriptWork: incr.Ref<Record<string, string>>, scriptWork: incr.Ref<Record<string, string>>,
) { ) {
const page = await pageWork; const page = await pageWork;
return incr.work(async (io, { id, html, meta, styleKey, clientRefs }) => { return incr.work(
async (io, { id, html, meta, styleKey, clientRefs, regenerate }) => {
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey))); const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
const scriptIds = clientRefs.map(hot.getScriptId); const scriptIds = clientRefs.map(hot.getScriptId);
@ -315,16 +303,23 @@ export async function assembleAndWritePage(
.map((x) => `{${x}}`) .map((x) => `{${x}}`)
.join("\n"); .join("\n");
const doc = sg.wrapDocument({ const buffer = sg.wrapDocument({
body: html, body: html,
head: meta, head: meta,
inlineCss, inlineCss,
scripts, scripts,
}); });
await io.writeAsset(id, doc, { await io.writeAsset({
pathname: id,
buffer,
headers: {
"Content-Type": "text/html", "Content-Type": "text/html",
},
regenerative: !!regenerate,
}); });
}, page); },
page,
);
} }
export type PageOrView = PreparedPage | PreparedView; export type PageOrView = PreparedPage | PreparedView;

View file

@ -78,8 +78,7 @@ Module.prototype._compile = function (
} }
} }
fileStats.set(filename, { fileStats.set(filename, {
cssImportsRecursive: cssImportsRecursive: cssImportsMaybe.length > 0
cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe)) ? Array.from(new Set(cssImportsMaybe))
: null, : null,
imports, imports,
@ -137,8 +136,7 @@ export function loadEsbuildCode(
src = code; src = code;
} }
if (src.includes("import.meta")) { if (src.includes("import.meta")) {
src = src = `
`
import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())}; import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())};
import.meta.dirname = ${JSON.stringify(path.dirname(filepath))}; import.meta.dirname = ${JSON.stringify(path.dirname(filepath))};
import.meta.filename = ${JSON.stringify(filepath)}; import.meta.filename = ${JSON.stringify(filepath)};

View file

@ -11,7 +11,7 @@ test("trivial case", async () => {
await setTimeout(1000); await setTimeout(1000);
const contents = await io.readFile(file1.path); const contents = await io.readFile(file1.path);
return [contents, Math.random()] as const; return [contents, Math.random()] as const;
} },
}); });
const second = incr.work({ const second = incr.work({
label: "second compute", label: "second compute",
@ -19,7 +19,7 @@ test("trivial case", async () => {
async run(io) { async run(io) {
await setTimeout(1000); await setTimeout(1000);
return io.readWork(first)[0].toUpperCase(); return io.readWork(first)[0].toUpperCase();
} },
}); });
const third = incr.work({ const third = incr.work({
label: "third compute", label: "third compute",
@ -27,7 +27,7 @@ test("trivial case", async () => {
async run(io) { async run(io) {
await setTimeout(1000); await setTimeout(1000);
return io.readWork(first)[1] * 1000; return io.readWork(first)[1] * 1000;
} },
}); });
return incr.work({ return incr.work({
label: "last compute", label: "last compute",
@ -37,8 +37,8 @@ test("trivial case", async () => {
return { return {
second: io.readWork(second), second: io.readWork(second),
third: io.readWork(third), third: io.readWork(third),
} };
} },
}); });
} }
const { value: first } = await incr.compile(compilation); const { value: first } = await incr.compile(compilation);
@ -52,5 +52,5 @@ test("trivial case", async () => {
import * as incr from "./incremental2.ts"; import * as incr from "./incremental2.ts";
import { beforeEach, test } from "node:test"; import { beforeEach, test } from "node:test";
import { tmpFile } from "#sitegen/testing";import { setTimeout } from "node:timers/promises"; import { tmpFile } from "#sitegen/testing";
import { setTimeout } from "node:timers/promises";

View file

@ -2,8 +2,7 @@
// See `work()`, `compile()`, and `invalidate()` for details. // See `work()`, `compile()`, and `invalidate()` for details.
// //
// All state is serializable to allow recovering state across sessions. // All state is serializable to allow recovering state across sessions.
// This framework special-cases the asset map, but is otherwise // This library special-cases the asset map, but is otherwise agnostic.
// agnostic of what it is a compiler for.
let running = false; let running = false;
let jobs = 0; let jobs = 0;
let newKeys = 0; let newKeys = 0;
@ -15,28 +14,25 @@ let writes = new Map<string, FileWrite>();
let assets = new Map<string, Asset>(); // keyed by hash let assets = new Map<string, Asset>(); // keyed by hash
export interface Ref<T> { export interface Ref<T> {
/** This method is compatible with `await` syntax */
then(
onFulfilled: (value: T) => void,
onRejected: (error: unknown) => void,
): void;
key: string; key: string;
/** This method is compatible with `await` syntax */
then(resolve: (value: T) => void, reject: (error: unknown) => void): void;
get value(): T | null; get value(): T | null;
} }
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>; type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
/** /**
* Declare and a unit of work. Return value is memoized and * Declare and a unit of work. Return value is memoized and only rebuilt when
* only rebuilt when inputs (declared via `Io`) change. Outputs * inputs change. Inputs are tracked via the `io` interface, as well as a hash
* are written at the end of a compilation (see `compile`). * of the `input` param and caller source code. Outputs are written at the end
* of a compilation (see `compile`).
* *
* If the returned `Ref` is not awaited or read * Work items are lazy, only started when `Ref` is awaited or `io.readWork`ed.
* via io.readWork, the job is never started.
*/ */
export function work<O>(job: Job<void, O>): Ref<O>; export function work<O>(job: Job<void, O>): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>; export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> { export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const source = JSON.stringify(util.getCallSites(2)[1]); const source = JSON.stringify(UNWRAP(util.getCallSites(2)[1]));
const keySource = [source, util.inspect(input)].join(":"); const keySource = [source, util.inspect(input)].join(":");
const key = crypto.createHash("sha1").update(keySource).digest("base64url"); const key = crypto.createHash("sha1").update(keySource).digest("base64url");
ASSERT(running); ASSERT(running);
@ -60,13 +56,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const value = await job(io, input); const value = await job(io, input);
validateSerializable(value, ""); validateSerializable(value, "");
const { reads, writes } = io; const { reads, writes } = io;
works.set(key, { works.set(key, { value, affects: [], reads, writes });
value,
affects: [],
reads,
writes,
debug: source,
});
for (const add of reads.files) { for (const add of reads.files) {
const { affects } = UNWRAP(files.get(add)); const { affects } = UNWRAP(files.get(add));
ASSERT(!affects.includes(key)); ASSERT(!affects.includes(key));
@ -133,12 +123,8 @@ export async function flush(start: number) {
// Trim // Trim
const detachedFiles = new Set<string>(); const detachedFiles = new Set<string>();
const referencedAssets = new Set<string>(); const referencedAssets = new Set<string>();
for (const [ for (const [k, v] of works) {
k, const assets = v.writes.assets;
{
writes: { assets },
},
] of works) {
if (seenWorks.has(k)) { if (seenWorks.has(k)) {
for (const asset of assets.values()) referencedAssets.add(asset.hash); for (const asset of assets.values()) referencedAssets.add(asset.hash);
continue; continue;
@ -150,11 +136,9 @@ export async function flush(start: number) {
files.delete(k); files.delete(k);
detachedFiles.add(k); detachedFiles.add(k);
} }
for (const k of assets.keys()) { for (const k of assets.keys()) if (!referencedAssets.has(k)) assets.delete(k);
if (!referencedAssets.has(k)) assets.delete(k);
}
const p = []; const p: Promise<void>[] = [];
// File writes // File writes
let dist = 0; let dist = 0;
for (const [key, { buffer, size }] of writes) { for (const [key, { buffer, size }] of writes) {
@ -163,10 +147,14 @@ export async function flush(start: number) {
} }
// Asset map // Asset map
{ {
const { json, blob } = getAssetManifest(); const { json, blob, dynamic, dts } = getAssetManifest();
const jsonString = Buffer.from(JSON.stringify(json)); const jsonString = Buffer.from(JSON.stringify(json));
p.push(fs.writeMkdir(".clover/o/static.json", jsonString)); p.push(fs.writeMkdir(".clover/o/asset.json", jsonString));
p.push(fs.writeMkdir(".clover/o/static.blob", blob)); p.push(fs.writeMkdir(".clover/o/asset.blob", blob));
p.push(fs.writeMkdir(".clover/ts/asset.d.ts", dts));
for (const [k, v] of dynamic) {
p.push(fs.writeMkdir(`.clover/o/dynamic/${k}`, v));
}
dist += blob.byteLength + jsonString.byteLength; dist += blob.byteLength + jsonString.byteLength;
} }
await Promise.all(p); await Promise.all(p);
@ -179,9 +167,8 @@ export async function flush(start: number) {
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`); console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
console.writeLine(` - ${assets.size} static assets`); console.writeLine(` - ${assets.size} static assets`);
console.writeLine( console.writeLine(
` - dist: ${formatSize(dist)}, incremental: ${formatSize( ` - dist: ${formatSize(dist)},` +
serialized.byteLength, ` incremental: ${formatSize(serialized.byteLength)}`,
)}`,
); );
} }
@ -337,7 +324,7 @@ async function deserialize(buffer: Buffer) {
if (err.code === "ENOENT") return null; if (err.code === "ENOENT") return null;
throw err; throw err;
}) })
.then((stat) => ({ k, size, work, stat })), .then((stat) => ({ k, size, work, stat }))
), ),
); );
for (const { k, stat, work, size } of statFiles) { for (const { k, stat, work, size } of statFiles) {
@ -356,19 +343,23 @@ async function deserialize(buffer: Buffer) {
} }
await Promise.all( await Promise.all(
Array.from(files, ([key, file]) => invalidateEntry(key, file)), Array.from(files, ([key, file]) => invalidateEntry(key, file, false)),
); );
} }
export async function invalidate(filePath: string): Promise<boolean> { export async function invalidate(
filePath: string,
unloadModule: boolean = true,
): Promise<boolean> {
const key = toRel(toAbs(filePath)); const key = toRel(toAbs(filePath));
const file = UNWRAP(files.get(key), `Untracked file '${key}'`); const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
return invalidateEntry(key, file); return invalidateEntry(key, file, unloadModule);
} }
export async function invalidateEntry( export async function invalidateEntry(
key: string, key: string,
file: TrackedFile, file: TrackedFile,
unloadModule: boolean,
): Promise<boolean> { ): Promise<boolean> {
try { try {
if (file.type === "d") { if (file.type === "d") {
@ -399,36 +390,62 @@ export async function invalidateEntry(
return false; return false;
} catch (e) { } catch (e) {
forceInvalidate(file); forceInvalidate(file);
if (unloadModule) {
// TODO: handle when this triggers unloading of `generate.ts`
hot.unload(toAbs(key)); hot.unload(toAbs(key));
}
if (file.type === "null") files.delete(key); if (file.type === "null") files.delete(key);
return true; return true;
} }
} }
export function getAssetManifest() { export function getAssetManifest() {
const dynamic = new Map<string, Buffer>();
const writer = new BufferWriter(); const writer = new BufferWriter();
const asset = Object.fromEntries( const assetMap = Object.fromEntries(
Array.from(works, (work) => work[1].writes.assets) Array.from(works, (work) => work[1].writes.assets)
.filter((map) => map.size > 0) .filter((map) => map.size > 0)
.flatMap((map) => .flatMap((map) =>
Array.from(map, ([key, { hash, headers }]) => { Array.from(map, ([key, { hash, headers, regenerative }]) => {
const { raw, gzip, zstd } = UNWRAP( const { raw, gzip, zstd } = UNWRAP(
assets.get(hash), assets.get(hash),
`Asset ${key} (${hash})`, `Asset ${key} (${hash})`,
); );
if (regenerative) {
const id = crypto
.createHash("sha1")
.update(key)
.digest("hex")
.slice(0, 16); /* TODO */
dynamic.set(
id,
manifest.packDynamicBuffer(raw, gzip, zstd, headers),
);
return [key, { type: 1, id }] as const;
}
return [ return [
key, key,
{ {
type: 0,
raw: writer.write(raw, "raw:" + hash), raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash), gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash), zstd: writer.write(zstd, "zstd:" + hash),
headers, headers,
}, },
] as const; ] as const;
}), })
), ),
) satisfies BuiltAssetMap; ) satisfies manifest.Manifest;
return { json: asset, blob: writer.get() }; return {
json: assetMap,
blob: writer.get(),
dynamic,
dts: "export type AssetKey = " +
Object.keys(assetMap)
.map((key) => JSON.stringify(key))
.join(" | ") +
"\n",
};
} }
/* Input/Output with automatic tracking. /* Input/Output with automatic tracking.
@ -502,7 +519,7 @@ export class Io {
const stat = await fs.stat(abs); const stat = await fs.stat(abs);
if (stat.isDirectory()) { if (stat.isDirectory()) {
return (await this.readDirRecursive(abs)).map((grand) => return (await this.readDirRecursive(abs)).map((grand) =>
path.join(child, grand), path.join(child, grand)
); );
} else { } else {
return child; return child;
@ -540,26 +557,27 @@ export class Io {
} }
} }
} }
async writeAsset( async writeAsset(asset: {
pathname: string, pathname: string;
blob: string | Buffer, buffer: string | Buffer;
headersOption?: HeadersInit, regenerative?: boolean;
) { headers?: HeadersInit;
ASSERT(pathname.startsWith("/")); }) {
ASSERT(!seenWrites.has("a:" + pathname)); ASSERT(asset.pathname.startsWith("/"));
ASSERT(!seenWrites.has("a:" + asset.pathname));
const buffer = typeof blob === "string" ? Buffer.from(blob) : blob; const buffer = typeof asset.buffer === "string"
? Buffer.from(asset.buffer)
const headers = new Headers(headersOption ?? {}); : asset.buffer;
const headers = new Headers(asset.headers ?? {});
const hash = crypto.createHash("sha1").update(buffer).digest("hex"); const hash = crypto.createHash("sha1").update(buffer).digest("hex");
if (!headers.has("Content-Type")) { if (!headers.has("Content-Type")) {
headers.set("Content-Type", mime.contentTypeFor(pathname)); headers.set("Content-Type", mime.contentTypeFor(asset.pathname));
} }
headers.set("ETag", JSON.stringify(hash)); headers.set("etag", JSON.stringify(hash));
this.writes.assets.set(pathname, { this.writes.assets.set(asset.pathname, {
hash, hash,
// @ts-expect-error TODO
headers: Object.fromEntries(headers), headers: Object.fromEntries(headers),
regenerative: !!asset.regenerative,
}); });
if (!assets.has(hash)) { if (!assets.has(hash)) {
jobs += 1; jobs += 1;
@ -600,7 +618,7 @@ class BufferWriter {
write(buffer: Buffer, hash: string): BufferView { write(buffer: Buffer, hash: string): BufferView {
let view = this.seen.get(hash); let view = this.seen.get(hash);
if (view) return view; if (view) return view;
view = [this.size, (this.size += buffer.byteLength)]; view = [this.size, this.size += buffer.byteLength];
this.seen.set(hash, view); this.seen.set(hash, view);
this.buffers.push(buffer); this.buffers.push(buffer);
return view; return view;
@ -626,7 +644,7 @@ export function validateSerializable(value: unknown, key: string) {
Buffer.isBuffer(value) Buffer.isBuffer(value)
) { ) {
Object.entries(value).forEach(([k, v]) => Object.entries(value).forEach(([k, v]) =>
validateSerializable(v, `${key}.${k}`), validateSerializable(v, `${key}.${k}`)
); );
} else { } else {
throw new Error( throw new Error(
@ -662,54 +680,48 @@ interface FileWrite {
} }
interface Writes { interface Writes {
files: Set<string>; files: Set<string>;
assets: Map< assets: Map<string, AssetWrite>;
string,
{
hash: string;
headers: Record<string, string>;
}
>;
} }
interface Asset { interface Asset {
raw: Buffer; raw: Buffer;
gzip: Buffer; gzip: Buffer;
zstd: Buffer; zstd: Buffer;
} }
interface AssetWrite {
hash: string;
headers: Record<string, string>;
regenerative: boolean;
}
interface Work<T = unknown> { interface Work<T = unknown> {
debug?: string;
value: T; value: T;
reads: Reads; reads: Reads;
writes: Writes; writes: Writes;
affects: string[]; affects: string[];
} }
type TrackedFile = { type TrackedFile =
affects: string[]; & { affects: string[] }
} & ( & (
| { type: "f"; lastModified: number } | { type: "f"; lastModified: number }
| { type: "d"; contentHash: string; contents: string[] } | { type: "d"; contentHash: string; contents: string[] }
| { type: "null" } | { type: "null" }
); );
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}
export interface BuiltAsset {
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
headers: Record<string, string>;
}
const gzip = util.promisify(zlib.gzip); const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress); const zstdCompress = util.promisify(zlib.zstdCompress);
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as hot from "./hot.ts"; import * as hot from "./hot.ts";
import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";
import * as manifest from "#sitegen/assets";
import * as path from "node:path";
import * as util from "node:util"; import * as util from "node:util";
import * as crypto from "node:crypto"; import * as crypto from "node:crypto";
import * as mime from "#sitegen/mime";
import * as zlib from "node:zlib"; import * as zlib from "node:zlib";
import * as console from "@paperclover/console"; import * as console from "@paperclover/console";
import { Spinner } from "@paperclover/console/Spinner"; import { Spinner } from "@paperclover/console/Spinner";
import { formatSize } from "@/file-viewer/format.ts"; import { formatSize } from "@/file-viewer/format.ts";
import * as msgpackr from "msgpackr"; import * as msgpackr from "msgpackr";

View file

@ -1,73 +1,114 @@
interface Loaded { // Static and dynamic assets are built alongside the server binary.
map: BuiltAssetMap; // This module implements decoding and serving of the asset blobs,
buf: Buffer; // but also implements patching of dynamic assets. The `Manifest`
} // is generated by `incremental.ts`
let assets: Loaded | null = null; const root = import.meta.dirname;
let current: Loaded | null = null;
export type StaticPageId = string; // TODO: rename all these types
type DynamicId = string;
export type { Key };
export type Manifest =
& {
[K in Key]: StaticAsset | DynamicAsset;
}
& {
[string: string]: StaticAsset | DynamicAsset;
};
export interface StaticAsset extends AssetBase {
type: 0;
}
interface AssetBase {
headers: Record<string, string> & { etag: string };
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
}
export interface DynamicAsset {
type: 1;
id: DynamicId;
}
interface Loaded {
map: Manifest;
static: Buffer;
dynamic: Map<DynamicId, DynamicEntry>;
}
export interface DynamicEntry extends AssetBase {
buffer: Buffer;
}
export async function reload() { export async function reload() {
const [map, buf] = await Promise.all([ const map = await fs.readJson<Manifest>(path.join(root, "asset.json"));
fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"), const statics = await fs.readFile(path.join(root, "asset.blob"));
fs.readFile(path.join(import.meta.dirname, "static.blob")), const dynamic = new Map(
]); await Promise.all(
return (assets = { map: JSON.parse(map), buf }); Object.entries(map)
} .filter((entry): entry is [string, DynamicAsset] => entry[1].type === 1)
.map(async ([k, v]) =>
export function reloadSync() { [
const map = fs.readFileSync( v.id,
path.join(import.meta.dirname, "static.json"), await fs.readFile(path.join(root, "dynamic", v.id))
"utf8", .then(loadRegenerative),
] as const
),
),
); );
const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob")); return (current = { map, static: statics, dynamic });
return (assets = { map: JSON.parse(map), buf });
} }
export async function middleware(c: Context, next: Next) { export async function middleware(c: Context, next: Next) {
if (!assets) await reload(); if (!current) current = await reload();
const asset = assets!.map[c.req.path]; const asset = current.map[c.req.path];
if (asset) return assetInner(c, asset, 200); if (asset) return assetInner(c, asset, 200);
return next(); return next();
} }
export async function notFound(c: Context) { export async function notFound(c: Context) {
if (!assets) await reload(); if (!current) current = await reload();
let pathname = c.req.path; let pathname = c.req.path;
do { do {
const asset = assets!.map[pathname + "/404"]; const asset = current.map[pathname + "/404"];
if (asset) return assetInner(c, asset, 404); if (asset) return assetInner(c, asset, 404);
pathname = pathname.slice(0, pathname.lastIndexOf("/")); pathname = pathname.slice(0, pathname.lastIndexOf("/"));
} while (pathname); } while (pathname);
const asset = assets!.map["/404"]; const asset = current.map["/404"];
if (asset) return assetInner(c, asset, 404); if (asset) return assetInner(c, asset, 404);
return c.text("the 'Not Found' page was not found", 404); return c.text("the 'Not Found' page was not found", 404);
} }
export async function serveAsset( export async function serveAsset(c: Context, id: Key, status: StatusCode) {
c: Context, return assetInner(c, (current ?? (await reload())).map[id], status);
id: StaticPageId,
status: StatusCode,
) {
return assetInner(c, (assets ?? (await reload())).map[id], status);
} }
/** @deprecated */
export function hasAsset(id: string) { export function hasAsset(id: string) {
return (assets ?? reloadSync()).map[id] !== undefined; return UNWRAP(current).map[id] !== undefined;
} }
export function etagMatches(etag: string, ifNoneMatch: string) { export function etagMatches(etag: string, ifNoneMatch: string) {
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1; return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
} }
function subarrayAsset([start, end]: BufferView) { function assetInner(c: Context, asset: Manifest[Key], status: StatusCode) {
return assets!.buf.subarray(start, end); ASSERT(current);
if (asset.type === 0) {
return respondWithBufferAndViews(c, current.static, asset, status);
} else {
const entry = UNWRAP(current.dynamic.get(asset.id));
return respondWithBufferAndViews(c, entry.buffer, entry, status);
}
} }
function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) { function respondWithBufferAndViews(
const ifnonematch = c.req.header("If-None-Match"); c: Context,
if (ifnonematch) { buffer: Buffer,
const etag = asset.headers.ETag; asset: AssetBase,
if (etagMatches(etag, ifnonematch)) { status: StatusCode,
) {
const ifNoneMatch = c.req.header("If-None-Match");
if (ifNoneMatch) {
const etag = asset.headers.etag;
if (etagMatches(etag, ifNoneMatch)) {
return (c.res = new Response(null, { return (c.res = new Response(null, {
status: 304, status: 304,
statusText: "Not Modified", statusText: "Not Modified",
@ -80,24 +121,103 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
const acceptEncoding = c.req.header("Accept-Encoding") ?? ""; const acceptEncoding = c.req.header("Accept-Encoding") ?? "";
let body; let body;
let headers = asset.headers; let headers = asset.headers;
if (acceptEncoding.includes("zstd") && asset.zstd) { if (acceptEncoding.includes("zstd")) {
body = subarrayAsset(asset.zstd); body = buffer.subarray(...asset.zstd);
headers = { headers = {
...asset.headers, ...asset.headers,
"Content-Encoding": "zstd", "Content-Encoding": "zstd",
}; };
} else if (acceptEncoding.includes("gzip") && asset.gzip) { } else if (acceptEncoding.includes("gzip")) {
body = subarrayAsset(asset.gzip); body = buffer.subarray(...asset.gzip);
headers = { headers = {
...asset.headers, ...asset.headers,
"Content-Encoding": "gzip", "Content-Encoding": "gzip",
}; };
} else { } else {
body = subarrayAsset(asset.raw); body = buffer.subarray(...asset.raw);
} }
return (c.res = new Response(body, { headers, status })); return (c.res = new Response(body, { headers, status }));
} }
export function packDynamicBuffer(
raw: Buffer,
gzip: Buffer,
zstd: Buffer,
headers: Record<string, string>,
) {
const headersBuffer = Buffer.from(
Object.entries(headers)
.map((entry) => entry.join(":"))
.join("\n"),
"utf-8",
);
const header = new Uint32Array(3);
header[0] = headersBuffer.byteLength + header.byteLength;
header[1] = header[0] + raw.byteLength;
header[2] = header[1] + gzip.byteLength;
return Buffer.concat([
Buffer.from(header.buffer),
headersBuffer,
raw,
gzip,
zstd,
]);
}
function loadRegenerative(buffer: Buffer): DynamicEntry {
const headersEnd = buffer.readUInt32LE(0);
const headers = Object.fromEntries(
buffer
.subarray(3 * 4, headersEnd)
.toString("utf-8")
.split("\n")
.map((line) => {
const i = line.indexOf(":");
return [line.slice(0, i), line.slice(i + 1)];
}),
);
const raw = buffer.readUInt32LE(4);
const gzip = buffer.readUInt32LE(8);
const hasEtag = (v: object): v is typeof v & { etag: string } =>
"etag" in v && typeof v.etag === "string";
ASSERT(hasEtag(headers));
return {
headers,
buffer,
raw: [headersEnd, raw],
gzip: [raw, gzip],
zstd: [gzip, buffer.byteLength],
};
}
const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress);
export async function overwriteDynamic(
key: Key,
value: string | Buffer,
headers: Record<string, string>,
) {
if (!current) current = await reload();
const asset = UNWRAP(current.map[key]);
ASSERT(asset.type === 1);
UNWRAP(current.dynamic.has(asset.id));
const buffer = Buffer.from(value);
const etag = JSON.stringify(
crypto.createHash("sha1").update(buffer).digest("hex"),
);
const [gzipBuffer, zstdBuffer] = await Promise.all([
gzip(buffer),
zstdCompress(buffer),
]);
const packed = packDynamicBuffer(buffer, gzipBuffer, zstdBuffer, {
...headers,
etag,
});
current.dynamic.set(asset.id, loadRegenerative(packed));
await fs.writeFile(path.join(root, "dynamic", asset.id), packed);
}
process.on("message", (msg: any) => { process.on("message", (msg: any) => {
if (msg?.type === "clover.assets.reload") reload(); if (msg?.type === "clover.assets.reload") reload();
}); });
@ -105,6 +225,10 @@ process.on("message", (msg: any) => {
import * as fs from "#sitegen/fs"; import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono"; import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status"; import type { StatusCode } from "hono/utils/http-status";
import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts"; import type { BufferView } from "../incremental.ts";
import { Buffer } from "node:buffer"; import { Buffer } from "node:buffer";
import * as path from "node:path"; import * as path from "node:path";
import type { AssetKey as Key } from "../../.clover/ts/asset.d.ts";
import * as crypto from "node:crypto";
import * as zlib from "node:zlib";
import * as util from "node:util";

View file

@ -51,8 +51,8 @@ export class Queue<T, R> {
let n = 0; let n = 0;
for (const item of active) { for (const item of active) {
let itemText = "- " + item.format(now); let itemText = "- " + item.format(now);
text += text += `\n` +
`\n` + itemText.slice(0, Math.max(0, process.stdout.columns - 1)); itemText.slice(0, Math.max(0, process.stdout.columns - 1));
if (n > 10) { if (n > 10) {
text += `\n ... + ${active.length - n} more`; text += `\n ... + ${active.length - n} more`;
break; break;

View file

@ -4,6 +4,7 @@ export {
createReadStream, createReadStream,
createWriteStream, createWriteStream,
existsSync, existsSync,
type FileHandle,
open, open,
readdir, readdir,
readdirSync, readdirSync,
@ -15,7 +16,6 @@ export {
statSync, statSync,
writeFile, writeFile,
writeFileSync, writeFileSync,
type FileHandle,
}; };
export function mkdir(dir: string) { export function mkdir(dir: string) {
@ -98,6 +98,7 @@ import {
writeFileSync, writeFileSync,
} from "node:fs"; } from "node:fs";
import { import {
type FileHandle,
mkdir as nodeMkdir, mkdir as nodeMkdir,
open, open,
readdir, readdir,
@ -106,6 +107,5 @@ import {
rmdir, rmdir,
stat, stat,
writeFile, writeFile,
type FileHandle,
} from "node:fs/promises"; } from "node:fs/promises";
export { Stats } from "node:fs"; export { Stats } from "node:fs";

View file

@ -1,9 +1,11 @@
/* Impementation of CommonMark specification for markdown with support /* Implementation of [CommonMark] specification for markdown with support
* for custom syntax extensions via the parser options. Instead of * for custom syntax extensions via the parser options. Instead of
* returning an AST that has a second conversion pass to JSX, the * returning an AST that has a second conversion pass to JSX, the
* returned value of 'parse' is 'engine.Node' which can be stringified * returned value of 'parse' is 'engine.Node' which can be stringified
* via clover's SSR engine. This way, generation optimizations, async * via Clover's SSR engine. This way, generation optimizations, async
* components, and other features are gained for free here. * components, and other features are gained for free here.
*
* [CommonMark]: https://spec.commonmark.org/0.31.2/
*/ */
function parse(src: string, options: Partial<ParseOpts> = {}) {} function parse(src: string, options: Partial<ParseOpts> = {}) {}
@ -16,6 +18,9 @@ export function Markdown({
return parse(src, options); return parse(src, options);
} }
// TODO: This implementation is flawed because it is impossible to sanely handle
// emphasis and strong emphasis, and all their edge cases. Instead of making these
// using extensions interface, they should be special cased.
function parseInline(src: string, options: Partial<InlineOpts> = {}) { function parseInline(src: string, options: Partial<InlineOpts> = {}) {
const { rules = inlineRules, links = new Map() } = options; const { rules = inlineRules, links = new Map() } = options;
const opts: InlineOpts = { rules, links }; const opts: InlineOpts = { rules, links };
@ -110,12 +115,11 @@ export const inlineRules: Record<string, InlineRule> = {
} else if (afterText[0] === "[") { } else if (afterText[0] === "[") {
const splitTarget = splitFirst(afterText.slice(1), /]/); const splitTarget = splitFirst(afterText.slice(1), /]/);
if (!splitTarget) return null; if (!splitTarget) return null;
const name = const name = splitTarget.first.trim().length === 0
splitTarget.first.trim().length === 0 // Collapsed reference link
? // Collapsed reference link ? textSrc.trim()
textSrc.trim() // Full Reference Link
: // Full Reference Link : splitTarget.first.trim();
splitTarget.first.trim();
const target = opts.links.get(name); const target = opts.links.get(name);
if (!target) return null; if (!target) return null;
({ href, title } = target); ({ href, title } = target);

View file

@ -1,6 +1,25 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string; export type ScriptId = string;
export interface PageExports extends ViewExports {
regenerate?: PageRegenerateOptions;
}
export interface ViewExports {
default: render.Component;
meta: meta.Meta | ((props: { ssr: true }) => Promise<meta.Meta> | meta.Meta);
theme?: css.Theme;
layout?: Layout;
}
export interface Layout {
default: render.Component;
theme?: css.Theme;
// TODO: nested layout
}
export interface PageRegenerateOptions {
tags?: string[];
seconds?: number;
debounce?: number;
}
/** /**
* A filesystem object associated with some ID, * A filesystem object associated with some ID,
* such as a page's route to it's source file. * such as a page's route to it's source file.
@ -49,3 +68,5 @@ export function wrapDocument({
} }
import * as render from "#engine/render"; import * as render from "#engine/render";
import type * as meta from "./meta.ts";
import type * as css from "../css.ts";

View file

@ -10,7 +10,9 @@ export function getDb(file: string) {
if (db) return db; if (db) return db;
const fileWithExt = file.includes(".") ? file : file + ".sqlite"; const fileWithExt = file.includes(".") ? file : file + ".sqlite";
db = new WrappedDatabase( db = new WrappedDatabase(
new DatabaseSync(path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt)), new DatabaseSync(
path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt),
),
); );
map.set(file, db); map.set(file, db);
return db; return db;

View file

@ -1,5 +1,9 @@
export function tmpFile(basename: string) { export function tmpFile(basename: string) {
const file = path.join(import.meta.dirname, '../../.clover/testing', basename); const file = path.join(
import.meta.dirname,
"../../.clover/testing",
basename,
);
return { return {
path: file, path: file,
read: fs.readFile.bind(fs, file), read: fs.readFile.bind(fs, file),
@ -7,5 +11,5 @@ export function tmpFile(basename: string) {
}; };
} }
import * as path from 'node:path'; import * as path from "node:path";
import * as fs from './fs.ts'; import * as fs from "./fs.ts";

View file

@ -1,6 +1,6 @@
// The "view" system allows rendering dynamic pages within backends. // The "view" system allows rendering dynamic pages within backends.
// This is done by scanning all `views` dirs, bundling their client // This is done by scanning all `views` dirs, bundling their client
// resources, and then providing `renderView` which renders a page. // resources, and then providing `serve` which renders a page.
// //
// This system also implements page regeneration. // This system also implements page regeneration.
let codegen: Codegen; let codegen: Codegen;
@ -12,53 +12,53 @@ try {
// Generated in `bundle.ts` // Generated in `bundle.ts`
export interface Codegen { export interface Codegen {
views: Record<ViewKey, View>; views: { [K in Key]: View<PropsFromModule<ViewMap[K]>> };
scripts: Record<string, string>; scripts: Record<string, string>;
regenTtls: Ttl[]; regenTtls: Ttl[];
regenTags: Record<string, ViewKey[]>; regenTags: Record<RegenKey, Key[]>;
} }
export interface View { // The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
export interface View<Props extends Record<string, unknown>> {
component: render.Component; component: render.Component;
meta: meta: meta.Meta | ((props: Props) => Promise<meta.Meta> | meta.Meta);
| meta.Meta
| ((props: { context?: hono.Context }) => Promise<meta.Meta> | meta.Meta);
layout?: render.Component; layout?: render.Component;
inlineCss: string; inlineCss: string;
scripts: Record<string, string>; scripts: Record<string, string>;
} }
export interface Ttl { export interface Ttl {
seconds: number; seconds: number;
key: ViewKey; key: Key;
} }
type ViewKey = keyof ViewMap; export type Key = keyof ViewMap;
export async function renderView<K extends ViewKey>( export async function serve<K extends Key>(
context: hono.Context, context: hono.Context,
id: K, id: K,
props: PropsFromModule<ViewMap[K]>, props: PropsFromModule<ViewMap[K]>,
) { ) {
return context.html(await renderViewToString(id, { context, ...props })); return context.html(await renderToString(id, { context, ...props }));
} }
type PropsFromModule<M extends any> = M extends { type PropsFromModule<M extends any> = M extends {
default: (props: infer T) => render.Node; default: (props: infer T) => render.Node;
} } ? T
? T
: never; : never;
export async function renderViewToString<K extends ViewKey>( export async function renderToString<K extends Key>(
id: K, id: K,
props: PropsFromModule<ViewMap[K]>, props: PropsFromModule<ViewMap[K]>,
) { ) {
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
const { const {
component, component,
inlineCss, inlineCss,
layout, layout,
meta: metadata, meta: metadata,
}: View = UNWRAP(codegen.views[id], `Missing view ${id}`); }: View<PropsFromModule<ViewMap[K]>> = UNWRAP(
codegen.views[id],
`Missing view ${id}`,
);
// -- metadata -- // -- metadata --
const renderedMetaPromise = Promise.resolve( const renderedMetaPromise = Promise.resolve(
@ -79,21 +79,43 @@ export async function renderViewToString<K extends ViewKey>(
head: await renderedMetaPromise, head: await renderedMetaPromise,
inlineCss, inlineCss,
scripts: joinScripts( scripts: joinScripts(
Array.from(sitegen.scripts, (id) => Array.from(
UNWRAP(codegen.scripts[id], `Missing script ${id}`), sitegen!.scripts,
(id) => UNWRAP(codegen.scripts[id], `Missing script ${id}`),
), ),
), ),
}); });
} }
export function joinScripts(scriptSources: string[]) { export function regenerate(tag: RegenKey) {
for (const view of codegen.regenTags[tag]) {
const key = view.slice("page:".length);
renderToString(view, {})
.then((result) => {
console.info(`regenerate ${key}`);
asset.overwriteDynamic(key as asset.Key, result, {
"content-type": "text/html",
});
})
.catch((e) => {
console.error(`Failed regenerating ${view} from tag ${tag}`, e);
});
}
}
function joinScripts(scriptSources: string[]) {
const { length } = scriptSources; const { length } = scriptSources;
if (length === 0) return ""; if (length === 0) return "";
if (length === 1) return scriptSources[0]; if (0 in scriptSources) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";"); return scriptSources.map((source) => `{${source}}`).join(";");
} }
import * as meta from "./meta.ts"; import * as meta from "./meta.ts";
import type * as hono from "#hono"; import type * as hono from "#hono";
import * as render from "#engine/render"; import * as render from "#engine/render";
import * as sg from "./sitegen.ts"; import * as sg from "./sitegen.ts";
import type { RegisteredViews as ViewMap } from "../../.clover/ts/view.d.ts"; import * as asset from "./assets.ts";
import type {
RegenKey,
RegisteredViews as ViewMap,
} from "../../.clover/ts/view.d.ts";

View file

@ -14,16 +14,17 @@ export function loadMarko(module: NodeJS.Module, filepath: string) {
// bare client import statements to it's own usage. // bare client import statements to it's own usage.
const scannedClientRefs = new Set<string>(); const scannedClientRefs = new Set<string>();
if (src.match(/^\s*client\s+import\s+["']/m)) { if (src.match(/^\s*client\s+import\s+["']/m)) {
src = src = src.replace(
src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m, /^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => { (_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`); const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = hot.resolveClientRef(filepath, ref); const resolved = hot.resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved); scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${JSON.stringify( return `<CloverScriptInclude=${
JSON.stringify(
hot.getScriptId(resolved), hot.getScriptId(resolved),
)} />`; )
} />`;
}, },
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n'; ) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
} }

View file

@ -11,7 +11,7 @@ let watch: Watch;
export async function main() { export async function main() {
// Catch up state by running a main build. // Catch up state by running a main build.
await incr.restore(); if (!process.argv.includes("-f")) await incr.restore();
watch = new Watch(rebuild); watch = new Watch(rebuild);
rebuild([]); rebuild([]);
} }
@ -36,15 +36,16 @@ function onSubprocessClose(code: number | null, signal: string | null) {
} }
async function rebuild(files: string[]) { async function rebuild(files: string[]) {
const hasInvalidated = files.length === 0 const hasInvalidated = files.length === 0 ||
|| (await Promise.all(files.map(incr.invalidate))).some(Boolean); (await Promise.all(files.map((file) => incr.invalidate(file))))
.some(Boolean);
if (!hasInvalidated) return; if (!hasInvalidated) return;
incr.compile(generate.generate).then(({ incr.compile(generate.generate).then(({
watchFiles, watchFiles,
newOutputs, newOutputs,
newAssets newAssets,
}) => { }) => {
const removeWatch = [...watch.files].filter(x => !watchFiles.has(x)) const removeWatch = [...watch.files].filter((x) => !watchFiles.has(x));
for (const file of removeWatch) watch.remove(file); for (const file of removeWatch) watch.remove(file);
watch.add(...watchFiles); watch.add(...watchFiles);
// Restart the server if it was changed or not running. // Restart the server if it was changed or not running.
@ -60,8 +61,8 @@ async function rebuild(files: string[]) {
function statusLine() { function statusLine() {
console.info( console.info(
`Watching ${watch.files.size} files ` `Watching ${watch.files.size} files ` +
+ `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`, `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
); );
} }
@ -142,7 +143,7 @@ class Watch {
#getFiles(absPath: string, event: fs.WatchEventType) { #getFiles(absPath: string, event: fs.WatchEventType) {
const files = []; const files = [];
if (this.files.has(absPath)) files.push(absPath); if (this.files.has(absPath)) files.push(absPath);
if (event === 'rename') { if (event === "rename") {
const dir = path.dirname(absPath); const dir = path.dirname(absPath);
if (this.files.has(dir)) files.push(dir); if (this.files.has(dir)) files.push(dir);
} }

View file

@ -1,6 +1,6 @@
// This is the main file for the backend // This is the main file for paperclover.net's server.
const app = new Hono(); const app = new Hono();
const logHttp = scoped("http", { color: "magenta" }); const logHttp = console.scoped("http", { color: "magenta" });
// Middleware // Middleware
app.use(trimTrailingSlash()); app.use(trimTrailingSlash());
@ -38,4 +38,4 @@ import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash"; import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets"; import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts"; import * as admin from "./admin.ts";
import { scoped } from "@paperclover/console"; import * as console from "@paperclover/console";

View file

@ -1,10 +0,0 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-07-08",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";

View file

@ -1,290 +0,0 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-06-13",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";
I've been recently playing around [Marko][1], and after adding limited support
for it in my website generator, [sitegen][2], I instantly fell in love with how
minimalistic it is in comparison to JSX, Astro components, and Svelte.
## Introduction
If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving
JavaScript into HTML. Attributes are JavaScript expressions.
```marko
<div>
// `input` is like props, but given in the top-level scope
<time datetime=input.date.toISOString()>
// Interpolation with JS template string syntax
${formatTimeNicely(input.date)}
</time>
<div>
<a href=`/users/${input.user.id}`>${input.user.name}</a>
</div>
// Capital letter variables for imported components
<MarkdownContent message=input.message />
// Components also can be auto-imported by lowercase.
// This will look upwards for a `tags/` folder containing
// "custom-footer.marko", similar to how Node.js finds
// package names in all upwards `node_modules` folders.
<custom-footer />
</div>
// ESM `import` / `export` just work as expected.
// I prefer my imports at the end, to highlight the markup.
import MarkdownContent from "./MarkdownContent.marko";
import { formatTimeNicely } from "../date-helpers.ts";
```
Tags with the `value` attribute have a shorthand, which is used by the built-in
`<if>` for conditional rendering.
```marko
// Sugar for <input value="string" />
<input="string" />
// and it composes amazingly to the 'if' built-in
<if=input.user>
<UserProfile=input.user />
</if>
```
Tags can also return values into the scope for use in the template using `/`, such as `<id>` for unique ID generation. This is available to components that `<return=output/>`.
```
<id/uniqueId />
<input id=uniqueId type="checkbox" name="allow_trans_rights" />
<label for=uniqueId>click me!</>
// ^ oh, you can also omit the
// closing tag name if you want.
```
It's important that I started with the two forms of "Tag I/O": `=` for input
and `/` for output. With those building blocks, we introduce local variables
with `const`
```
<const/rendered = markdownToHtml(input.value) />
// This is how you insert raw HTML to the document
<inline-html=rendered />
// It supports all of the cozy destructuring syntax JS has
<const/{ id, name } = user />
```
Unlike JSX, when you pass content within a tag (`input.content` instead of
JSX's `children`), instead of it being a JSX element, it is actually a
function. This means that the `for` tag can render the content multiple times.
```
<ul>
<for from=1 to=10>
// Renders a new random number for each iteration.
<li>${Math.random()}</li>
</>
</ul>
```
Since `content` is a function, it can take arguments. This is done with `|`
```
<h1>my friends</h1>
<ul>
// I tend to omit the closing tag names for the built-in control
// flow tags, but I keep them for HTML tags. It's kinda like how
// in JavaScript you just write `}` to close your `if`s and loops.
//
// Anyways <for> also has 'of'
<for|item| of=user.friends>
<li class="friend">${item.name}</li>
</>
// They support the same syntax JavaScript function params allows,
// so you can have destructuring here too, and multiple params.
<for|{ name }, index| of=user.friends>
// By the way you can also use emmet-style class and ID shorthands.
<li.friend>My #${index + 1} friend is ${name}</li>
</>
</ul>
```
Instead of named slots, Marko has attribute tags. These are more powerful than
slots since they are functions, and can also act as sugar for more complicated
attributes.
```
<Layout title="Welcome">
<@header variant="big">
<h1>the next big thing</h1>
</@header>
<p>body text...</p>
</Layout>
// The `input` variable inside of <Layout /> is:
//
// {
// title: "Welcome",
// header: {
// content: /* function rendering "<h1>the next big thing</h1>" */,
// variant: "big",
// },
// content: /* function rendering "<p>body text</p>" */
// }
```
This layout could be implemented as such:
```marko
<main>
<if=input.header />
<const/{ ...headerProps, content }=input.header />
<header ...headerProps>
// Instead of assigning to a variable with a capital letter,
// template interpolation works on tag names. This can also
// be a string to render the native HTML tag of that kind.
<${content} />
</header>
<hr />
</>
<${input.content} />
</main>
```
The last syntax feature missing is calling a tag with parameters. That is done
just like a regular function call, with '('.
```
<Something(item, index) />
```
In fact, attributes can just be sugar over this syntax; _this technically isn't
true but it's close enough for the example_
```
<SpecialButton type="submit" class="red" />
// is equal to
<SpecialButton({ type: "submit", class: "red" }) />
```
All of the above is about how Marko's syntax works, and how it performs HTML
generation with components. Marko also allows interactive components, but an
explaination of that is beyond the scope of this page, mostly since I have not
used it. A brief example of it, modified from their documentation.
```marko
// Reactive variables with <let/> just work...
<let/basicCounter=0 />
<button onClick() { basicCounter += 1 }>${basicCounter}</button>
// ...but a counter is boring.
<let/todos=[
{ id: 0, text: "Learn Marko" },
{ id: 1, text: "Make a Website" },
]/>
// 'by' is like React JSX's "key" property, but it's optional.
<ul><for|todo, i| of=todos by=(todo => todo.id)>
<li.todo>
// this variable remains stable even if the list
// re-orders, because 'by' was specified.
<let/done=false/>
<label>
<span>${todo.text}</span>
// ':=' creates a two-way reactive binding,
// (it passes a callback for `checkedChanged`)
<input type="checkbox" checked:=done />
</label>
<button
title="delete"
disabled=!done
onClick() {
todos = todos.toSpliced(i, 1);
}
> &times; </button>
</li>
</></ul>
// Form example
<let/nextId=2/>
<form onSubmit(e) {
e.preventDefault();
todos = todos.concat({
id: nextId++,
// HTMLFormElement exposes all its named input
// elements as extra properties on the object.
text: e.target.text.value,
});
// And you can clear it with 'reset()'
e.target.reset();
}>
// We don't 'onChange' like a React loser. The form
// value can be read in the submit event like normal.
<input name="text" placeholder="Another Item">
<button type="submit">Add</button>
</form>
```
## Usage on `paperclover.net`
TODO: document a lot of feedback, how i embedded Marko
My website uses statically generated HTML. That is why I have not needed to use
reactive variables. My generator doesn't even try compiling components
client-side.
Here is the actual component used to render [questions on the clover q+a][/q+a].
```marko
// Renders a `Question` entry including its markdown body.
export interface Input {
question: Question;
admin?: boolean;
}
// 2024-12-31 05:00:00 EST
export const transitionDate = 1735639200000;
<const/{ question, admin } = input />
<const/{ id, date, text } = question/>
<${"e-"}
f=(date > transitionDate ? true : undefined)
id=admin ? `q${id}` : undefined
>
<if=admin>
<a
style="margin-right: 0.5rem"
href=`/admin/q+a/${id}`
>[EDIT]</a>
</>
<a>
<time
datetime=formatQuestionISOTimestamp(date)
>${formatQuestionTimestamp(date)}</time>
</a>
<CloverMarkdown ...{ text } />
</>
// this singleton script will make all the '<time>' tags clickable.
client import "./clickable-links.client.ts";
import type { Question } from "@/q+a/models/Question.ts";
import { formatQuestionTimestamp, formatQuestionISOTimestamp } from "@/q+a/format.ts";
import { CloverMarkdown } from "@/q+a/clover-markdown.tsx";
```
import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts';

View file

@ -93,7 +93,7 @@ app.get("/file/*", async (c, next) => {
} satisfies APIDirectoryList; } satisfies APIDirectoryList;
return c.json(json); return c.json(json);
} }
c.res = await renderView(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, { c.res = await view.serve(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
file, file,
hasCotyledonCookie, hasCotyledonCookie,
}); });
@ -111,7 +111,7 @@ app.get("/file/*", async (c, next) => {
!lofi !lofi
) { ) {
prefetchFile(file.path); prefetchFile(file.path);
c.res = await renderView(c, "file-viewer/clofi", { c.res = await view.serve(c, "file-viewer/clofi", {
file, file,
hasCotyledonCookie, hasCotyledonCookie,
}); });
@ -125,11 +125,10 @@ app.get("/file/*", async (c, next) => {
let encoding = decideEncoding(c.req.header("Accept-Encoding")); let encoding = decideEncoding(c.req.header("Accept-Encoding"));
let sizeHeader = let sizeHeader = encoding === "raw"
encoding === "raw"
? expectedSize ? expectedSize
: // Size cannot be known because of compression modes // Size cannot be known because of compression modes
undefined; : undefined;
// Etag // Etag
{ {
@ -217,7 +216,7 @@ app.get("/canvas/:script", async (c, next) => {
if (!hasAsset(`/js/canvas/${script}.js`)) { if (!hasAsset(`/js/canvas/${script}.js`)) {
return next(); return next();
} }
return renderView(c, "file-viewer/canvas", { return view.serve(c, "file-viewer/canvas", {
script, script,
}); });
}); });
@ -295,8 +294,7 @@ function handleRanges(
): Response { ): Response {
// TODO: multiple ranges // TODO: multiple ranges
const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0); const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0);
const rangeBody = const rangeBody = streamOrBuffer instanceof ReadableStream
streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges) ? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize); : applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
return new Response(rangeBody, { return new Response(rangeBody, {
@ -420,7 +418,7 @@ import { type Context, Hono } from "hono";
import * as render from "#engine/render"; import * as render from "#engine/render";
import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets"; import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets";
import { renderView } from "#sitegen/view"; import * as view from "#sitegen/view";
import { contentTypeFor } from "#sitegen/mime"; import { contentTypeFor } from "#sitegen/mime";
import { requireFriendAuth } from "@/friend-auth.ts"; import { requireFriendAuth } from "@/friend-auth.ts";

View file

@ -17,9 +17,11 @@ export async function main() {
const start = performance.now(); const start = performance.now();
const timerSpinner = new Spinner({ const timerSpinner = new Spinner({
text: () => text: () =>
`paper clover's scan3 [${((performance.now() - start) / 1000).toFixed( `paper clover's scan3 [${
((performance.now() - start) / 1000).toFixed(
1, 1,
)}s]`, )
}s]`,
fps: 10, fps: 10,
}); });
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() }; using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -44,7 +46,7 @@ export async function main() {
.flatMap((child) => .flatMap((child) =>
child.kind === MediaFileKind.directory child.kind === MediaFileKind.directory
? child.getRecursiveFileChildren() ? child.getRecursiveFileChildren()
: child, : child
); );
qMeta.addMany( qMeta.addMany(
@ -103,9 +105,11 @@ export async function main() {
) { ) {
date = mediaFile.date; date = mediaFile.date;
console.warn( console.warn(
`M-time on ${publicPath} was likely corrupted. ${formatDate( `M-time on ${publicPath} was likely corrupted. ${
formatDate(
mediaFile.date, mediaFile.date,
)} -> ${formatDate(stat.mtime)}`, )
} -> ${formatDate(stat.mtime)}`,
); );
} }
mediaFile = MediaFile.createFile({ mediaFile = MediaFile.createFile({
@ -162,7 +166,7 @@ export async function main() {
}: Omit<ProcessFileArgs, "spin">) { }: Omit<ProcessFileArgs, "spin">) {
const ext = mediaFile.extensionNonEmpty.toLowerCase(); const ext = mediaFile.extensionNonEmpty.toLowerCase();
let possible = processors.filter((p) => let possible = processors.filter((p) =>
p.include ? p.include.has(ext) : !p.exclude?.has(ext), p.include ? p.include.has(ext) : !p.exclude?.has(ext)
); );
if (possible.length === 0) return; if (possible.length === 0) return;
@ -196,7 +200,7 @@ export async function main() {
); );
} else { } else {
possible = order.map(({ id }) => possible = order.map(({ id }) =>
UNWRAP(possible.find((p) => p.id === id)), UNWRAP(possible.find((p) => p.id === id))
); );
} }
@ -260,8 +264,9 @@ export async function main() {
const children = dir.getChildren(); const children = dir.getChildren();
// readme.txt // readme.txt
const readmeContent = const readmeContent = children.find((x) =>
children.find((x) => x.basename === "readme.txt")?.contents ?? ""; x.basename === "readme.txt"
)?.contents ?? "";
// dirsort // dirsort
let dirsort: string[] | null = null; let dirsort: string[] | null = null;
@ -354,7 +359,9 @@ export async function main() {
console.info( console.info(
"Global Stats:\n" + "Global Stats:\n" +
`- File Count: \x1b[1m${count}\x1b[0m\n` + `- File Count: \x1b[1m${count}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${formatSize(MediaFile.getByPath("/")!.size)}\x1b[0m\n` + `- Canonical Size: \x1b[1m${
formatSize(MediaFile.getByPath("/")!.size)
}\x1b[0m\n` +
`- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`, `- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`,
); );
} }
@ -459,7 +466,6 @@ const procLoadTextContents: Process = {
async run({ absPath, mediaFile, stat }) { async run({ absPath, mediaFile, stat }) {
if (stat.size > 1_000_000) return; if (stat.size > 1_000_000) return;
const text = await fs.readFile(absPath, "utf-8"); const text = await fs.readFile(absPath, "utf-8");
console.log({ text });
mediaFile.setContents(text); mediaFile.setContents(text);
}, },
}; };
@ -696,7 +702,7 @@ async function unproduceAsset(key: string) {
const ref = AssetRef.get(key); const ref = AssetRef.get(key);
if (ref) { if (ref) {
ref.unref(); ref.unref();
console.log(`unref ${key}`); console.warn(`TODO: unref ${key}`);
// TODO: remove associated files from target // TODO: remove associated files from target
} }
} }

View file

@ -29,7 +29,9 @@ export function formatDurationLong(seconds: number) {
const hours = Math.floor(seconds / 3600); const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60); const minutes = Math.floor((seconds % 3600) / 60);
const remainingSeconds = seconds % 60; const remainingSeconds = seconds % 60;
return `${hours}:${minutes.toString().padStart(2, "0")}:${remainingSeconds.toString().padStart(2, "0")}`; return `${hours}:${minutes.toString().padStart(2, "0")}:${
remainingSeconds.toString().padStart(2, "0")
}`;
} }
export function escapeUri(uri: string) { export function escapeUri(uri: string) {
@ -100,21 +102,27 @@ export function highlightLinksInTextView(
// Case 1: https:// or http:// URLs // Case 1: https:// or http:// URLs
if (match.startsWith("http")) { if (match.startsWith("http")) {
if (match.includes(findDomain)) { if (match.includes(findDomain)) {
return `<a href="${match return `<a href="${
match
.replace(/https?:\/\/paperclover\.net\/+/, "/") .replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")}">${match}</a>`; .replace(/\/\/+/g, "/")
}">${match}</a>`;
} }
return `<a href="${match.replace( return `<a href="${
match.replace(
/\/\/+/g, /\/\/+/g,
"/", "/",
)}" target="_blank" rel="noopener noreferrer">${match}</a>`; )
}" target="_blank" rel="noopener noreferrer">${match}</a>`;
} }
// Case 2: domain URLs without protocol // Case 2: domain URLs without protocol
if (match.startsWith(findDomain)) { if (match.startsWith(findDomain)) {
return `<a href="${match return `<a href="${
match
.replace(findDomain + "/", "/") .replace(findDomain + "/", "/")
.replace(/\/\/+/g, "/")}">${match}</a>`; .replace(/\/\/+/g, "/")
}">${match}</a>`;
} }
// Case 3: /file/ URLs // Case 3: /file/ URLs
@ -145,7 +153,7 @@ export function highlightLinksInTextView(
// Match sibling file names (only if they're not already part of a link) // Match sibling file names (only if they're not already part of a link)
if (siblingFiles.length > 0) { if (siblingFiles.length > 0) {
const escapedBasenames = siblingFiles.map((f) => const escapedBasenames = siblingFiles.map((f) =>
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
); );
const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g"); const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g");
const parts = processedText.split(/(<[^>]*>)/); const parts = processedText.split(/(<[^>]*>)/);
@ -155,9 +163,11 @@ export function highlightLinksInTextView(
parts[i] = parts[i].replace(pattern, (match: string) => { parts[i] = parts[i].replace(pattern, (match: string) => {
const file = siblingLookup[match]; const file = siblingLookup[match];
if (file) { if (file) {
return `<a href="/file/${file.path return `<a href="/file/${
file.path
.replace(/^\//, "") .replace(/^\//, "")
.replace(/\/\/+/g, "/")}">${match}</a>`; .replace(/\/\/+/g, "/")
}">${match}</a>`;
} }
return match; return match;
}); });
@ -240,9 +250,11 @@ export function highlightConvo(text: string) {
return paras return paras
.map(({ speaker, lines }) => { .map(({ speaker, lines }) => {
return `<div class="s-${speaker}">${lines return `<div class="s-${speaker}">${
lines
.map((line) => `<div class="line">${line}</div>`) .map((line) => `<div class="line">${line}</div>`)
.join("\n")}</div>`; .join("\n")
}</div>`;
}) })
.join("\n"); .join("\n");
} }
@ -267,10 +279,12 @@ export function formatDate(dateTime: Date) {
? dateTime < unknownDate ? dateTime < unknownDate
? "??.??.??" ? "??.??.??"
: `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}` : `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}`
: `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${dateTime : `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${
dateTime
.getDate() .getDate()
.toString() .toString()
.padStart(2, "0")}.${dateTime.getFullYear().toString().slice(2)}`; .padStart(2, "0")
}.${dateTime.getFullYear().toString().slice(2)}`;
} }
import type { MediaFile } from "@/file-viewer/models/MediaFile.ts"; import type { MediaFile } from "@/file-viewer/models/MediaFile.ts";

View file

@ -131,8 +131,12 @@ export class Parse {
percentage: Number(percentageStr), percentage: Number(percentageStr),
timeElapsed, timeElapsed,
transferNumber: this.currentTransfer, transferNumber: this.currentTransfer,
filesToCheck: toCheckStr ? this.toCheck = Number(toCheckStr) : this.toCheck, filesToCheck: toCheckStr
totalFiles: totalStr ? this.totalFiles = Number(totalStr) : this.totalFiles, ? this.toCheck = Number(toCheckStr)
: this.toCheck,
totalFiles: totalStr
? this.totalFiles = Number(totalStr)
: this.totalFiles,
speed: speed || null, speed: speed || null,
}; };
} }

View file

@ -4,9 +4,7 @@
font-weight: 400 750; font-weight: 400 750;
font-style: normal; font-style: normal;
font-display: swap; font-display: swap;
font-variation-settings: font-variation-settings: "CASL" 0.25, "MONO" 0;
"CASL" 0.25,
"MONO" 0;
font-style: oblique -15deg 0deg; font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E; unicode-range: U+0020-007E;
} }
@ -16,9 +14,7 @@
font-weight: 400 800; font-weight: 400 800;
font-style: normal; font-style: normal;
font-display: swap; font-display: swap;
font-variation-settings: font-variation-settings: "CASL" 0.25, "MONO" 1;
"CASL" 0.25,
"MONO" 1;
font-style: oblique -15deg 0deg; font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E; unicode-range: U+0020-007E;
} }
@ -28,13 +24,21 @@
font-weight: 400 800; font-weight: 400 800;
font-style: normal; font-style: normal;
font-display: swap; font-display: swap;
font-variation-settings: font-variation-settings: "CASL" 0.25, "MONO" 1;
"CASL" 0.25,
"MONO" 1;
font-style: oblique -15deg 0deg; font-style: oblique -15deg 0deg;
unicode-range: unicode-range:
U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022, U+00C0-00FF,
U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F; U+00A9,
U+2190-2193,
U+2018,
U+2019,
U+201C,
U+201D,
U+2022,
U+00A0-00A8,
U+00AA-00BF,
U+2194-2199,
U+0100-017F;
} }
*, *,

View file

@ -45,7 +45,7 @@ app.post("/q+a", async (c) => {
return sendSuccess(c, new Date()); return sendSuccess(c, new Date());
} }
const ipAddr = c.req.header("cf-connecting-ip"); const ipAddr = c.req.header("x-forwarded-for");
if (ipAddr) { if (ipAddr) {
input.sourceName = uniqueNamesGenerator({ input.sourceName = uniqueNamesGenerator({
dictionaries: [adjectives, colors, animals], dictionaries: [adjectives, colors, animals],
@ -54,11 +54,6 @@ app.post("/q+a", async (c) => {
}); });
} }
const cfIPCountry = c.req.header("cf-ipcountry");
if (cfIPCountry) {
input.sourceLocation = cfIPCountry;
}
if (ipAddr && PROXYCHECK_API_KEY) { if (ipAddr && PROXYCHECK_API_KEY) {
const proxyCheck = await fetch( const proxyCheck = await fetch(
`https://proxycheck.io/v2/?key=${PROXYCHECK_API_KEY}&risk=1&vpn=1`, `https://proxycheck.io/v2/?key=${PROXYCHECK_API_KEY}&risk=1&vpn=1`,
@ -77,17 +72,19 @@ app.post("/q+a", async (c) => {
proxyCheck[ipAddr].organisation ?? proxyCheck[ipAddr].organisation ??
proxyCheck[ipAddr].provider ?? "unknown"; proxyCheck[ipAddr].provider ?? "unknown";
} }
if (Number(proxyCheck[ipAddr].risk) > 72) { if (Number(proxyCheck[ipAddr].risk) > 78) {
return questionFailure( return questionFailure(
c, c,
403, 403,
"This IP address has been flagged as a high risk IP address. If you are using a VPN/Proxy, please disable it and try again.", "This IP address has been flagged as a high risk IP address. If " +
"you are using a VPN/Proxy, please disable it and try again.",
text, text,
); );
} }
} }
} }
view.regenerate("q+a inbox");
const date = Question.create( const date = Question.create(
QuestionType.pending, QuestionType.pending,
JSON.stringify(input), JSON.stringify(input),
@ -104,7 +101,7 @@ async function sendSuccess(c: Context, date: Date) {
id: formatQuestionId(date), id: formatQuestionId(date),
}, { status: 200 }); }, { status: 200 });
} }
c.res = await renderView(c, "q+a/success", { c.res = await view.serve(c, "q+a/success", {
permalink: `https://paperclover.net/q+a/${formatQuestionId(date)}`, permalink: `https://paperclover.net/q+a/${formatQuestionId(date)}`,
}); });
} }
@ -130,7 +127,7 @@ app.get("/q+a/:id", async (c, next) => {
if (image) { if (image) {
return getQuestionImage(question, c.req.method === "HEAD"); return getQuestionImage(question, c.req.method === "HEAD");
} }
return renderView(c, "q+a/permalink", { question }); return view.serve(c, "q+a/permalink", { question });
}); });
// Admin // Admin
@ -138,7 +135,7 @@ app.get("/admin/q+a", async (c) => {
return serveAsset(c, "/admin/q+a", 200); return serveAsset(c, "/admin/q+a", 200);
}); });
app.get("/admin/q+a/inbox", async (c) => { app.get("/admin/q+a/inbox", async (c) => {
return renderView(c, "q+a/backend-inbox", {}); return view.serve(c, "q+a/backend-inbox", {});
}); });
app.delete("/admin/q+a/:id", async (c, next) => { app.delete("/admin/q+a/:id", async (c, next) => {
const id = c.req.param("id"); const id = c.req.param("id");
@ -152,6 +149,7 @@ app.delete("/admin/q+a/:id", async (c, next) => {
} else { } else {
Question.rejectByQmid(question.qmid); Question.rejectByQmid(question.qmid);
} }
view.regenerate("q+a");
return c.json({ success: true, message: "ok" }); return c.json({ success: true, message: "ok" });
}); });
app.patch("/admin/q+a/:id", async (c, next) => { app.patch("/admin/q+a/:id", async (c, next) => {
@ -165,6 +163,7 @@ app.patch("/admin/q+a/:id", async (c, next) => {
return questionFailure(c, 400, "Bad Request"); return questionFailure(c, 400, "Bad Request");
} }
Question.updateByQmid(question.qmid, form.text, form.type); Question.updateByQmid(question.qmid, form.text, form.type);
view.regenerate("q+a");
return c.json({ success: true, message: "ok" }); return c.json({ success: true, message: "ok" });
}); });
app.get("/admin/q+a/:id", async (c, next) => { app.get("/admin/q+a/:id", async (c, next) => {
@ -177,20 +176,22 @@ app.get("/admin/q+a/:id", async (c, next) => {
let pendingInfo: null | PendingQuestionData = null; let pendingInfo: null | PendingQuestionData = null;
if (question.type === QuestionType.pending) { if (question.type === QuestionType.pending) {
pendingInfo = JSON.parse(question.text) as PendingQuestionData; pendingInfo = JSON.parse(question.text) as PendingQuestionData;
question.text = pendingInfo.prompt.trim().split("\n").map((line) => question.text = pendingInfo.prompt
line.trim().length === 0 ? "" : `q: ${line.trim()}` .trim()
).join("\n") + "\n\n"; .split("\n")
.map((line) => (line.trim().length === 0 ? "" : `q: ${line.trim()}`))
.join("\n") + "\n\n";
question.type = QuestionType.normal; question.type = QuestionType.normal;
} }
return renderView(c, "q+a/editor", { return view.serve(c, "q+a/editor", {
pendingInfo, pendingInfo,
question, question,
}); });
}); });
app.get("/q+a/things/random", async (c) => { app.get("/q+a/things/random", async (c) => {
c.res = await renderView(c, "q+a/things-random", {}); c.res = await view.serve(c, "q+a/things-random", {});
}); });
async function questionFailure( async function questionFailure(
@ -202,7 +203,7 @@ async function questionFailure(
if (c.req.header("Accept")?.includes("application/json")) { if (c.req.header("Accept")?.includes("application/json")) {
return c.json({ success: false, message, id: null }, { status }); return c.json({ success: false, message, id: null }, { status });
} }
return await renderView(c, "q+a/fail", { return await view.serve(c, "q+a/fail", {
error: message, error: message,
content, content,
}); });
@ -218,11 +219,8 @@ import {
} from "unique-names-generator"; } from "unique-names-generator";
import { hasAdminToken } from "../admin.ts"; import { hasAdminToken } from "../admin.ts";
import { serveAsset } from "#sitegen/assets"; import { serveAsset } from "#sitegen/assets";
import { import type { PendingQuestionData } from "./models/PendingQuestion.ts";
PendingQuestion,
PendingQuestionData,
} from "./models/PendingQuestion.ts";
import { Question, QuestionType } from "./models/Question.ts"; import { Question, QuestionType } from "./models/Question.ts";
import { renderView } from "#sitegen/view"; import * as view from "#sitegen/view";
import { getQuestionImage } from "./image.tsx"; import { getQuestionImage } from "./image.tsx";
import { formatQuestionId, questionIdToTimestamp } from "./format.ts"; import { formatQuestionId, questionIdToTimestamp } from "./format.ts";

View file

@ -144,9 +144,7 @@ function ListRenderer(node: ASTNode, children: any[]) {
const T = node.ordered ? "ol" : "ul"; const T = node.ordered ? "ol" : "ul";
return ( return (
<T> <T>
{children.map((child) => ( {children.map((child) => <li>{child}</li>)}
<li>{child}</li>
))}
</T> </T>
); );
} }

View file

@ -11,7 +11,7 @@ const getBrowser = RefCountedExpirable(
); );
export async function renderQuestionImage(question: Question) { export async function renderQuestionImage(question: Question) {
const html = await renderViewToString("q+a/image-embed", { question }); const html = await view.renderToString("q+a/image-embed", { question });
// this browser session will be reused if multiple images are generated // this browser session will be reused if multiple images are generated
// either at the same time or within a 5-minute time span. the dispose // either at the same time or within a 5-minute time span. the dispose
@ -44,14 +44,15 @@ export async function getQuestionImage(
question: Question, question: Question,
headOnly: boolean, headOnly: boolean,
): Promise<Response> { ): Promise<Response> {
const hash = crypto.createHash("sha1") const hash = crypto
.createHash("sha1")
.update(question.qmid + question.type + question.text) .update(question.qmid + question.type + question.text)
.digest("hex"); .digest("hex");
const headers = { const headers = {
"Content-Type": "image/png", "Content-Type": "image/png",
"Cache-Control": "public, max-age=31536000", "Cache-Control": "public, max-age=31536000",
"ETag": `"${hash}"`, ETag: `"${hash}"`,
"Last-Modified": question.date.toUTCString(), "Last-Modified": question.date.toUTCString(),
}; };
@ -78,4 +79,4 @@ import * as path from "node:path";
import * as puppeteer from "puppeteer"; import * as puppeteer from "puppeteer";
import { Question } from "@/q+a/models/Question.ts"; import { Question } from "@/q+a/models/Question.ts";
import { RefCountedExpirable } from "#sitegen/async"; import { RefCountedExpirable } from "#sitegen/async";
import { renderViewToString } from "#sitegen/view"; import * as view from "#sitegen/view";

View file

@ -7,7 +7,7 @@ export const meta: Metadata = {
description: "ask clover a question", description: "ask clover a question",
}; };
export const regenerate = { export const regenerate = {
manual: true, tags: ["q+a", "q+a inbox"]
}; };
<const/inboxSize = PendingQuestion.getAll().length /> <const/inboxSize = PendingQuestion.getAll().length />

View file

@ -19,8 +19,8 @@ export const meta = { title: 'question answer inbox' };
</time> </time>
<div style="color: dodgerblue; margin-bottom: 0.25rem"> <div style="color: dodgerblue; margin-bottom: 0.25rem">
${q.sourceName} ${q.sourceName}
${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : null} ${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : ''}
${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : null} ${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : ''}
</div> </div>
<p style="white-space: pre-wrap">${q.prompt}</p> <p style="white-space: pre-wrap">${q.prompt}</p>
<p> <p>

View file

@ -33,8 +33,12 @@ const rawFileRoot = process.env.CLOVER_FILE_RAW ??
const derivedFileRoot = process.env.CLOVER_FILE_DERIVED ?? const derivedFileRoot = process.env.CLOVER_FILE_DERIVED ??
path.join(nasRoot, "Documents/Config/paperclover/derived"); path.join(nasRoot, "Documents/Config/paperclover/derived");
if (!fs.existsSync(rawFileRoot)) throw new Error(`${rawFileRoot} does not exist`); if (!fs.existsSync(rawFileRoot)) {
if (!fs.existsSync(derivedFileRoot)) throw new Error(`${derivedFileRoot} does not exist`); throw new Error(`${rawFileRoot} does not exist`);
}
if (!fs.existsSync(derivedFileRoot)) {
throw new Error(`${derivedFileRoot} does not exist`);
}
type Awaitable<T> = T | Promise<T>; type Awaitable<T> = T | Promise<T>;

View file

@ -5,7 +5,7 @@
"incremental": true, "incremental": true,
"jsx": "react-jsxdev", "jsx": "react-jsxdev",
"jsxImportSource": "#engine", "jsxImportSource": "#engine",
"lib": ["dom", "esnext", "esnext.iterator"], "lib": ["esnext", "dom", "dom.iterable"],
"module": "nodenext", "module": "nodenext",
"noEmit": true, "noEmit": true,
"outDir": ".clover/ts", "outDir": ".clover/ts",
@ -14,7 +14,8 @@
"skipLibCheck": true, "skipLibCheck": true,
"strict": true, "strict": true,
"verbatimModuleSyntax": true, "verbatimModuleSyntax": true,
"target": "es2022" "target": "es2022",
"noUncheckedIndexedAccess": true
}, },
"include": ["framework/**/*", "src/**/*"] "include": ["framework/**/*", "src/**/*"]
} }