From 66da12903690539b40010466ea53c201589afc86 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Sat, 2 Aug 2025 17:31:58 -0700 Subject: [PATCH] rewrite `incremental.ts` (#21) the problems with the original implementation was mostly around error handling. sources had to be tracked manually and provided to each incremental output. the `hasArtifact` check was frequently forgotten. this has been re-abstracted through `incr.work()`, which is given an `io` object. all fs reads and module loads go through this interface, which allows the sources to be properly tracked, even if it throws. closes #12 --- framework/bundle.ts | 426 ++++++----- framework/css.ts | 26 +- framework/debug.safe.ts | 9 +- framework/definitions.d.ts | 2 +- framework/engine/ssr.ts | 4 +- framework/esbuild-support.ts | 46 +- framework/generate.ts | 707 ++++++++----------- framework/hot.ts | 74 +- framework/incremental.test.ts | 56 ++ framework/incremental.ts | 1246 +++++++++++++++++---------------- framework/lib/assets.ts | 4 +- framework/lib/testing.ts | 11 + framework/marko.ts | 46 ++ framework/watch.ts | 160 ++--- package-lock.json | 126 ++++ package.json | 2 + src/file-viewer/bin/scan3.ts | 240 +++---- src/global.css | 26 +- src/pages/index.marko | 2 +- 19 files changed, 1653 insertions(+), 1560 deletions(-) create mode 100644 framework/incremental.test.ts create mode 100644 framework/lib/testing.ts create mode 100644 framework/marko.ts diff --git a/framework/bundle.ts b/framework/bundle.ts index 16afbaf..3d8f859 100644 --- a/framework/bundle.ts +++ b/framework/bundle.ts @@ -1,17 +1,25 @@ +async function trackEsbuild(io: Io, metafile: esbuild.Metafile) { + await Promise.all(Object.keys(metafile.inputs) + .filter(file => !isIgnoredSource(file)) + .map(file => io.trackFile(file))); +} + // This file implements client-side bundling, mostly wrapping esbuild. export async function bundleClientJavaScript( - referencedScripts: string[], - extraPublicScripts: string[], - incr: Incremental, - dev: boolean = false, + io: Io, + { clientRefs, extraPublicScripts, dev = false }: { + clientRefs: string[]; + extraPublicScripts: string[]; + dev: boolean; + }, ) { const entryPoints = [ ...new Set([ - ...referencedScripts.map((file) => path.resolve(hot.projectSrc, file)), + ...clientRefs.map((x) => `src/${x}`), ...extraPublicScripts, - ]), + ].map(toAbs)), ]; - if (entryPoints.length === 0) return; + if (entryPoints.length === 0) return {}; const invalidFiles = entryPoints .filter((file) => !file.match(/\.client\.[tj]sx?/)); if (invalidFiles.length > 0) { @@ -24,7 +32,7 @@ export async function bundleClientJavaScript( const clientPlugins: esbuild.Plugin[] = [ projectRelativeResolution(), - markoViaBuildCache(incr), + markoViaBuildCache(), ]; const bundle = await esbuild.build({ @@ -65,256 +73,218 @@ export async function bundleClientJavaScript( ) ); const { metafile, outputFiles } = bundle; - const promises: Promise[] = []; + const p = [] + p.push(trackEsbuild(io, metafile)); + const scripts: Record = {}; for (const file of outputFiles) { const { text } = file; let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/"); const { inputs } = UNWRAP(metafile.outputs["out!" + route]); - const sources = Object.keys(inputs) - .filter((x) => !x.startsWith(" !isIgnoredSource(x)); // Register non-chunks as script entries. const chunk = route.startsWith("/js/c."); if (!chunk) { const key = hot.getScriptId(path.resolve(sources[sources.length - 1])); route = "/js/" + key.replace(/\.client\.tsx?/, ".js"); - incr.put({ - sources, - kind: "script", - key, - value: text, - }); + scripts[key] = text; } // Register chunks and public scripts as assets. if (chunk || publicScriptRoutes.includes(route)) { - promises.push(incr.putAsset({ - sources, - key: route, - body: text, - })); + p.push(io.writeAsset(route, text)); } } - await Promise.all(promises); + await Promise.all(p); + return scripts; } export type ServerPlatform = "node" | "passthru"; -export async function bundleServerJavaScript( - incr: Incremental, - platform: ServerPlatform = "node", -) { - if (incr.hasArtifact("backendBundle", platform)) return; - - // Comment - const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_"); - - const viewSource = [ - ...Array.from( - incr.out.viewMetadata, - ([, view], i) => `import * as view${i} from ${JSON.stringify(view.file)}`, - ), - `const styles = ${magicWord}[-2]`, - `export const scripts = ${magicWord}[-1]`, - "export const views = {", - ...Array.from(incr.out.viewMetadata, ([key, view], i) => - [ - ` ${JSON.stringify(key)}: {`, - ` component: view${i}.default,`, - // ` meta: ${ - // view.staticMeta ? JSON.stringify(view.staticMeta) : `view${i}.meta` - // },`, - ` meta: view${i}.meta,`, - ` layout: ${view.hasLayout ? `view${i}.layout?.default` : "null"},`, - ` inlineCss: styles[${magicWord}[${i}]]`, - ` },`, - ].join("\n")), - "}", - ].join("\n"); - - // -- plugins -- - const serverPlugins: esbuild.Plugin[] = [ - virtualFiles({ - "$views": viewSource, - }), - projectRelativeResolution(), - markoViaBuildCache(incr), - { - name: "replace client references", - setup(b) { - b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({ - contents: - hot.resolveClientRefs(await fs.readFile(file, "utf-8"), file).code, - loader: path.extname(file).slice(1) as esbuild.Loader, - })); - }, - }, - { - name: "mark css external", - setup(b) { - b.onResolve( - { filter: /\.css$/ }, - () => ({ path: ".", namespace: "dropped" }), - ); - b.onLoad( - { filter: /./, namespace: "dropped" }, - () => ({ contents: "" }), - ); - }, - }, - ]; - const pkg = await fs.readJson("package.json") as { - dependencies: Record; - }; - const { metafile, outputFiles } = await esbuild.build({ - bundle: true, - chunkNames: "c.[hash]", - entryNames: "server", - entryPoints: [ - path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"), - ], - platform: "node", - format: "esm", - minify: false, - outdir: "out!", - plugins: serverPlugins, - splitting: true, - logLevel: "silent", - write: false, - metafile: true, - jsx: "automatic", - jsxImportSource: "#ssr", - jsxDev: false, - define: { - MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText), - }, - external: Object.keys(pkg.dependencies) - .filter((x) => !x.startsWith("@paperclover")), - }); - - const files: Record = {}; - let fileWithMagicWord: string | null = null; - for (const output of outputFiles) { - const basename = output.path.replace(/^.*?!/, ""); - const key = "out!" + basename.replaceAll("\\", "/"); - // If this contains the generated "$views" file, then - // mark this file as the one for replacement. Because - // `splitting` is `true`, esbuild will not emit this - // file in more than one chunk. - if (metafile.outputs[key].inputs["framework/lib/view.ts"]) { - fileWithMagicWord = basename; - } - files[basename] = Buffer.from(output.contents); - } - incr.put({ - kind: "backendBundle", - key: platform, - value: { - magicWord, - files, - fileWithMagicWord, - }, - sources: Object.keys(metafile.inputs).filter((x) => - !x.includes("[], + styleMap: Map>; + scriptMap: incr.Ref>; platform: ServerPlatform, -) { - if (incr.hasArtifact("backendReplace", platform)) return; - const { - files, - fileWithMagicWord, - magicWord, - } = UNWRAP(incr.getArtifact("backendBundle", platform)); - - if (!fileWithMagicWord) return; - - // Only the reachable resources need to be inserted into the bundle. - const viewScriptsList = new Set( - Array.from(incr.out.viewMetadata.values()) - .flatMap((view) => view.clientRefs), - ); - const viewStyleKeys = Array.from(incr.out.viewMetadata.values()) - .map((view) => css.styleKey(view.cssImports, view.theme)); - const viewCssBundles = viewStyleKeys - .map((key) => UNWRAP(incr.out.style.get(key), "Style key: " + key)); - - // Deduplicate styles - const styleList = Array.from(new Set(viewCssBundles)); - - // Replace the magic word - let text = files[fileWithMagicWord].toString("utf-8"); - text = text.replace( - new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"), - (_, i) => { - i = Number(i); - // Inline the styling data - if (i === -2) { - return JSON.stringify(styleList.map((cssText) => cssText)); - } - // Inline the script data - if (i === -1) { - return JSON.stringify(Object.fromEntries(incr.out.script)); - } - // Reference an index into `styleList` - return `${styleList.indexOf(viewCssBundles[i])}`; - }, - ); - - incr.put({ - kind: "backendReplace", - key: platform, - sources: [ - // Backend input code (includes view code) - ...incr.sourcesFor("backendBundle", platform), - // Script - ...Array.from(viewScriptsList) - .flatMap((key) => incr.sourcesFor("script", hot.getScriptId(key))), - // Style - ...viewStyleKeys.flatMap((key) => incr.sourcesFor("style", key)), - ], - value: Buffer.from(text), - }); } +export async function bundleServerJavaScript( + { viewItems, viewRefs, styleMap, scriptMap: wScriptMap, entries, platform }: ServerSideOptions +) { + const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => { + const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_"); + return { + magicWord, + file: [ + ...viewItems.map((view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`), + `const styles = ${magicWord}[-2]`, + `export const scripts = ${magicWord}[-1]`, + "export const views = {", + ...viewItems.map((view, i) => [ + ` ${JSON.stringify(view.id)}: {`, + ` component: view${i}.default,`, + ` meta: view${i}.meta,`, + ` layout: view${i}.layout?.default ?? null,`, + ` inlineCss: styles[${magicWord}[${i}]]`, + ` },`, + ].join("\n")), + "}", + ].join("\n") + }; + }, viewItems) -function markoViaBuildCache(incr: Incremental): esbuild.Plugin { - return { - name: "marko via build cache", - setup(b) { - b.onLoad( - { filter: /\.marko$/ }, - async ({ path: file }) => { - const key = path.relative(hot.projectRoot, file) - .replaceAll("\\", "/"); - const cacheEntry = incr.out.serverMarko.get(key); - if (!cacheEntry) { - if (!fs.existsSync(file)) { - console.log(`File does not exist: ${file}`); - } - throw new Error("Marko file not in cache: " + file); + const wBundles = entries.map(entry => [entry, incr.work(async (io, entry) => { + const pkg = await io.readJson<{ dependencies: Record; }>("package.json"); + + let magicWord = null as string | null; + // -- plugins -- + const serverPlugins: esbuild.Plugin[] = [ + virtualFiles({ + // only add dependency when imported. + "$views": async () => { + const view = await io.readWork(wViewSource); + ({ magicWord } = view); + return view.file; + }, + }), + projectRelativeResolution(), + markoViaBuildCache(), + { + name: "replace client references", + setup(b) { + b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({ + contents: + hot.resolveClientRefs(await fs.readFile(file, "utf-8"), file).code, + loader: path.extname(file).slice(1) as esbuild.Loader, + })); + }, + }, + { + name: "mark css external", + setup(b) { + b.onResolve( + { filter: /\.css$/ }, + () => ({ path: ".", namespace: "dropped" }), + ); + b.onLoad( + { filter: /./, namespace: "dropped" }, + () => ({ contents: "" }), + ); + }, + }, + ]; + + const { metafile, outputFiles, errors, warnings } = await esbuild.build({ + bundle: true, + chunkNames: "c.[hash]", + entryNames: path.basename(entry, path.extname(entry)), + entryPoints: [ + path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"), + ], + platform: "node", + format: "esm", + minify: false, + outdir: "out!", + plugins: serverPlugins, + splitting: true, + logLevel: "silent", + write: false, + metafile: true, + jsx: "automatic", + jsxImportSource: "#ssr", + jsxDev: false, + define: { + MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText), + 'globalThis.CLOVER_SERVER_ENTRY': JSON.stringify(entry), + }, + external: Object.keys(pkg.dependencies) + .filter((x) => !x.startsWith("@paperclover")), + }); + await trackEsbuild(io, metafile) + + let fileWithMagicWord: { + bytes: Buffer; + basename: string; + magicWord: string; + } | null = null; + for (const output of outputFiles) { + const basename = output.path.replace(/^.*?!(?:\/|\\)/, ""); + const key = "out!/" + basename.replaceAll("\\", "/"); + // If this contains the generated "$views" file, then + // mark this file as the one for replacement. Because + // `splitting` is `true`, esbuild will not emit this + // file in more than one chunk. + if (magicWord && metafile.outputs[key].inputs["framework/lib/view.ts"]) { + ASSERT(!fileWithMagicWord); + fileWithMagicWord = { + basename, + bytes: Buffer.from(output.contents), + magicWord, + }; + } else { + io.writeFile(basename, Buffer.from(output.contents)) + } + } + return fileWithMagicWord; + }, entry)] as const); + + const wProcessed = wBundles.map(async([entry, wBundle]) => { + if (!await wBundle) return; + await incr.work(async (io) => { + // Only the reachable resources need to be read and inserted into the bundle. + // This is what Map is for + const { basename, bytes, magicWord } = UNWRAP(await io.readWork(wBundle)); + const views = await Promise.all(viewRefs.map(ref => io.readWork(ref))); + + // Client JS + const scriptList = Object.entries(await io.readWork(wScriptMap)); + const viewScriptsList = new Set(views.flatMap(view => view.clientRefs)); + const neededScripts = scriptList.filter(([k]) => viewScriptsList.has(k)); + + // CSS + const viewStyleKeys = views.map((view) => view.styleKey); + const viewCssBundles = await Promise.all( + viewStyleKeys.map((key) => io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key)))); + const styleList = Array.from(new Set(viewCssBundles)); + + // Replace the magic word + const text = bytes.toString("utf-8").replace( + new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"), + (_, i) => { + i = Number(i); + // Inline the styling data + if (i === -2) { + return JSON.stringify(styleList.map((cssText) => cssText)); } - return ({ - loader: "ts", - contents: cacheEntry.src, - resolveDir: path.dirname(file), - }); + // Inline the script data + if (i === -1) { + return JSON.stringify(Object.fromEntries(neededScripts)); + } + // Reference an index into `styleList` + return `${styleList.indexOf(viewCssBundles[i])}`; }, ); - }, - }; + + io.writeFile(basename, text); + }); + }) + + await Promise.all(wProcessed); } + import * as esbuild from "esbuild"; import * as path from "node:path"; import process from "node:process"; import * as hot from "./hot.ts"; -import { projectRelativeResolution, virtualFiles } from "./esbuild-support.ts"; -import { Incremental } from "./incremental.ts"; +import { + isIgnoredSource, + markoViaBuildCache, + projectRelativeResolution, + virtualFiles, +} from "./esbuild-support.ts"; +import { Io, toAbs, toRel } from "./incremental.ts"; import * as css from "./css.ts"; import * as fs from "#sitegen/fs"; import * as mime from "#sitegen/mime"; +import * as incr from "./incremental.ts"; +import * as sg from "#sitegen";import type { PreparedView } from "./generate2.ts";import { meta } from "@/file-viewer/pages/file.cotyledon_speedbump.tsx"; diff --git a/framework/css.ts b/framework/css.ts index ddf7df9..8d441a9 100644 --- a/framework/css.ts +++ b/framework/css.ts @@ -40,11 +40,6 @@ export function preprocess(css: string, theme: Theme): string { ); } -export interface Output { - text: string; - sources: string[]; -} - export function styleKey( cssImports: string[], theme: Theme, @@ -60,11 +55,14 @@ export function styleKey( } export async function bundleCssFiles( - cssImports: string[], - theme: Theme, - dev: boolean = false, -): Promise { - cssImports = cssImports.map((file) => path.resolve(hot.projectSrc, file)); + io: Io, + { cssImports, theme, dev }: { + cssImports: string[], + theme: Theme, + dev: boolean, + } +) { + cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file))); const plugin = { name: "clover css", setup(b) { @@ -106,15 +104,11 @@ export async function bundleCssFiles( throw new AggregateError(warnings, "CSS Build Failed"); } if (outputFiles.length > 1) throw new Error("Too many output files"); - return { - text: outputFiles[0].text, - sources: Object.keys(metafile.outputs["$input$.css"].inputs) - .filter((x) => !x.startsWith("vfs:")), - }; + return outputFiles[0].text; } import * as esbuild from "esbuild"; import * as fs from "#sitegen/fs"; import * as hot from "./hot.ts"; import * as path from "node:path"; -import { virtualFiles } from "./esbuild-support.ts"; +import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts"; diff --git a/framework/debug.safe.ts b/framework/debug.safe.ts index f959b1a..211e9eb 100644 --- a/framework/debug.safe.ts +++ b/framework/debug.safe.ts @@ -6,12 +6,7 @@ globalThis.UNWRAP = (t, ...args) => { } return t; }; -globalThis.ASSERT = (t, ...args) => { - if (!t) { - throw new Error( - args.length > 0 ? util.format(...args) : "Assertion Failed", - ); - } -}; +globalThis.ASSERT = assert.ok; import * as util from "node:util"; +import * as assert from 'node:assert' diff --git a/framework/definitions.d.ts b/framework/definitions.d.ts index 85bee3f..cb031f9 100644 --- a/framework/definitions.d.ts +++ b/framework/definitions.d.ts @@ -1,4 +1,4 @@ declare function UNWRAP(value: T | null | undefined, ...log: unknown[]): T; -declare function ASSERT(value: unknown, ...log: unknown[]): asserts value; +declare function ASSERT(value: unknown, message?: string): asserts value; type Timer = ReturnType; diff --git a/framework/engine/ssr.ts b/framework/engine/ssr.ts index 4cb43c7..48a0ea2 100644 --- a/framework/engine/ssr.ts +++ b/framework/engine/ssr.ts @@ -13,6 +13,7 @@ export function ssrSync(node: Node, addon: A = {} as A) { const resolved = resolveNode(r, node); return { text: renderNode(resolved), addon }; } +export { ssrSync as sync }; export function ssrAsync(node: Node, addon: A = {} as A) { const r = initRender(true, addon); @@ -20,7 +21,7 @@ export function ssrAsync(node: Node, addon: A = {} as A) { if (r.async === 0) { return Promise.resolve({ text: renderNode(resolved), addon }); } - const { resolve, reject, promise } = Promise.withResolvers(); + const { resolve, reject, promise } = Promise.withResolvers>(); r.asyncDone = () => { const rejections = r.rejections; if (!rejections) return resolve({ text: renderNode(resolved), addon }); @@ -29,6 +30,7 @@ export function ssrAsync(node: Node, addon: A = {} as A) { }; return promise; } +export { ssrAsync as async }; /** Inline HTML into a render without escaping it */ export function html(rawText: ResolvedNode): DirectHtml { diff --git a/framework/esbuild-support.ts b/framework/esbuild-support.ts index 3b611da..5a39fe1 100644 --- a/framework/esbuild-support.ts +++ b/framework/esbuild-support.ts @@ -1,5 +1,7 @@ +type Awaitable = T | Promise; + export function virtualFiles( - map: Record, + map: Record Awaitable)>, ) { return { name: "clover vfs", @@ -18,8 +20,9 @@ export function virtualFiles( ); b.onLoad( { filter: /./, namespace: "vfs" }, - ({ path }) => { - const entry = map[path]; + async ({ path }) => { + let entry = map[path]; + if (typeof entry === 'function') entry = await entry(); return ({ resolveDir: ".", loader: "ts", @@ -73,7 +76,42 @@ export function projectRelativeResolution(root = process.cwd() + "/src") { } satisfies esbuild.Plugin; } +export function markoViaBuildCache(): esbuild.Plugin { + return { + name: "marko via build cache", + setup(b) { + b.onLoad( + { filter: /\.marko$/ }, + async ({ path: file }) => { + const cacheEntry = markoCache.get(file); + if (!cacheEntry) { + if (!fs.existsSync(file)) { + console.warn(`File does not exist: ${file}`); + } + console.log(markoCache.keys()); + throw new Error("Marko file not in cache: " + file); + } + return ({ + loader: "ts", + contents: cacheEntry.src, + resolveDir: path.dirname(file), + }); + }, + ); + }, + }; +} + +export function isIgnoredSource(source: string) { + return source.includes(", any>({ - text: "Recovering State", - successText, - failureText: () => "sitegen FAIL", - }, async (spinner) => { - // const incr = Incremental.fromDisk(); - // await incr.statAllFiles(); - const incr = new Incremental(); - const result = await sitegen(spinner, incr); - incr.toDisk(); // Allows picking up this state again - return result; - }) as ReturnType; +export async function main() { + await incr.restore(); + await incr.compile(generate); } -export function successText({ - elapsed, - inserted, - referenced, - unreferenced, -}: Awaited>) { - const s = (array: unknown[]) => array.length === 1 ? "" : "s"; - const kind = inserted.length === referenced.length ? "build" : "update"; - const status = inserted.length > 0 - ? `${kind} ${inserted.length} key${s(inserted)}` - : unreferenced.length > 0 - ? `pruned ${unreferenced.length} key${s(unreferenced)}` - : `checked ${referenced.length} key${s(referenced)}`; - return `sitegen! ${status} in ${elapsed.toFixed(1)}s`; +export async function generate() { + // -- read config and discover files -- + const siteConfig = await incr.work(readManifest); + const { + staticFiles, + scripts, + views, + pages, + } = await discoverAllFiles(siteConfig); + + // TODO: make sure that `static` and `pages` does not overlap + + // TODO: loadMarkoCache + + // -- perform build-time rendering -- + const builtPages = pages.map((item) => incr.work(preparePage, item)); + const builtViews = views.map((item) => incr.work(prepareView, item)); + const builtStaticFiles = Promise.all((staticFiles.map((item) => + incr.work( + async (io, { id, file }) => void await io.writeAsset(id, await io.readFile(file)), + item, + ) + ))); + const routes = await Promise.all([...builtViews, ...builtPages]); + + // -- page resources -- + const scriptMap = incr.work(bundle.bundleClientJavaScript, { + clientRefs: routes.flatMap((x) => x.clientRefs), + extraPublicScripts: scripts.map((entry) => entry.file), + dev: false, + }); + const styleMap = prepareInlineCss(routes); + + // -- backend -- + const builtBackend = bundle.bundleServerJavaScript({ + entries: siteConfig.backends, + platform: 'node', + styleMap, + scriptMap, + viewItems: views, + viewRefs: builtViews, + }) + + // -- assemble page assets -- + const pAssemblePages = builtPages.map((page) => + assembleAndWritePage(page, styleMap, scriptMap) + ); + + await Promise.all([ + builtBackend, + builtStaticFiles, + ...pAssemblePages, + ]); } -export async function sitegen( - status: Spinner, - incr: Incremental, +export async function readManifest(io: Io) { + const cfg = await io.import("src/site.ts"); + return { + siteSections: cfg.siteSections.map((section) => ({ + root: toRel(section.root), + })), + backends: cfg.backends.map(toRel), + }; +} + +export async function discoverAllFiles( + siteConfig: Awaited>, ) { - const startTime = performance.now(); - - let root = path.resolve(import.meta.dirname, "../src"); - const join = (...sub: string[]) => path.join(root, ...sub); - - // Sitegen reviews every defined section for resources to process - const sections: sg.Section[] = - require(path.join(root, "site.ts")).siteSections; + return ( + await Promise.all( + siteConfig.siteSections.map(({ root: sectionRoot }) => + incr.work(scanSiteSection, toAbs(sectionRoot)) + ), + ) + ).reduce((acc, next) => ({ + staticFiles: acc.staticFiles.concat(next.staticFiles), + pages: acc.pages.concat(next.pages), + views: acc.views.concat(next.views), + scripts: acc.scripts.concat(next.scripts), + })); +} +export async function scanSiteSection(io: Io, sectionRoot: string) { // Static files are compressed and served as-is. // - "{section}/static/*.png" let staticFiles: FileItem[] = []; @@ -61,394 +109,227 @@ export async function sitegen( // - "{section}/scripts/*.client.ts" let scripts: FileItem[] = []; - // -- Scan for files -- - status.text = "Scanning Project"; - for (const section of sections) { - const { root: sectionRoot } = section; - const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub); - const rootPrefix = root === sectionRoot - ? "" - : path.relative(root, sectionRoot) + "/"; - const kinds = [ - { - dir: sectionPath("pages"), - list: pages, - prefix: "/", - include: [".tsx", ".mdx", ".marko"], - exclude: [".client.ts", ".client.tsx"], - }, - { - dir: sectionPath("static"), - list: staticFiles, - prefix: "/", - ext: true, - }, - { - dir: sectionPath("scripts"), - list: scripts, - prefix: rootPrefix, - include: [".client.ts", ".client.tsx"], - }, - { - dir: sectionPath("views"), - list: views, - prefix: rootPrefix, - include: [".tsx", ".mdx", ".marko"], - exclude: [".client.ts", ".client.tsx"], - }, - ]; - for ( - const { dir, list, prefix, include = [""], exclude = [], ext = false } - of kinds - ) { - const items = fs.readDirRecOptionalSync(dir); - for (const subPath of items) { - const file = path.join(dir, subPath); - const stat = fs.statSync(file); - if (stat.isDirectory()) continue; - if (!include.some((e) => subPath.endsWith(e))) continue; - if (exclude.some((e) => subPath.endsWith(e))) continue; - const trim = ext - ? subPath - : subPath.slice(0, -path.extname(subPath).length).replaceAll( - ".", - "/", - ); - let id = prefix + trim.replaceAll("\\", "/"); - if (prefix === "/" && id.endsWith("/index")) { - id = id.slice(0, -"/index".length) || "/"; - } - list.push({ id, file: file }); - } - } - } - const globalCssPath = join("global.css"); - - // TODO: make sure that `static` and `pages` does not overlap - - // -- inline style sheets, used and shared by pages and views -- - status.text = "Building"; - const cssOnce = new OnceMap(); - const cssQueue = new Queue({ - name: "Bundle", - async fn([, key, files, theme]: [string, string, string[], css.Theme]) { - const { text, sources } = await css.bundleCssFiles(files, theme); - incr.put({ - kind: "style", - key, - sources, - value: text, - }); + const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub); + const rootPrefix = hot.projectSrc === sectionRoot + ? "" + : path.relative(hot.projectSrc, sectionRoot) + "/"; + const kinds = [ + { + dir: sectionPath("pages"), + list: pages, + prefix: "/", + include: [".tsx", ".mdx", ".marko"], + exclude: [".client.ts", ".client.tsx"], + }, + { + dir: sectionPath("static"), + list: staticFiles, + prefix: "/", + ext: true, + }, + { + dir: sectionPath("scripts"), + list: scripts, + prefix: rootPrefix, + include: [".client.ts", ".client.tsx"], + }, + { + dir: sectionPath("views"), + list: views, + prefix: rootPrefix, + include: [".tsx", ".mdx", ".marko"], + exclude: [".client.ts", ".client.tsx"], }, - passive: true, - getItemText: ([id]) => id, - maxJobs: 2, - }); - function ensureCssGetsBuilt( - cssImports: string[], - theme: css.Theme, - referrer: string, - ) { - const key = css.styleKey(cssImports, theme); - cssOnce.get( - key, - async () => { - incr.getArtifact("style", key) ?? - await cssQueue.add([referrer, key, cssImports, theme]); - }, - ); - } - - // -- server side render pages -- - async function loadPageModule({ file }: FileItem) { - require(file); - } - async function renderPage(item: FileItem) { - // -- load and validate module -- - let { - default: Page, - meta: metadata, - theme: pageTheme, - layout, - } = require(item.file); - if (!Page) { - throw new Error("Page is missing a 'default' export."); - } - if (!metadata) { - throw new Error("Page is missing 'meta' export with a title."); - } - - // -- css -- - if (layout?.theme) pageTheme = layout.theme; - const theme: css.Theme = { - ...css.defaultTheme, - ...pageTheme, - }; - const cssImports = Array.from( - new Set([globalCssPath, ...hot.getCssImports(item.file)]), - (file) => path.relative(hot.projectSrc, file), - ); - ensureCssGetsBuilt(cssImports, theme, item.id); - - // -- metadata -- - const renderedMetaPromise = Promise.resolve( - typeof metadata === "function" ? metadata({ ssr: true }) : metadata, - ).then((m) => meta.renderMeta(m)); - - // -- html -- - let page = [engine.kElement, Page, {}]; - if (layout?.default) { - page = [engine.kElement, layout.default, { children: page }]; - } - const bodyPromise = engine.ssrAsync(page, { - sitegen: sg.initRender(), - }); - - const [{ text, addon }, renderedMeta] = await Promise.all([ - bodyPromise, - renderedMetaPromise, - ]); - if (!renderedMeta.includes("")) { - throw new Error( - "Page is missing 'meta.title'. " + - "All pages need a title tag.", - ); - } - incr.put({ - kind: "pageMetadata", - key: item.id, - // Incremental integrates with `hot.ts` + `require` - // to trace all the needed source files here. - sources: [item.file], - value: { - html: text, - meta: renderedMeta, - cssImports, - theme: theme ?? null, - clientRefs: Array.from(addon.sitegen.scripts), - }, - }); - } - async function prepareView(item: FileItem) { - const module = require(item.file); - if (!module.meta) { - throw new Error(`${item.file} is missing 'export const meta'`); - } - if (!module.default) { - throw new Error(`${item.file} is missing a default export.`); - } - const pageTheme = module.layout?.theme ?? module.theme; - const theme: css.Theme = { - ...css.defaultTheme, - ...pageTheme, - }; - const cssImports = Array.from( - new Set([globalCssPath, ...hot.getCssImports(item.file)]), - (file) => path.relative(hot.projectSrc, file), - ); - ensureCssGetsBuilt(cssImports, theme, item.id); - incr.put({ - kind: "viewMetadata", - key: item.id, - sources: [item.file], - value: { - file: path.relative(hot.projectRoot, item.file), - cssImports, - theme, - clientRefs: hot.getClientScriptRefs(item.file), - hasLayout: !!module.layout?.default, - }, - }); - } - - // Of the pages that are already built, a call to 'ensureCssGetsBuilt' is - // required so that it's (1) re-built if needed, (2) not pruned from build. - const neededPages = pages.filter((page) => { - const existing = incr.getArtifact("pageMetadata", page.id); - if (existing) { - const { cssImports, theme } = existing; - ensureCssGetsBuilt(cssImports, theme, page.id); - } - return !existing; - }); - const neededViews = views.filter((view) => { - const existing = incr.getArtifact("viewMetadata", view.id); - if (existing) { - const { cssImports, theme } = existing; - ensureCssGetsBuilt(cssImports, theme, view.id); - } - return !existing; - }); - - // Load the marko cache before render modules are loaded - incr.loadMarkoCache(); - - // This is done in two passes so that a page that throws during evaluation - // will report "Load Render Module" instead of "Render Static Page". - const spinnerFormat = status.format; - status.format = () => ""; - const moduleLoadQueue = new Queue({ - name: "Load Render Module", - fn: loadPageModule, - getItemText, - maxJobs: 1, - }); - moduleLoadQueue.addMany(neededPages); - moduleLoadQueue.addMany(neededViews); - await moduleLoadQueue.done({ method: "stop" }); - const pageQueue = new Queue({ - name: "Render Static Page", - fn: renderPage, - getItemText, - maxJobs: 2, - }); - pageQueue.addMany(neededPages); - const viewQueue = new Queue({ - name: "Build Dynamic View", - fn: prepareView, - getItemText, - maxJobs: 2, - }); - viewQueue.addMany(neededViews); - const pageAndViews = [ - pageQueue.done({ method: "stop" }), - viewQueue.done({ method: "stop" }), ]; - await Promise.allSettled(pageAndViews); - await Promise.all(pageAndViews); - status.format = spinnerFormat; - - // -- bundle server javascript (backend and views) -- - status.text = "Bundle JavaScript"; - incr.snapshotMarkoCache(); - const serverJavaScriptPromise = bundle.bundleServerJavaScript(incr, "node"); - - // -- bundle client javascript -- - const referencedScripts = Array.from( - new Set( - [ - ...pages.map((item) => - UNWRAP( - incr.getArtifact("pageMetadata", item.id), - `Missing pageMetadata ${item.id}`, - ) - ), - ...views.map((item) => - UNWRAP( - incr.getArtifact("viewMetadata", item.id), - `Missing viewMetadata ${item.id}`, - ) - ), - ].flatMap((item) => item.clientRefs), - ), - (script) => path.resolve(hot.projectSrc, script), - ).filter((file) => !incr.hasArtifact("script", hot.getScriptId(file))); - const extraPublicScripts = scripts.map((entry) => entry.file); - const clientJavaScriptPromise = bundle.bundleClientJavaScript( - referencedScripts, - extraPublicScripts, - incr, - ); - await Promise.all([ - serverJavaScriptPromise, - clientJavaScriptPromise, - cssQueue.done({ method: "stop" }), - ]); - await bundle.finalizeServerJavaScript(incr, "node"); - - // -- copy/compress static files -- - async function doStaticFile(item: FileItem) { - const body = await fs.readFile(item.file); - await incr.putAsset({ - sources: [item.file], - key: item.id, - body, - }); - } - const staticQueue = new Queue({ - name: "Load Static", - fn: doStaticFile, - getItemText, - maxJobs: 16, - }); - status.format = () => ""; - staticQueue.addMany( - staticFiles.filter((file) => !incr.hasArtifact("asset", file.id)), - ); - await staticQueue.done({ method: "stop" }); - status.format = spinnerFormat; - - // -- concatenate static rendered pages -- - status.text = `Concat Pages`; - await Promise.all(pages.map(async (page) => { - if (incr.hasArtifact("asset", page.id)) return; + for (const kind of kinds) { const { - html, - meta, - cssImports, - theme, - clientRefs, - } = UNWRAP(incr.out.pageMetadata.get(page.id)); - const scriptIds = clientRefs.map(hot.getScriptId); - const styleKey = css.styleKey(cssImports, theme); - const style = UNWRAP( - incr.out.style.get(styleKey), - `Missing style ${styleKey}`, + dir, + list, + prefix, + include = [""], + exclude = [], + ext = false, + } = kind; + + let items; + try { + items = await io.readDirRecursive(dir); + } catch (err: any) { + if (err.code === "ENOENT") continue; + throw err; + } + for (const subPath of items) { + const file = path.join(dir, subPath); + const stat = fs.statSync(file); + if (stat.isDirectory()) continue; + if (!include.some((e) => subPath.endsWith(e))) continue; + if (exclude.some((e) => subPath.endsWith(e))) continue; + const trim = ext + ? subPath + : subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/"); + let id = prefix + trim.replaceAll("\\", "/"); + if (prefix === "/" && id.endsWith("/index")) { + id = id.slice(0, -"/index".length) || "/"; + } + list.push({ id, file: path.relative(hot.projectRoot, file) }); + } + } + + return { staticFiles, pages, views, scripts }; +} + +export async function preparePage(io: Io, item: sg.FileItem) { + // -- load and validate module -- + let { + default: Page, + meta: metadata, + theme: pageTheme, + layout, + } = await io.import<any>(item.file); + if (!Page) { + throw new Error("Page is missing a 'default' export."); + } + if (!metadata) { + throw new Error("Page is missing 'meta' export with a title."); + } + + // -- css -- + if (layout?.theme) pageTheme = layout.theme; + const theme: css.Theme = { + ...css.defaultTheme, + ...pageTheme, + }; + const cssImports = Array.from( + new Set([globalCssPath, ...hot.getCssImports(item.file)]), + (file) => path.relative(hot.projectSrc, file), + ); + + // -- metadata -- + const renderedMetaPromise = Promise.resolve( + typeof metadata === "function" ? metadata({ ssr: true }) : metadata, + ).then((m) => meta.renderMeta(m)); + + // -- html -- + let page = [engine.kElement, Page, {}]; + if (layout?.default) { + page = [engine.kElement, layout.default, { children: page }]; + } + const bodyPromise = engine.ssrAsync(page, { + sitegen: sg.initRender(), + }); + + const [{ text, addon }, renderedMeta] = await Promise.all([ + bodyPromise, + renderedMetaPromise, + ]); + if (!renderedMeta.includes("<title>")) { + throw new Error( + "Page is missing 'meta.title'. " + "All pages need a title tag.", ); - const doc = wrapDocument({ - body: html, - head: meta, - inlineCss: style, - scripts: scriptIds.map( - (ref) => UNWRAP(incr.out.script.get(ref), `Missing script ${ref}`), - ).map((x) => `{${x}}`).join("\n"), - }); - await incr.putAsset({ - sources: [ - page.file, - ...incr.sourcesFor("style", styleKey), - ...scriptIds.flatMap((ref) => incr.sourcesFor("script", ref)), - ], - key: page.id, - body: doc, - headers: { - "Content-Type": "text/html", - }, - }); - })); - status.format = () => ""; - status.text = ``; - // This will wait for all compression jobs to finish, which up - // to this point have been left as dangling promises. - await incr.wait(); - - const { inserted, referenced, unreferenced } = incr.shake(); - - // Flush the site to disk. - status.format = spinnerFormat; - status.text = `Incremental Flush`; - incr.flush("node"); // Write outputs + } + const styleKey = css.styleKey(cssImports, theme); return { - incr, - inserted, - referenced, - unreferenced, - elapsed: (performance.now() - startTime) / 1000, + id: item.id, + html: text, + meta: renderedMeta, + cssImports, + theme: theme ?? null, + styleKey, + clientRefs: Array.from(addon.sitegen.scripts), }; } -function getItemText({ file }: FileItem) { - return path.relative(hot.projectSrc, file).replaceAll("\\", "/"); +export async function prepareView(io: Io, item: sg.FileItem) { + const module = await io.import<any>(item.file); + if (!module.meta) { + throw new Error(`${item.file} is missing 'export const meta'`); + } + if (!module.default) { + throw new Error(`${item.file} is missing a default export.`); + } + const pageTheme = module.layout?.theme ?? module.theme; + const theme: css.Theme = { + ...css.defaultTheme, + ...pageTheme, + }; + const cssImports = Array.from( + new Set([globalCssPath, ...hot.getCssImports(item.file)]), + (file) => path.relative(hot.projectSrc, file), + ); + const styleKey = css.styleKey(cssImports, theme); + return { + file: path.relative(hot.projectRoot, item.file), + cssImports, + theme, + clientRefs: hot.getClientScriptRefs(item.file), + hasLayout: !!module.layout?.default, + styleKey, + }; +} +export type PreparedView = Awaited<ReturnType<typeof prepareView>>; + +export function prepareInlineCss( + items: Array<{ + styleKey: string; + cssImports: string[]; + theme: css.Theme; + }>, +) { + const map = new Map<string, incr.Ref<string>>(); + for (const { styleKey, cssImports, theme } of items) { + if (map.has(styleKey)) continue; + map.set( + styleKey, + incr.work(css.bundleCssFiles, { + cssImports, + theme, + dev: false, + }), + ); + } + return map; } -import { OnceMap, Queue } from "#sitegen/async"; -import { Incremental } from "./incremental.ts"; +export type PreparedPage = Awaited<ReturnType<typeof preparePage>>; +export async function assembleAndWritePage( + pageWork: incr.Ref<PreparedPage>, + styleMap: Map<string, incr.Ref<string>>, + scriptWork: incr.Ref<Record<string, string>>, +) { + const page = await pageWork; + return incr.work( + async (io, { id, html, meta, styleKey, clientRefs }) => { + const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey))); + + const scriptIds = clientRefs.map(hot.getScriptId); + const scriptMap = await io.readWork(scriptWork); + const scripts = scriptIds.map((ref) => + UNWRAP(scriptMap[ref], `Missing script ${ref}`) + ) + .map((x) => `{${x}}`).join("\n"); + + const doc = wrapDocument({ + body: html, + head: meta, + inlineCss, + scripts, + }); + await io.writeAsset(id, doc, { + "Content-Type": "text/html", + }); + }, + page, + ); +} + +import * as sg from "#sitegen"; +import * as incr from "./incremental.ts"; +import { Io } from "./incremental.ts"; import * as bundle from "./bundle.ts"; import * as css from "./css.ts"; import * as engine from "./engine/ssr.ts"; import * as hot from "./hot.ts"; import * as fs from "#sitegen/fs"; -import * as sg from "#sitegen"; import type { FileItem } from "#sitegen"; import * as path from "node:path"; import * as meta from "#sitegen/meta"; diff --git a/framework/hot.ts b/framework/hot.ts index c66dc4d..234533a 100644 --- a/framework/hot.ts +++ b/framework/hot.ts @@ -19,6 +19,8 @@ export const load = createRequire( }; export const { cache } = load; +load<any>("source-map-support").install({hookRequire: true}); + // Register extensions by overwriting `require.extensions` const require = load; const exts = require.extensions; @@ -42,8 +44,7 @@ export function getFileStat(filepath: string) { } function shouldTrackPath(filename: string) { - return !filename.includes("node_modules") && - !filename.includes(import.meta.dirname); + return !filename.includes("node_modules"); } const Module = load<typeof import("node:module")>("node:module"); @@ -59,11 +60,12 @@ Module.prototype._compile = function ( filename, format, ); - const stat = fs.statSync(filename); if (shouldTrackPath(filename)) { + const stat = fs.statSync(filename); const cssImportsMaybe: string[] = []; const imports: string[] = []; - for (const { filename: file, cloverClientRefs } of this.children) { + for (const childModule of this.children) { + const { filename: file, cloverClientRefs } = childModule; if (file.endsWith(".css")) cssImportsMaybe.push(file); else { const child = fileStats.get(file); @@ -71,6 +73,7 @@ Module.prototype._compile = function ( const { cssImportsRecursive } = child; if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive); imports.push(file); + (childModule.cloverImporters ??= []).push(this); if (cloverClientRefs && cloverClientRefs.length > 0) { (this.cloverClientRefs ??= []) .push(...cloverClientRefs); @@ -82,7 +85,7 @@ Module.prototype._compile = function ( ? Array.from(new Set(cssImportsMaybe)) : null, imports, - lastModified: stat.mtimeMs, + lastModified: Math.floor(stat.mtimeMs), }); } return result; @@ -113,7 +116,7 @@ function loadEsbuild(module: NodeJS.Module, filepath: string) { interface LoadOptions { scannedClientRefs?: string[]; } -function loadEsbuildCode( +export function loadEsbuildCode( module: NodeJS.Module, filepath: string, src: string, @@ -139,7 +142,7 @@ function loadEsbuildCode( import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())}; import.meta.dirname = ${JSON.stringify(path.dirname(filepath))}; import.meta.filename = ${JSON.stringify(filepath)}; - `.trim().replace(/\n/g, "") + src; + `.trim().replace(/[\n\s]/g, "") + src; } src = esbuild.transformSync(src, { loader, @@ -149,11 +152,12 @@ function loadEsbuildCode( jsxImportSource: "#ssr", jsxDev: true, sourcefile: filepath, + sourcemap: 'inline', }).code; return module._compile(src, filepath, "commonjs"); } -function resolveClientRef(sourcePath: string, ref: string) { +export function resolveClientRef(sourcePath: string, ref: string) { const filePath = resolveFrom(sourcePath, ref); if ( !filePath.endsWith(".client.ts") && @@ -164,44 +168,10 @@ function resolveClientRef(sourcePath: string, ref: string) { return path.relative(projectSrc, filePath); } -// TODO: extract the marko compilation tools out, lazy load them -export interface MarkoCacheEntry { - src: string; - scannedClientRefs: string[]; -} -export const markoCache = new Map<string, MarkoCacheEntry>(); +let lazyMarko: typeof import('./marko.ts') | null = null; function loadMarko(module: NodeJS.Module, filepath: string) { - let cache = markoCache.get(filepath); - if (!cache) { - let src = fs.readFileSync(filepath, "utf8"); - // A non-standard thing here is Clover Sitegen implements - // its own client side scripting stuff, so it overrides - // bare client import statements to it's own usage. - const scannedClientRefs = new Set<string>(); - if (src.match(/^\s*client\s+import\s+["']/m)) { - src = src.replace( - /^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m, - (_, src) => { - const ref = JSON.parse(`"${src.slice(1, -1)}"`); - const resolved = resolveClientRef(filepath, ref); - scannedClientRefs.add(resolved); - return `<CloverScriptInclude=${ - JSON.stringify(getScriptId(resolved)) - } />`; - }, - ) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n'; - } - - src = marko.compileSync(src, filepath).code; - src = src.replace("marko/debug/html", "#ssr/marko"); - cache = { src, scannedClientRefs: Array.from(scannedClientRefs) }; - markoCache.set(filepath, cache); - } - - const { src, scannedClientRefs } = cache; - return loadEsbuildCode(module, filepath, src, { - scannedClientRefs, - }); + lazyMarko ??= require<typeof import('./marko.ts')>("./framework/marko.ts"); + lazyMarko.loadMarko(module, filepath); } function loadMdx(module: NodeJS.Module, filepath: string) { @@ -224,10 +194,14 @@ export function reloadRecursive(filepath: string) { } export function unload(filepath: string) { + lazyMarko?.markoCache.delete(filepath) filepath = path.resolve(filepath); - const existing = cache[filepath]; - if (existing) delete cache[filepath]; - fileStats.delete(filepath); + const module = cache[filepath]; + if (!module) return; + delete cache[filepath]; + for (const importer of module.cloverImporters ?? []) { + unload(importer.filename); + } } function deleteRecursiveInner(id: string, module: any) { @@ -326,6 +300,7 @@ declare global { interface Module { cloverClientRefs?: string[]; cloverSourceCode?: string; + cloverImporters?: Module[], _compile( this: NodeJS.Module, @@ -343,11 +318,10 @@ declare module "node:module" { ): unknown; } -import * as fs from "./lib/fs.ts"; +import * as fs from "#sitegen/fs"; import * as path from "node:path"; import { pathToFileURL } from "node:url"; import * as esbuild from "esbuild"; -import * as marko from "@marko/compiler"; import { createRequire } from "node:module"; import * as mdx from "@mdx-js/mdx"; import * as self from "./hot.ts"; diff --git a/framework/incremental.test.ts b/framework/incremental.test.ts new file mode 100644 index 0000000..234bf6a --- /dev/null +++ b/framework/incremental.test.ts @@ -0,0 +1,56 @@ +test("trivial case", async () => { + incr.reset(); + + const file1 = tmpFile("example.txt"); + file1.write("one"); + + async function compilation() { + const first = incr.work({ + label: "first compute", + async run (io) { + await setTimeout(1000); + const contents = await io.readFile(file1.path); + return [contents, Math.random()] as const; + } + }); + const second = incr.work({ + label: "second compute", + wait: first, + async run (io) { + await setTimeout(1000); + return io.readWork(first)[0].toUpperCase(); + } + }); + const third = incr.work({ + label: "third compute", + wait: first, + async run (io) { + await setTimeout(1000); + return io.readWork(first)[1] * 1000; + } + }); + return incr.work({ + label: "last compute", + wait: [second, third], + async run (io) { + await setTimeout(1000); + return { + second: io.readWork(second), + third: io.readWork(third), + } + } + }); + } + const { value: first } = await incr.compile(compilation); + const { value: second } = await incr.compile(compilation); + ASSERT(first === second); + incr.forceInvalidate(file1.path); + const { value: third } = await incr.compile(compilation); + ASSERT(first !== third); + ASSERT(first[0] === third[0]); +}); + +import * as incr from "./incremental2.ts"; +import { beforeEach, test } from "node:test"; +import { tmpFile } from "#sitegen/testing";import { setTimeout } from "node:timers/promises"; + diff --git a/framework/incremental.ts b/framework/incremental.ts index ef36367..525793e 100644 --- a/framework/incremental.ts +++ b/framework/incremental.ts @@ -1,592 +1,575 @@ -// Incremental contains multiple maps for the different kinds -// of Artifact, which contain a list of source files which -// were used to produce it. When files change, Incremental sees -// that the `mtime` is newer, and purges the referenced artifacts. +// Incremental build system using singleton state. +// See `work()`, `compile()`, and `invalidate()` for details. +// +// All state is serializable to allow recovering state across sessions. +// This framework special-cases the asset map, but is otherwise +// agnostic of what it is a compiler for. +let running = false; +let jobs = 0; +let newKeys = 0; +let seenWorks = new Set<string>(); // for detecting conflict vs overwrite +let seenWrites = new Set<string>(); // for detecting conflict vs overwrite +let works = new Map<string, Work>(); +let files = new Map<string, TrackedFile>(); // keyed by `toRel` path +let writes = new Map<string, FileWrite>(); +let assets = new Map<string, Asset>(); // keyed by hash -type SourceId = string; // relative to project root, e.g. 'src/global.css' -type ArtifactId = string; // `${ArtifactType}\0${string}` -type Sha1Id = string; // Sha1 hex string - -// -- artifact types -- -interface ArtifactMap { - /* An asset (serve with "#sitegen/asset" */ - asset: Asset; - /* The bundled text of a '.client.ts' script */ - // TODO: track imports this has into `asset` - script: string; - /* The bundled style tag contents. Keyed by 'css.styleKey' */ - style: string; - /* Metadata about a static page */ - pageMetadata: PageMetadata; - /* Metadata about a dynamic view */ - viewMetadata: ViewMetadata; - /* Cached '.marko' server compilation */ - serverMarko: hot.MarkoCacheEntry; - /* Backend source code, pre-replacement. Keyed by platform type. */ - backendBundle: BackendBundle; - /* One file in the backend receives post-processing. */ - backendReplace: Buffer; -} -type ArtifactKind = keyof ArtifactMap; -/* Automatic path tracing is performed to make it so that - * specifying 'sources: [file]' refers to it and everything it imports. - * These kinds do not have that behavior - */ -const exactDependencyKinds = ["serverMarko"]; -export interface Asset { - buffer: Buffer; - headers: Record<string, string | undefined>; - hash: string; -} -/** - * This interface intentionally omits the *contents* - * of its scripts and styles for fine-grained rebuilds. - */ -export interface PageMetadata { - html: string; - meta: string; - cssImports: string[]; - theme: css.Theme; - clientRefs: string[]; -} -/** - * Like a page, this intentionally omits resources, - * but additionally omits the bundled server code. - */ -export interface ViewMetadata { - file: string; - // staticMeta: string | null; TODO - cssImports: string[]; - theme: css.Theme; - clientRefs: string[]; - hasLayout: boolean; -} -export interface BackendBundle { - magicWord: string; - fileWithMagicWord: string | null; - files: Record<string, Buffer>; -} - -// -- incremental support types -- -export interface PutBase { - sources: SourceId[]; +export interface Ref<T> { + /** This method is compatible with `await` syntax */ + then( + onFulfilled: (value: T) => void, + onRejected: (error: unknown) => void, + ): void; key: string; } -export interface Put<T extends ArtifactKind> extends PutBase { - kind: T; - value: ArtifactMap[T]; -} -export interface Invalidations { - lastModified: number; - outputs: Set<ArtifactId>; - files: Set<SourceId>; -} +type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>; -export class Incremental { - /** The generated artifacts */ - out: { - [K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>; - } = { - asset: new Map(), - script: new Map(), - style: new Map(), - pageMetadata: new Map(), - viewMetadata: new Map(), - serverMarko: new Map(), - backendBundle: new Map(), - backendReplace: new Map(), - }; - /** Tracking filesystem entries to `srcId` */ - invals = new Map<SourceId, Invalidations>(); - /** Tracking output keys to files */ - sources = new Map<ArtifactId, SourceId[]>(); +/** + * Declare and a unit of work. Return value is memoized and + * only rebuilt when inputs (declared via `Io`) change. Outputs + * are written at the end of a compilation (see `compile`). + * + * If the returned `Ref` is not awaited or read + * via io.readWork, the job is never started. + */ +export function work<O>(job: Job<void, O>): Ref<O>; +export function work<I, O>(job: Job<I, O>, input: I): Ref<O>; +export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> { + const source = JSON.stringify(util.getCallSites(2)[1]); + const keySource = [source, util.inspect(input)].join(":"); + const key = crypto.createHash("sha1").update(keySource).digest("base64url"); + ASSERT(running); + ASSERT( + !seenWorks.has(key), + `Key '${key}' must be unique during the build. ` + + `To fix this, provide a manual 'key' argument.`, + ); + seenWorks.add(key); - /** Compressed resources */ - compress = new Map<Sha1Id, Compressed>(); - compressQueue = new Queue<CompressJob, void>({ - name: "Compress", - maxJobs: 5, - fn: this.compressImpl.bind(this), - passive: true, - getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`, - }); - - /** Reset at the end of each update */ - round = { - inserted: new Set<ArtifactId>(), - referenced: new Set<ArtifactId>(), - }; - - getArtifact<T extends ArtifactKind>(kind: T, key: string) { - this.round.referenced.add(`${kind}\0${key}`); - return this.out[kind].get(key); + const prev = works.get(key) as Work<O> | null; + if (prev) { + return { key, then: (done) => done(prev.value) }; } - hasArtifact(kind: ArtifactKind, key: string) { - return this.getArtifact(kind, key) != null; - } - - sourcesFor(kind: ArtifactKind, key: string) { - return UNWRAP( - this.sources.get(kind + "\0" + key), - `No artifact '${kind}' '${key}'`, - ); - } - - shake() { - const toPublic = (str: string) => { - const [kind, key] = str.split("\0"); - return { kind: kind as ArtifactKind, key }; - }; - const inserted = Array.from(this.round.inserted, toPublic); - const referenced = Array.from(this.round.referenced, toPublic); - const unreferenced: { kind: ArtifactKind; key: string }[] = []; - - for (const kind in this.out) { - const map = this.out[kind as keyof typeof this.out]; - if (!map) continue; - for (const key of map.keys()) { - if (!this.round.referenced.has(`${kind}\0${key}`)) { - unreferenced.push({ kind: kind as ArtifactKind, key }); - // this.out[kind as ArtifactKind].delete(key); - } - } - } - - this.round.inserted.clear(); - this.round.referenced.clear(); - - return { inserted, referenced, unreferenced }; - } - - /* - * Put built artifacts into the incremental cache. The entry points - * used to build this must be provided. 'Incremental' will trace JS - * imports and file modification times tracked by 'hot.ts'. - */ - put<T extends ArtifactKind>({ - sources, - kind, - key, - value, - }: Put<T>) { - // These three invariants affect incremental accuracy. - if (this.round.inserted.has(`${kind}\0${key}`)) { - console.error( - `Artifact ${kind}:${key} was inserted multiple times in the same round!`, - ); - } else if (!this.round.referenced.has(`${kind}\0${key}`)) { - console.error( - `Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`, - ); - } else if (this.out[kind].has(key)) { - console.error( - `Artifact ${kind}:${key} is not stale, but overwritten.`, - ); - } - - this.out[kind].set(key, value); - - this.round.inserted.add(`${kind}\0${key}`); - - // Update sources information - ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key); - sources = sources.map((src) => path.normalize(src)); - const fullKey = `${kind}\0${key}`; - const prevSources = this.sources.get(fullKey); - const newSources = new Set( - sources.map((file) => - path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file - ), - ); - this.sources.set(fullKey, [...newSources]); - for (const source of prevSources ?? []) { - if (sources.includes(source)) { - newSources.delete(source); - continue; - } - const invals = UNWRAP(this.invals.get(source)); - ASSERT(invals.outputs.has(fullKey)); - invals.outputs.delete(fullKey); - } - // Use reflection from the plugin system to get imports. - for (const source of newSources) { - const invals = this.#getOrInitInvals(source); - invals.outputs.add(fullKey); - this.#followImports(source); - } - } - - // TODO: this doesnt remove stuff when it disappeary - #getOrInitInvals(source: string) { - let invals = this.invals.get(source); - if (!invals) { - const lastModified = hot.getFileStat(source)?.lastModified ?? - fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs; - this.invals.set( - source, - invals = { - lastModified, - files: new Set(), - outputs: new Set(), - }, - ); - } - return invals; - } - - #followImports(file: string) { - const stat = hot.getFileStat(file); - if (!stat) return; - for (const i of stat.imports) { - const invals = this.#getOrInitInvals(i); - invals.files.add(file); - this.#followImports(i); - } - } - - async statAllFiles() { - for (const file of this.invals.keys()) { - try { - const mtime = fs.statSync(file).mtimeMs; - this.updateStat(file, mtime); - } catch (err) { - } - } - } - - updateStat(file: string, newLastModified: number | null) { - file = path.relative(hot.projectRoot, file); - const stat = this.invals.get(file); - ASSERT(stat, "Updated stat on untracked file " + file); - const hasUpdate = !newLastModified || stat.lastModified < newLastModified; - if (hasUpdate) { - // Invalidate - console.info(file + " " + (newLastModified ? "updated" : "deleted")); - hot.unload(file); - const invalidQueue = [file]; - let currentInvalid; - while (currentInvalid = invalidQueue.pop()) { - const invalidations = this.invals.get(currentInvalid); - ASSERT( - invalidations, - "No reason to track file '" + currentInvalid + - "' if it has no invalidations", - ); - const { files, outputs } = invalidations; - for (const out of outputs) { - const [kind, artifactKey] = out.split("\0"); - this.out[kind as ArtifactKind].delete(artifactKey); - } - invalidQueue.push(...files); - } - } - if (newLastModified) { - stat.lastModified = newLastModified; - } else { - this.invals.delete(file); - } - return hasUpdate; - } - - async putAsset(info: PutAsset) { - const { body, headers, key } = info; - const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body); - const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer)) - .toString("hex"); - const value: Asset = { - buffer, - headers: { - "Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key), - "ETag": JSON.stringify(hash), - ...headers, - }, - hash, - }; - const a = this.put({ ...info, kind: "asset", value }); - if (!this.compress.has(hash)) { - const label = info.key; - this.compress.set(hash, { - zstd: undefined, - gzip: undefined, - }); - this.compressQueue.add({ label, buffer, algo: "zstd", hash }); - this.compressQueue.add({ label, buffer, algo: "gzip", hash }); - } - return a; - } - - async compressImpl({ algo, buffer, hash }: CompressJob) { - let out; - if (algo === "zstd") out = await zstd(buffer); - else if (algo === "gzip") out = await gzip(buffer, { level: 9 }); - else algo satisfies never; - - let entry = this.compress.get(hash); - if (!entry) { - this.compress.set( - hash, - entry = { - zstd: undefined, - gzip: undefined, - }, - ); - } - entry![algo] = out; - } - - serialize() { - const writer = new BufferWriter(); - - // -- artifact -- - const asset = Array.from( - this.out.asset, - ([key, { buffer, hash, headers }]) => { - const raw = writer.write(buffer, hash); - const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; - const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null; - const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null; - return [key, { - raw, - gzip, - zstd, - hash, - headers: headers as Record<string, string>, - }] satisfies SerializedMeta["asset"][0]; - }, - ); - const script = Array.from(this.out.script); - const style = Array.from(this.out.style); - const pageMetadata = Array.from(this.out.pageMetadata); - const viewMetadata = Array.from(this.out.viewMetadata); - const serverMarko = Array.from(this.out.serverMarko); - const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => { - return [k, { - magicWord: v.magicWord, - fileWithMagicWord: v.fileWithMagicWord, - files: Object.entries(v.files).map( - ([file, contents]) => [ - file, - writer.write(contents, "backendBundle" + k + ":" + file), - ], - ), - }] satisfies SerializedMeta["backendBundle"][0]; - }); - const backendReplace = Array.from( - this.out.backendReplace, - ([k, v]) => - [ - k, - writer.write(v, "backendReplace" + k), - ] satisfies SerializedMeta["backendReplace"][0], - ); - // -- incremental metadata -- - const invals = Array.from(this.invals, ([key, value]) => { - const { lastModified, files, outputs } = value; - return [key, { - m: lastModified, - f: [...files], - o: [...outputs], - }] satisfies SerializedMeta["invals"][0]; - }); - const sources = Array.from(this.sources, ([key, value]) => { - return [key, ...value] as [string, ...string[]]; - }); - const json = { - asset, - script, - invals, - sources, - style, - pageMetadata, - viewMetadata, - serverMarko, - backendBundle, - backendReplace, - } satisfies SerializedMeta; - const meta = Buffer.from(JSON.stringify(json), "utf-8"); - - const lengthBuffer = Buffer.alloc(4); - lengthBuffer.writeUInt32LE(meta.byteLength, 0); - - return Buffer.concat([lengthBuffer, meta, ...writer.buffers]); - } - - static fromSerialized(buffer: Buffer): Incremental { - const metaLength = buffer.readUint32LE(0); - const meta: SerializedMeta = JSON.parse( - buffer.subarray(4, 4 + metaLength).toString("utf8"), - ); - const view = ([start, end]: View) => - buffer.subarray(4 + metaLength + start, 4 + metaLength + end); - - const incr = new Incremental(); - incr.out = { - asset: new Map(meta.asset.map(([key, value]) => { - const { hash, raw, gzip, zstd, headers } = value; - if ((gzip || zstd) && !incr.compress.has(hash)) { - incr.compress.set(hash, { - gzip: gzip ? view(gzip) : undefined, - zstd: zstd ? view(zstd) : undefined, - }); - } - return [key, { - buffer: view(raw), - headers: headers, - hash: hash, - }]; - })), - script: new Map(meta.script), - style: new Map(meta.style), - pageMetadata: new Map(meta.pageMetadata), - viewMetadata: new Map(meta.viewMetadata), - serverMarko: new Map(meta.serverMarko), - backendBundle: new Map(meta.backendBundle.map(([key, value]) => { - return [key, { - magicWord: value.magicWord, - fileWithMagicWord: value.fileWithMagicWord, - files: Object.fromEntries( - value.files.map(([file, contents]) => [file, view(contents)]), - ), - }]; - })), - backendReplace: new Map( - meta.backendReplace.map(([key, contents]) => [key, view(contents)]), - ), - }; - incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => { - return [key, { - lastModified: m, - files: new Set(f), - outputs: new Set(o), - }]; - })); - incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value])); - return incr; - } - - /* - * Move the cached (server) marko transpilations from this incremental - * into the running process. - */ - loadMarkoCache() { - hot.markoCache.clear(); - for (const [key, value] of this.out.serverMarko) { - hot.markoCache.set(path.resolve(hot.projectRoot, key), value); - } - } - - /* - * Move the cached (server) marko transpilations from this incremental - * into the running process. - */ - snapshotMarkoCache() { - for (const [file, value] of hot.markoCache) { - const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/"); - // Only insert if it doesn't exist. Calling 'put' when it - // already exists would inform the user of extra calls to put. - if (!this.hasArtifact("serverMarko", key)) { - this.put({ - kind: "serverMarko", - sources: [file], - key, - value, - }); - } - } - } - - toDisk(file = ".clover/incr.state") { - const buffer = this.serialize(); - fs.writeFileSync(file, buffer); - } - - static fromDisk(file = ".clover/incr.state"): Incremental { + async function perform() { + const io = new Io(key); + jobs += 1; + newKeys += 1; try { - const buffer = fs.readFileSync(file); - return Incremental.fromSerialized(buffer); - } catch (err: any) { - if (err.code === "ENOENT") return new Incremental(); + const value = await job(io, input); + validateSerializable(value, ""); + const { reads, writes } = io; + works.set(key, { + value, + affects: [], + reads, + writes, + debug: source, + }); + for (const add of reads.files) { + const { affects } = UNWRAP(files.get(add)); + ASSERT(!affects.includes(key)); + affects.push(key); + } + for (const add of reads.works) { + const { affects } = UNWRAP(works.get(add)); + ASSERT(!affects.includes(key)); + affects.push(key); + } + return value; + } finally { + jobs -= 1; + } + } + + let cached: Promise<O>; + return { + key, + then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject), + }; +} + +export async function compile<T>(compiler: () => Promise<T>) { + ASSERT(!running, `Cannot run twice at the same time.`); + try { + running = true; + ASSERT(jobs === 0); + const start = performance.now(); + const timerSpinner = new Spinner({ + text: () => + `sitegen! [${ + ((performance.now() - start) / 1000).toFixed( + 1, + ) + }s]`, + fps: 10, + }); + using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() }; + + const value = await compiler(); + ASSERT(jobs === 0); + timerSpinner.text = "incremental flush"; + await flush(start); + timerSpinner.stop(); + return { + value, + watchFiles: new Set(files.keys()), + newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)), + newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')), + }; + } finally { + running = false; + newKeys = 0; + seenWrites.clear(); + seenWorks.clear(); + } +} + +export async function flush(start: number) { + // Trim + const detachedFiles = new Set<string>; + const referencedAssets = new Set<string>; + for (const [k, { writes: { assets } }] of works) { + if (seenWorks.has(k)) { + for (const asset of assets.values()) referencedAssets.add(asset.hash); + continue; + } + deleteWork(k); + } + for (const [k, file] of files) { + if (file.affects.length > 0) continue; + files.delete(k); + detachedFiles.add(k); + } + for (const k of assets.keys()) { + if (!referencedAssets.has(k)) + assets.delete(k); + } + + const p = []; + // File writes + let dist = 0; + for (const [key, { buffer, size }] of writes) { + if (buffer) p.push(fs.writeMkdir(path.join(`.clover/o/${key}`), buffer)); + dist += size; + } + // Asset map + { + const { json, blob } = getAssetManifest(); + const jsonString = Buffer.from(JSON.stringify(json)); + p.push(fs.writeMkdir(".clover/o/static.json", jsonString)); + p.push(fs.writeMkdir(".clover/o/static.blob", blob)); + dist += blob.byteLength + jsonString.byteLength; + } + await Promise.all(p); + + // Incremental state + const serialized = msgpackr.pack(serialize()); + await fs.writeMkdir(".clover/incr.state", serialized); + const time = (performance.now() - start).toFixed(0); + console.success(`sitegen! in ${time} ms`); + console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`); + console.writeLine(` - ${assets.size} static assets`); + console.writeLine( + ` - dist: ${formatSize(dist)}, incremental: ${ + formatSize(serialized.byteLength) + }`, + ); +} + +export async function restore() { + let buffer; + try { + buffer = await fs.readFile(".clover/incr.state"); + } catch (err: any) { + if (err.code !== "ENOENT") throw err; + } + if (!buffer) return; + await deserialize(buffer); +} + +function forceInvalidate(entry: { affects: string[] }) { + const queue = [...entry.affects]; + let key; + while ((key = queue.shift())) { + const affects = deleteWork(key); + queue.push(...affects); + } +} + +function deleteWork(key: string) { + const work = works.get(key); + if (!work) return []; + const { reads, affects, writes: w } = work; + for (const remove of reads.files) { + const { affects } = UNWRAP(files.get(remove)); + ASSERT(affects.includes(key)); + affects.splice(affects.indexOf(key), 1); + } + for (const remove of reads.works) { + const { affects } = UNWRAP(works.get(remove), remove); + ASSERT(affects.includes(key)); + affects.splice(affects.indexOf(key), 1); + } + for (const remove of affects) { + const { reads: { works: list } } = UNWRAP(works.get(remove), remove); + ASSERT(list.has(key)); + list.delete(key); + } + for (const file of w.files) { + if (UNWRAP(writes.get(file)).work === key) + writes.delete(file); + } + // Assets are temporarily kept, trimmed via manual GC after compilation. + + works.delete(key); + return affects; +} + +export function reset() { + ASSERT(!running); + works.clear(); + files.clear(); + assets.clear(); +} + +export function serialize() { + const fileEntries = Array.from(files, ([k, v]) => + [ + k, + v.type, + v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null, + ...v.affects, + ] as const); + const workEntries = Array.from(works, ([k, v]) => + [ + k, + v.value, + Array.from(v.reads.files), + Array.from(v.reads.works), + Array.from(v.writes.files), + Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const), + v.affects, + ] as const); + const expectedFilesOnDisk = Array.from( + writes, + ([k, { size, work }]) => [k, size, work] as const, + ); + const assetEntries = Array.from( + assets, + ([k, asset]) => [k, asset.raw, asset.gzip, asset.zstd] as const, + ); + return [ + 1, + fileEntries, + workEntries, + expectedFilesOnDisk, + assetEntries, + ] as const; +} +type SerializedState = ReturnType<typeof serialize>; + +/* No-op on failure */ +async function deserialize(buffer: Buffer) { + const decoded = msgpackr.decode(buffer) as SerializedState; + if (!Array.isArray(decoded)) return false; + if (decoded[0] !== 1) return false; + const [, fileEntries, workEntries, expectedFilesOnDisk, assetEntries] = + decoded; + for (const [k, type, content, ...affects] of fileEntries) { + if (type === "f") { + ASSERT(typeof content === "number"); + files.set(k, { type, affects, lastModified: content }); + } else if (type === 'd') { + ASSERT(typeof content === "string"); + files.set(k, { type, affects, contentHash: content, contents: [] }); + } else { + files.set(k, { type, affects }); + } + } + for (const entry of workEntries) { + const [ + k, + value, + readFiles, + readWorks, + writeFiles, + writeAssets, + affects, + ] = entry; + works.set(k, { + value, + reads: { + files: new Set(readFiles), + works: new Set(readWorks), + }, + writes: { + files: new Set(writeFiles), + assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, { + hash: JSON.parse(UNWRAP(headers.etag)), + headers, + }])), + }, + affects, + }); + } + const statFiles = await Promise.all(expectedFilesOnDisk + .map(([k, size, work]) => + fs.stat(path.join(".clover/o", k)) + .catch((err) => { + if (err.code === "ENOENT") return null; + throw err; + }) + .then((stat) => ({ k, size, work, stat })) + )); + for (const { k, stat, work, size } of statFiles) { + if (stat?.size === size) { + writes.set(k, { + size: size, + buffer: null, + work, + }); + } else { + forceInvalidate({ affects: [work] }); + } + } + for (const [hash, raw, gzip, zstd] of assetEntries) { + assets.set(hash, { raw, gzip, zstd }); + } + + await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file))); +} + +export async function invalidate(filePath: string): Promise<boolean> { + const key = toRel(toAbs(filePath)); + const file = UNWRAP(files.get(key), `Untracked file '${key}'`) + return invalidateEntry(key, file) +} + +export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> { + try { + if (file.type === "d") { + const contents = file.contents = await fs.readdir(key); + contents.sort(); + const contentHash = crypto + .createHash("sha1") + .update(contents.join("\0")) + .digest("base64url"); + if (file.contentHash !== contentHash) { + file.contentHash = contentHash; + throw new Error(); + } + } else if (file.type === 'f') { + const lastModified = await fs.stat(key) + .then(x => Math.floor(x.mtimeMs), () => 0); + if (file.lastModified !== lastModified) { + file.lastModified = lastModified; + throw new Error(); + } + } else { + file.type satisfies 'null'; + const stat = await fs.stat(key).catch(() => null); + if (stat) throw new Error(); + } + return false; + } catch (e) { + forceInvalidate(file); + hot.unload(toAbs(key)); + if (file.type === 'null') files.delete(key); + return true; + } +} + +export function getAssetManifest() { + const writer = new BufferWriter(); + const asset = Object.fromEntries( + Array.from(works, (work) => work[1].writes.assets) + .filter((map) => map.size > 0) + .flatMap((map) => + Array.from(map, ([key, { hash, headers }]) => { + const { raw, gzip, zstd } = UNWRAP( + assets.get(hash), + `Asset ${key} (${hash})`, + ); + return [key, { + raw: writer.write(raw, "raw:" + hash), + gzip: writer.write(gzip, "gzip:" + hash), + zstd: writer.write(zstd, "zstd:" + hash), + headers, + }] as const; + }) + ), + ) satisfies BuiltAssetMap; + return { json: asset, blob: writer.get() }; +} + +/* Input/Output with automatic tracking. + * - Inputs read with Io are tracked to know when to rebuild + * - Outputs written with Io are deleted when abandoned. + */ +export class Io { + constructor(public key: string) {} + reads: Reads = { files: new Set(), works: new Set() }; + writes: Writes = { files: new Set(), assets: new Map() }; + + #trackFs(file: string) { + const resolved = toAbs(file); + const key = toRel(resolved); + this.reads.files.add(key); + return { resolved, key }; + } + async readWork<T>(ref: Ref<T>): Promise<T> { + this.reads.works.add(ref.key); + return await ref; + } + /** Track a file in the compilation without reading it. */ + async trackFile(file: string) { + const { key, resolved } = this.#trackFs(file); + if (!files.get(key)) { + let lastModified: number = 0; + try { + lastModified = Math.floor((await fs.stat(file)).mtimeMs); + files.set(key, { type: "f", lastModified, affects: [] }); + } catch { + files.set(key, { type: "null", affects: [] }); + } + } + return resolved; + } + async readFile(file: string) { + return fs.readFile(await this.trackFile(file), "utf-8"); + } + async readJson<T>(file: string) { + return JSON.parse(await this.readFile(file)) as T; + } + async readDir(dir: string) { + const { key, resolved } = this.#trackFs(dir); + const existing = files.get(key); + try { + if (existing?.type === 'd') return existing.contents; + const contents = await fs.readdir(resolved); + contents.sort(); + const contentHash = crypto + .createHash("sha1") + .update(contents.join("\0")) + .digest("base64url"); + files.set(key, { + type: "d", + affects: [], + contentHash, + contents, + }); + return contents; + } catch (err) { + if (!existing) files.set(key, { type: "null", affects: [] }); throw err; } } - - async wait() { - await this.compressQueue.done({ method: "success" }); + async readDirRecursive(dir: string): Promise<string[]> { + const dirs = await this.readDir(dir); + return ( + await Promise.all( + dirs.map(async (child) => { + const abs = path.join(dir, child); + const stat = await fs.stat(abs); + if (stat.isDirectory()) { + return (await this.readDirRecursive(abs)).map((grand) => + path.join(child, grand) + ); + } else { + return child; + } + }), + ) + ).flat(); } - - async flush( - platform: bundle.ServerPlatform, - dir = path.resolve(".clover/out"), + /* Track all dependencies of a module. */ + async import<T>(file: string): Promise<T> { + const { resolved } = this.#trackFs(file); + try { + return require(resolved) as T; + } finally { + const queue = [resolved]; + const seen = new Set<string>(); + let current; + while ((current = queue.shift())) { + const stat = hot.getFileStat(current); + if (!stat) continue; + const { key } = this.#trackFs(current); + if (!files.get(key)) { + files.set(key, { + type: "f", + affects: [], + lastModified: stat?.lastModified ?? 0, + }); + } + for (const imp of stat.imports) { + if (!seen.has(imp)) { + seen.add(imp); + queue.push(imp); + } + } + } + } + } + async writeAsset( + pathname: string, + blob: string | Buffer, + headersOption?: HeadersInit, ) { - ASSERT(!this.compressQueue.active); - const join = (...args: string[]) => path.join(dir, ...args); - const writer = new BufferWriter(); + ASSERT(pathname.startsWith("/")); + ASSERT(!seenWrites.has("a:" + pathname)); - // TODO: ensure all compressed got compressed + const buffer = typeof blob === "string" ? Buffer.from(blob) : blob; - const asset = Object.fromEntries( - Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => { - const raw = writer.write(buffer, hash); - const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; - const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz"); - const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd"); - return [key, { raw, gzip, zstd, headers }]; - }), - ); - const backendBundle = UNWRAP(this.out.backendBundle.get(platform)); - - // Arrange output files - const outFiles: Array<[file: string, contents: string | Buffer]> = [ - // Asset manifest - ["static.json", JSON.stringify(asset)], - ["static.blob", writer.get()], - - // Backend - ...Object.entries(backendBundle.files).map(([subPath, contents]) => - [ - subPath, - subPath === backendBundle.fileWithMagicWord - ? UNWRAP(this.out.backendReplace.get(platform)) - : contents, - ] as [string, Buffer] - ), - ]; - - // TODO: check duplicates - - // Perform all i/o - await Promise.all( - outFiles.map(([subPath, contents]) => - fs.writeMkdir(join(subPath), contents, { flush: true }) - ), - ); + const headers = new Headers(headersOption ?? {}); + const hash = crypto.createHash("sha1").update(buffer).digest("hex"); + if (!headers.has("Content-Type")) { + headers.set("Content-Type", mime.contentTypeFor(pathname)); + } + headers.set("ETag", JSON.stringify(hash)); + this.writes.assets.set(pathname, { + hash, + // @ts-expect-error TODO + headers: Object.fromEntries(headers), + }); + if (!assets.has(hash)) { + jobs += 1; + assets.set(hash, undefined!); + const [gzipBuffer, zstdBuffer] = await Promise.all([ + gzip(buffer), + zstdCompress(buffer), + ]); + assets.set(hash, { + raw: buffer, + gzip: gzipBuffer, + zstd: zstdBuffer, + }); + jobs -= 1; + } + } + writeFile(subPath: string, blob: string | Buffer) { + ASSERT(!subPath.startsWith("/")); + ASSERT( + !seenWrites.has("f:" + subPath), + `File overwritten: ${JSON.stringify(subPath)}`, + ); + seenWrites.add("f:" + subPath); + const buffer = Buffer.isBuffer(blob) ? blob : Buffer.from(blob); + writes.set(subPath, { + buffer, + size: buffer.byteLength, + work: this.key, + }); } -} - -export interface PutAsset extends PutBase { - body: string | Buffer; - headers?: Record<string, string | undefined>; -} - -export interface Compressed { - gzip?: Buffer; - zstd?: Buffer; -} - -export interface CompressJob { - algo: "zstd" | "gzip"; - buffer: Buffer; - label: string; - hash: string; } class BufferWriter { size = 0; - seen = new Map<string, View>(); + seen = new Map<string, BufferView>(); buffers: Buffer[] = []; - write(buffer: Buffer, hash: string): View { + write(buffer: Buffer, hash: string): BufferView { let view = this.seen.get(hash); if (view) return view; view = [this.size, this.size += buffer.byteLength]; @@ -600,58 +583,101 @@ class BufferWriter { } } -export type View = [start: number, end: number]; +export function validateSerializable(value: unknown, key: string) { + if (typeof value === "string") { + if (value.includes(hot.projectRoot)) { + throw new Error( + `Return value must not contain the CWD for portability, found at ${key}`, + ); + } + } else if (value && typeof value === "object") { + if (Array.isArray(value)) { + value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`)); + } else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) { + Object.entries(value).forEach(([k, v]) => + validateSerializable(v, `${key}.${k}`) + ); + } else { + throw new Error( + `Return value must be a plain JS object, found ${ + Object.getPrototypeOf(value).constructor.name + } at ${key}`, + ); + } + } else if (["bigint", "function", "symbol"].includes(typeof value)) { + throw new Error( + `Return value must be a plain JS object, found ${typeof value} at ${key}`, + ); + } +} +export function toAbs(absPath: string) { + return path.resolve(hot.projectRoot, absPath); +} + +export function toRel(absPath: string) { + return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/"); +} + +export type BufferView = [start: number, end: number]; +interface Reads { + files: Set<string>; + works: Set<string>; +} +interface FileWrite { + buffer: Buffer | null; + size: number; + work: string; +} +interface Writes { + files: Set<string>; + assets: Map<string, { + hash: string; + headers: Record<string, string>; + }>; +} +interface Asset { + raw: Buffer; + gzip: Buffer; + zstd: Buffer; +} +interface Work<T = unknown> { + debug?: string; + value: T; + reads: Reads; + writes: Writes; + affects: string[]; +} +type TrackedFile = + & { + affects: string[]; + } + & ( + | { type: "f"; lastModified: number } + | { type: "d"; contentHash: string; contents: string[] } + | { type: "null"; } + ); export interface BuiltAssetMap { [route: string]: BuiltAsset; } - export interface BuiltAsset { - raw: View; - gzip: View; - zstd: View; + raw: BufferView; + gzip: BufferView; + zstd: BufferView; headers: Record<string, string>; } -export interface SerializedMeta { - asset: Array<[route: string, data: { - raw: View; - gzip: View | null; - zstd: View | null; - hash: string; - headers: Record<string, string>; - }]>; - script: Array<[key: string, value: string]>; - style: Array<[key: string, value: string]>; - pageMetadata: Array<[key: string, PageMetadata]>; - viewMetadata: Array<[key: string, ViewMetadata]>; - serverMarko: Array<[key: string, hot.MarkoCacheEntry]>; - backendBundle: Array<[platform: string, { - magicWord: string; - fileWithMagicWord: string | null; - files: Array<[string, View]>; - }]>; - backendReplace: Array<[key: string, View]>; - - invals: Array<[key: string, { - /** Modified */ - m: number; - f: SourceId[]; - o: ArtifactId[]; - }]>; - sources: Array<[string, ...string[]]>; -} - const gzip = util.promisify(zlib.gzip); -const zstd = util.promisify(zlib.zstdCompress); +const zstdCompress = util.promisify(zlib.zstdCompress); import * as fs from "#sitegen/fs"; -import * as zlib from "node:zlib"; -import * as util from "node:util"; -import { Queue } from "#sitegen/async"; -import * as hot from "./hot.ts"; -import * as mime from "#sitegen/mime"; import * as path from "node:path"; -import { Buffer } from "node:buffer"; -import * as css from "./css.ts"; -import type * as bundle from "./bundle.ts"; +import * as hot from "./hot.ts"; +import * as util from "node:util"; +import * as crypto from "node:crypto"; +import * as mime from "#sitegen/mime"; +import * as zlib from "node:zlib"; +import * as console from "@paperclover/console"; +import { Spinner } from "@paperclover/console/Spinner"; +import { formatSize } from "@/file-viewer/format.ts"; +import * as msgpackr from "msgpackr"; diff --git a/framework/lib/assets.ts b/framework/lib/assets.ts index bc55963..090cf45 100644 --- a/framework/lib/assets.ts +++ b/framework/lib/assets.ts @@ -69,7 +69,7 @@ export function etagMatches(etag: string, ifNoneMatch: string) { return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1; } -function subarrayAsset([start, end]: View) { +function subarrayAsset([start, end]: BufferView) { return assets!.buf.subarray(start, end); } @@ -115,6 +115,6 @@ process.on("message", (msg: any) => { import * as fs from "#sitegen/fs"; import type { Context, Next } from "hono"; import type { StatusCode } from "hono/utils/http-status"; -import type { BuiltAsset, BuiltAssetMap, View } from "../incremental.ts"; +import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts"; import { Buffer } from "node:buffer"; import * as path from "node:path"; diff --git a/framework/lib/testing.ts b/framework/lib/testing.ts new file mode 100644 index 0000000..36ad50f --- /dev/null +++ b/framework/lib/testing.ts @@ -0,0 +1,11 @@ +export function tmpFile(basename: string) { + const file = path.join(import.meta.dirname, '../../.clover/testing', basename); + return { + path: file, + read: fs.readFile.bind(fs, file), + write: fs.writeMkdir.bind(fs, file), + }; +} + +import * as path from 'node:path'; +import * as fs from './fs.ts'; diff --git a/framework/marko.ts b/framework/marko.ts new file mode 100644 index 0000000..c865656 --- /dev/null +++ b/framework/marko.ts @@ -0,0 +1,46 @@ +console.log("MARKO"); +export interface MarkoCacheEntry { + src: string; + scannedClientRefs: string[]; +} + +export const markoCache = new Map<string, MarkoCacheEntry>(); + +export function loadMarko(module: NodeJS.Module, filepath: string) { + let cache = markoCache.get(filepath); + console.log({ filepath, has: !!cache }) + if (!cache) { + let src = fs.readFileSync(filepath, "utf8"); + // A non-standard thing here is Clover Sitegen implements + // its own client side scripting stuff, so it overrides + // bare client import statements to it's own usage. + const scannedClientRefs = new Set<string>(); + if (src.match(/^\s*client\s+import\s+["']/m)) { + src = src.replace( + /^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m, + (_, src) => { + const ref = JSON.parse(`"${src.slice(1, -1)}"`); + const resolved = hot.resolveClientRef(filepath, ref); + scannedClientRefs.add(resolved); + return `<CloverScriptInclude=${ + JSON.stringify(hot.getScriptId(resolved)) + } />`; + }, + ) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n'; + } + + src = marko.compileSync(src, filepath).code; + src = src.replace("marko/debug/html", "#ssr/marko"); + cache = { src, scannedClientRefs: Array.from(scannedClientRefs) }; + markoCache.set(filepath, cache); + } + + const { src, scannedClientRefs } = cache; + return hot.loadEsbuildCode(module, filepath, src, { + scannedClientRefs, + }); +} + +import * as marko from "@marko/compiler"; +import * as hot from "./hot.ts"; +import * as fs from "#sitegen/fs"; diff --git a/framework/watch.ts b/framework/watch.ts index 14a39b4..9bc7bf7 100644 --- a/framework/watch.ts +++ b/framework/watch.ts @@ -2,102 +2,67 @@ const debounceMilliseconds = 25; +let subprocess: child_process.ChildProcess | null = null; +process.on("beforeExit", () => { + subprocess?.removeListener("close", onSubprocessClose); +}); + +let watch: Watch; + export async function main() { - let subprocess: child_process.ChildProcess | null = null; - // Catch up state by running a main build. - const { incr } = await generate.main(); - // ...and watch the files that cause invals. - const watch = new Watch(rebuild); - watch.add(...incr.invals.keys()); - statusLine(); - // ... and then serve it! - serve(); + await incr.restore(); + watch = new Watch(rebuild); + rebuild([]); +} - function serve() { - if (subprocess) { - subprocess.removeListener("close", onSubprocessClose); - subprocess.kill(); - } - subprocess = child_process.fork(".clover/out/server.js", [ - "--development", - ], { - stdio: "inherit", - }); - subprocess.on("close", onSubprocessClose); +function serve() { + if (subprocess) { + subprocess.removeListener("close", onSubprocessClose); + subprocess.kill(); } - - function onSubprocessClose(code: number | null, signal: string | null) { - subprocess = null; - const status = code != null ? `code ${code}` : `signal ${signal}`; - console.error(`Backend process exited with ${status}`); - } - - process.on("beforeExit", () => { - subprocess?.removeListener("close", onSubprocessClose); + subprocess = child_process.fork(".clover/o/backend.js", [ + "--development", + ], { + stdio: "inherit", }); + subprocess.on("close", onSubprocessClose); +} - function rebuild(files: string[]) { - files = files.map((file) => path.relative(hot.projectRoot, file)); - const changed: string[] = []; - for (const file of files) { - let mtimeMs: number | null = null; - try { - mtimeMs = fs.statSync(file).mtimeMs; - } catch (err: any) { - if (err?.code !== "ENOENT") throw err; - } - if (incr.updateStat(file, mtimeMs)) changed.push(file); - } - if (changed.length === 0) { - console.warn("Files were modified but the 'modify' time did not change."); - return; - } - withSpinner<any, Awaited<ReturnType<typeof generate.sitegen>>>({ - text: "Rebuilding", - successText: generate.successText, - failureText: () => "sitegen FAIL", - }, async (spinner) => { - console.info("---"); - console.info( - "Updated" + - (changed.length === 1 - ? " " + changed[0] - : changed.map((file) => "\n- " + file)), - ); - const result = await generate.sitegen(spinner, incr); - incr.toDisk(); // Allows picking up this state again - for (const file of watch.files) { - const relative = path.relative(hot.projectRoot, file); - if (!incr.invals.has(relative)) watch.remove(file); - } - return result; - }).then((result) => { - // Restart the server if it was changed or not running. - if ( - !subprocess || - result.inserted.some(({ kind }) => kind === "backendReplace") - ) { - serve(); - } else if ( - subprocess && - result.inserted.some(({ kind }) => kind === "asset") - ) { - subprocess.send({ type: "clover.assets.reload" }); - } - return result; - }).catch((err) => { - console.error(util.inspect(err)); - }).finally(statusLine); - } +function onSubprocessClose(code: number | null, signal: string | null) { + subprocess = null; + const status = code != null ? `code ${code}` : `signal ${signal}`; + console.error(`Backend process exited with ${status}`); +} - function statusLine() { - console.info( - `Watching ${incr.invals.size} files \x1b[36m[last change: ${ - new Date().toLocaleTimeString() - }]\x1b[39m`, - ); - } +async function rebuild(files: string[]) { + const hasInvalidated = files.length === 0 + || (await Promise.all(files.map(incr.invalidate))).some(Boolean); + if (!hasInvalidated) return; + incr.compile(generate.generate).then(({ + watchFiles, + newOutputs, + newAssets + }) => { + const removeWatch = [...watch.files].filter(x => !watchFiles.has(x)) + for (const file of removeWatch) watch.remove(file); + watch.add(...watchFiles); + // Restart the server if it was changed or not running. + if (!subprocess || newOutputs.includes("backend.js")) { + serve(); + } else if (subprocess && newAssets) { + subprocess.send({ type: "clover.assets.reload" }); + } + }).catch((err) => { + console.error(util.inspect(err)); + }).finally(statusLine); +} + +function statusLine() { + console.info( + `Watching ${watch.files.size} files ` + + `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`, + ); } class Watch { @@ -174,11 +139,21 @@ class Watch { for (const w of this.watchers) w.close(); } + #getFiles(absPath: string, event: fs.WatchEventType) { + const files = []; + if (this.files.has(absPath)) files.push(absPath); + if (event === 'rename') { + const dir = path.dirname(absPath); + if (this.files.has(dir)) files.push(dir); + } + return files; + } + #handleEvent(root: string, event: fs.WatchEventType, subPath: string | null) { if (!subPath) return; - const file = path.join(root, subPath); - if (!this.files.has(file)) return; - this.stale.add(file); + const files = this.#getFiles(path.join(root, subPath), event); + if (files.length === 0) return; + for(const file of files) this.stale.add(file); const { debounce } = this; if (debounce !== null) clearTimeout(debounce); this.debounce = setTimeout(() => { @@ -192,6 +167,7 @@ class Watch { import * as fs from "node:fs"; import { withSpinner } from "@paperclover/console/Spinner"; import * as generate from "./generate.ts"; +import * as incr from "./incremental.ts"; import * as path from "node:path"; import * as util from "node:util"; import * as hot from "./hot.ts"; diff --git a/package-lock.json b/package-lock.json index f83bc1a..643365b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,8 +15,10 @@ "hls.js": "^1.6.5", "hono": "^4.7.11", "marko": "^6.0.20", + "msgpackr": "^1.11.5", "puppeteer": "^24.10.1", "sharp": "^0.34.2", + "source-map-support": "^0.5.21", "unique-names-generator": "^4.7.1", "vscode-oniguruma": "^2.0.1", "vscode-textmate": "^9.2.0" @@ -1478,6 +1480,84 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz", + "integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz", + "integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz", + "integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz", + "integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz", + "integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz", + "integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@paperclover/console": { "resolved": "git+https://git.paperclover.net/clo/console.git#1a6ac2b79fdd8a21a1c57d25723975872bc07e3e", "dependencies": { @@ -3765,6 +3845,37 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "license": "MIT" }, + "node_modules/msgpackr": { + "version": "1.11.5", + "resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz", + "integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==", + "license": "MIT", + "optionalDependencies": { + "msgpackr-extract": "^3.0.2" + } + }, + "node_modules/msgpackr-extract": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz", + "integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "dependencies": { + "node-gyp-build-optional-packages": "5.2.2" + }, + "bin": { + "download-msgpackr-prebuilds": "bin/download-prebuilds.js" + }, + "optionalDependencies": { + "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" + } + }, "node_modules/netmask": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", @@ -3774,6 +3885,21 @@ "node": ">= 0.4.0" } }, + "node_modules/node-gyp-build-optional-packages": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz", + "integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==", + "license": "MIT", + "optional": true, + "dependencies": { + "detect-libc": "^2.0.1" + }, + "bin": { + "node-gyp-build-optional-packages": "bin.js", + "node-gyp-build-optional-packages-optional": "optional.js", + "node-gyp-build-optional-packages-test": "build-test.js" + } + }, "node_modules/node-releases": { "version": "2.0.19", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", diff --git a/package.json b/package.json index 6f7c941..8701176 100644 --- a/package.json +++ b/package.json @@ -11,8 +11,10 @@ "hls.js": "^1.6.5", "hono": "^4.7.11", "marko": "^6.0.20", + "msgpackr": "^1.11.5", "puppeteer": "^24.10.1", "sharp": "^0.34.2", + "source-map-support": "^0.5.21", "unique-names-generator": "^4.7.1", "vscode-oniguruma": "^2.0.1", "vscode-textmate": "^9.2.0" diff --git a/src/file-viewer/bin/scan3.ts b/src/file-viewer/bin/scan3.ts index 71d8c7f..3c69c8c 100644 --- a/src/file-viewer/bin/scan3.ts +++ b/src/file-viewer/bin/scan3.ts @@ -17,9 +17,9 @@ export async function main() { const start = performance.now(); const timerSpinner = new Spinner({ text: () => - `paper clover's scan3 [${ - ((performance.now() - start) / 1000).toFixed(1) - }s]`, + `paper clover's scan3 [${((performance.now() - start) / 1000).toFixed( + 1, + )}s]`, fps: 10, }); using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() }; @@ -38,20 +38,23 @@ export async function main() { qList.addMany(items.map((subPath) => path.join(absPath, subPath))); if (mediaFile) { - const deleted = mediaFile.getChildren() + const deleted = mediaFile + .getChildren() .filter((child) => !items.includes(child.basename)) .flatMap((child) => child.kind === MediaFileKind.directory ? child.getRecursiveFileChildren() - : child + : child, ); - qMeta.addMany(deleted.map((mediaFile) => ({ - absPath: path.join(root, mediaFile.path), - publicPath: mediaFile.path, - stat: null, - mediaFile, - }))); + qMeta.addMany( + deleted.map((mediaFile) => ({ + absPath: path.join(root, mediaFile.path), + publicPath: mediaFile.path, + stat: null, + mediaFile, + })), + ); } return; @@ -96,13 +99,13 @@ export async function main() { if ( mediaFile && mediaFile.date.getTime() < stat.mtime.getTime() && - (Date.now() - stat.mtime.getTime()) < monthMilliseconds + Date.now() - stat.mtime.getTime() < monthMilliseconds ) { date = mediaFile.date; console.warn( - `M-time on ${publicPath} was likely corrupted. ${ - formatDate(mediaFile.date) - } -> ${formatDate(stat.mtime)}`, + `M-time on ${publicPath} was likely corrupted. ${formatDate( + mediaFile.date, + )} -> ${formatDate(stat.mtime)}`, ); } mediaFile = MediaFile.createFile({ @@ -129,7 +132,10 @@ export async function main() { await processor.run({ absPath, stat, mediaFile, spin }); mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index))); for (const dependantJob of after) { - ASSERT(dependantJob.needs > 0, `dependantJob.needs > 0, ${dependantJob.needs}`); + ASSERT( + dependantJob.needs > 0, + `dependantJob.needs > 0, ${dependantJob.needs}`, + ); dependantJob.needs -= 1; if (dependantJob.needs == 0) qProcess.add(dependantJob); } @@ -149,25 +155,27 @@ export async function main() { })); } - async function queueProcessors( - { absPath, stat, mediaFile }: Omit<ProcessFileArgs, "spin">, - ) { + async function queueProcessors({ + absPath, + stat, + mediaFile, + }: Omit<ProcessFileArgs, "spin">) { const ext = mediaFile.extensionNonEmpty.toLowerCase(); let possible = processors.filter((p) => - p.include ? p.include.has(ext) : !p.exclude?.has(ext) + p.include ? p.include.has(ext) : !p.exclude?.has(ext), ); if (possible.length === 0) return; const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1; - ASSERT(hash <= 0xFFFF, `${hash.toString(16)} has no bits above 16 set`); + ASSERT(hash <= 0xffff, `${hash.toString(16)} has no bits above 16 set`); let processed = mediaFile.processed; // If the hash has changed, migrate the bitfield over. // This also runs when the processor hash is in it's initial 0 state. const order = decodeProcessors(mediaFile.processors); - if ((processed & 0xFFFF) !== hash) { - const previous = order.filter((_, i) => - (processed & (1 << (16 + i))) !== 0 + if ((processed & 0xffff) !== hash) { + const previous = order.filter( + (_, i) => (processed & (1 << (16 + i))) !== 0, ); processed = hash; for (const { id, hash } of previous) { @@ -182,13 +190,13 @@ export async function main() { } mediaFile.setProcessors( processed, - possible.map((p) => - p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xFF) - ).join(";"), + possible + .map((p) => p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xff)) + .join(";"), ); } else { possible = order.map(({ id }) => - UNWRAP(possible.find((p) => p.id === id)) + UNWRAP(possible.find((p) => p.id === id)), ); } @@ -225,8 +233,9 @@ export async function main() { async function runUndoProcessors(mediaFile: MediaFile) { const { processed } = mediaFile; - const previous = decodeProcessors(mediaFile.processors) - .filter((_, i) => (processed & (1 << (16 + i))) !== 0); + const previous = decodeProcessors(mediaFile.processors).filter( + (_, i) => (processed & (1 << (16 + i))) !== 0, + ); for (const { id } of previous) { const p = processors.find((p) => p.id === id); if (!p) continue; @@ -244,22 +253,23 @@ export async function main() { await qProcess.done(); // Update directory metadata - const dirs = MediaFile.getDirectoriesToReindex() - .sort((a, b) => b.path.length - a.path.length); + const dirs = MediaFile.getDirectoriesToReindex().sort( + (a, b) => b.path.length - a.path.length, + ); for (const dir of dirs) { const children = dir.getChildren(); // readme.txt - const readmeContent = children.find((x) => - x.basename === "readme.txt" - )?.contents ?? ""; + const readmeContent = + children.find((x) => x.basename === "readme.txt")?.contents ?? ""; // dirsort let dirsort: string[] | null = null; const dirSortRaw = children.find((x) => x.basename === ".dirsort")?.contents ?? ""; if (dirSortRaw) { - dirsort = dirSortRaw.split("\n") + dirsort = dirSortRaw + .split("\n") .map((x) => x.trim()) .filter(Boolean); } @@ -284,7 +294,8 @@ export async function main() { } } - const dirHash = crypto.createHash("sha1") + const dirHash = crypto + .createHash("sha1") .update(dir.path + allHashes) .digest("hex"); @@ -323,19 +334,21 @@ export async function main() { console.info( "Updated file viewer index in \x1b[1m" + - ((performance.now() - start) / 1000).toFixed(1) + "s\x1b[0m", + ((performance.now() - start) / 1000).toFixed(1) + + "s\x1b[0m", ); MediaFile.db.prepare("VACUUM").run(); - const { duration, count } = MediaFile.db.prepare< - [], - { count: number; duration: number } - >(` + const { duration, count } = MediaFile.db + .prepare<[], { count: number; duration: number }>( + ` select count(*) as count, sum(duration) as duration from media_files - `).getNonNull(); + `, + ) + .getNonNull(); console.info(); console.info( @@ -365,7 +378,7 @@ const execFile: typeof execFileRaw = (( ) => execFileRaw(...args).catch((e: any) => { if (e?.message?.startsWith?.("Command failed")) { - if (e.code > (2 ** 31)) e.code |= 0; + if (e.code > 2 ** 31) e.code |= 0; const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`; e.message = `${e.cmd.split(" ")[0]} failed with ${code}`; } @@ -374,11 +387,7 @@ const execFile: typeof execFileRaw = (( const ffprobeBin = testProgram("ffprobe", "--help"); const ffmpegBin = testProgram("ffmpeg", "--help"); -const ffmpegOptions = [ - "-hide_banner", - "-loglevel", - "warning", -]; +const ffmpegOptions = ["-hide_banner", "-loglevel", "warning"]; const procDuration: Process = { name: "calculate duration", @@ -496,27 +505,23 @@ const procImageSubsets: Process = { for (const size of targetSizes) { const { w, h } = resizeDimensions(width, height, size); for (const { ext, args } of transcodeRules.imagePresets) { - spin.text = baseStatus + - ` (${w}x${h}, ${ext.slice(1).toUpperCase()})`; + spin.text = baseStatus + ` (${w}x${h}, ${ext.slice(1).toUpperCase()})`; stack.use( - await produceAsset( - `${mediaFile.hash}/${size}${ext}`, - async (out) => { - await fs.mkdir(path.dirname(out)); - await fs.rm(out, { force: true }); - await execFile(ffmpegBin!, [ - ...ffmpegOptions, - "-i", - absPath, - "-vf", - `scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`, - ...args, - out, - ]); - return [out]; - }, - ), + await produceAsset(`${mediaFile.hash}/${size}${ext}`, async (out) => { + await fs.mkdir(path.dirname(out)); + await fs.rm(out, { force: true }); + await execFile(ffmpegBin!, [ + ...ffmpegOptions, + "-i", + absPath, + "-vf", + `scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`, + ...args, + out, + ]); + return [out]; + }), ); } } @@ -561,21 +566,17 @@ const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({ if (config.encoder && typeof config.encoder.videoSrc === "string") { const { videoSrc, audioSrc, rate } = config.encoder; inputArgs = [ - ...rate ? ["-r", String(rate)] : [], + ...(rate ? ["-r", String(rate)] : []), "-i", videoSrc, - ...audioSrc ? ["-i", audioSrc] : [], + ...(audioSrc ? ["-i", audioSrc] : []), ]; } } catch (err: any) { if (err?.code !== "ENOENT") throw err; } - const args = transcodeRules.getVideoArgs( - preset, - base, - inputArgs, - ); + const args = transcodeRules.getVideoArgs(preset, base, inputArgs); try { const fakeProgress = new Progress({ text: spin.text, spinner: null }); fakeProgress.stop(); @@ -612,22 +613,25 @@ const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({ const procCompression = [ { name: "gzip", fn: () => zlib.createGzip({ level: 9 }) }, { name: "zstd", fn: () => zlib.createZstdCompress() }, -].map(({ name, fn }) => ({ - name: `compress ${name}`, - exclude: rules.extsPreCompressed, - async run({ absPath, mediaFile }) { - if ((mediaFile.size ?? 0) < 10) return; - await produceAsset(`${mediaFile.hash}/${name}`, async (base) => { - fs.mkdirSync(path.dirname(base)); - await stream.promises.pipeline( - fs.createReadStream(absPath), - fn(), - fs.createWriteStream(base), - ); - return [base]; - }); - }, -} satisfies Process as Process)); +].map( + ({ name, fn }) => + ({ + name: `compress ${name}`, + exclude: rules.extsPreCompressed, + async run({ absPath, mediaFile }) { + if ((mediaFile.size ?? 0) < 10) return; + await produceAsset(`${mediaFile.hash}/${name}`, async (base) => { + fs.mkdirSync(path.dirname(base)); + await stream.promises.pipeline( + fs.createReadStream(absPath), + fn(), + fs.createWriteStream(base), + ); + return [base]; + }); + }, + }) satisfies Process as Process, +); const processors = [ procDimensions, @@ -637,30 +641,29 @@ const processors = [ procImageSubsets, ...procVideos, ...procCompression, -] - .map((process, id, all) => { - const strIndex = (id: number) => - String.fromCharCode("a".charCodeAt(0) + id); - return { - ...process as Process, - id: strIndex(id), - // Create a unique key. - hash: new Uint16Array( - crypto.createHash("sha1") - .update( - process.run.toString() + - (process.version ? String(process.version) : ""), - ) - .digest().buffer, - ).reduce((a, b) => a ^ b), - depends: (process.depends ?? []).map((depend) => { - const index = all.findIndex((p) => p.name === depend); - if (index === -1) throw new Error(`Cannot find depend '${depend}'`); - if (index === id) throw new Error(`Cannot depend on self: '${depend}'`); - return strIndex(index); - }), - }; - }); +].map((process, id, all) => { + const strIndex = (id: number) => String.fromCharCode("a".charCodeAt(0) + id); + return { + ...(process as Process), + id: strIndex(id), + // Create a unique key. + hash: new Uint16Array( + crypto + .createHash("sha1") + .update( + process.run.toString() + + (process.version ? String(process.version) : ""), + ) + .digest().buffer, + ).reduce((a, b) => a ^ b), + depends: (process.depends ?? []).map((depend) => { + const index = all.findIndex((p) => p.name === depend); + if (index === -1) throw new Error(`Cannot find depend '${depend}'`); + if (index === id) throw new Error(`Cannot depend on self: '${depend}'`); + return strIndex(index); + }), + }; +}); function resizeDimensions(w: number, h: number, desiredWidth: number) { ASSERT(desiredWidth < w, `${desiredWidth} < ${w}`); @@ -676,10 +679,7 @@ async function produceAsset( if (asset.refs === 1) { const paths = await builder(path.join(workDir, key)); asset.addFiles( - paths.map((file) => - path.relative(workDir, file) - .replaceAll("\\", "/") - ), + paths.map((file) => path.relative(workDir, file).replaceAll("\\", "/")), ); } return { @@ -719,7 +719,7 @@ interface ProcessJob { absPath: string; stat: fs.Stats; mediaFile: MediaFile; - processor: typeof processors[0]; + processor: (typeof processors)[0]; index: number; after: ProcessJob[]; needs: number; diff --git a/src/global.css b/src/global.css index d25b9e4..0e29c27 100644 --- a/src/global.css +++ b/src/global.css @@ -4,7 +4,9 @@ font-weight: 400 750; font-style: normal; font-display: swap; - font-variation-settings: "CASL" 0.25, "MONO" 0; + font-variation-settings: + "CASL" 0.25, + "MONO" 0; font-style: oblique -15deg 0deg; unicode-range: U+0020-007E; } @@ -14,7 +16,9 @@ font-weight: 400 800; font-style: normal; font-display: swap; - font-variation-settings: "CASL" 0.25, "MONO" 1; + font-variation-settings: + "CASL" 0.25, + "MONO" 1; font-style: oblique -15deg 0deg; unicode-range: U+0020-007E; } @@ -24,21 +28,13 @@ font-weight: 400 800; font-style: normal; font-display: swap; - font-variation-settings: "CASL" 0.25, "MONO" 1; + font-variation-settings: + "CASL" 0.25, + "MONO" 1; font-style: oblique -15deg 0deg; unicode-range: - U+00C0-00FF, - U+00A9, - U+2190-2193, - U+2018, - U+2019, - U+201C, - U+201D, - U+2022, - U+00A0-00A8, - U+00AA-00BF, - U+2194-2199, - U+0100-017F; + U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022, + U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F; } *, diff --git a/src/pages/index.marko b/src/pages/index.marko index 50d72b1..047da17 100644 --- a/src/pages/index.marko +++ b/src/pages/index.marko @@ -29,7 +29,7 @@ export const meta: Meta = { <main> <div> <h2>posts</h2> - <p>song: <span>in the summer</span> (coming soon, 2025-07-12)</p> + <p>song: <a href="/in-the-summer">in the summer</a> (2025-01-01)</p> <p>song: <a href="/waterfalls">waterfalls</a> (2025-01-01)</p> <h2>things</h2> <p><a href="/q+a">questions and answers</a></p>