incremental works kinda
This commit is contained in:
parent
12e4bbdf5a
commit
9cdb67fcf0
11 changed files with 1109 additions and 1068 deletions
|
@ -1,16 +1,21 @@
|
||||||
|
async function trackEsbuild(io: Io, metafile: esbuild.Metafile) {
|
||||||
|
await Promise.all(Object.keys(metafile.inputs)
|
||||||
|
.filter(file => !isIgnoredSource(file))
|
||||||
|
.map(file => io.trackFile(file)));
|
||||||
|
}
|
||||||
|
|
||||||
// This file implements client-side bundling, mostly wrapping esbuild.
|
// This file implements client-side bundling, mostly wrapping esbuild.
|
||||||
bundleClientJavaScript.label = "bundle client-side javascript";
|
|
||||||
export async function bundleClientJavaScript(
|
export async function bundleClientJavaScript(
|
||||||
io: Io,
|
io: Io,
|
||||||
{ clientRefs, extraPublicScripts, dev = false }: {
|
{ clientRefs, extraPublicScripts, dev = false }: {
|
||||||
clientRefs: string[],
|
clientRefs: string[];
|
||||||
extraPublicScripts: string[],
|
extraPublicScripts: string[];
|
||||||
dev: boolean
|
dev: boolean;
|
||||||
}
|
},
|
||||||
) {
|
) {
|
||||||
const entryPoints = [
|
const entryPoints = [
|
||||||
...new Set([
|
...new Set([
|
||||||
...clientRefs.map(x => `src/${x}`),
|
...clientRefs.map((x) => `src/${x}`),
|
||||||
...extraPublicScripts,
|
...extraPublicScripts,
|
||||||
].map(toAbs)),
|
].map(toAbs)),
|
||||||
];
|
];
|
||||||
|
@ -68,13 +73,14 @@ export async function bundleClientJavaScript(
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
const { metafile, outputFiles } = bundle;
|
const { metafile, outputFiles } = bundle;
|
||||||
|
const p = []
|
||||||
|
p.push(trackEsbuild(io, metafile));
|
||||||
const scripts: Record<string, string> = {};
|
const scripts: Record<string, string> = {};
|
||||||
for (const file of outputFiles) {
|
for (const file of outputFiles) {
|
||||||
const { text } = file;
|
const { text } = file;
|
||||||
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
||||||
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
|
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
|
||||||
const sources = Object.keys(inputs)
|
const sources = Object.keys(inputs).filter((x) => !isIgnoredSource(x));
|
||||||
.filter((x) => !x.startsWith("<define:"));
|
|
||||||
|
|
||||||
// Register non-chunks as script entries.
|
// Register non-chunks as script entries.
|
||||||
const chunk = route.startsWith("/js/c.");
|
const chunk = route.startsWith("/js/c.");
|
||||||
|
@ -85,52 +91,60 @@ export async function bundleClientJavaScript(
|
||||||
}
|
}
|
||||||
// Register chunks and public scripts as assets.
|
// Register chunks and public scripts as assets.
|
||||||
if (chunk || publicScriptRoutes.includes(route)) {
|
if (chunk || publicScriptRoutes.includes(route)) {
|
||||||
io.writeAsset(route, text);
|
p.push(io.writeAsset(route, text));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
await Promise.all(p);
|
||||||
return scripts;
|
return scripts;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ServerPlatform = "node" | "passthru";
|
export type ServerPlatform = "node" | "passthru";
|
||||||
|
export interface ServerSideOptions {
|
||||||
|
entries: string[],
|
||||||
|
viewItems: sg.FileItem[]
|
||||||
|
viewRefs: incr.Ref<PreparedView>[],
|
||||||
|
styleMap: Map<string, incr.Ref<string>>;
|
||||||
|
scriptMap: incr.Ref<Record<string, string>>;
|
||||||
|
platform: ServerPlatform,
|
||||||
|
}
|
||||||
export async function bundleServerJavaScript(
|
export async function bundleServerJavaScript(
|
||||||
io: Io,
|
{ viewItems, viewRefs, styleMap, scriptMap: wScriptMap, entries, platform }: ServerSideOptions
|
||||||
{
|
|
||||||
entry,
|
|
||||||
platform,
|
|
||||||
}: {
|
|
||||||
entry: string,
|
|
||||||
platform: ServerPlatform
|
|
||||||
},
|
|
||||||
) {
|
) {
|
||||||
|
const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => {
|
||||||
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
||||||
|
return {
|
||||||
const viewSource = [
|
magicWord,
|
||||||
...Array.from(
|
file: [
|
||||||
incr.out.viewMetadata,
|
...viewItems.map((view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`),
|
||||||
([, view], i) => `import * as view${i} from ${JSON.stringify(view.file)}`,
|
|
||||||
),
|
|
||||||
`const styles = ${magicWord}[-2]`,
|
`const styles = ${magicWord}[-2]`,
|
||||||
`export const scripts = ${magicWord}[-1]`,
|
`export const scripts = ${magicWord}[-1]`,
|
||||||
"export const views = {",
|
"export const views = {",
|
||||||
...Array.from(incr.out.viewMetadata, ([key, view], i) =>
|
...viewItems.map((view, i) => [
|
||||||
[
|
` ${JSON.stringify(view.id)}: {`,
|
||||||
` ${JSON.stringify(key)}: {`,
|
|
||||||
` component: view${i}.default,`,
|
` component: view${i}.default,`,
|
||||||
// ` meta: ${
|
|
||||||
// view.staticMeta ? JSON.stringify(view.staticMeta) : `view${i}.meta`
|
|
||||||
// },`,
|
|
||||||
` meta: view${i}.meta,`,
|
` meta: view${i}.meta,`,
|
||||||
` layout: ${view.hasLayout ? `view${i}.layout?.default` : "null"},`,
|
` layout: view${i}.layout?.default ?? null,`,
|
||||||
` inlineCss: styles[${magicWord}[${i}]]`,
|
` inlineCss: styles[${magicWord}[${i}]]`,
|
||||||
` },`,
|
` },`,
|
||||||
].join("\n")),
|
].join("\n")),
|
||||||
"}",
|
"}",
|
||||||
].join("\n");
|
].join("\n")
|
||||||
|
};
|
||||||
|
}, viewItems)
|
||||||
|
|
||||||
|
const wBundles = entries.map(entry => [entry, incr.work(async (io, entry) => {
|
||||||
|
const pkg = await io.readJson<{ dependencies: Record<string, string>; }>("package.json");
|
||||||
|
|
||||||
|
let magicWord = null as string | null;
|
||||||
// -- plugins --
|
// -- plugins --
|
||||||
const serverPlugins: esbuild.Plugin[] = [
|
const serverPlugins: esbuild.Plugin[] = [
|
||||||
virtualFiles({
|
virtualFiles({
|
||||||
"$views": viewSource,
|
// only add dependency when imported.
|
||||||
|
"$views": async () => {
|
||||||
|
const view = await io.readWork(wViewSource);
|
||||||
|
({ magicWord } = view);
|
||||||
|
return view.file;
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
projectRelativeResolution(),
|
projectRelativeResolution(),
|
||||||
markoViaBuildCache(),
|
markoViaBuildCache(),
|
||||||
|
@ -158,13 +172,11 @@ platform: ServerPlatform
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
const pkg = await fs.readJson("package.json") as {
|
|
||||||
dependencies: Record<string, string>;
|
|
||||||
};
|
|
||||||
const { metafile, outputFiles } = await esbuild.build({
|
const { metafile, outputFiles } = await esbuild.build({
|
||||||
bundle: true,
|
bundle: true,
|
||||||
chunkNames: "c.[hash]",
|
chunkNames: "c.[hash]",
|
||||||
entryNames: "server",
|
entryNames: path.basename(entry, path.extname(entry)),
|
||||||
entryPoints: [
|
entryPoints: [
|
||||||
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
|
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
|
||||||
],
|
],
|
||||||
|
@ -182,71 +194,60 @@ platform: ServerPlatform
|
||||||
jsxDev: false,
|
jsxDev: false,
|
||||||
define: {
|
define: {
|
||||||
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
||||||
|
CLOVER_SERVER_ENTRY: JSON.stringify(entry),
|
||||||
},
|
},
|
||||||
external: Object.keys(pkg.dependencies)
|
external: Object.keys(pkg.dependencies)
|
||||||
.filter((x) => !x.startsWith("@paperclover")),
|
.filter((x) => !x.startsWith("@paperclover")),
|
||||||
});
|
});
|
||||||
|
await trackEsbuild(io, metafile)
|
||||||
|
|
||||||
const files: Record<string, Buffer> = {};
|
let fileWithMagicWord: {
|
||||||
let fileWithMagicWord: string | null = null;
|
bytes: Buffer;
|
||||||
|
basename: string;
|
||||||
|
magicWord: string;
|
||||||
|
} | null = null;
|
||||||
for (const output of outputFiles) {
|
for (const output of outputFiles) {
|
||||||
const basename = output.path.replace(/^.*?!/, "");
|
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
|
||||||
const key = "out!" + basename.replaceAll("\\", "/");
|
const key = "out!" + basename.replaceAll("\\", "/");
|
||||||
// If this contains the generated "$views" file, then
|
// If this contains the generated "$views" file, then
|
||||||
// mark this file as the one for replacement. Because
|
// mark this file as the one for replacement. Because
|
||||||
// `splitting` is `true`, esbuild will not emit this
|
// `splitting` is `true`, esbuild will not emit this
|
||||||
// file in more than one chunk.
|
// file in more than one chunk.
|
||||||
if (metafile.outputs[key].inputs["framework/lib/view.ts"]) {
|
if (magicWord && metafile.outputs[key].inputs["framework/lib/view.ts"]) {
|
||||||
fileWithMagicWord = basename;
|
ASSERT(!fileWithMagicWord);
|
||||||
}
|
fileWithMagicWord = {
|
||||||
files[basename] = Buffer.from(output.contents);
|
basename,
|
||||||
}
|
bytes: Buffer.from(output.contents),
|
||||||
incr.put({
|
|
||||||
kind: "backendBundle",
|
|
||||||
key: platform,
|
|
||||||
value: {
|
|
||||||
magicWord,
|
magicWord,
|
||||||
files,
|
};
|
||||||
fileWithMagicWord,
|
} else {
|
||||||
},
|
io.writeFile(basename, Buffer.from(output.contents))
|
||||||
sources: Object.keys(metafile.inputs).filter((x) =>
|
|
||||||
!x.includes("<define:") &&
|
|
||||||
!x.startsWith("vfs:") &&
|
|
||||||
!x.startsWith("dropped:") &&
|
|
||||||
!x.includes("node_modules")
|
|
||||||
),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
return fileWithMagicWord;
|
||||||
|
}, entry)] as const);
|
||||||
|
|
||||||
export async function finalizeServerJavaScript(
|
const wProcessed = wBundles.map(async([entry, wBundle]) => {
|
||||||
incr: Incremental,
|
if (!await wBundle) return;
|
||||||
platform: ServerPlatform,
|
await incr.work(async (io) => {
|
||||||
) {
|
// Only the reachable resources need to be read and inserted into the bundle.
|
||||||
if (incr.hasArtifact("backendReplace", platform)) return;
|
// This is what Map<string, incr.Ref> is for
|
||||||
const {
|
const { basename, bytes, magicWord } = UNWRAP(await io.readWork(wBundle));
|
||||||
files,
|
const views = await Promise.all(viewRefs.map(ref => io.readWork(ref)));
|
||||||
fileWithMagicWord,
|
|
||||||
magicWord,
|
|
||||||
} = UNWRAP(incr.getArtifact("backendBundle", platform));
|
|
||||||
|
|
||||||
if (!fileWithMagicWord) return;
|
// Client JS
|
||||||
|
const scriptList = Object.entries(await io.readWork(wScriptMap));
|
||||||
|
const viewScriptsList = new Set(views.flatMap(view => view.clientRefs));
|
||||||
|
const neededScripts = scriptList.filter(([k]) => viewScriptsList.has(k));
|
||||||
|
|
||||||
// Only the reachable resources need to be inserted into the bundle.
|
// CSS
|
||||||
const viewScriptsList = new Set(
|
const viewStyleKeys = views.map((view) => view.styleKey);
|
||||||
Array.from(incr.out.viewMetadata.values())
|
const viewCssBundles = await Promise.all(
|
||||||
.flatMap((view) => view.clientRefs),
|
viewStyleKeys.map((key) => io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key))));
|
||||||
);
|
|
||||||
const viewStyleKeys = Array.from(incr.out.viewMetadata.values())
|
|
||||||
.map((view) => css.styleKey(view.cssImports, view.theme));
|
|
||||||
const viewCssBundles = viewStyleKeys
|
|
||||||
.map((key) => UNWRAP(incr.out.style.get(key), "Style key: " + key));
|
|
||||||
|
|
||||||
// Deduplicate styles
|
|
||||||
const styleList = Array.from(new Set(viewCssBundles));
|
const styleList = Array.from(new Set(viewCssBundles));
|
||||||
|
|
||||||
// Replace the magic word
|
// Replace the magic word
|
||||||
let text = files[fileWithMagicWord].toString("utf-8");
|
const text = bytes.toString("utf-8").replace(
|
||||||
text = text.replace(
|
|
||||||
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
|
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
|
||||||
(_, i) => {
|
(_, i) => {
|
||||||
i = Number(i);
|
i = Number(i);
|
||||||
|
@ -256,27 +257,18 @@ export async function finalizeServerJavaScript(
|
||||||
}
|
}
|
||||||
// Inline the script data
|
// Inline the script data
|
||||||
if (i === -1) {
|
if (i === -1) {
|
||||||
return JSON.stringify(Object.fromEntries(incr.out.script));
|
return JSON.stringify(Object.fromEntries(neededScripts));
|
||||||
}
|
}
|
||||||
// Reference an index into `styleList`
|
// Reference an index into `styleList`
|
||||||
return `${styleList.indexOf(viewCssBundles[i])}`;
|
return `${styleList.indexOf(viewCssBundles[i])}`;
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
incr.put({
|
io.writeFile(basename, text);
|
||||||
kind: "backendReplace",
|
|
||||||
key: platform,
|
|
||||||
sources: [
|
|
||||||
// Backend input code (includes view code)
|
|
||||||
...incr.sourcesFor("backendBundle", platform),
|
|
||||||
// Script
|
|
||||||
...Array.from(viewScriptsList)
|
|
||||||
.flatMap((key) => incr.sourcesFor("script", hot.getScriptId(key))),
|
|
||||||
// Style
|
|
||||||
...viewStyleKeys.flatMap((key) => incr.sourcesFor("style", key)),
|
|
||||||
],
|
|
||||||
value: Buffer.from(text),
|
|
||||||
});
|
});
|
||||||
|
})
|
||||||
|
|
||||||
|
await Promise.all(wProcessed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -284,9 +276,15 @@ import * as esbuild from "esbuild";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import process from "node:process";
|
import process from "node:process";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
import { markoViaBuildCache, projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
|
import {
|
||||||
|
isIgnoredSource,
|
||||||
|
markoViaBuildCache,
|
||||||
|
projectRelativeResolution,
|
||||||
|
virtualFiles,
|
||||||
|
} from "./esbuild-support.ts";
|
||||||
import { Io, toAbs, toRel } from "./incremental.ts";
|
import { Io, toAbs, toRel } from "./incremental.ts";
|
||||||
import * as css from "./css.ts";
|
import * as css from "./css.ts";
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as mime from "#sitegen/mime";
|
import * as mime from "#sitegen/mime";
|
||||||
import * as incr from './incremental.ts';
|
import * as incr from "./incremental.ts";
|
||||||
|
import * as sg from "#sitegen";import type { PreparedView } from "./generate2.ts";import { meta } from "@/file-viewer/pages/file.cotyledon_speedbump.tsx";
|
||||||
|
|
|
@ -62,7 +62,7 @@ export async function bundleCssFiles(
|
||||||
dev: boolean,
|
dev: boolean,
|
||||||
}
|
}
|
||||||
) {
|
) {
|
||||||
cssImports = await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file)));
|
||||||
const plugin = {
|
const plugin = {
|
||||||
name: "clover css",
|
name: "clover css",
|
||||||
setup(b) {
|
setup(b) {
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
|
type Awaitable<T> = T | Promise<T>;
|
||||||
|
|
||||||
export function virtualFiles(
|
export function virtualFiles(
|
||||||
map: Record<string, string | esbuild.OnLoadResult>,
|
map: Record<string, string | esbuild.OnLoadResult | (() => Awaitable<string | esbuild.OnLoadResult>)>,
|
||||||
) {
|
) {
|
||||||
return {
|
return {
|
||||||
name: "clover vfs",
|
name: "clover vfs",
|
||||||
|
@ -18,8 +20,9 @@ export function virtualFiles(
|
||||||
);
|
);
|
||||||
b.onLoad(
|
b.onLoad(
|
||||||
{ filter: /./, namespace: "vfs" },
|
{ filter: /./, namespace: "vfs" },
|
||||||
({ path }) => {
|
async ({ path }) => {
|
||||||
const entry = map[path];
|
let entry = map[path];
|
||||||
|
if (typeof entry === 'function') entry = await entry();
|
||||||
return ({
|
return ({
|
||||||
resolveDir: ".",
|
resolveDir: ".",
|
||||||
loader: "ts",
|
loader: "ts",
|
||||||
|
@ -99,6 +102,13 @@ export function markoViaBuildCache(): esbuild.Plugin {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isIgnoredSource(source: string) {
|
||||||
|
return source.includes("<define:") ||
|
||||||
|
source.startsWith("vfs:") ||
|
||||||
|
source.startsWith("dropped:") ||
|
||||||
|
source.includes("node_modules")
|
||||||
|
}
|
||||||
|
|
||||||
import * as esbuild from "esbuild";
|
import * as esbuild from "esbuild";
|
||||||
import * as string from "#sitegen/string";
|
import * as string from "#sitegen/string";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
|
|
|
@ -1,61 +1,113 @@
|
||||||
// This file contains the main site generator build process.
|
const { toRel, toAbs } = incr;
|
||||||
// By using `Incremental`'s ability to automatically purge stale
|
const globalCssPath = toAbs("src/global.css");
|
||||||
// assets, the `sitegen` function performs partial rebuilds.
|
|
||||||
|
|
||||||
export function main() {
|
export async function main() {
|
||||||
return withSpinner<Record<string, unknown>, any>({
|
await incr.restore();
|
||||||
text: "Recovering State",
|
await incr.compile(generate);
|
||||||
successText,
|
|
||||||
failureText: () => "sitegen FAIL",
|
|
||||||
}, async (spinner) => {
|
|
||||||
// const incr = Incremental.fromDisk();
|
|
||||||
// await incr.statAllFiles();
|
|
||||||
const incr = new Incremental();
|
|
||||||
const result = await sitegen(spinner, incr);
|
|
||||||
incr.toDisk(); // Allows picking up this state again
|
|
||||||
return result;
|
|
||||||
}) as ReturnType<typeof sitegen>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function successText({
|
export async function generate() {
|
||||||
elapsed,
|
// -- read config and discover files --
|
||||||
inserted,
|
const siteConfig = await incr.work(readManifest);
|
||||||
referenced,
|
const {
|
||||||
unreferenced,
|
staticFiles,
|
||||||
}: Awaited<ReturnType<typeof sitegen>>) {
|
scripts,
|
||||||
const s = (array: unknown[]) => array.length === 1 ? "" : "s";
|
views,
|
||||||
const kind = inserted.length === referenced.length ? "build" : "update";
|
pages,
|
||||||
const status = inserted.length > 0
|
} = await discoverAllFiles(siteConfig);
|
||||||
? `${kind} ${inserted.length} key${s(inserted)}`
|
|
||||||
: unreferenced.length > 0
|
// TODO: make sure that `static` and `pages` does not overlap
|
||||||
? `pruned ${unreferenced.length} key${s(unreferenced)}`
|
|
||||||
: `checked ${referenced.length} key${s(referenced)}`;
|
// TODO: loadMarkoCache
|
||||||
return `sitegen! ${status} in ${elapsed.toFixed(1)}s`;
|
|
||||||
|
// -- perform build-time rendering --
|
||||||
|
const builtPages = pages.map((item) => incr.work(preparePage, item));
|
||||||
|
const builtViews = views.map((item) => incr.work(prepareView, item));
|
||||||
|
const builtStaticFiles = Promise.all((staticFiles.map((item) =>
|
||||||
|
incr.work(
|
||||||
|
async (io, { id, file }) => void await io.writeAsset(id, await io.readFile(file)),
|
||||||
|
item,
|
||||||
|
)
|
||||||
|
)));
|
||||||
|
const routes = await Promise.all([...builtViews, ...builtPages]);
|
||||||
|
|
||||||
|
// -- page resources --
|
||||||
|
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
||||||
|
clientRefs: routes.flatMap((x) => x.clientRefs),
|
||||||
|
extraPublicScripts: scripts.map((entry) => entry.file),
|
||||||
|
dev: false,
|
||||||
|
});
|
||||||
|
const styleMap = prepareInlineCss(routes);
|
||||||
|
|
||||||
|
// -- backend --
|
||||||
|
const builtBackend = bundle.bundleServerJavaScript({
|
||||||
|
entries: siteConfig.backends,
|
||||||
|
platform: 'node',
|
||||||
|
styleMap,
|
||||||
|
scriptMap,
|
||||||
|
viewItems: views,
|
||||||
|
viewRefs: builtViews,
|
||||||
|
})
|
||||||
|
|
||||||
|
// -- assemble page assets --
|
||||||
|
const pAssemblePages = builtPages.map((page) =>
|
||||||
|
assembleAndWritePage(page, styleMap, scriptMap)
|
||||||
|
);
|
||||||
|
|
||||||
|
await Promise.all([
|
||||||
|
builtBackend,
|
||||||
|
builtStaticFiles,
|
||||||
|
...pAssemblePages,
|
||||||
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function sitegen(
|
export async function readManifest(io: Io) {
|
||||||
status: Spinner,
|
const cfg = await io.import<typeof import("../src/site.ts")>("src/site.ts");
|
||||||
incr: Incremental,
|
return {
|
||||||
|
siteSections: cfg.siteSections.map((section) => ({
|
||||||
|
root: toRel(section.root),
|
||||||
|
})),
|
||||||
|
backends: cfg.backends.map(toRel),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function discoverAllFiles(
|
||||||
|
siteConfig: Awaited<ReturnType<typeof readManifest>>,
|
||||||
) {
|
) {
|
||||||
const startTime = performance.now();
|
return (
|
||||||
|
await Promise.all(
|
||||||
|
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||||
|
incr.work(scanSiteSection, toAbs(sectionRoot))
|
||||||
|
),
|
||||||
|
)
|
||||||
|
).reduce((acc, next) => ({
|
||||||
|
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
||||||
|
pages: acc.pages.concat(next.pages),
|
||||||
|
views: acc.views.concat(next.views),
|
||||||
|
scripts: acc.scripts.concat(next.scripts),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
let root = path.resolve(import.meta.dirname, "../src");
|
export async function scanSiteSection(io: Io, sectionRoot: string) {
|
||||||
const join = (...sub: string[]) => path.join(root, ...sub);
|
// Static files are compressed and served as-is.
|
||||||
|
// - "{section}/static/*.png"
|
||||||
|
let staticFiles: FileItem[] = [];
|
||||||
|
// Pages are rendered then served as static files.
|
||||||
|
// - "{section}/pages/*.marko"
|
||||||
|
let pages: FileItem[] = [];
|
||||||
|
// Views are dynamically rendered pages called via backend code.
|
||||||
|
// - "{section}/views/*.tsx"
|
||||||
|
let views: FileItem[] = [];
|
||||||
|
// Public scripts are bundled for the client as static assets under "/js/[...]"
|
||||||
|
// This is used for the file viewer's canvases.
|
||||||
|
// Note that '.client.ts' can be placed anywhere in the file structure.
|
||||||
|
// - "{section}/scripts/*.client.ts"
|
||||||
|
let scripts: FileItem[] = [];
|
||||||
|
|
||||||
// Sitegen reviews every defined section for resources to process
|
|
||||||
const sections: sg.Section[] =
|
|
||||||
require(path.join(root, "site.ts")).siteSections;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// -- Scan for files --
|
|
||||||
status.text = "Scanning Project";
|
|
||||||
for (const section of sections) {
|
|
||||||
const { root: sectionRoot } = section;
|
|
||||||
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
||||||
const rootPrefix = root === sectionRoot
|
const rootPrefix = hot.projectSrc === sectionRoot
|
||||||
? ""
|
? ""
|
||||||
: path.relative(root, sectionRoot) + "/";
|
: path.relative(hot.projectSrc, sectionRoot) + "/";
|
||||||
const kinds = [
|
const kinds = [
|
||||||
{
|
{
|
||||||
dir: sectionPath("pages"),
|
dir: sectionPath("pages"),
|
||||||
|
@ -84,11 +136,23 @@ export async function sitegen(
|
||||||
exclude: [".client.ts", ".client.tsx"],
|
exclude: [".client.ts", ".client.tsx"],
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
for (
|
for (const kind of kinds) {
|
||||||
const { dir, list, prefix, include = [""], exclude = [], ext = false }
|
const {
|
||||||
of kinds
|
dir,
|
||||||
) {
|
list,
|
||||||
const items = fs.readDirRecOptionalSync(dir);
|
prefix,
|
||||||
|
include = [""],
|
||||||
|
exclude = [],
|
||||||
|
ext = false,
|
||||||
|
} = kind;
|
||||||
|
|
||||||
|
let items;
|
||||||
|
try {
|
||||||
|
items = await io.readDirRecursive(dir);
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.code === "ENOENT") continue;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
for (const subPath of items) {
|
for (const subPath of items) {
|
||||||
const file = path.join(dir, subPath);
|
const file = path.join(dir, subPath);
|
||||||
const stat = fs.statSync(file);
|
const stat = fs.statSync(file);
|
||||||
|
@ -97,67 +161,26 @@ export async function sitegen(
|
||||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||||
const trim = ext
|
const trim = ext
|
||||||
? subPath
|
? subPath
|
||||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
: subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/");
|
||||||
".",
|
|
||||||
"/",
|
|
||||||
);
|
|
||||||
let id = prefix + trim.replaceAll("\\", "/");
|
let id = prefix + trim.replaceAll("\\", "/");
|
||||||
if (prefix === "/" && id.endsWith("/index")) {
|
if (prefix === "/" && id.endsWith("/index")) {
|
||||||
id = id.slice(0, -"/index".length) || "/";
|
id = id.slice(0, -"/index".length) || "/";
|
||||||
}
|
}
|
||||||
list.push({ id, file: file });
|
list.push({ id, file: path.relative(hot.projectRoot, file) });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
const globalCssPath = join("global.css");
|
|
||||||
|
|
||||||
// TODO: make sure that `static` and `pages` does not overlap
|
|
||||||
|
|
||||||
// -- inline style sheets, used and shared by pages and views --
|
|
||||||
status.text = "Building";
|
|
||||||
const cssOnce = new OnceMap();
|
|
||||||
const cssQueue = new Queue({
|
|
||||||
name: "Bundle",
|
|
||||||
async fn([, key, files, theme]: [string, string, string[], css.Theme]) {
|
|
||||||
const { text, sources } = await css.bundleCssFiles(files, theme);
|
|
||||||
incr.put({
|
|
||||||
kind: "style",
|
|
||||||
key,
|
|
||||||
sources,
|
|
||||||
value: text,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
passive: true,
|
|
||||||
getItemText: ([id]) => id,
|
|
||||||
maxJobs: 2,
|
|
||||||
});
|
|
||||||
function ensureCssGetsBuilt(
|
|
||||||
cssImports: string[],
|
|
||||||
theme: css.Theme,
|
|
||||||
referrer: string,
|
|
||||||
) {
|
|
||||||
const key = css.styleKey(cssImports, theme);
|
|
||||||
cssOnce.get(
|
|
||||||
key,
|
|
||||||
async () => {
|
|
||||||
incr.getArtifact("style", key) ??
|
|
||||||
await cssQueue.add([referrer, key, cssImports, theme]);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// -- server side render pages --
|
return { staticFiles, pages, views, scripts };
|
||||||
async function loadPageModule({ file }: FileItem) {
|
|
||||||
require(file);
|
|
||||||
}
|
}
|
||||||
async function renderPage(item: FileItem) {
|
|
||||||
|
export async function preparePage(io: Io, item: sg.FileItem) {
|
||||||
// -- load and validate module --
|
// -- load and validate module --
|
||||||
let {
|
let {
|
||||||
default: Page,
|
default: Page,
|
||||||
meta: metadata,
|
meta: metadata,
|
||||||
theme: pageTheme,
|
theme: pageTheme,
|
||||||
layout,
|
layout,
|
||||||
} = require(item.file);
|
} = await io.import<any>(item.file);
|
||||||
if (!Page) {
|
if (!Page) {
|
||||||
throw new Error("Page is missing a 'default' export.");
|
throw new Error("Page is missing a 'default' export.");
|
||||||
}
|
}
|
||||||
|
@ -175,7 +198,6 @@ export async function sitegen(
|
||||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||||
(file) => path.relative(hot.projectSrc, file),
|
(file) => path.relative(hot.projectSrc, file),
|
||||||
);
|
);
|
||||||
ensureCssGetsBuilt(cssImports, theme, item.id);
|
|
||||||
|
|
||||||
// -- metadata --
|
// -- metadata --
|
||||||
const renderedMetaPromise = Promise.resolve(
|
const renderedMetaPromise = Promise.resolve(
|
||||||
|
@ -197,27 +219,23 @@ export async function sitegen(
|
||||||
]);
|
]);
|
||||||
if (!renderedMeta.includes("<title>")) {
|
if (!renderedMeta.includes("<title>")) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Page is missing 'meta.title'. " +
|
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
||||||
"All pages need a title tag.",
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
incr.put({
|
const styleKey = css.styleKey(cssImports, theme);
|
||||||
kind: "pageMetadata",
|
return {
|
||||||
key: item.id,
|
id: item.id,
|
||||||
// Incremental integrates with `hot.ts` + `require`
|
|
||||||
// to trace all the needed source files here.
|
|
||||||
sources: [item.file],
|
|
||||||
value: {
|
|
||||||
html: text,
|
html: text,
|
||||||
meta: renderedMeta,
|
meta: renderedMeta,
|
||||||
cssImports,
|
cssImports,
|
||||||
theme: theme ?? null,
|
theme: theme ?? null,
|
||||||
|
styleKey,
|
||||||
clientRefs: Array.from(addon.sitegen.scripts),
|
clientRefs: Array.from(addon.sitegen.scripts),
|
||||||
},
|
};
|
||||||
});
|
|
||||||
}
|
}
|
||||||
async function prepareView(item: FileItem) {
|
|
||||||
const module = require(item.file);
|
export async function prepareView(io: Io, item: sg.FileItem) {
|
||||||
|
const module = await io.import<any>(item.file);
|
||||||
if (!module.meta) {
|
if (!module.meta) {
|
||||||
throw new Error(`${item.file} is missing 'export const meta'`);
|
throw new Error(`${item.file} is missing 'export const meta'`);
|
||||||
}
|
}
|
||||||
|
@ -233,175 +251,80 @@ export async function sitegen(
|
||||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||||
(file) => path.relative(hot.projectSrc, file),
|
(file) => path.relative(hot.projectSrc, file),
|
||||||
);
|
);
|
||||||
ensureCssGetsBuilt(cssImports, theme, item.id);
|
const styleKey = css.styleKey(cssImports, theme);
|
||||||
incr.put({
|
return {
|
||||||
kind: "viewMetadata",
|
|
||||||
key: item.id,
|
|
||||||
sources: [item.file],
|
|
||||||
value: {
|
|
||||||
file: path.relative(hot.projectRoot, item.file),
|
file: path.relative(hot.projectRoot, item.file),
|
||||||
cssImports,
|
cssImports,
|
||||||
theme,
|
theme,
|
||||||
clientRefs: hot.getClientScriptRefs(item.file),
|
clientRefs: hot.getClientScriptRefs(item.file),
|
||||||
hasLayout: !!module.layout?.default,
|
hasLayout: !!module.layout?.default,
|
||||||
},
|
styleKey,
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Of the pages that are already built, a call to 'ensureCssGetsBuilt' is
|
|
||||||
// required so that it's (1) re-built if needed, (2) not pruned from build.
|
|
||||||
const neededPages = pages.filter((page) => {
|
|
||||||
const existing = incr.getArtifact("pageMetadata", page.id);
|
|
||||||
if (existing) {
|
|
||||||
const { cssImports, theme } = existing;
|
|
||||||
ensureCssGetsBuilt(cssImports, theme, page.id);
|
|
||||||
}
|
|
||||||
return !existing;
|
|
||||||
});
|
|
||||||
const neededViews = views.filter((view) => {
|
|
||||||
const existing = incr.getArtifact("viewMetadata", view.id);
|
|
||||||
if (existing) {
|
|
||||||
const { cssImports, theme } = existing;
|
|
||||||
ensureCssGetsBuilt(cssImports, theme, view.id);
|
|
||||||
}
|
|
||||||
return !existing;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Load the marko cache before render modules are loaded
|
|
||||||
incr.loadMarkoCache();
|
|
||||||
|
|
||||||
// This is done in two passes so that a page that throws during evaluation
|
|
||||||
// will report "Load Render Module" instead of "Render Static Page".
|
|
||||||
const spinnerFormat = status.format;
|
|
||||||
status.format = () => "";
|
|
||||||
const moduleLoadQueue = new Queue({
|
|
||||||
name: "Load Render Module",
|
|
||||||
fn: loadPageModule,
|
|
||||||
getItemText,
|
|
||||||
maxJobs: 1,
|
|
||||||
});
|
|
||||||
moduleLoadQueue.addMany(neededPages);
|
|
||||||
moduleLoadQueue.addMany(neededViews);
|
|
||||||
await moduleLoadQueue.done({ method: "stop" });
|
|
||||||
const pageQueue = new Queue({
|
|
||||||
name: "Render Static Page",
|
|
||||||
fn: renderPage,
|
|
||||||
getItemText,
|
|
||||||
maxJobs: 2,
|
|
||||||
});
|
|
||||||
pageQueue.addMany(neededPages);
|
|
||||||
const viewQueue = new Queue({
|
|
||||||
name: "Build Dynamic View",
|
|
||||||
fn: prepareView,
|
|
||||||
getItemText,
|
|
||||||
maxJobs: 2,
|
|
||||||
});
|
|
||||||
viewQueue.addMany(neededViews);
|
|
||||||
const pageAndViews = [
|
|
||||||
pageQueue.done({ method: "stop" }),
|
|
||||||
viewQueue.done({ method: "stop" }),
|
|
||||||
];
|
|
||||||
await Promise.allSettled(pageAndViews);
|
|
||||||
await Promise.all(pageAndViews);
|
|
||||||
status.format = spinnerFormat;
|
|
||||||
|
|
||||||
// -- bundle server javascript (backend and views) --
|
|
||||||
status.text = "Bundle JavaScript";
|
|
||||||
incr.snapshotMarkoCache();
|
|
||||||
const serverJavaScriptPromise = bundle.bundleServerJavaScript(incr, "node");
|
|
||||||
|
|
||||||
// -- bundle client javascript --
|
|
||||||
const referencedScripts = Array.from(
|
|
||||||
new Set(
|
|
||||||
[
|
|
||||||
...pages.map((item) =>
|
|
||||||
UNWRAP(
|
|
||||||
incr.getArtifact("pageMetadata", item.id),
|
|
||||||
`Missing pageMetadata ${item.id}`,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
...views.map((item) =>
|
|
||||||
UNWRAP(
|
|
||||||
incr.getArtifact("viewMetadata", item.id),
|
|
||||||
`Missing viewMetadata ${item.id}`,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
].flatMap((item) => item.clientRefs),
|
|
||||||
),
|
|
||||||
(script) => path.resolve(hot.projectSrc, script),
|
|
||||||
).filter((file) => !incr.hasArtifact("script", hot.getScriptId(file)));
|
|
||||||
const extraPublicScripts = scripts.map((entry) => entry.file);
|
|
||||||
const clientJavaScriptPromise = bundle.bundleClientJavaScript(
|
|
||||||
referencedScripts,
|
|
||||||
extraPublicScripts,
|
|
||||||
incr,
|
|
||||||
);
|
|
||||||
await Promise.all([
|
|
||||||
serverJavaScriptPromise,
|
|
||||||
clientJavaScriptPromise,
|
|
||||||
cssQueue.done({ method: "stop" }),
|
|
||||||
]);
|
|
||||||
await bundle.finalizeServerJavaScript(incr, "node");
|
|
||||||
|
|
||||||
// -- copy/compress static files --
|
|
||||||
async function doStaticFile(item: FileItem) {
|
|
||||||
const body = await fs.readFile(item.file);
|
|
||||||
await incr.putAsset({
|
|
||||||
sources: [item.file],
|
|
||||||
key: item.id,
|
|
||||||
body,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const staticQueue = new Queue({
|
|
||||||
name: "Load Static",
|
|
||||||
fn: doStaticFile,
|
|
||||||
getItemText,
|
|
||||||
maxJobs: 16,
|
|
||||||
});
|
|
||||||
status.format = () => "";
|
|
||||||
staticQueue.addMany(
|
|
||||||
staticFiles.filter((file) => !incr.hasArtifact("asset", file.id)),
|
|
||||||
);
|
|
||||||
await staticQueue.done({ method: "stop" });
|
|
||||||
status.format = spinnerFormat;
|
|
||||||
|
|
||||||
// -- concatenate static rendered pages --
|
|
||||||
status.text = `Concat Pages`;
|
|
||||||
await Promise.all(pages.map(async (page) => {
|
|
||||||
}));
|
|
||||||
status.format = () => "";
|
|
||||||
status.text = ``;
|
|
||||||
// This will wait for all compression jobs to finish, which up
|
|
||||||
// to this point have been left as dangling promises.
|
|
||||||
await incr.wait();
|
|
||||||
|
|
||||||
const { inserted, referenced, unreferenced } = incr.shake();
|
|
||||||
|
|
||||||
// Flush the site to disk.
|
|
||||||
status.format = spinnerFormat;
|
|
||||||
status.text = `Incremental Flush`;
|
|
||||||
incr.flush("node"); // Write outputs
|
|
||||||
return {
|
|
||||||
incr,
|
|
||||||
inserted,
|
|
||||||
referenced,
|
|
||||||
unreferenced,
|
|
||||||
elapsed: (performance.now() - startTime) / 1000,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
|
||||||
|
|
||||||
function getItemText({ file }: FileItem) {
|
export function prepareInlineCss(
|
||||||
return path.relative(hot.projectSrc, file).replaceAll("\\", "/");
|
items: Array<{
|
||||||
|
styleKey: string;
|
||||||
|
cssImports: string[];
|
||||||
|
theme: css.Theme;
|
||||||
|
}>,
|
||||||
|
) {
|
||||||
|
const map = new Map<string, incr.Ref<string>>();
|
||||||
|
for (const { styleKey, cssImports, theme } of items) {
|
||||||
|
if (map.has(styleKey)) continue;
|
||||||
|
map.set(
|
||||||
|
styleKey,
|
||||||
|
incr.work(css.bundleCssFiles, {
|
||||||
|
cssImports,
|
||||||
|
theme,
|
||||||
|
dev: false,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
import { OnceMap, Queue } from "#sitegen/async";
|
export type PreparedPage = Awaited<ReturnType<typeof preparePage>>;
|
||||||
import { Incremental } from "./incremental.ts";
|
export async function assembleAndWritePage(
|
||||||
|
pageWork: incr.Ref<PreparedPage>,
|
||||||
|
styleMap: Map<string, incr.Ref<string>>,
|
||||||
|
scriptWork: incr.Ref<Record<string, string>>,
|
||||||
|
) {
|
||||||
|
const page = await pageWork;
|
||||||
|
return incr.work(
|
||||||
|
async (io, { id, html, meta, styleKey, clientRefs }) => {
|
||||||
|
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
|
||||||
|
|
||||||
|
const scriptIds = clientRefs.map(hot.getScriptId);
|
||||||
|
const scriptMap = await io.readWork(scriptWork);
|
||||||
|
const scripts = scriptIds.map((ref) =>
|
||||||
|
UNWRAP(scriptMap[ref], `Missing script ${ref}`)
|
||||||
|
)
|
||||||
|
.map((x) => `{${x}}`).join("\n");
|
||||||
|
|
||||||
|
const doc = wrapDocument({
|
||||||
|
body: html,
|
||||||
|
head: meta,
|
||||||
|
inlineCss,
|
||||||
|
scripts,
|
||||||
|
});
|
||||||
|
await io.writeAsset(id, doc, {
|
||||||
|
"Content-Type": "text/html",
|
||||||
|
});
|
||||||
|
},
|
||||||
|
page,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
import * as sg from "#sitegen";
|
||||||
|
import * as incr from "./incremental.ts";
|
||||||
|
import { Io } from "./incremental.ts";
|
||||||
import * as bundle from "./bundle.ts";
|
import * as bundle from "./bundle.ts";
|
||||||
import * as css from "./css.ts";
|
import * as css from "./css.ts";
|
||||||
import * as engine from "./engine/ssr.ts";
|
import * as engine from "./engine/ssr.ts";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as sg from "#sitegen";
|
|
||||||
import type { FileItem } from "#sitegen";
|
import type { FileItem } from "#sitegen";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import * as meta from "#sitegen/meta";
|
import * as meta from "#sitegen/meta";
|
||||||
|
|
|
@ -1,324 +0,0 @@
|
||||||
const { toRel, toAbs } = incr;
|
|
||||||
const globalCssPath = toAbs("src/global.css");
|
|
||||||
|
|
||||||
export async function main() {
|
|
||||||
const startTime = performance.now();
|
|
||||||
|
|
||||||
// -- read config and discover files --
|
|
||||||
const siteConfig = await incr.work(readManifest);
|
|
||||||
const {
|
|
||||||
staticFiles,
|
|
||||||
scripts,
|
|
||||||
views,
|
|
||||||
pages,
|
|
||||||
} = await discoverAllFiles(siteConfig);
|
|
||||||
|
|
||||||
// TODO: make sure that `static` and `pages` does not overlap
|
|
||||||
|
|
||||||
// TODO: loadMarkoCache
|
|
||||||
|
|
||||||
// -- perform build-time rendering --
|
|
||||||
const builtPages = pages.map((item) => incr.work(preparePage, item));
|
|
||||||
const builtViews = views.map((item) => incr.work(prepareView, item));
|
|
||||||
const builtStaticFiles = staticFiles.map((item) =>
|
|
||||||
incr.work(
|
|
||||||
async (io, { id, file }) => io.writeAsset(id, await io.readFile(file)),
|
|
||||||
item,
|
|
||||||
)
|
|
||||||
);
|
|
||||||
const routes = await Promise.all([...builtViews, ...builtPages]);
|
|
||||||
|
|
||||||
// -- bundle server javascript (backend and views) --
|
|
||||||
const backends = siteConfig.backends.map((backend) => incr.work(bundle.bundleServerJavaScript, {}))
|
|
||||||
|
|
||||||
// -- page resources --
|
|
||||||
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
|
||||||
clientRefs: routes.flatMap((x) => x.clientRefs),
|
|
||||||
extraPublicScripts: scripts.map((entry) => entry.file),
|
|
||||||
dev: false,
|
|
||||||
});
|
|
||||||
const styleMap = prepareInlineCss(routes);
|
|
||||||
|
|
||||||
// -- backend --
|
|
||||||
|
|
||||||
// -- assemble page assets --
|
|
||||||
const pAssemblePages = builtPages.map((page) =>
|
|
||||||
assembleAndWritePage(page, styleMap, scriptMap)
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
incr.serializeToDisk();
|
|
||||||
}
|
|
||||||
|
|
||||||
readManifest.label = "reading manifest";
|
|
||||||
export async function readManifest(io: Io) {
|
|
||||||
const cfg = await io.import<typeof import("../src/site.ts")>("src/site.ts");
|
|
||||||
return {
|
|
||||||
siteSections: cfg.siteSections.map((section) => ({
|
|
||||||
root: toRel(section.root),
|
|
||||||
})),
|
|
||||||
backends: cfg.backends.map(toRel),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function discoverAllFiles(
|
|
||||||
siteConfig: Awaited<ReturnType<typeof readManifest>>,
|
|
||||||
) {
|
|
||||||
return (
|
|
||||||
await Promise.all(
|
|
||||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
|
||||||
incr.work(scanSiteSection, toAbs(sectionRoot))
|
|
||||||
),
|
|
||||||
)
|
|
||||||
).reduce((acc, next) => ({
|
|
||||||
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
|
||||||
pages: acc.pages.concat(next.pages),
|
|
||||||
views: acc.views.concat(next.views),
|
|
||||||
scripts: acc.scripts.concat(next.scripts),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
scanSiteSection.getLabel = (input: string) =>
|
|
||||||
"discovering files in " + toRel(input);
|
|
||||||
export async function scanSiteSection(io: Io, sectionRoot: string) {
|
|
||||||
// Static files are compressed and served as-is.
|
|
||||||
// - "{section}/static/*.png"
|
|
||||||
let staticFiles: FileItem[] = [];
|
|
||||||
// Pages are rendered then served as static files.
|
|
||||||
// - "{section}/pages/*.marko"
|
|
||||||
let pages: FileItem[] = [];
|
|
||||||
// Views are dynamically rendered pages called via backend code.
|
|
||||||
// - "{section}/views/*.tsx"
|
|
||||||
let views: FileItem[] = [];
|
|
||||||
// Public scripts are bundled for the client as static assets under "/js/[...]"
|
|
||||||
// This is used for the file viewer's canvases.
|
|
||||||
// Note that '.client.ts' can be placed anywhere in the file structure.
|
|
||||||
// - "{section}/scripts/*.client.ts"
|
|
||||||
let scripts: FileItem[] = [];
|
|
||||||
|
|
||||||
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
|
||||||
const rootPrefix = hot.projectSrc === sectionRoot
|
|
||||||
? ""
|
|
||||||
: path.relative(hot.projectSrc, sectionRoot) + "/";
|
|
||||||
const kinds = [
|
|
||||||
{
|
|
||||||
dir: sectionPath("pages"),
|
|
||||||
list: pages,
|
|
||||||
prefix: "/",
|
|
||||||
include: [".tsx", ".mdx", ".marko"],
|
|
||||||
exclude: [".client.ts", ".client.tsx"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
dir: sectionPath("static"),
|
|
||||||
list: staticFiles,
|
|
||||||
prefix: "/",
|
|
||||||
ext: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
dir: sectionPath("scripts"),
|
|
||||||
list: scripts,
|
|
||||||
prefix: rootPrefix,
|
|
||||||
include: [".client.ts", ".client.tsx"],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
dir: sectionPath("views"),
|
|
||||||
list: views,
|
|
||||||
prefix: rootPrefix,
|
|
||||||
include: [".tsx", ".mdx", ".marko"],
|
|
||||||
exclude: [".client.ts", ".client.tsx"],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
for (const kind of kinds) {
|
|
||||||
const {
|
|
||||||
dir,
|
|
||||||
list,
|
|
||||||
prefix,
|
|
||||||
include = [""],
|
|
||||||
exclude = [],
|
|
||||||
ext = false,
|
|
||||||
} = kind;
|
|
||||||
|
|
||||||
let items;
|
|
||||||
try {
|
|
||||||
items = await io.readDirRecursive(dir);
|
|
||||||
} catch (err: any) {
|
|
||||||
if (err.code === "ENOENT") continue;
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
for (const subPath of items) {
|
|
||||||
const file = path.join(dir, subPath);
|
|
||||||
const stat = fs.statSync(file);
|
|
||||||
if (stat.isDirectory()) continue;
|
|
||||||
if (!include.some((e) => subPath.endsWith(e))) continue;
|
|
||||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
|
||||||
const trim = ext
|
|
||||||
? subPath
|
|
||||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/");
|
|
||||||
let id = prefix + trim.replaceAll("\\", "/");
|
|
||||||
if (prefix === "/" && id.endsWith("/index")) {
|
|
||||||
id = id.slice(0, -"/index".length) || "/";
|
|
||||||
}
|
|
||||||
list.push({ id, file: path.relative(hot.projectRoot, file) });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { staticFiles, pages, views, scripts };
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function preparePage(io: Io, item: sg.FileItem) {
|
|
||||||
// -- load and validate module --
|
|
||||||
let {
|
|
||||||
default: Page,
|
|
||||||
meta: metadata,
|
|
||||||
theme: pageTheme,
|
|
||||||
layout,
|
|
||||||
} = await io.import<any>(item.file);
|
|
||||||
if (!Page) {
|
|
||||||
throw new Error("Page is missing a 'default' export.");
|
|
||||||
}
|
|
||||||
if (!metadata) {
|
|
||||||
throw new Error("Page is missing 'meta' export with a title.");
|
|
||||||
}
|
|
||||||
|
|
||||||
// -- css --
|
|
||||||
if (layout?.theme) pageTheme = layout.theme;
|
|
||||||
const theme: css.Theme = {
|
|
||||||
...css.defaultTheme,
|
|
||||||
...pageTheme,
|
|
||||||
};
|
|
||||||
const cssImports = Array.from(
|
|
||||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
|
||||||
(file) => path.relative(hot.projectSrc, file),
|
|
||||||
);
|
|
||||||
|
|
||||||
// -- metadata --
|
|
||||||
const renderedMetaPromise = Promise.resolve(
|
|
||||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
|
||||||
).then((m) => meta.renderMeta(m));
|
|
||||||
|
|
||||||
// -- html --
|
|
||||||
let page = [engine.kElement, Page, {}];
|
|
||||||
if (layout?.default) {
|
|
||||||
page = [engine.kElement, layout.default, { children: page }];
|
|
||||||
}
|
|
||||||
const bodyPromise = engine.ssrAsync(page, {
|
|
||||||
sitegen: sg.initRender(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
|
||||||
bodyPromise,
|
|
||||||
renderedMetaPromise,
|
|
||||||
]);
|
|
||||||
if (!renderedMeta.includes("<title>")) {
|
|
||||||
throw new Error(
|
|
||||||
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const styleKey = css.styleKey(cssImports, theme);
|
|
||||||
return {
|
|
||||||
id: item.id,
|
|
||||||
html: text,
|
|
||||||
meta: renderedMeta,
|
|
||||||
cssImports,
|
|
||||||
theme: theme ?? null,
|
|
||||||
styleKey,
|
|
||||||
clientRefs: Array.from(addon.sitegen.scripts),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function prepareView(io: Io, item: sg.FileItem) {
|
|
||||||
const module = await io.import<any>(item.file);
|
|
||||||
if (!module.meta) {
|
|
||||||
throw new Error(`${item.file} is missing 'export const meta'`);
|
|
||||||
}
|
|
||||||
if (!module.default) {
|
|
||||||
throw new Error(`${item.file} is missing a default export.`);
|
|
||||||
}
|
|
||||||
const pageTheme = module.layout?.theme ?? module.theme;
|
|
||||||
const theme: css.Theme = {
|
|
||||||
...css.defaultTheme,
|
|
||||||
...pageTheme,
|
|
||||||
};
|
|
||||||
const cssImports = Array.from(
|
|
||||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
|
||||||
(file) => path.relative(hot.projectSrc, file),
|
|
||||||
);
|
|
||||||
const styleKey = css.styleKey(cssImports, theme);
|
|
||||||
return {
|
|
||||||
file: path.relative(hot.projectRoot, item.file),
|
|
||||||
cssImports,
|
|
||||||
theme,
|
|
||||||
clientRefs: hot.getClientScriptRefs(item.file),
|
|
||||||
hasLayout: !!module.layout?.default,
|
|
||||||
styleKey,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function prepareInlineCss(
|
|
||||||
items: Array<{
|
|
||||||
styleKey: string;
|
|
||||||
cssImports: string[];
|
|
||||||
theme: css.Theme;
|
|
||||||
}>,
|
|
||||||
) {
|
|
||||||
const map = new Map<string, incr.Ref<string>>();
|
|
||||||
for (const { styleKey, cssImports, theme } of items) {
|
|
||||||
if (map.has(styleKey)) continue;
|
|
||||||
map.set(
|
|
||||||
styleKey,
|
|
||||||
incr.work(css.bundleCssFiles, {
|
|
||||||
cssImports,
|
|
||||||
theme,
|
|
||||||
dev: false,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
type PreparedPage = Awaited<ReturnType<typeof preparePage>>;
|
|
||||||
export async function assembleAndWritePage(
|
|
||||||
pageWork: Promise<PreparedPage>,
|
|
||||||
styleMap: Map<string, incr.Ref<string>>,
|
|
||||||
scriptWork: incr.Ref<Record<string, string>>,
|
|
||||||
) {
|
|
||||||
const page = await pageWork;
|
|
||||||
return incr.work(
|
|
||||||
async (io, { id, html, meta, styleKey, clientRefs }) => {
|
|
||||||
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
|
|
||||||
|
|
||||||
const scriptIds = clientRefs.map(hot.getScriptId);
|
|
||||||
const scriptMap = await io.readWork(scriptWork);
|
|
||||||
const scripts = scriptIds.map((ref) =>
|
|
||||||
UNWRAP(scriptMap[ref], `Missing script ${ref}`)
|
|
||||||
)
|
|
||||||
.map((x) => `{${x}}`).join("\n");
|
|
||||||
|
|
||||||
const doc = wrapDocument({
|
|
||||||
body: html,
|
|
||||||
head: meta,
|
|
||||||
inlineCss,
|
|
||||||
scripts,
|
|
||||||
});
|
|
||||||
io.writeAsset(id, doc, {
|
|
||||||
"Content-Type": "text/html",
|
|
||||||
});
|
|
||||||
},
|
|
||||||
page,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
import * as sg from "#sitegen";
|
|
||||||
import * as incr from "./incremental.ts";
|
|
||||||
import { Io } from "./incremental.ts";
|
|
||||||
import { OnceMap, Queue } from "#sitegen/async";
|
|
||||||
import * as bundle from "./bundle.ts";
|
|
||||||
import * as css from "./css.ts";
|
|
||||||
import * as engine from "./engine/ssr.ts";
|
|
||||||
import * as hot from "./hot.ts";
|
|
||||||
import * as fs from "#sitegen/fs";
|
|
||||||
import type { FileItem } from "#sitegen";
|
|
||||||
import * as path from "node:path";
|
|
||||||
import * as meta from "#sitegen/meta";
|
|
||||||
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
|
|
||||||
import { wrapDocument } from "./lib/view.ts";
|
|
|
@ -83,7 +83,7 @@ Module.prototype._compile = function (
|
||||||
? Array.from(new Set(cssImportsMaybe))
|
? Array.from(new Set(cssImportsMaybe))
|
||||||
: null,
|
: null,
|
||||||
imports,
|
imports,
|
||||||
lastModified: stat.mtimeMs,
|
lastModified: Math.floor(stat.mtimeMs),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -1,30 +1,41 @@
|
||||||
// Incremental compilation framework
|
// Incremental compilation framework
|
||||||
let running = false;
|
let running = false;
|
||||||
let seenWorks = new Set<string>();
|
let jobs = 0;
|
||||||
|
let newKeys = 0;
|
||||||
|
let seenWorks = new Set<string>(); // for detecting conflict vs overwrite
|
||||||
|
let seenWrites = new Set<string>(); // for detecting conflict vs overwrite
|
||||||
let works = new Map<string, Work>();
|
let works = new Map<string, Work>();
|
||||||
let files = new Map<string, File>();
|
let files = new Map<string, TrackedFile>(); // keyed by `toRel` path
|
||||||
let assets = new Map<string, Asset>();
|
let writes = new Map<string, FileWrite>();
|
||||||
|
let assets = new Map<string, Asset>(); // keyed by hash
|
||||||
|
|
||||||
export interface Ref<T> extends Promise<T> {
|
export interface Ref<T> {
|
||||||
|
/** This method is compatible with `await` syntax */
|
||||||
|
then(
|
||||||
|
onFulfilled: (value: T) => void,
|
||||||
|
onRejected: (error: unknown) => void,
|
||||||
|
): void;
|
||||||
key: string;
|
key: string;
|
||||||
}
|
}
|
||||||
|
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Declare and begin a unit of work. Return value is memoized and
|
* Declare and a unit of work. Return value is memoized and
|
||||||
* only re-run when inputs (via `Io`) change. Outputs are written
|
* only rebuilt when inputs (declared via `Io`) change. Outputs
|
||||||
* at the end of a compilation (see `compile`).
|
* are written at the end of a compilation (see `compile`).
|
||||||
|
*
|
||||||
|
* If the returned `Ref` is not awaited or read
|
||||||
|
* via io.readWork, the job is never started.
|
||||||
*/
|
*/
|
||||||
export function work<O>(job: (io: Io) => Promise<O>): Ref<O>;
|
export function work<O>(job: Job<void, O>): Ref<O>;
|
||||||
export function work<I, O>(job:(io: Io, input: I) => Promise<O>, input: I): Ref<O>;
|
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
|
||||||
export function work<I, O>(job: (io: Io, input: I) => Promise<O>, input: I = null as I): Ref<O> {
|
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
||||||
const keySource = [
|
const keySource = [
|
||||||
JSON.stringify(util.getCallSites(2)[1]),
|
JSON.stringify(util.getCallSites(2)[1]),
|
||||||
util.inspect(input),
|
util.inspect(input),
|
||||||
];
|
].join(":");
|
||||||
const key = crypto
|
const key = crypto.createHash("sha1").update(keySource).digest("base64url");
|
||||||
.createHash("sha1")
|
ASSERT(running);
|
||||||
.update(keySource.join(":"))
|
|
||||||
.digest("base64url");
|
|
||||||
ASSERT(
|
ASSERT(
|
||||||
!seenWorks.has(key),
|
!seenWorks.has(key),
|
||||||
`Key '${key}' must be unique during the build. ` +
|
`Key '${key}' must be unique during the build. ` +
|
||||||
|
@ -34,45 +45,139 @@ export function work<I, O>(job: (io: Io, input: I) => Promise<O>, input: I = nul
|
||||||
|
|
||||||
const prev = works.get(key) as Work<O> | null;
|
const prev = works.get(key) as Work<O> | null;
|
||||||
if (prev) {
|
if (prev) {
|
||||||
const promise = Promise.resolve(prev.value) as Ref<O>;
|
return { key, then: (done) => done(prev.value) };
|
||||||
promise.key = key;
|
}
|
||||||
return promise;
|
|
||||||
};
|
|
||||||
|
|
||||||
const io = new Io();
|
|
||||||
const promise = job(io, input).then((value) => {
|
|
||||||
const { needs, writes } = io;
|
|
||||||
|
|
||||||
// Apply the deltas to the graph
|
|
||||||
applyDiff(key, files, [], needs.files);
|
|
||||||
applyDiff(key, works, [], needs.works);
|
|
||||||
|
|
||||||
|
async function perform() {
|
||||||
|
const io = new Io(key);
|
||||||
|
jobs += 1;
|
||||||
|
newKeys += 1;
|
||||||
|
try {
|
||||||
|
const value = await job(io, input);
|
||||||
validateSerializable(value, "");
|
validateSerializable(value, "");
|
||||||
|
const { reads, writes } = io;
|
||||||
works.set(key, {
|
works.set(key, {
|
||||||
value,
|
value,
|
||||||
affects: [],
|
affects: [],
|
||||||
needs,
|
reads,
|
||||||
writes
|
writes,
|
||||||
});
|
});
|
||||||
}) as Ref<O>;
|
for (const add of reads.files) {
|
||||||
promise.key = key;
|
const { affects } = UNWRAP(files.get(add));
|
||||||
return promise;
|
ASSERT(!affects.includes(key));
|
||||||
|
affects.push(key);
|
||||||
}
|
}
|
||||||
|
for (const add of reads.works) {
|
||||||
|
const { affects } = UNWRAP(works.get(add));
|
||||||
|
ASSERT(!affects.includes(key));
|
||||||
|
affects.push(key);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
} finally {
|
||||||
|
jobs -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let cached: Promise<O>;
|
||||||
|
return {
|
||||||
|
key,
|
||||||
|
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export async function compile<T>(compiler: () => Promise<T>) {
|
export async function compile<T>(compiler: () => Promise<T>) {
|
||||||
ASSERT(!running, `Cannot run twice`);
|
ASSERT(!running, `Cannot run twice`);
|
||||||
running = true;
|
|
||||||
try {
|
try {
|
||||||
|
running = true;
|
||||||
|
ASSERT(jobs === 0);
|
||||||
|
const start = performance.now();
|
||||||
|
const timerSpinner = new Spinner({
|
||||||
|
text: () =>
|
||||||
|
`sitegen! [${
|
||||||
|
((performance.now() - start) / 1000).toFixed(
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
}s]`,
|
||||||
|
fps: 10,
|
||||||
|
});
|
||||||
|
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
||||||
|
|
||||||
const value = await compiler();
|
const value = await compiler();
|
||||||
|
ASSERT(jobs === 0);
|
||||||
|
timerSpinner.text = "incremental flush";
|
||||||
|
await flush(start);
|
||||||
|
timerSpinner.stop();
|
||||||
seenWorks.clear();
|
seenWorks.clear();
|
||||||
ASSERT(!queue.active, `Queue was still running`);
|
newKeys = 0;
|
||||||
await queue.done();
|
|
||||||
return { value };
|
return { value };
|
||||||
} finally {
|
} finally {
|
||||||
running = false;
|
running = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function flush(start: number) {
|
||||||
|
// Trim
|
||||||
|
const detachedFiles = new Set<string>;
|
||||||
|
const referencedAssets = new Set<string>;
|
||||||
|
for (const [k, { writes: { assets } }] of works) {
|
||||||
|
if (seenWorks.has(k)) {
|
||||||
|
for (const asset of assets.values()) referencedAssets.add(asset.hash);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
deleteWork(k);
|
||||||
|
}
|
||||||
|
for (const [k, file] of files) {
|
||||||
|
if (file.affects.length > 0) continue;
|
||||||
|
files.delete(k);
|
||||||
|
detachedFiles.add(k);
|
||||||
|
}
|
||||||
|
for (const k of assets.keys()) {
|
||||||
|
if (!referencedAssets.has(k))
|
||||||
|
assets.delete(k);
|
||||||
|
}
|
||||||
|
|
||||||
|
const p = [];
|
||||||
|
// File writes
|
||||||
|
let dist = 0;
|
||||||
|
for (const [key, { buffer, size }] of writes) {
|
||||||
|
if (buffer) p.push(fs.writeMkdir(path.join(`.clover/o/${key}`), buffer));
|
||||||
|
dist += size;
|
||||||
|
}
|
||||||
|
// Asset map
|
||||||
|
{
|
||||||
|
const { json, blob } = getAssetManifest();
|
||||||
|
const jsonString = Buffer.from(JSON.stringify(json));
|
||||||
|
p.push(fs.writeMkdir(".clover/o/static.json", jsonString));
|
||||||
|
p.push(fs.writeMkdir(".clover/o/static.blob", blob));
|
||||||
|
dist += blob.byteLength + jsonString.byteLength;
|
||||||
|
}
|
||||||
|
await Promise.all(p);
|
||||||
|
|
||||||
|
// Incremental state
|
||||||
|
const serialized = msgpackr.pack(serialize());
|
||||||
|
await fs.writeMkdir(".clover/incr.state", serialized);
|
||||||
|
const time = (performance.now() - start).toFixed(0);
|
||||||
|
console.success(`sitegen! in ${time} ms`);
|
||||||
|
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
|
||||||
|
console.writeLine(` - ${assets.size} static assets`);
|
||||||
|
console.writeLine(
|
||||||
|
` - dist: ${formatSize(dist)}, incremental: ${
|
||||||
|
formatSize(serialized.byteLength)
|
||||||
|
}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function restore() {
|
||||||
|
let buffer;
|
||||||
|
try {
|
||||||
|
buffer = await fs.readFile(".clover/incr.state");
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.code !== "ENOENT") throw err;
|
||||||
|
}
|
||||||
|
if (!buffer) return;
|
||||||
|
await deserialize(buffer);
|
||||||
|
}
|
||||||
|
|
||||||
export function forceInvalidate(file: string) {
|
export function forceInvalidate(file: string) {
|
||||||
const resolved = toAbs(file);
|
const resolved = toAbs(file);
|
||||||
const key = toRel(resolved);
|
const key = toRel(resolved);
|
||||||
|
@ -83,14 +188,39 @@ export function forceInvalidateEntry(entry: { affects: string[] }) {
|
||||||
const queue = [...entry.affects];
|
const queue = [...entry.affects];
|
||||||
let key;
|
let key;
|
||||||
while ((key = queue.shift())) {
|
while ((key = queue.shift())) {
|
||||||
const { needs, affects } = UNWRAP(works.get(key));
|
const affects = deleteWork(key);
|
||||||
applyDiff(key, files, needs.files, []);
|
|
||||||
applyDiff(key, works, needs.works, []);
|
|
||||||
works.delete(key);
|
|
||||||
queue.push(...affects);
|
queue.push(...affects);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function deleteWork(key: string) {
|
||||||
|
console.info({ key });
|
||||||
|
const { reads, affects, writes: w } = UNWRAP(works.get(key));
|
||||||
|
for (const remove of reads.files) {
|
||||||
|
const { affects } = UNWRAP(files.get(remove));
|
||||||
|
ASSERT(affects.includes(key));
|
||||||
|
affects.splice(affects.indexOf(key), 1);
|
||||||
|
}
|
||||||
|
for (const remove of reads.works) {
|
||||||
|
const { affects } = UNWRAP(works.get(remove), remove);
|
||||||
|
ASSERT(affects.includes(key));
|
||||||
|
affects.splice(affects.indexOf(key), 1);
|
||||||
|
}
|
||||||
|
for (const remove of affects) {
|
||||||
|
const { reads: { works: list } } = UNWRAP(works.get(remove), remove);
|
||||||
|
ASSERT(list.has(key));
|
||||||
|
list.delete(key);
|
||||||
|
}
|
||||||
|
for (const file of w.files) {
|
||||||
|
if (UNWRAP(writes.get(file)).work === key)
|
||||||
|
writes.delete(file);
|
||||||
|
}
|
||||||
|
// Assets are temporarily kept, trimmed via manual GC after compilation.
|
||||||
|
|
||||||
|
works.delete(key);
|
||||||
|
return affects;
|
||||||
|
}
|
||||||
|
|
||||||
export function reset() {
|
export function reset() {
|
||||||
ASSERT(!running);
|
ASSERT(!running);
|
||||||
works.clear();
|
works.clear();
|
||||||
|
@ -99,48 +229,182 @@ export function reset() {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function serialize() {
|
export function serialize() {
|
||||||
// Aiming for a compact JSON format.
|
const fileEntries = Array.from(files, ([k, v]) =>
|
||||||
const fileEntries = Array.from(files, ([k, v]) => [
|
[
|
||||||
k,
|
k,
|
||||||
"lastModified" in v ? "f" : "d",
|
v.type,
|
||||||
"lastModified" in v ? v.lastModified : v.contentHash,
|
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null,
|
||||||
...v.affects,
|
...v.affects,
|
||||||
]);
|
] as const);
|
||||||
const workEntries = Array.from(works, ([k, v]) => [k, v.value, ...v.affects]);
|
const workEntries = Array.from(works, ([k, v]) =>
|
||||||
return JSON.stringify({
|
[
|
||||||
file: fileEntries,
|
k,
|
||||||
work: workEntries,
|
v.value,
|
||||||
|
Array.from(v.reads.files),
|
||||||
|
Array.from(v.reads.works),
|
||||||
|
Array.from(v.writes.files),
|
||||||
|
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const),
|
||||||
|
v.affects,
|
||||||
|
] as const);
|
||||||
|
const expectedFilesOnDisk = Array.from(
|
||||||
|
writes,
|
||||||
|
([k, { size, work }]) => [k, size, work] as const,
|
||||||
|
);
|
||||||
|
const assetEntries = Array.from(
|
||||||
|
assets,
|
||||||
|
([k, asset]) => [k, asset.raw, asset.gzip, asset.zstd] as const,
|
||||||
|
);
|
||||||
|
return [
|
||||||
|
1,
|
||||||
|
fileEntries,
|
||||||
|
workEntries,
|
||||||
|
expectedFilesOnDisk,
|
||||||
|
assetEntries,
|
||||||
|
] as const;
|
||||||
|
}
|
||||||
|
type SerializedState = ReturnType<typeof serialize>;
|
||||||
|
|
||||||
|
/* No-op on failure */
|
||||||
|
async function deserialize(buffer: Buffer) {
|
||||||
|
const decoded = msgpackr.decode(buffer) as SerializedState;
|
||||||
|
if (!Array.isArray(decoded)) return false;
|
||||||
|
if (decoded[0] !== 1) return false;
|
||||||
|
const [, fileEntries, workEntries, expectedFilesOnDisk, assetEntries] =
|
||||||
|
decoded;
|
||||||
|
for (const [k, type, content, ...affects] of fileEntries) {
|
||||||
|
if (type === "f") {
|
||||||
|
ASSERT(typeof content === "number");
|
||||||
|
files.set(k, { type, affects, lastModified: content });
|
||||||
|
} else if (type === 'd') {
|
||||||
|
ASSERT(typeof content === "string");
|
||||||
|
files.set(k, { type, affects, contentHash: content, contents: [] });
|
||||||
|
} else {
|
||||||
|
files.set(k, { type, affects });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const entry of workEntries) {
|
||||||
|
const [
|
||||||
|
k,
|
||||||
|
value,
|
||||||
|
readFiles,
|
||||||
|
readWorks,
|
||||||
|
writeFiles,
|
||||||
|
writeAssets,
|
||||||
|
affects,
|
||||||
|
] = entry;
|
||||||
|
works.set(k, {
|
||||||
|
value,
|
||||||
|
reads: {
|
||||||
|
files: new Set(readFiles),
|
||||||
|
works: new Set(readWorks),
|
||||||
|
},
|
||||||
|
writes: {
|
||||||
|
files: new Set(writeFiles),
|
||||||
|
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, {
|
||||||
|
hash: JSON.parse(UNWRAP(headers.etag)),
|
||||||
|
headers,
|
||||||
|
}])),
|
||||||
|
},
|
||||||
|
affects,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
export function serializeToDisk(file = ".clover/incr.state") {
|
const statFiles = await Promise.all(expectedFilesOnDisk
|
||||||
fs.writeMkdirSync(file, serialize());
|
.map(([k, size, work]) =>
|
||||||
|
fs.stat(path.join(".clover/o", k))
|
||||||
|
.catch((err) => {
|
||||||
|
if (err.code === "ENOENT") return null;
|
||||||
|
throw err;
|
||||||
|
})
|
||||||
|
.then((stat) => ({ k, size, work, stat }))
|
||||||
|
));
|
||||||
|
for (const { k, stat, work, size } of statFiles) {
|
||||||
|
if (stat?.size === size) {
|
||||||
|
writes.set(k, {
|
||||||
|
size: size,
|
||||||
|
buffer: null,
|
||||||
|
work,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
forceInvalidateEntry({ affects: [work] });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const [hash, raw, gzip, zstd] of assetEntries) {
|
||||||
|
assets.set(hash, { raw, gzip, zstd });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await Promise.all(Array.from(files, async ([k, file]) => {
|
||||||
|
try {
|
||||||
|
if (file.type === "d") {
|
||||||
|
const contents = file.contents = await fs.readdir(k);
|
||||||
|
contents.sort();
|
||||||
|
const contentHash = crypto
|
||||||
|
.createHash("sha1")
|
||||||
|
.update(contents.join("\0"))
|
||||||
|
.digest("base64url");
|
||||||
|
if (file.contentHash !== contentHash) {
|
||||||
|
file.contentHash = contentHash;
|
||||||
|
throw new Error();
|
||||||
|
}
|
||||||
|
} else if (file.type === 'f') {
|
||||||
|
const lastModified = await fs.stat(k)
|
||||||
|
.then(x => Math.floor(x.mtimeMs), () => 0);
|
||||||
|
if (file.lastModified !== lastModified) {
|
||||||
|
file.lastModified = lastModified;
|
||||||
|
throw new Error();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
file.type satisfies 'null';
|
||||||
|
const stat = await fs.stat(k).catch(() => null);
|
||||||
|
if (stat) throw new Error();
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
forceInvalidateEntry(file);
|
||||||
|
if (file.type === 'null') files.delete(k);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAssetManifest() {
|
||||||
|
const writer = new BufferWriter();
|
||||||
|
const asset = Object.fromEntries(
|
||||||
|
Array.from(works, (work) => work[1].writes.assets)
|
||||||
|
.filter((map) => map.size > 0)
|
||||||
|
.flatMap((map) =>
|
||||||
|
Array.from(map, ([key, { hash, headers }]) => {
|
||||||
|
const { raw, gzip, zstd } = UNWRAP(
|
||||||
|
assets.get(hash),
|
||||||
|
`Asset ${key} (${hash})`,
|
||||||
|
);
|
||||||
|
return [key, {
|
||||||
|
raw: writer.write(raw, "raw:" + hash),
|
||||||
|
gzip: writer.write(gzip, "raw:" + hash),
|
||||||
|
zstd: writer.write(zstd, "raw:" + hash),
|
||||||
|
headers,
|
||||||
|
}] as const;
|
||||||
|
})
|
||||||
|
),
|
||||||
|
) satisfies BuiltAssetMap;
|
||||||
|
return { json: asset, blob: writer.get() };
|
||||||
|
}
|
||||||
|
|
||||||
/* Input/Output with automatic tracking.
|
/* Input/Output with automatic tracking.
|
||||||
* - Inputs read with Io are tracked to know when to rebuild
|
* - Inputs read with Io are tracked to know when to rebuild
|
||||||
* - Outputs written with Io are deleted when abandoned.
|
* - Outputs written with Io are deleted when abandoned.
|
||||||
*/
|
*/
|
||||||
export class Io {
|
export class Io {
|
||||||
needs: Needs = {
|
constructor(public key: string) {}
|
||||||
files: new Set(),
|
reads: Reads = { files: new Set(), works: new Set() };
|
||||||
works: new Set(),
|
writes: Writes = { files: new Set(), assets: new Map() };
|
||||||
};
|
|
||||||
writes: Writes = {
|
|
||||||
files: new Map(),
|
|
||||||
assets: new Map(),
|
|
||||||
};
|
|
||||||
|
|
||||||
#trackFs(file: string) {
|
#trackFs(file: string) {
|
||||||
const resolved = toAbs(file);
|
const resolved = toAbs(file);
|
||||||
const key = toRel(resolved);
|
const key = toRel(resolved);
|
||||||
this.needs.files.add(key);
|
this.reads.files.add(key);
|
||||||
return { resolved, key };
|
return { resolved, key };
|
||||||
}
|
}
|
||||||
readWork<T>(ref: Ref<T>): Promise<T> {
|
async readWork<T>(ref: Ref<T>): Promise<T> {
|
||||||
this.needs.works.add(ref.key);
|
this.reads.works.add(ref.key);
|
||||||
return ref;
|
return await ref;
|
||||||
}
|
}
|
||||||
/** Track a file in the compilation without reading it. */
|
/** Track a file in the compilation without reading it. */
|
||||||
async trackFile(file: string) {
|
async trackFile(file: string) {
|
||||||
|
@ -148,33 +412,41 @@ export class Io {
|
||||||
if (!files.get(key)) {
|
if (!files.get(key)) {
|
||||||
let lastModified: number = 0;
|
let lastModified: number = 0;
|
||||||
try {
|
try {
|
||||||
lastModified = (await fs.stat(file)).mtimeMs;
|
lastModified = Math.floor((await fs.stat(file)).mtimeMs);
|
||||||
} catch {}
|
files.set(key, { type: "f", lastModified, affects: [] });
|
||||||
files.set(key, {
|
} catch {
|
||||||
affects: [],
|
files.set(key, { type: "null", affects: [] });
|
||||||
lastModified,
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return resolved;
|
return resolved;
|
||||||
}
|
}
|
||||||
async readFile(file: string) {
|
async readFile(file: string) {
|
||||||
return fs.readFile(await this.trackFile(file), "utf-8");
|
return fs.readFile(await this.trackFile(file), "utf-8");
|
||||||
}
|
}
|
||||||
|
async readJson<T>(file: string) {
|
||||||
|
return JSON.parse(await this.readFile(file)) as T;
|
||||||
|
}
|
||||||
async readDir(dir: string) {
|
async readDir(dir: string) {
|
||||||
const { key, resolved } = this.#trackFs(dir);
|
const { key, resolved } = this.#trackFs(dir);
|
||||||
let result: string[] = [];
|
const existing = files.get(key);
|
||||||
try {
|
try {
|
||||||
result = await fs.readdir(resolved);
|
if (existing?.type === 'd') return existing.contents;
|
||||||
return result;
|
const contents = await fs.readdir(resolved);
|
||||||
} finally {
|
contents.sort();
|
||||||
const contentHash = crypto
|
const contentHash = crypto
|
||||||
.createHash("sha1")
|
.createHash("sha1")
|
||||||
.update(result.join("\0"))
|
.update(contents.join("\0"))
|
||||||
.digest("base64url");
|
.digest("base64url");
|
||||||
files.set(key, {
|
files.set(key, {
|
||||||
|
type: "d",
|
||||||
affects: [],
|
affects: [],
|
||||||
contentHash,
|
contentHash,
|
||||||
|
contents,
|
||||||
});
|
});
|
||||||
|
return contents;
|
||||||
|
} catch (err) {
|
||||||
|
if (!existing) files.set(key, { type: "null", affects: [] });
|
||||||
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async readDirRecursive(dir: string): Promise<string[]> {
|
async readDirRecursive(dir: string): Promise<string[]> {
|
||||||
|
@ -205,11 +477,12 @@ export class Io {
|
||||||
const seen = new Set<string>();
|
const seen = new Set<string>();
|
||||||
let current;
|
let current;
|
||||||
while ((current = queue.shift())) {
|
while ((current = queue.shift())) {
|
||||||
const stat = hot.getFileStat(resolved);
|
const stat = hot.getFileStat(current);
|
||||||
if (!stat) continue;
|
if (!stat) continue;
|
||||||
const { key } = this.#trackFs(current);
|
const { key } = this.#trackFs(current);
|
||||||
if (!files.get(key)) {
|
if (!files.get(key)) {
|
||||||
files.set(key, {
|
files.set(key, {
|
||||||
|
type: "f",
|
||||||
affects: [],
|
affects: [],
|
||||||
lastModified: stat?.lastModified ?? 0,
|
lastModified: stat?.lastModified ?? 0,
|
||||||
});
|
});
|
||||||
|
@ -223,44 +496,74 @@ export class Io {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writeAsset(pathname: string, blob: string | Buffer, headersOption?: HeadersInit) {
|
async writeAsset(
|
||||||
|
pathname: string,
|
||||||
|
blob: string | Buffer,
|
||||||
|
headersOption?: HeadersInit,
|
||||||
|
) {
|
||||||
ASSERT(pathname.startsWith("/"));
|
ASSERT(pathname.startsWith("/"));
|
||||||
|
ASSERT(!seenWrites.has("a:" + pathname));
|
||||||
|
|
||||||
|
const buffer = typeof blob === "string" ? Buffer.from(blob) : blob;
|
||||||
|
|
||||||
const headers = new Headers(headersOption ?? {});
|
const headers = new Headers(headersOption ?? {});
|
||||||
const hash = crypto.createHash('sha1').update(blob).digest('hex');
|
const hash = crypto.createHash("sha1").update(buffer).digest("hex");
|
||||||
if (!headers.has("Content-Type")) {
|
if (!headers.has("Content-Type")) {
|
||||||
headers.set("Content-Type", mime.contentTypeFor(pathname));
|
headers.set("Content-Type", mime.contentTypeFor(pathname));
|
||||||
}
|
}
|
||||||
headers.set("ETag", JSON.stringify(hash));
|
headers.set("ETag", JSON.stringify(hash));
|
||||||
ASSERT(!this.writes.assets.has(pathname));
|
|
||||||
this.writes.assets.set(pathname, {
|
this.writes.assets.set(pathname, {
|
||||||
hash,
|
hash,
|
||||||
// @ts-expect-error TODO
|
// @ts-expect-error TODO
|
||||||
headers: Object.fromEntries(headers)
|
headers: Object.fromEntries(headers),
|
||||||
});
|
});
|
||||||
|
if (!assets.has(hash)) {
|
||||||
|
jobs += 1;
|
||||||
|
assets.set(hash, undefined!);
|
||||||
|
const [gzipBuffer, zstdBuffer] = await Promise.all([
|
||||||
|
gzip(buffer),
|
||||||
|
zstdCompress(buffer),
|
||||||
|
]);
|
||||||
|
assets.set(hash, {
|
||||||
|
raw: buffer,
|
||||||
|
gzip: gzipBuffer,
|
||||||
|
zstd: zstdBuffer,
|
||||||
|
});
|
||||||
|
jobs -= 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
writeFile(subPath: string, blob: string | Buffer) {
|
writeFile(subPath: string, blob: string | Buffer) {
|
||||||
ASSERT(!this.writes.assets.has(subPath));
|
ASSERT(!subPath.startsWith("/"));
|
||||||
this.writes.files.set(subPath, Buffer.isBuffer(blob) ? blob : Buffer.from(blob));
|
ASSERT(
|
||||||
|
!seenWrites.has("f:" + subPath),
|
||||||
|
`File overwritten: ${JSON.stringify(subPath)}`,
|
||||||
|
);
|
||||||
|
seenWrites.add("f:" + subPath);
|
||||||
|
const buffer = Buffer.isBuffer(blob) ? blob : Buffer.from(blob);
|
||||||
|
writes.set(subPath, {
|
||||||
|
buffer,
|
||||||
|
size: buffer.byteLength,
|
||||||
|
work: this.key,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function applyDiff(
|
class BufferWriter {
|
||||||
key: string,
|
size = 0;
|
||||||
list: Map<string, { affects: string[] }>,
|
seen = new Map<string, BufferView>();
|
||||||
beforeIter: Iterable<string>,
|
buffers: Buffer[] = [];
|
||||||
afterIter: Iterable<string>,
|
|
||||||
) {
|
write(buffer: Buffer, hash: string): BufferView {
|
||||||
const before = Array.from(beforeIter);
|
let view = this.seen.get(hash);
|
||||||
const after = Array.from(afterIter);
|
if (view) return view;
|
||||||
for (const add of after.filter((x) => !before.includes(x))) {
|
view = [this.size, this.size += buffer.byteLength];
|
||||||
const { affects } = UNWRAP(list.get(add));
|
this.seen.set(hash, view);
|
||||||
ASSERT(!affects.includes(key));
|
this.buffers.push(buffer);
|
||||||
affects.push(key);
|
return view;
|
||||||
}
|
}
|
||||||
for (const remove of before.filter((x) => !after.includes(x))) {
|
|
||||||
const { affects } = UNWRAP(list.get(remove));
|
get() {
|
||||||
ASSERT(affects.includes(key));
|
return Buffer.concat(this.buffers);
|
||||||
affects.splice(affects.indexOf(key), 1);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,39 +603,43 @@ export function toRel(absPath: string) {
|
||||||
return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/");
|
return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/");
|
||||||
}
|
}
|
||||||
|
|
||||||
type BufferView = [start: number, end: number];
|
export type BufferView = [start: number, end: number];
|
||||||
type File = TrackedFile | TrackedDir;
|
interface Reads {
|
||||||
interface Needs {
|
|
||||||
files: Set<string>;
|
files: Set<string>;
|
||||||
works: Set<string>;
|
works: Set<string>;
|
||||||
}
|
}
|
||||||
|
interface FileWrite {
|
||||||
|
buffer: Buffer | null;
|
||||||
|
size: number;
|
||||||
|
work: string;
|
||||||
|
}
|
||||||
interface Writes {
|
interface Writes {
|
||||||
files: Map<string, Buffer>;
|
files: Set<string>;
|
||||||
assets: Map<string, {
|
assets: Map<string, {
|
||||||
hash: string,
|
hash: string;
|
||||||
headers: Record<string, string>
|
headers: Record<string, string>;
|
||||||
}>;
|
}>;
|
||||||
}
|
}
|
||||||
interface Asset {
|
interface Asset {
|
||||||
raw: Buffer;
|
raw: Buffer;
|
||||||
gzip: Buffer;
|
gzip: Buffer;
|
||||||
zstd: Buffer;
|
zstd: Buffer;
|
||||||
refs: number;
|
|
||||||
}
|
}
|
||||||
interface Work<T = unknown> {
|
interface Work<T = unknown> {
|
||||||
value: T;
|
value: T;
|
||||||
affects: string[];
|
reads: Reads;
|
||||||
needs: Needs;
|
|
||||||
writes: Writes;
|
writes: Writes;
|
||||||
}
|
|
||||||
interface TrackedFile {
|
|
||||||
lastModified: number;
|
|
||||||
affects: string[];
|
affects: string[];
|
||||||
}
|
}
|
||||||
interface TrackedDir {
|
type TrackedFile =
|
||||||
contentHash: string;
|
& {
|
||||||
affects: string[];
|
affects: string[];
|
||||||
}
|
}
|
||||||
|
& (
|
||||||
|
| { type: "f"; lastModified: number }
|
||||||
|
| { type: "d"; contentHash: string; contents: string[] | null }
|
||||||
|
| { type: "null"; }
|
||||||
|
);
|
||||||
export interface BuiltAssetMap {
|
export interface BuiltAssetMap {
|
||||||
[route: string]: BuiltAsset;
|
[route: string]: BuiltAsset;
|
||||||
}
|
}
|
||||||
|
@ -343,12 +650,17 @@ export interface BuiltAsset {
|
||||||
headers: Record<string, string>;
|
headers: Record<string, string>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const gzip = util.promisify(zlib.gzip);
|
||||||
|
const zstdCompress = util.promisify(zlib.zstdCompress);
|
||||||
|
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
import * as util from "node:util";
|
import * as util from "node:util";
|
||||||
import * as crypto from "node:crypto";
|
import * as crypto from "node:crypto";
|
||||||
import * as async from "#sitegen/async";
|
|
||||||
import type { Spinner } from "@paperclover/console/Spinner";
|
|
||||||
import * as mime from "#sitegen/mime";
|
import * as mime from "#sitegen/mime";
|
||||||
import type { View } from "#sitegen/view";
|
import * as zlib from "node:zlib";
|
||||||
|
import * as console from "@paperclover/console";
|
||||||
|
import { Spinner } from "@paperclover/console/Spinner";
|
||||||
|
import { formatSize } from "@/file-viewer/format.ts";
|
||||||
|
import * as msgpackr from "msgpackr";
|
||||||
|
|
|
@ -69,7 +69,7 @@ export function etagMatches(etag: string, ifNoneMatch: string) {
|
||||||
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
|
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
function subarrayAsset([start, end]: View) {
|
function subarrayAsset([start, end]: BufferView) {
|
||||||
return assets!.buf.subarray(start, end);
|
return assets!.buf.subarray(start, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,6 +115,6 @@ process.on("message", (msg: any) => {
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import type { Context, Next } from "hono";
|
import type { Context, Next } from "hono";
|
||||||
import type { StatusCode } from "hono/utils/http-status";
|
import type { StatusCode } from "hono/utils/http-status";
|
||||||
import type { BuiltAsset, BuiltAssetMap, View } from "../incremental.ts";
|
import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts";
|
||||||
import { Buffer } from "node:buffer";
|
import { Buffer } from "node:buffer";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
|
|
125
package-lock.json
generated
125
package-lock.json
generated
|
@ -15,6 +15,7 @@
|
||||||
"hls.js": "^1.6.5",
|
"hls.js": "^1.6.5",
|
||||||
"hono": "^4.7.11",
|
"hono": "^4.7.11",
|
||||||
"marko": "^6.0.20",
|
"marko": "^6.0.20",
|
||||||
|
"msgpackr": "^1.11.5",
|
||||||
"puppeteer": "^24.10.1",
|
"puppeteer": "^24.10.1",
|
||||||
"sharp": "^0.34.2",
|
"sharp": "^0.34.2",
|
||||||
"source-map-support": "^0.5.21",
|
"source-map-support": "^0.5.21",
|
||||||
|
@ -1479,6 +1480,84 @@
|
||||||
"url": "https://opencollective.com/unified"
|
"url": "https://opencollective.com/unified"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"darwin"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==",
|
||||||
|
"cpu": [
|
||||||
|
"arm"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==",
|
||||||
|
"cpu": [
|
||||||
|
"arm64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"linux"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==",
|
||||||
|
"cpu": [
|
||||||
|
"x64"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"os": [
|
||||||
|
"win32"
|
||||||
|
]
|
||||||
|
},
|
||||||
"node_modules/@paperclover/console": {
|
"node_modules/@paperclover/console": {
|
||||||
"resolved": "git+https://git.paperclover.net/clo/console.git#1a6ac2b79fdd8a21a1c57d25723975872bc07e3e",
|
"resolved": "git+https://git.paperclover.net/clo/console.git#1a6ac2b79fdd8a21a1c57d25723975872bc07e3e",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
@ -3766,6 +3845,37 @@
|
||||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/msgpackr": {
|
||||||
|
"version": "1.11.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz",
|
||||||
|
"integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"optionalDependencies": {
|
||||||
|
"msgpackr-extract": "^3.0.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/msgpackr-extract": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"node-gyp-build-optional-packages": "5.2.2"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
||||||
|
},
|
||||||
|
"optionalDependencies": {
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
||||||
|
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/netmask": {
|
"node_modules/netmask": {
|
||||||
"version": "2.0.2",
|
"version": "2.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz",
|
||||||
|
@ -3775,6 +3885,21 @@
|
||||||
"node": ">= 0.4.0"
|
"node": ">= 0.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/node-gyp-build-optional-packages": {
|
||||||
|
"version": "5.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
||||||
|
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"dependencies": {
|
||||||
|
"detect-libc": "^2.0.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"node-gyp-build-optional-packages": "bin.js",
|
||||||
|
"node-gyp-build-optional-packages-optional": "optional.js",
|
||||||
|
"node-gyp-build-optional-packages-test": "build-test.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/node-releases": {
|
"node_modules/node-releases": {
|
||||||
"version": "2.0.19",
|
"version": "2.0.19",
|
||||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
|
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
"hls.js": "^1.6.5",
|
"hls.js": "^1.6.5",
|
||||||
"hono": "^4.7.11",
|
"hono": "^4.7.11",
|
||||||
"marko": "^6.0.20",
|
"marko": "^6.0.20",
|
||||||
|
"msgpackr": "^1.11.5",
|
||||||
"puppeteer": "^24.10.1",
|
"puppeteer": "^24.10.1",
|
||||||
"sharp": "^0.34.2",
|
"sharp": "^0.34.2",
|
||||||
"source-map-support": "^0.5.21",
|
"source-map-support": "^0.5.21",
|
||||||
|
|
|
@ -4,7 +4,9 @@
|
||||||
font-weight: 400 750;
|
font-weight: 400 750;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-display: swap;
|
font-display: swap;
|
||||||
font-variation-settings: "CASL" 0.25, "MONO" 0;
|
font-variation-settings:
|
||||||
|
"CASL" 0.25,
|
||||||
|
"MONO" 0;
|
||||||
font-style: oblique -15deg 0deg;
|
font-style: oblique -15deg 0deg;
|
||||||
unicode-range: U+0020-007E;
|
unicode-range: U+0020-007E;
|
||||||
}
|
}
|
||||||
|
@ -14,7 +16,9 @@
|
||||||
font-weight: 400 800;
|
font-weight: 400 800;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-display: swap;
|
font-display: swap;
|
||||||
font-variation-settings: "CASL" 0.25, "MONO" 1;
|
font-variation-settings:
|
||||||
|
"CASL" 0.25,
|
||||||
|
"MONO" 1;
|
||||||
font-style: oblique -15deg 0deg;
|
font-style: oblique -15deg 0deg;
|
||||||
unicode-range: U+0020-007E;
|
unicode-range: U+0020-007E;
|
||||||
}
|
}
|
||||||
|
@ -24,21 +28,13 @@
|
||||||
font-weight: 400 800;
|
font-weight: 400 800;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-display: swap;
|
font-display: swap;
|
||||||
font-variation-settings: "CASL" 0.25, "MONO" 1;
|
font-variation-settings:
|
||||||
|
"CASL" 0.25,
|
||||||
|
"MONO" 1;
|
||||||
font-style: oblique -15deg 0deg;
|
font-style: oblique -15deg 0deg;
|
||||||
unicode-range:
|
unicode-range:
|
||||||
U+00C0-00FF,
|
U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022,
|
||||||
U+00A9,
|
U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F;
|
||||||
U+2190-2193,
|
|
||||||
U+2018,
|
|
||||||
U+2019,
|
|
||||||
U+201C,
|
|
||||||
U+201D,
|
|
||||||
U+2022,
|
|
||||||
U+00A0-00A8,
|
|
||||||
U+00AA-00BF,
|
|
||||||
U+2194-2199,
|
|
||||||
U+0100-017F;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*,
|
*,
|
||||||
|
@ -63,7 +59,7 @@ main {
|
||||||
}
|
}
|
||||||
|
|
||||||
h1 {
|
h1 {
|
||||||
font-size: 2em;
|
font-size: 2.5em;
|
||||||
}
|
}
|
||||||
|
|
||||||
h1,
|
h1,
|
||||||
|
|
Loading…
Reference in a new issue