incremental works kinda
This commit is contained in:
parent
12e4bbdf5a
commit
9cdb67fcf0
11 changed files with 1109 additions and 1068 deletions
|
@ -1,16 +1,21 @@
|
|||
async function trackEsbuild(io: Io, metafile: esbuild.Metafile) {
|
||||
await Promise.all(Object.keys(metafile.inputs)
|
||||
.filter(file => !isIgnoredSource(file))
|
||||
.map(file => io.trackFile(file)));
|
||||
}
|
||||
|
||||
// This file implements client-side bundling, mostly wrapping esbuild.
|
||||
bundleClientJavaScript.label = "bundle client-side javascript";
|
||||
export async function bundleClientJavaScript(
|
||||
io: Io,
|
||||
{ clientRefs, extraPublicScripts, dev = false }: {
|
||||
clientRefs: string[],
|
||||
extraPublicScripts: string[],
|
||||
dev: boolean
|
||||
}
|
||||
clientRefs: string[];
|
||||
extraPublicScripts: string[];
|
||||
dev: boolean;
|
||||
},
|
||||
) {
|
||||
const entryPoints = [
|
||||
...new Set([
|
||||
...clientRefs.map(x => `src/${x}`),
|
||||
...clientRefs.map((x) => `src/${x}`),
|
||||
...extraPublicScripts,
|
||||
].map(toAbs)),
|
||||
];
|
||||
|
@ -68,13 +73,14 @@ export async function bundleClientJavaScript(
|
|||
)
|
||||
);
|
||||
const { metafile, outputFiles } = bundle;
|
||||
const p = []
|
||||
p.push(trackEsbuild(io, metafile));
|
||||
const scripts: Record<string, string> = {};
|
||||
for (const file of outputFiles) {
|
||||
const { text } = file;
|
||||
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
||||
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
|
||||
const sources = Object.keys(inputs)
|
||||
.filter((x) => !x.startsWith("<define:"));
|
||||
const sources = Object.keys(inputs).filter((x) => !isIgnoredSource(x));
|
||||
|
||||
// Register non-chunks as script entries.
|
||||
const chunk = route.startsWith("/js/c.");
|
||||
|
@ -85,198 +91,184 @@ export async function bundleClientJavaScript(
|
|||
}
|
||||
// Register chunks and public scripts as assets.
|
||||
if (chunk || publicScriptRoutes.includes(route)) {
|
||||
io.writeAsset(route, text);
|
||||
p.push(io.writeAsset(route, text));
|
||||
}
|
||||
}
|
||||
await Promise.all(p);
|
||||
return scripts;
|
||||
}
|
||||
|
||||
export type ServerPlatform = "node" | "passthru";
|
||||
export async function bundleServerJavaScript(
|
||||
io: Io,
|
||||
{
|
||||
entry,
|
||||
platform,
|
||||
}: {
|
||||
entry: string,
|
||||
platform: ServerPlatform
|
||||
},
|
||||
) {
|
||||
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
||||
|
||||
const viewSource = [
|
||||
...Array.from(
|
||||
incr.out.viewMetadata,
|
||||
([, view], i) => `import * as view${i} from ${JSON.stringify(view.file)}`,
|
||||
),
|
||||
`const styles = ${magicWord}[-2]`,
|
||||
`export const scripts = ${magicWord}[-1]`,
|
||||
"export const views = {",
|
||||
...Array.from(incr.out.viewMetadata, ([key, view], i) =>
|
||||
[
|
||||
` ${JSON.stringify(key)}: {`,
|
||||
` component: view${i}.default,`,
|
||||
// ` meta: ${
|
||||
// view.staticMeta ? JSON.stringify(view.staticMeta) : `view${i}.meta`
|
||||
// },`,
|
||||
` meta: view${i}.meta,`,
|
||||
` layout: ${view.hasLayout ? `view${i}.layout?.default` : "null"},`,
|
||||
` inlineCss: styles[${magicWord}[${i}]]`,
|
||||
` },`,
|
||||
].join("\n")),
|
||||
"}",
|
||||
].join("\n");
|
||||
|
||||
// -- plugins --
|
||||
const serverPlugins: esbuild.Plugin[] = [
|
||||
virtualFiles({
|
||||
"$views": viewSource,
|
||||
}),
|
||||
projectRelativeResolution(),
|
||||
markoViaBuildCache(),
|
||||
{
|
||||
name: "replace client references",
|
||||
setup(b) {
|
||||
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
|
||||
contents:
|
||||
hot.resolveClientRefs(await fs.readFile(file, "utf-8"), file).code,
|
||||
loader: path.extname(file).slice(1) as esbuild.Loader,
|
||||
}));
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mark css external",
|
||||
setup(b) {
|
||||
b.onResolve(
|
||||
{ filter: /\.css$/ },
|
||||
() => ({ path: ".", namespace: "dropped" }),
|
||||
);
|
||||
b.onLoad(
|
||||
{ filter: /./, namespace: "dropped" },
|
||||
() => ({ contents: "" }),
|
||||
);
|
||||
},
|
||||
},
|
||||
];
|
||||
const pkg = await fs.readJson("package.json") as {
|
||||
dependencies: Record<string, string>;
|
||||
};
|
||||
const { metafile, outputFiles } = await esbuild.build({
|
||||
bundle: true,
|
||||
chunkNames: "c.[hash]",
|
||||
entryNames: "server",
|
||||
entryPoints: [
|
||||
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
|
||||
],
|
||||
platform: "node",
|
||||
format: "esm",
|
||||
minify: false,
|
||||
outdir: "out!",
|
||||
plugins: serverPlugins,
|
||||
splitting: true,
|
||||
logLevel: "silent",
|
||||
write: false,
|
||||
metafile: true,
|
||||
jsx: "automatic",
|
||||
jsxImportSource: "#ssr",
|
||||
jsxDev: false,
|
||||
define: {
|
||||
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
||||
},
|
||||
external: Object.keys(pkg.dependencies)
|
||||
.filter((x) => !x.startsWith("@paperclover")),
|
||||
});
|
||||
|
||||
const files: Record<string, Buffer> = {};
|
||||
let fileWithMagicWord: string | null = null;
|
||||
for (const output of outputFiles) {
|
||||
const basename = output.path.replace(/^.*?!/, "");
|
||||
const key = "out!" + basename.replaceAll("\\", "/");
|
||||
// If this contains the generated "$views" file, then
|
||||
// mark this file as the one for replacement. Because
|
||||
// `splitting` is `true`, esbuild will not emit this
|
||||
// file in more than one chunk.
|
||||
if (metafile.outputs[key].inputs["framework/lib/view.ts"]) {
|
||||
fileWithMagicWord = basename;
|
||||
}
|
||||
files[basename] = Buffer.from(output.contents);
|
||||
}
|
||||
incr.put({
|
||||
kind: "backendBundle",
|
||||
key: platform,
|
||||
value: {
|
||||
magicWord,
|
||||
files,
|
||||
fileWithMagicWord,
|
||||
},
|
||||
sources: Object.keys(metafile.inputs).filter((x) =>
|
||||
!x.includes("<define:") &&
|
||||
!x.startsWith("vfs:") &&
|
||||
!x.startsWith("dropped:") &&
|
||||
!x.includes("node_modules")
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
export async function finalizeServerJavaScript(
|
||||
incr: Incremental,
|
||||
export interface ServerSideOptions {
|
||||
entries: string[],
|
||||
viewItems: sg.FileItem[]
|
||||
viewRefs: incr.Ref<PreparedView>[],
|
||||
styleMap: Map<string, incr.Ref<string>>;
|
||||
scriptMap: incr.Ref<Record<string, string>>;
|
||||
platform: ServerPlatform,
|
||||
}
|
||||
export async function bundleServerJavaScript(
|
||||
{ viewItems, viewRefs, styleMap, scriptMap: wScriptMap, entries, platform }: ServerSideOptions
|
||||
) {
|
||||
if (incr.hasArtifact("backendReplace", platform)) return;
|
||||
const {
|
||||
files,
|
||||
fileWithMagicWord,
|
||||
magicWord,
|
||||
} = UNWRAP(incr.getArtifact("backendBundle", platform));
|
||||
const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => {
|
||||
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
||||
return {
|
||||
magicWord,
|
||||
file: [
|
||||
...viewItems.map((view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`),
|
||||
`const styles = ${magicWord}[-2]`,
|
||||
`export const scripts = ${magicWord}[-1]`,
|
||||
"export const views = {",
|
||||
...viewItems.map((view, i) => [
|
||||
` ${JSON.stringify(view.id)}: {`,
|
||||
` component: view${i}.default,`,
|
||||
` meta: view${i}.meta,`,
|
||||
` layout: view${i}.layout?.default ?? null,`,
|
||||
` inlineCss: styles[${magicWord}[${i}]]`,
|
||||
` },`,
|
||||
].join("\n")),
|
||||
"}",
|
||||
].join("\n")
|
||||
};
|
||||
}, viewItems)
|
||||
|
||||
if (!fileWithMagicWord) return;
|
||||
const wBundles = entries.map(entry => [entry, incr.work(async (io, entry) => {
|
||||
const pkg = await io.readJson<{ dependencies: Record<string, string>; }>("package.json");
|
||||
|
||||
// Only the reachable resources need to be inserted into the bundle.
|
||||
const viewScriptsList = new Set(
|
||||
Array.from(incr.out.viewMetadata.values())
|
||||
.flatMap((view) => view.clientRefs),
|
||||
);
|
||||
const viewStyleKeys = Array.from(incr.out.viewMetadata.values())
|
||||
.map((view) => css.styleKey(view.cssImports, view.theme));
|
||||
const viewCssBundles = viewStyleKeys
|
||||
.map((key) => UNWRAP(incr.out.style.get(key), "Style key: " + key));
|
||||
let magicWord = null as string | null;
|
||||
// -- plugins --
|
||||
const serverPlugins: esbuild.Plugin[] = [
|
||||
virtualFiles({
|
||||
// only add dependency when imported.
|
||||
"$views": async () => {
|
||||
const view = await io.readWork(wViewSource);
|
||||
({ magicWord } = view);
|
||||
return view.file;
|
||||
},
|
||||
}),
|
||||
projectRelativeResolution(),
|
||||
markoViaBuildCache(),
|
||||
{
|
||||
name: "replace client references",
|
||||
setup(b) {
|
||||
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
|
||||
contents:
|
||||
hot.resolveClientRefs(await fs.readFile(file, "utf-8"), file).code,
|
||||
loader: path.extname(file).slice(1) as esbuild.Loader,
|
||||
}));
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "mark css external",
|
||||
setup(b) {
|
||||
b.onResolve(
|
||||
{ filter: /\.css$/ },
|
||||
() => ({ path: ".", namespace: "dropped" }),
|
||||
);
|
||||
b.onLoad(
|
||||
{ filter: /./, namespace: "dropped" },
|
||||
() => ({ contents: "" }),
|
||||
);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Deduplicate styles
|
||||
const styleList = Array.from(new Set(viewCssBundles));
|
||||
const { metafile, outputFiles } = await esbuild.build({
|
||||
bundle: true,
|
||||
chunkNames: "c.[hash]",
|
||||
entryNames: path.basename(entry, path.extname(entry)),
|
||||
entryPoints: [
|
||||
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
|
||||
],
|
||||
platform: "node",
|
||||
format: "esm",
|
||||
minify: false,
|
||||
outdir: "out!",
|
||||
plugins: serverPlugins,
|
||||
splitting: true,
|
||||
logLevel: "silent",
|
||||
write: false,
|
||||
metafile: true,
|
||||
jsx: "automatic",
|
||||
jsxImportSource: "#ssr",
|
||||
jsxDev: false,
|
||||
define: {
|
||||
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
||||
CLOVER_SERVER_ENTRY: JSON.stringify(entry),
|
||||
},
|
||||
external: Object.keys(pkg.dependencies)
|
||||
.filter((x) => !x.startsWith("@paperclover")),
|
||||
});
|
||||
await trackEsbuild(io, metafile)
|
||||
|
||||
// Replace the magic word
|
||||
let text = files[fileWithMagicWord].toString("utf-8");
|
||||
text = text.replace(
|
||||
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
|
||||
(_, i) => {
|
||||
i = Number(i);
|
||||
// Inline the styling data
|
||||
if (i === -2) {
|
||||
return JSON.stringify(styleList.map((cssText) => cssText));
|
||||
let fileWithMagicWord: {
|
||||
bytes: Buffer;
|
||||
basename: string;
|
||||
magicWord: string;
|
||||
} | null = null;
|
||||
for (const output of outputFiles) {
|
||||
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
|
||||
const key = "out!" + basename.replaceAll("\\", "/");
|
||||
// If this contains the generated "$views" file, then
|
||||
// mark this file as the one for replacement. Because
|
||||
// `splitting` is `true`, esbuild will not emit this
|
||||
// file in more than one chunk.
|
||||
if (magicWord && metafile.outputs[key].inputs["framework/lib/view.ts"]) {
|
||||
ASSERT(!fileWithMagicWord);
|
||||
fileWithMagicWord = {
|
||||
basename,
|
||||
bytes: Buffer.from(output.contents),
|
||||
magicWord,
|
||||
};
|
||||
} else {
|
||||
io.writeFile(basename, Buffer.from(output.contents))
|
||||
}
|
||||
// Inline the script data
|
||||
if (i === -1) {
|
||||
return JSON.stringify(Object.fromEntries(incr.out.script));
|
||||
}
|
||||
// Reference an index into `styleList`
|
||||
return `${styleList.indexOf(viewCssBundles[i])}`;
|
||||
},
|
||||
);
|
||||
}
|
||||
return fileWithMagicWord;
|
||||
}, entry)] as const);
|
||||
|
||||
incr.put({
|
||||
kind: "backendReplace",
|
||||
key: platform,
|
||||
sources: [
|
||||
// Backend input code (includes view code)
|
||||
...incr.sourcesFor("backendBundle", platform),
|
||||
// Script
|
||||
...Array.from(viewScriptsList)
|
||||
.flatMap((key) => incr.sourcesFor("script", hot.getScriptId(key))),
|
||||
// Style
|
||||
...viewStyleKeys.flatMap((key) => incr.sourcesFor("style", key)),
|
||||
],
|
||||
value: Buffer.from(text),
|
||||
});
|
||||
const wProcessed = wBundles.map(async([entry, wBundle]) => {
|
||||
if (!await wBundle) return;
|
||||
await incr.work(async (io) => {
|
||||
// Only the reachable resources need to be read and inserted into the bundle.
|
||||
// This is what Map<string, incr.Ref> is for
|
||||
const { basename, bytes, magicWord } = UNWRAP(await io.readWork(wBundle));
|
||||
const views = await Promise.all(viewRefs.map(ref => io.readWork(ref)));
|
||||
|
||||
// Client JS
|
||||
const scriptList = Object.entries(await io.readWork(wScriptMap));
|
||||
const viewScriptsList = new Set(views.flatMap(view => view.clientRefs));
|
||||
const neededScripts = scriptList.filter(([k]) => viewScriptsList.has(k));
|
||||
|
||||
// CSS
|
||||
const viewStyleKeys = views.map((view) => view.styleKey);
|
||||
const viewCssBundles = await Promise.all(
|
||||
viewStyleKeys.map((key) => io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key))));
|
||||
const styleList = Array.from(new Set(viewCssBundles));
|
||||
|
||||
// Replace the magic word
|
||||
const text = bytes.toString("utf-8").replace(
|
||||
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
|
||||
(_, i) => {
|
||||
i = Number(i);
|
||||
// Inline the styling data
|
||||
if (i === -2) {
|
||||
return JSON.stringify(styleList.map((cssText) => cssText));
|
||||
}
|
||||
// Inline the script data
|
||||
if (i === -1) {
|
||||
return JSON.stringify(Object.fromEntries(neededScripts));
|
||||
}
|
||||
// Reference an index into `styleList`
|
||||
return `${styleList.indexOf(viewCssBundles[i])}`;
|
||||
},
|
||||
);
|
||||
|
||||
io.writeFile(basename, text);
|
||||
});
|
||||
})
|
||||
|
||||
await Promise.all(wProcessed);
|
||||
}
|
||||
|
||||
|
||||
|
@ -284,9 +276,15 @@ import * as esbuild from "esbuild";
|
|||
import * as path from "node:path";
|
||||
import process from "node:process";
|
||||
import * as hot from "./hot.ts";
|
||||
import { markoViaBuildCache, projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
|
||||
import {
|
||||
isIgnoredSource,
|
||||
markoViaBuildCache,
|
||||
projectRelativeResolution,
|
||||
virtualFiles,
|
||||
} from "./esbuild-support.ts";
|
||||
import { Io, toAbs, toRel } from "./incremental.ts";
|
||||
import * as css from "./css.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as mime from "#sitegen/mime";
|
||||
import * as incr from './incremental.ts';
|
||||
import * as incr from "./incremental.ts";
|
||||
import * as sg from "#sitegen";import type { PreparedView } from "./generate2.ts";import { meta } from "@/file-viewer/pages/file.cotyledon_speedbump.tsx";
|
||||
|
|
|
@ -62,7 +62,7 @@ export async function bundleCssFiles(
|
|||
dev: boolean,
|
||||
}
|
||||
) {
|
||||
cssImports = await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
||||
cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file)));
|
||||
const plugin = {
|
||||
name: "clover css",
|
||||
setup(b) {
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
type Awaitable<T> = T | Promise<T>;
|
||||
|
||||
export function virtualFiles(
|
||||
map: Record<string, string | esbuild.OnLoadResult>,
|
||||
map: Record<string, string | esbuild.OnLoadResult | (() => Awaitable<string | esbuild.OnLoadResult>)>,
|
||||
) {
|
||||
return {
|
||||
name: "clover vfs",
|
||||
|
@ -18,8 +20,9 @@ export function virtualFiles(
|
|||
);
|
||||
b.onLoad(
|
||||
{ filter: /./, namespace: "vfs" },
|
||||
({ path }) => {
|
||||
const entry = map[path];
|
||||
async ({ path }) => {
|
||||
let entry = map[path];
|
||||
if (typeof entry === 'function') entry = await entry();
|
||||
return ({
|
||||
resolveDir: ".",
|
||||
loader: "ts",
|
||||
|
@ -99,6 +102,13 @@ export function markoViaBuildCache(): esbuild.Plugin {
|
|||
};
|
||||
}
|
||||
|
||||
export function isIgnoredSource(source: string) {
|
||||
return source.includes("<define:") ||
|
||||
source.startsWith("vfs:") ||
|
||||
source.startsWith("dropped:") ||
|
||||
source.includes("node_modules")
|
||||
}
|
||||
|
||||
import * as esbuild from "esbuild";
|
||||
import * as string from "#sitegen/string";
|
||||
import * as path from "node:path";
|
||||
|
|
|
@ -1,407 +1,330 @@
|
|||
// This file contains the main site generator build process.
|
||||
// By using `Incremental`'s ability to automatically purge stale
|
||||
// assets, the `sitegen` function performs partial rebuilds.
|
||||
const { toRel, toAbs } = incr;
|
||||
const globalCssPath = toAbs("src/global.css");
|
||||
|
||||
export function main() {
|
||||
return withSpinner<Record<string, unknown>, any>({
|
||||
text: "Recovering State",
|
||||
successText,
|
||||
failureText: () => "sitegen FAIL",
|
||||
}, async (spinner) => {
|
||||
// const incr = Incremental.fromDisk();
|
||||
// await incr.statAllFiles();
|
||||
const incr = new Incremental();
|
||||
const result = await sitegen(spinner, incr);
|
||||
incr.toDisk(); // Allows picking up this state again
|
||||
return result;
|
||||
}) as ReturnType<typeof sitegen>;
|
||||
export async function main() {
|
||||
await incr.restore();
|
||||
await incr.compile(generate);
|
||||
}
|
||||
|
||||
export function successText({
|
||||
elapsed,
|
||||
inserted,
|
||||
referenced,
|
||||
unreferenced,
|
||||
}: Awaited<ReturnType<typeof sitegen>>) {
|
||||
const s = (array: unknown[]) => array.length === 1 ? "" : "s";
|
||||
const kind = inserted.length === referenced.length ? "build" : "update";
|
||||
const status = inserted.length > 0
|
||||
? `${kind} ${inserted.length} key${s(inserted)}`
|
||||
: unreferenced.length > 0
|
||||
? `pruned ${unreferenced.length} key${s(unreferenced)}`
|
||||
: `checked ${referenced.length} key${s(referenced)}`;
|
||||
return `sitegen! ${status} in ${elapsed.toFixed(1)}s`;
|
||||
}
|
||||
|
||||
export async function sitegen(
|
||||
status: Spinner,
|
||||
incr: Incremental,
|
||||
) {
|
||||
const startTime = performance.now();
|
||||
|
||||
let root = path.resolve(import.meta.dirname, "../src");
|
||||
const join = (...sub: string[]) => path.join(root, ...sub);
|
||||
|
||||
// Sitegen reviews every defined section for resources to process
|
||||
const sections: sg.Section[] =
|
||||
require(path.join(root, "site.ts")).siteSections;
|
||||
|
||||
|
||||
|
||||
// -- Scan for files --
|
||||
status.text = "Scanning Project";
|
||||
for (const section of sections) {
|
||||
const { root: sectionRoot } = section;
|
||||
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
||||
const rootPrefix = root === sectionRoot
|
||||
? ""
|
||||
: path.relative(root, sectionRoot) + "/";
|
||||
const kinds = [
|
||||
{
|
||||
dir: sectionPath("pages"),
|
||||
list: pages,
|
||||
prefix: "/",
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("static"),
|
||||
list: staticFiles,
|
||||
prefix: "/",
|
||||
ext: true,
|
||||
},
|
||||
{
|
||||
dir: sectionPath("scripts"),
|
||||
list: scripts,
|
||||
prefix: rootPrefix,
|
||||
include: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("views"),
|
||||
list: views,
|
||||
prefix: rootPrefix,
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
];
|
||||
for (
|
||||
const { dir, list, prefix, include = [""], exclude = [], ext = false }
|
||||
of kinds
|
||||
) {
|
||||
const items = fs.readDirRecOptionalSync(dir);
|
||||
for (const subPath of items) {
|
||||
const file = path.join(dir, subPath);
|
||||
const stat = fs.statSync(file);
|
||||
if (stat.isDirectory()) continue;
|
||||
if (!include.some((e) => subPath.endsWith(e))) continue;
|
||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||
const trim = ext
|
||||
? subPath
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
||||
".",
|
||||
"/",
|
||||
);
|
||||
let id = prefix + trim.replaceAll("\\", "/");
|
||||
if (prefix === "/" && id.endsWith("/index")) {
|
||||
id = id.slice(0, -"/index".length) || "/";
|
||||
}
|
||||
list.push({ id, file: file });
|
||||
}
|
||||
}
|
||||
}
|
||||
const globalCssPath = join("global.css");
|
||||
export async function generate() {
|
||||
// -- read config and discover files --
|
||||
const siteConfig = await incr.work(readManifest);
|
||||
const {
|
||||
staticFiles,
|
||||
scripts,
|
||||
views,
|
||||
pages,
|
||||
} = await discoverAllFiles(siteConfig);
|
||||
|
||||
// TODO: make sure that `static` and `pages` does not overlap
|
||||
|
||||
// -- inline style sheets, used and shared by pages and views --
|
||||
status.text = "Building";
|
||||
const cssOnce = new OnceMap();
|
||||
const cssQueue = new Queue({
|
||||
name: "Bundle",
|
||||
async fn([, key, files, theme]: [string, string, string[], css.Theme]) {
|
||||
const { text, sources } = await css.bundleCssFiles(files, theme);
|
||||
incr.put({
|
||||
kind: "style",
|
||||
key,
|
||||
sources,
|
||||
value: text,
|
||||
});
|
||||
},
|
||||
passive: true,
|
||||
getItemText: ([id]) => id,
|
||||
maxJobs: 2,
|
||||
// TODO: loadMarkoCache
|
||||
|
||||
// -- perform build-time rendering --
|
||||
const builtPages = pages.map((item) => incr.work(preparePage, item));
|
||||
const builtViews = views.map((item) => incr.work(prepareView, item));
|
||||
const builtStaticFiles = Promise.all((staticFiles.map((item) =>
|
||||
incr.work(
|
||||
async (io, { id, file }) => void await io.writeAsset(id, await io.readFile(file)),
|
||||
item,
|
||||
)
|
||||
)));
|
||||
const routes = await Promise.all([...builtViews, ...builtPages]);
|
||||
|
||||
// -- page resources --
|
||||
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
||||
clientRefs: routes.flatMap((x) => x.clientRefs),
|
||||
extraPublicScripts: scripts.map((entry) => entry.file),
|
||||
dev: false,
|
||||
});
|
||||
function ensureCssGetsBuilt(
|
||||
cssImports: string[],
|
||||
theme: css.Theme,
|
||||
referrer: string,
|
||||
) {
|
||||
const key = css.styleKey(cssImports, theme);
|
||||
cssOnce.get(
|
||||
key,
|
||||
async () => {
|
||||
incr.getArtifact("style", key) ??
|
||||
await cssQueue.add([referrer, key, cssImports, theme]);
|
||||
},
|
||||
);
|
||||
}
|
||||
const styleMap = prepareInlineCss(routes);
|
||||
|
||||
// -- server side render pages --
|
||||
async function loadPageModule({ file }: FileItem) {
|
||||
require(file);
|
||||
}
|
||||
async function renderPage(item: FileItem) {
|
||||
// -- load and validate module --
|
||||
let {
|
||||
default: Page,
|
||||
meta: metadata,
|
||||
theme: pageTheme,
|
||||
layout,
|
||||
} = require(item.file);
|
||||
if (!Page) {
|
||||
throw new Error("Page is missing a 'default' export.");
|
||||
}
|
||||
if (!metadata) {
|
||||
throw new Error("Page is missing 'meta' export with a title.");
|
||||
}
|
||||
// -- backend --
|
||||
const builtBackend = bundle.bundleServerJavaScript({
|
||||
entries: siteConfig.backends,
|
||||
platform: 'node',
|
||||
styleMap,
|
||||
scriptMap,
|
||||
viewItems: views,
|
||||
viewRefs: builtViews,
|
||||
})
|
||||
|
||||
// -- css --
|
||||
if (layout?.theme) pageTheme = layout.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
ensureCssGetsBuilt(cssImports, theme, item.id);
|
||||
|
||||
// -- metadata --
|
||||
const renderedMetaPromise = Promise.resolve(
|
||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||
).then((m) => meta.renderMeta(m));
|
||||
|
||||
// -- html --
|
||||
let page = [engine.kElement, Page, {}];
|
||||
if (layout?.default) {
|
||||
page = [engine.kElement, layout.default, { children: page }];
|
||||
}
|
||||
const bodyPromise = engine.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
renderedMetaPromise,
|
||||
]);
|
||||
if (!renderedMeta.includes("<title>")) {
|
||||
throw new Error(
|
||||
"Page is missing 'meta.title'. " +
|
||||
"All pages need a title tag.",
|
||||
);
|
||||
}
|
||||
incr.put({
|
||||
kind: "pageMetadata",
|
||||
key: item.id,
|
||||
// Incremental integrates with `hot.ts` + `require`
|
||||
// to trace all the needed source files here.
|
||||
sources: [item.file],
|
||||
value: {
|
||||
html: text,
|
||||
meta: renderedMeta,
|
||||
cssImports,
|
||||
theme: theme ?? null,
|
||||
clientRefs: Array.from(addon.sitegen.scripts),
|
||||
},
|
||||
});
|
||||
}
|
||||
async function prepareView(item: FileItem) {
|
||||
const module = require(item.file);
|
||||
if (!module.meta) {
|
||||
throw new Error(`${item.file} is missing 'export const meta'`);
|
||||
}
|
||||
if (!module.default) {
|
||||
throw new Error(`${item.file} is missing a default export.`);
|
||||
}
|
||||
const pageTheme = module.layout?.theme ?? module.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
ensureCssGetsBuilt(cssImports, theme, item.id);
|
||||
incr.put({
|
||||
kind: "viewMetadata",
|
||||
key: item.id,
|
||||
sources: [item.file],
|
||||
value: {
|
||||
file: path.relative(hot.projectRoot, item.file),
|
||||
cssImports,
|
||||
theme,
|
||||
clientRefs: hot.getClientScriptRefs(item.file),
|
||||
hasLayout: !!module.layout?.default,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Of the pages that are already built, a call to 'ensureCssGetsBuilt' is
|
||||
// required so that it's (1) re-built if needed, (2) not pruned from build.
|
||||
const neededPages = pages.filter((page) => {
|
||||
const existing = incr.getArtifact("pageMetadata", page.id);
|
||||
if (existing) {
|
||||
const { cssImports, theme } = existing;
|
||||
ensureCssGetsBuilt(cssImports, theme, page.id);
|
||||
}
|
||||
return !existing;
|
||||
});
|
||||
const neededViews = views.filter((view) => {
|
||||
const existing = incr.getArtifact("viewMetadata", view.id);
|
||||
if (existing) {
|
||||
const { cssImports, theme } = existing;
|
||||
ensureCssGetsBuilt(cssImports, theme, view.id);
|
||||
}
|
||||
return !existing;
|
||||
});
|
||||
|
||||
// Load the marko cache before render modules are loaded
|
||||
incr.loadMarkoCache();
|
||||
|
||||
// This is done in two passes so that a page that throws during evaluation
|
||||
// will report "Load Render Module" instead of "Render Static Page".
|
||||
const spinnerFormat = status.format;
|
||||
status.format = () => "";
|
||||
const moduleLoadQueue = new Queue({
|
||||
name: "Load Render Module",
|
||||
fn: loadPageModule,
|
||||
getItemText,
|
||||
maxJobs: 1,
|
||||
});
|
||||
moduleLoadQueue.addMany(neededPages);
|
||||
moduleLoadQueue.addMany(neededViews);
|
||||
await moduleLoadQueue.done({ method: "stop" });
|
||||
const pageQueue = new Queue({
|
||||
name: "Render Static Page",
|
||||
fn: renderPage,
|
||||
getItemText,
|
||||
maxJobs: 2,
|
||||
});
|
||||
pageQueue.addMany(neededPages);
|
||||
const viewQueue = new Queue({
|
||||
name: "Build Dynamic View",
|
||||
fn: prepareView,
|
||||
getItemText,
|
||||
maxJobs: 2,
|
||||
});
|
||||
viewQueue.addMany(neededViews);
|
||||
const pageAndViews = [
|
||||
pageQueue.done({ method: "stop" }),
|
||||
viewQueue.done({ method: "stop" }),
|
||||
];
|
||||
await Promise.allSettled(pageAndViews);
|
||||
await Promise.all(pageAndViews);
|
||||
status.format = spinnerFormat;
|
||||
|
||||
// -- bundle server javascript (backend and views) --
|
||||
status.text = "Bundle JavaScript";
|
||||
incr.snapshotMarkoCache();
|
||||
const serverJavaScriptPromise = bundle.bundleServerJavaScript(incr, "node");
|
||||
|
||||
// -- bundle client javascript --
|
||||
const referencedScripts = Array.from(
|
||||
new Set(
|
||||
[
|
||||
...pages.map((item) =>
|
||||
UNWRAP(
|
||||
incr.getArtifact("pageMetadata", item.id),
|
||||
`Missing pageMetadata ${item.id}`,
|
||||
)
|
||||
),
|
||||
...views.map((item) =>
|
||||
UNWRAP(
|
||||
incr.getArtifact("viewMetadata", item.id),
|
||||
`Missing viewMetadata ${item.id}`,
|
||||
)
|
||||
),
|
||||
].flatMap((item) => item.clientRefs),
|
||||
),
|
||||
(script) => path.resolve(hot.projectSrc, script),
|
||||
).filter((file) => !incr.hasArtifact("script", hot.getScriptId(file)));
|
||||
const extraPublicScripts = scripts.map((entry) => entry.file);
|
||||
const clientJavaScriptPromise = bundle.bundleClientJavaScript(
|
||||
referencedScripts,
|
||||
extraPublicScripts,
|
||||
incr,
|
||||
// -- assemble page assets --
|
||||
const pAssemblePages = builtPages.map((page) =>
|
||||
assembleAndWritePage(page, styleMap, scriptMap)
|
||||
);
|
||||
|
||||
await Promise.all([
|
||||
serverJavaScriptPromise,
|
||||
clientJavaScriptPromise,
|
||||
cssQueue.done({ method: "stop" }),
|
||||
builtBackend,
|
||||
builtStaticFiles,
|
||||
...pAssemblePages,
|
||||
]);
|
||||
await bundle.finalizeServerJavaScript(incr, "node");
|
||||
}
|
||||
|
||||
// -- copy/compress static files --
|
||||
async function doStaticFile(item: FileItem) {
|
||||
const body = await fs.readFile(item.file);
|
||||
await incr.putAsset({
|
||||
sources: [item.file],
|
||||
key: item.id,
|
||||
body,
|
||||
});
|
||||
}
|
||||
const staticQueue = new Queue({
|
||||
name: "Load Static",
|
||||
fn: doStaticFile,
|
||||
getItemText,
|
||||
maxJobs: 16,
|
||||
});
|
||||
status.format = () => "";
|
||||
staticQueue.addMany(
|
||||
staticFiles.filter((file) => !incr.hasArtifact("asset", file.id)),
|
||||
);
|
||||
await staticQueue.done({ method: "stop" });
|
||||
status.format = spinnerFormat;
|
||||
|
||||
// -- concatenate static rendered pages --
|
||||
status.text = `Concat Pages`;
|
||||
await Promise.all(pages.map(async (page) => {
|
||||
}));
|
||||
status.format = () => "";
|
||||
status.text = ``;
|
||||
// This will wait for all compression jobs to finish, which up
|
||||
// to this point have been left as dangling promises.
|
||||
await incr.wait();
|
||||
|
||||
const { inserted, referenced, unreferenced } = incr.shake();
|
||||
|
||||
// Flush the site to disk.
|
||||
status.format = spinnerFormat;
|
||||
status.text = `Incremental Flush`;
|
||||
incr.flush("node"); // Write outputs
|
||||
export async function readManifest(io: Io) {
|
||||
const cfg = await io.import<typeof import("../src/site.ts")>("src/site.ts");
|
||||
return {
|
||||
incr,
|
||||
inserted,
|
||||
referenced,
|
||||
unreferenced,
|
||||
elapsed: (performance.now() - startTime) / 1000,
|
||||
siteSections: cfg.siteSections.map((section) => ({
|
||||
root: toRel(section.root),
|
||||
})),
|
||||
backends: cfg.backends.map(toRel),
|
||||
};
|
||||
}
|
||||
|
||||
function getItemText({ file }: FileItem) {
|
||||
return path.relative(hot.projectSrc, file).replaceAll("\\", "/");
|
||||
export async function discoverAllFiles(
|
||||
siteConfig: Awaited<ReturnType<typeof readManifest>>,
|
||||
) {
|
||||
return (
|
||||
await Promise.all(
|
||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||
incr.work(scanSiteSection, toAbs(sectionRoot))
|
||||
),
|
||||
)
|
||||
).reduce((acc, next) => ({
|
||||
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
||||
pages: acc.pages.concat(next.pages),
|
||||
views: acc.views.concat(next.views),
|
||||
scripts: acc.scripts.concat(next.scripts),
|
||||
}));
|
||||
}
|
||||
|
||||
import { OnceMap, Queue } from "#sitegen/async";
|
||||
import { Incremental } from "./incremental.ts";
|
||||
export async function scanSiteSection(io: Io, sectionRoot: string) {
|
||||
// Static files are compressed and served as-is.
|
||||
// - "{section}/static/*.png"
|
||||
let staticFiles: FileItem[] = [];
|
||||
// Pages are rendered then served as static files.
|
||||
// - "{section}/pages/*.marko"
|
||||
let pages: FileItem[] = [];
|
||||
// Views are dynamically rendered pages called via backend code.
|
||||
// - "{section}/views/*.tsx"
|
||||
let views: FileItem[] = [];
|
||||
// Public scripts are bundled for the client as static assets under "/js/[...]"
|
||||
// This is used for the file viewer's canvases.
|
||||
// Note that '.client.ts' can be placed anywhere in the file structure.
|
||||
// - "{section}/scripts/*.client.ts"
|
||||
let scripts: FileItem[] = [];
|
||||
|
||||
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
||||
const rootPrefix = hot.projectSrc === sectionRoot
|
||||
? ""
|
||||
: path.relative(hot.projectSrc, sectionRoot) + "/";
|
||||
const kinds = [
|
||||
{
|
||||
dir: sectionPath("pages"),
|
||||
list: pages,
|
||||
prefix: "/",
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("static"),
|
||||
list: staticFiles,
|
||||
prefix: "/",
|
||||
ext: true,
|
||||
},
|
||||
{
|
||||
dir: sectionPath("scripts"),
|
||||
list: scripts,
|
||||
prefix: rootPrefix,
|
||||
include: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("views"),
|
||||
list: views,
|
||||
prefix: rootPrefix,
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
];
|
||||
for (const kind of kinds) {
|
||||
const {
|
||||
dir,
|
||||
list,
|
||||
prefix,
|
||||
include = [""],
|
||||
exclude = [],
|
||||
ext = false,
|
||||
} = kind;
|
||||
|
||||
let items;
|
||||
try {
|
||||
items = await io.readDirRecursive(dir);
|
||||
} catch (err: any) {
|
||||
if (err.code === "ENOENT") continue;
|
||||
throw err;
|
||||
}
|
||||
for (const subPath of items) {
|
||||
const file = path.join(dir, subPath);
|
||||
const stat = fs.statSync(file);
|
||||
if (stat.isDirectory()) continue;
|
||||
if (!include.some((e) => subPath.endsWith(e))) continue;
|
||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||
const trim = ext
|
||||
? subPath
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/");
|
||||
let id = prefix + trim.replaceAll("\\", "/");
|
||||
if (prefix === "/" && id.endsWith("/index")) {
|
||||
id = id.slice(0, -"/index".length) || "/";
|
||||
}
|
||||
list.push({ id, file: path.relative(hot.projectRoot, file) });
|
||||
}
|
||||
}
|
||||
|
||||
return { staticFiles, pages, views, scripts };
|
||||
}
|
||||
|
||||
export async function preparePage(io: Io, item: sg.FileItem) {
|
||||
// -- load and validate module --
|
||||
let {
|
||||
default: Page,
|
||||
meta: metadata,
|
||||
theme: pageTheme,
|
||||
layout,
|
||||
} = await io.import<any>(item.file);
|
||||
if (!Page) {
|
||||
throw new Error("Page is missing a 'default' export.");
|
||||
}
|
||||
if (!metadata) {
|
||||
throw new Error("Page is missing 'meta' export with a title.");
|
||||
}
|
||||
|
||||
// -- css --
|
||||
if (layout?.theme) pageTheme = layout.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
|
||||
// -- metadata --
|
||||
const renderedMetaPromise = Promise.resolve(
|
||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||
).then((m) => meta.renderMeta(m));
|
||||
|
||||
// -- html --
|
||||
let page = [engine.kElement, Page, {}];
|
||||
if (layout?.default) {
|
||||
page = [engine.kElement, layout.default, { children: page }];
|
||||
}
|
||||
const bodyPromise = engine.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
renderedMetaPromise,
|
||||
]);
|
||||
if (!renderedMeta.includes("<title>")) {
|
||||
throw new Error(
|
||||
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
||||
);
|
||||
}
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
id: item.id,
|
||||
html: text,
|
||||
meta: renderedMeta,
|
||||
cssImports,
|
||||
theme: theme ?? null,
|
||||
styleKey,
|
||||
clientRefs: Array.from(addon.sitegen.scripts),
|
||||
};
|
||||
}
|
||||
|
||||
export async function prepareView(io: Io, item: sg.FileItem) {
|
||||
const module = await io.import<any>(item.file);
|
||||
if (!module.meta) {
|
||||
throw new Error(`${item.file} is missing 'export const meta'`);
|
||||
}
|
||||
if (!module.default) {
|
||||
throw new Error(`${item.file} is missing a default export.`);
|
||||
}
|
||||
const pageTheme = module.layout?.theme ?? module.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
file: path.relative(hot.projectRoot, item.file),
|
||||
cssImports,
|
||||
theme,
|
||||
clientRefs: hot.getClientScriptRefs(item.file),
|
||||
hasLayout: !!module.layout?.default,
|
||||
styleKey,
|
||||
};
|
||||
}
|
||||
export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
|
||||
|
||||
export function prepareInlineCss(
|
||||
items: Array<{
|
||||
styleKey: string;
|
||||
cssImports: string[];
|
||||
theme: css.Theme;
|
||||
}>,
|
||||
) {
|
||||
const map = new Map<string, incr.Ref<string>>();
|
||||
for (const { styleKey, cssImports, theme } of items) {
|
||||
if (map.has(styleKey)) continue;
|
||||
map.set(
|
||||
styleKey,
|
||||
incr.work(css.bundleCssFiles, {
|
||||
cssImports,
|
||||
theme,
|
||||
dev: false,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
export type PreparedPage = Awaited<ReturnType<typeof preparePage>>;
|
||||
export async function assembleAndWritePage(
|
||||
pageWork: incr.Ref<PreparedPage>,
|
||||
styleMap: Map<string, incr.Ref<string>>,
|
||||
scriptWork: incr.Ref<Record<string, string>>,
|
||||
) {
|
||||
const page = await pageWork;
|
||||
return incr.work(
|
||||
async (io, { id, html, meta, styleKey, clientRefs }) => {
|
||||
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
|
||||
|
||||
const scriptIds = clientRefs.map(hot.getScriptId);
|
||||
const scriptMap = await io.readWork(scriptWork);
|
||||
const scripts = scriptIds.map((ref) =>
|
||||
UNWRAP(scriptMap[ref], `Missing script ${ref}`)
|
||||
)
|
||||
.map((x) => `{${x}}`).join("\n");
|
||||
|
||||
const doc = wrapDocument({
|
||||
body: html,
|
||||
head: meta,
|
||||
inlineCss,
|
||||
scripts,
|
||||
});
|
||||
await io.writeAsset(id, doc, {
|
||||
"Content-Type": "text/html",
|
||||
});
|
||||
},
|
||||
page,
|
||||
);
|
||||
}
|
||||
|
||||
import * as sg from "#sitegen";
|
||||
import * as incr from "./incremental.ts";
|
||||
import { Io } from "./incremental.ts";
|
||||
import * as bundle from "./bundle.ts";
|
||||
import * as css from "./css.ts";
|
||||
import * as engine from "./engine/ssr.ts";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as sg from "#sitegen";
|
||||
import type { FileItem } from "#sitegen";
|
||||
import * as path from "node:path";
|
||||
import * as meta from "#sitegen/meta";
|
||||
|
|
|
@ -1,324 +0,0 @@
|
|||
const { toRel, toAbs } = incr;
|
||||
const globalCssPath = toAbs("src/global.css");
|
||||
|
||||
export async function main() {
|
||||
const startTime = performance.now();
|
||||
|
||||
// -- read config and discover files --
|
||||
const siteConfig = await incr.work(readManifest);
|
||||
const {
|
||||
staticFiles,
|
||||
scripts,
|
||||
views,
|
||||
pages,
|
||||
} = await discoverAllFiles(siteConfig);
|
||||
|
||||
// TODO: make sure that `static` and `pages` does not overlap
|
||||
|
||||
// TODO: loadMarkoCache
|
||||
|
||||
// -- perform build-time rendering --
|
||||
const builtPages = pages.map((item) => incr.work(preparePage, item));
|
||||
const builtViews = views.map((item) => incr.work(prepareView, item));
|
||||
const builtStaticFiles = staticFiles.map((item) =>
|
||||
incr.work(
|
||||
async (io, { id, file }) => io.writeAsset(id, await io.readFile(file)),
|
||||
item,
|
||||
)
|
||||
);
|
||||
const routes = await Promise.all([...builtViews, ...builtPages]);
|
||||
|
||||
// -- bundle server javascript (backend and views) --
|
||||
const backends = siteConfig.backends.map((backend) => incr.work(bundle.bundleServerJavaScript, {}))
|
||||
|
||||
// -- page resources --
|
||||
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
||||
clientRefs: routes.flatMap((x) => x.clientRefs),
|
||||
extraPublicScripts: scripts.map((entry) => entry.file),
|
||||
dev: false,
|
||||
});
|
||||
const styleMap = prepareInlineCss(routes);
|
||||
|
||||
// -- backend --
|
||||
|
||||
// -- assemble page assets --
|
||||
const pAssemblePages = builtPages.map((page) =>
|
||||
assembleAndWritePage(page, styleMap, scriptMap)
|
||||
);
|
||||
|
||||
|
||||
incr.serializeToDisk();
|
||||
}
|
||||
|
||||
readManifest.label = "reading manifest";
|
||||
export async function readManifest(io: Io) {
|
||||
const cfg = await io.import<typeof import("../src/site.ts")>("src/site.ts");
|
||||
return {
|
||||
siteSections: cfg.siteSections.map((section) => ({
|
||||
root: toRel(section.root),
|
||||
})),
|
||||
backends: cfg.backends.map(toRel),
|
||||
};
|
||||
}
|
||||
|
||||
export async function discoverAllFiles(
|
||||
siteConfig: Awaited<ReturnType<typeof readManifest>>,
|
||||
) {
|
||||
return (
|
||||
await Promise.all(
|
||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||
incr.work(scanSiteSection, toAbs(sectionRoot))
|
||||
),
|
||||
)
|
||||
).reduce((acc, next) => ({
|
||||
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
||||
pages: acc.pages.concat(next.pages),
|
||||
views: acc.views.concat(next.views),
|
||||
scripts: acc.scripts.concat(next.scripts),
|
||||
}));
|
||||
}
|
||||
|
||||
scanSiteSection.getLabel = (input: string) =>
|
||||
"discovering files in " + toRel(input);
|
||||
export async function scanSiteSection(io: Io, sectionRoot: string) {
|
||||
// Static files are compressed and served as-is.
|
||||
// - "{section}/static/*.png"
|
||||
let staticFiles: FileItem[] = [];
|
||||
// Pages are rendered then served as static files.
|
||||
// - "{section}/pages/*.marko"
|
||||
let pages: FileItem[] = [];
|
||||
// Views are dynamically rendered pages called via backend code.
|
||||
// - "{section}/views/*.tsx"
|
||||
let views: FileItem[] = [];
|
||||
// Public scripts are bundled for the client as static assets under "/js/[...]"
|
||||
// This is used for the file viewer's canvases.
|
||||
// Note that '.client.ts' can be placed anywhere in the file structure.
|
||||
// - "{section}/scripts/*.client.ts"
|
||||
let scripts: FileItem[] = [];
|
||||
|
||||
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
||||
const rootPrefix = hot.projectSrc === sectionRoot
|
||||
? ""
|
||||
: path.relative(hot.projectSrc, sectionRoot) + "/";
|
||||
const kinds = [
|
||||
{
|
||||
dir: sectionPath("pages"),
|
||||
list: pages,
|
||||
prefix: "/",
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("static"),
|
||||
list: staticFiles,
|
||||
prefix: "/",
|
||||
ext: true,
|
||||
},
|
||||
{
|
||||
dir: sectionPath("scripts"),
|
||||
list: scripts,
|
||||
prefix: rootPrefix,
|
||||
include: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("views"),
|
||||
list: views,
|
||||
prefix: rootPrefix,
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
];
|
||||
for (const kind of kinds) {
|
||||
const {
|
||||
dir,
|
||||
list,
|
||||
prefix,
|
||||
include = [""],
|
||||
exclude = [],
|
||||
ext = false,
|
||||
} = kind;
|
||||
|
||||
let items;
|
||||
try {
|
||||
items = await io.readDirRecursive(dir);
|
||||
} catch (err: any) {
|
||||
if (err.code === "ENOENT") continue;
|
||||
throw err;
|
||||
}
|
||||
for (const subPath of items) {
|
||||
const file = path.join(dir, subPath);
|
||||
const stat = fs.statSync(file);
|
||||
if (stat.isDirectory()) continue;
|
||||
if (!include.some((e) => subPath.endsWith(e))) continue;
|
||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||
const trim = ext
|
||||
? subPath
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/");
|
||||
let id = prefix + trim.replaceAll("\\", "/");
|
||||
if (prefix === "/" && id.endsWith("/index")) {
|
||||
id = id.slice(0, -"/index".length) || "/";
|
||||
}
|
||||
list.push({ id, file: path.relative(hot.projectRoot, file) });
|
||||
}
|
||||
}
|
||||
|
||||
return { staticFiles, pages, views, scripts };
|
||||
}
|
||||
|
||||
export async function preparePage(io: Io, item: sg.FileItem) {
|
||||
// -- load and validate module --
|
||||
let {
|
||||
default: Page,
|
||||
meta: metadata,
|
||||
theme: pageTheme,
|
||||
layout,
|
||||
} = await io.import<any>(item.file);
|
||||
if (!Page) {
|
||||
throw new Error("Page is missing a 'default' export.");
|
||||
}
|
||||
if (!metadata) {
|
||||
throw new Error("Page is missing 'meta' export with a title.");
|
||||
}
|
||||
|
||||
// -- css --
|
||||
if (layout?.theme) pageTheme = layout.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
|
||||
// -- metadata --
|
||||
const renderedMetaPromise = Promise.resolve(
|
||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||
).then((m) => meta.renderMeta(m));
|
||||
|
||||
// -- html --
|
||||
let page = [engine.kElement, Page, {}];
|
||||
if (layout?.default) {
|
||||
page = [engine.kElement, layout.default, { children: page }];
|
||||
}
|
||||
const bodyPromise = engine.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
renderedMetaPromise,
|
||||
]);
|
||||
if (!renderedMeta.includes("<title>")) {
|
||||
throw new Error(
|
||||
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
||||
);
|
||||
}
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
id: item.id,
|
||||
html: text,
|
||||
meta: renderedMeta,
|
||||
cssImports,
|
||||
theme: theme ?? null,
|
||||
styleKey,
|
||||
clientRefs: Array.from(addon.sitegen.scripts),
|
||||
};
|
||||
}
|
||||
|
||||
export async function prepareView(io: Io, item: sg.FileItem) {
|
||||
const module = await io.import<any>(item.file);
|
||||
if (!module.meta) {
|
||||
throw new Error(`${item.file} is missing 'export const meta'`);
|
||||
}
|
||||
if (!module.default) {
|
||||
throw new Error(`${item.file} is missing a default export.`);
|
||||
}
|
||||
const pageTheme = module.layout?.theme ?? module.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
file: path.relative(hot.projectRoot, item.file),
|
||||
cssImports,
|
||||
theme,
|
||||
clientRefs: hot.getClientScriptRefs(item.file),
|
||||
hasLayout: !!module.layout?.default,
|
||||
styleKey,
|
||||
};
|
||||
}
|
||||
|
||||
export function prepareInlineCss(
|
||||
items: Array<{
|
||||
styleKey: string;
|
||||
cssImports: string[];
|
||||
theme: css.Theme;
|
||||
}>,
|
||||
) {
|
||||
const map = new Map<string, incr.Ref<string>>();
|
||||
for (const { styleKey, cssImports, theme } of items) {
|
||||
if (map.has(styleKey)) continue;
|
||||
map.set(
|
||||
styleKey,
|
||||
incr.work(css.bundleCssFiles, {
|
||||
cssImports,
|
||||
theme,
|
||||
dev: false,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
type PreparedPage = Awaited<ReturnType<typeof preparePage>>;
|
||||
export async function assembleAndWritePage(
|
||||
pageWork: Promise<PreparedPage>,
|
||||
styleMap: Map<string, incr.Ref<string>>,
|
||||
scriptWork: incr.Ref<Record<string, string>>,
|
||||
) {
|
||||
const page = await pageWork;
|
||||
return incr.work(
|
||||
async (io, { id, html, meta, styleKey, clientRefs }) => {
|
||||
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
|
||||
|
||||
const scriptIds = clientRefs.map(hot.getScriptId);
|
||||
const scriptMap = await io.readWork(scriptWork);
|
||||
const scripts = scriptIds.map((ref) =>
|
||||
UNWRAP(scriptMap[ref], `Missing script ${ref}`)
|
||||
)
|
||||
.map((x) => `{${x}}`).join("\n");
|
||||
|
||||
const doc = wrapDocument({
|
||||
body: html,
|
||||
head: meta,
|
||||
inlineCss,
|
||||
scripts,
|
||||
});
|
||||
io.writeAsset(id, doc, {
|
||||
"Content-Type": "text/html",
|
||||
});
|
||||
},
|
||||
page,
|
||||
);
|
||||
}
|
||||
|
||||
import * as sg from "#sitegen";
|
||||
import * as incr from "./incremental.ts";
|
||||
import { Io } from "./incremental.ts";
|
||||
import { OnceMap, Queue } from "#sitegen/async";
|
||||
import * as bundle from "./bundle.ts";
|
||||
import * as css from "./css.ts";
|
||||
import * as engine from "./engine/ssr.ts";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import type { FileItem } from "#sitegen";
|
||||
import * as path from "node:path";
|
||||
import * as meta from "#sitegen/meta";
|
||||
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
|
||||
import { wrapDocument } from "./lib/view.ts";
|
|
@ -83,7 +83,7 @@ Module.prototype._compile = function (
|
|||
? Array.from(new Set(cssImportsMaybe))
|
||||
: null,
|
||||
imports,
|
||||
lastModified: stat.mtimeMs,
|
||||
lastModified: Math.floor(stat.mtimeMs),
|
||||
});
|
||||
}
|
||||
return result;
|
||||
|
|
|
@ -1,78 +1,183 @@
|
|||
// Incremental compilation framework
|
||||
let running = false;
|
||||
let seenWorks = new Set<string>();
|
||||
let jobs = 0;
|
||||
let newKeys = 0;
|
||||
let seenWorks = new Set<string>(); // for detecting conflict vs overwrite
|
||||
let seenWrites = new Set<string>(); // for detecting conflict vs overwrite
|
||||
let works = new Map<string, Work>();
|
||||
let files = new Map<string, File>();
|
||||
let assets = new Map<string, Asset>();
|
||||
let files = new Map<string, TrackedFile>(); // keyed by `toRel` path
|
||||
let writes = new Map<string, FileWrite>();
|
||||
let assets = new Map<string, Asset>(); // keyed by hash
|
||||
|
||||
export interface Ref<T> extends Promise<T> {
|
||||
export interface Ref<T> {
|
||||
/** This method is compatible with `await` syntax */
|
||||
then(
|
||||
onFulfilled: (value: T) => void,
|
||||
onRejected: (error: unknown) => void,
|
||||
): void;
|
||||
key: string;
|
||||
}
|
||||
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
|
||||
|
||||
/**
|
||||
* Declare and begin a unit of work. Return value is memoized and
|
||||
* only re-run when inputs (via `Io`) change. Outputs are written
|
||||
* at the end of a compilation (see `compile`).
|
||||
* Declare and a unit of work. Return value is memoized and
|
||||
* only rebuilt when inputs (declared via `Io`) change. Outputs
|
||||
* are written at the end of a compilation (see `compile`).
|
||||
*
|
||||
* If the returned `Ref` is not awaited or read
|
||||
* via io.readWork, the job is never started.
|
||||
*/
|
||||
export function work<O>(job: (io: Io) => Promise<O>): Ref<O>;
|
||||
export function work<I, O>(job:(io: Io, input: I) => Promise<O>, input: I): Ref<O>;
|
||||
export function work<I, O>(job: (io: Io, input: I) => Promise<O>, input: I = null as I): Ref<O> {
|
||||
export function work<O>(job: Job<void, O>): Ref<O>;
|
||||
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
|
||||
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
||||
const keySource = [
|
||||
JSON.stringify(util.getCallSites(2)[1]),
|
||||
util.inspect(input),
|
||||
];
|
||||
const key = crypto
|
||||
.createHash("sha1")
|
||||
.update(keySource.join(":"))
|
||||
.digest("base64url");
|
||||
].join(":");
|
||||
const key = crypto.createHash("sha1").update(keySource).digest("base64url");
|
||||
ASSERT(running);
|
||||
ASSERT(
|
||||
!seenWorks.has(key),
|
||||
`Key '${key}' must be unique during the build.` +
|
||||
`To fix this, provide a manual 'key' argument.`,
|
||||
`Key '${key}' must be unique during the build. ` +
|
||||
`To fix this, provide a manual 'key' argument.`,
|
||||
);
|
||||
seenWorks.add(key);
|
||||
|
||||
const prev = works.get(key) as Work<O> | null;
|
||||
if (prev) {
|
||||
const promise = Promise.resolve(prev.value) as Ref<O>;
|
||||
promise.key = key;
|
||||
return promise;
|
||||
return { key, then: (done) => done(prev.value) };
|
||||
}
|
||||
|
||||
async function perform() {
|
||||
const io = new Io(key);
|
||||
jobs += 1;
|
||||
newKeys += 1;
|
||||
try {
|
||||
const value = await job(io, input);
|
||||
validateSerializable(value, "");
|
||||
const { reads, writes } = io;
|
||||
works.set(key, {
|
||||
value,
|
||||
affects: [],
|
||||
reads,
|
||||
writes,
|
||||
});
|
||||
for (const add of reads.files) {
|
||||
const { affects } = UNWRAP(files.get(add));
|
||||
ASSERT(!affects.includes(key));
|
||||
affects.push(key);
|
||||
}
|
||||
for (const add of reads.works) {
|
||||
const { affects } = UNWRAP(works.get(add));
|
||||
ASSERT(!affects.includes(key));
|
||||
affects.push(key);
|
||||
}
|
||||
return value;
|
||||
} finally {
|
||||
jobs -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cached: Promise<O>;
|
||||
return {
|
||||
key,
|
||||
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject),
|
||||
};
|
||||
|
||||
const io = new Io();
|
||||
const promise = job(io, input).then((value) => {
|
||||
const { needs, writes } = io;
|
||||
|
||||
// Apply the deltas to the graph
|
||||
applyDiff(key, files, [], needs.files);
|
||||
applyDiff(key, works, [], needs.works);
|
||||
|
||||
validateSerializable(value, "");
|
||||
|
||||
works.set(key, {
|
||||
value,
|
||||
affects: [],
|
||||
needs,
|
||||
writes
|
||||
});
|
||||
}) as Ref<O>;
|
||||
promise.key = key;
|
||||
return promise;
|
||||
}
|
||||
|
||||
export async function compile<T>(compiler: () => Promise<T>) {
|
||||
ASSERT(!running, `Cannot run twice`);
|
||||
running = true;
|
||||
try {
|
||||
running = true;
|
||||
ASSERT(jobs === 0);
|
||||
const start = performance.now();
|
||||
const timerSpinner = new Spinner({
|
||||
text: () =>
|
||||
`sitegen! [${
|
||||
((performance.now() - start) / 1000).toFixed(
|
||||
1,
|
||||
)
|
||||
}s]`,
|
||||
fps: 10,
|
||||
});
|
||||
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
||||
|
||||
const value = await compiler();
|
||||
ASSERT(jobs === 0);
|
||||
timerSpinner.text = "incremental flush";
|
||||
await flush(start);
|
||||
timerSpinner.stop();
|
||||
seenWorks.clear();
|
||||
ASSERT(!queue.active, `Queue was still running`);
|
||||
await queue.done();
|
||||
newKeys = 0;
|
||||
return { value };
|
||||
} finally {
|
||||
running = false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function flush(start: number) {
|
||||
// Trim
|
||||
const detachedFiles = new Set<string>;
|
||||
const referencedAssets = new Set<string>;
|
||||
for (const [k, { writes: { assets } }] of works) {
|
||||
if (seenWorks.has(k)) {
|
||||
for (const asset of assets.values()) referencedAssets.add(asset.hash);
|
||||
continue;
|
||||
}
|
||||
deleteWork(k);
|
||||
}
|
||||
for (const [k, file] of files) {
|
||||
if (file.affects.length > 0) continue;
|
||||
files.delete(k);
|
||||
detachedFiles.add(k);
|
||||
}
|
||||
for (const k of assets.keys()) {
|
||||
if (!referencedAssets.has(k))
|
||||
assets.delete(k);
|
||||
}
|
||||
|
||||
const p = [];
|
||||
// File writes
|
||||
let dist = 0;
|
||||
for (const [key, { buffer, size }] of writes) {
|
||||
if (buffer) p.push(fs.writeMkdir(path.join(`.clover/o/${key}`), buffer));
|
||||
dist += size;
|
||||
}
|
||||
// Asset map
|
||||
{
|
||||
const { json, blob } = getAssetManifest();
|
||||
const jsonString = Buffer.from(JSON.stringify(json));
|
||||
p.push(fs.writeMkdir(".clover/o/static.json", jsonString));
|
||||
p.push(fs.writeMkdir(".clover/o/static.blob", blob));
|
||||
dist += blob.byteLength + jsonString.byteLength;
|
||||
}
|
||||
await Promise.all(p);
|
||||
|
||||
// Incremental state
|
||||
const serialized = msgpackr.pack(serialize());
|
||||
await fs.writeMkdir(".clover/incr.state", serialized);
|
||||
const time = (performance.now() - start).toFixed(0);
|
||||
console.success(`sitegen! in ${time} ms`);
|
||||
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
|
||||
console.writeLine(` - ${assets.size} static assets`);
|
||||
console.writeLine(
|
||||
` - dist: ${formatSize(dist)}, incremental: ${
|
||||
formatSize(serialized.byteLength)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function restore() {
|
||||
let buffer;
|
||||
try {
|
||||
buffer = await fs.readFile(".clover/incr.state");
|
||||
} catch (err: any) {
|
||||
if (err.code !== "ENOENT") throw err;
|
||||
}
|
||||
if (!buffer) return;
|
||||
await deserialize(buffer);
|
||||
}
|
||||
|
||||
export function forceInvalidate(file: string) {
|
||||
const resolved = toAbs(file);
|
||||
const key = toRel(resolved);
|
||||
|
@ -83,14 +188,39 @@ export function forceInvalidateEntry(entry: { affects: string[] }) {
|
|||
const queue = [...entry.affects];
|
||||
let key;
|
||||
while ((key = queue.shift())) {
|
||||
const { needs, affects } = UNWRAP(works.get(key));
|
||||
applyDiff(key, files, needs.files, []);
|
||||
applyDiff(key, works, needs.works, []);
|
||||
works.delete(key);
|
||||
const affects = deleteWork(key);
|
||||
queue.push(...affects);
|
||||
}
|
||||
}
|
||||
|
||||
function deleteWork(key: string) {
|
||||
console.info({ key });
|
||||
const { reads, affects, writes: w } = UNWRAP(works.get(key));
|
||||
for (const remove of reads.files) {
|
||||
const { affects } = UNWRAP(files.get(remove));
|
||||
ASSERT(affects.includes(key));
|
||||
affects.splice(affects.indexOf(key), 1);
|
||||
}
|
||||
for (const remove of reads.works) {
|
||||
const { affects } = UNWRAP(works.get(remove), remove);
|
||||
ASSERT(affects.includes(key));
|
||||
affects.splice(affects.indexOf(key), 1);
|
||||
}
|
||||
for (const remove of affects) {
|
||||
const { reads: { works: list } } = UNWRAP(works.get(remove), remove);
|
||||
ASSERT(list.has(key));
|
||||
list.delete(key);
|
||||
}
|
||||
for (const file of w.files) {
|
||||
if (UNWRAP(writes.get(file)).work === key)
|
||||
writes.delete(file);
|
||||
}
|
||||
// Assets are temporarily kept, trimmed via manual GC after compilation.
|
||||
|
||||
works.delete(key);
|
||||
return affects;
|
||||
}
|
||||
|
||||
export function reset() {
|
||||
ASSERT(!running);
|
||||
works.clear();
|
||||
|
@ -99,48 +229,182 @@ export function reset() {
|
|||
}
|
||||
|
||||
export function serialize() {
|
||||
// Aiming for a compact JSON format.
|
||||
const fileEntries = Array.from(files, ([k, v]) => [
|
||||
k,
|
||||
"lastModified" in v ? "f" : "d",
|
||||
"lastModified" in v ? v.lastModified : v.contentHash,
|
||||
...v.affects,
|
||||
]);
|
||||
const workEntries = Array.from(works, ([k, v]) => [k, v.value, ...v.affects]);
|
||||
return JSON.stringify({
|
||||
file: fileEntries,
|
||||
work: workEntries,
|
||||
});
|
||||
const fileEntries = Array.from(files, ([k, v]) =>
|
||||
[
|
||||
k,
|
||||
v.type,
|
||||
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null,
|
||||
...v.affects,
|
||||
] as const);
|
||||
const workEntries = Array.from(works, ([k, v]) =>
|
||||
[
|
||||
k,
|
||||
v.value,
|
||||
Array.from(v.reads.files),
|
||||
Array.from(v.reads.works),
|
||||
Array.from(v.writes.files),
|
||||
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const),
|
||||
v.affects,
|
||||
] as const);
|
||||
const expectedFilesOnDisk = Array.from(
|
||||
writes,
|
||||
([k, { size, work }]) => [k, size, work] as const,
|
||||
);
|
||||
const assetEntries = Array.from(
|
||||
assets,
|
||||
([k, asset]) => [k, asset.raw, asset.gzip, asset.zstd] as const,
|
||||
);
|
||||
return [
|
||||
1,
|
||||
fileEntries,
|
||||
workEntries,
|
||||
expectedFilesOnDisk,
|
||||
assetEntries,
|
||||
] as const;
|
||||
}
|
||||
export function serializeToDisk(file = ".clover/incr.state") {
|
||||
fs.writeMkdirSync(file, serialize());
|
||||
type SerializedState = ReturnType<typeof serialize>;
|
||||
|
||||
/* No-op on failure */
|
||||
async function deserialize(buffer: Buffer) {
|
||||
const decoded = msgpackr.decode(buffer) as SerializedState;
|
||||
if (!Array.isArray(decoded)) return false;
|
||||
if (decoded[0] !== 1) return false;
|
||||
const [, fileEntries, workEntries, expectedFilesOnDisk, assetEntries] =
|
||||
decoded;
|
||||
for (const [k, type, content, ...affects] of fileEntries) {
|
||||
if (type === "f") {
|
||||
ASSERT(typeof content === "number");
|
||||
files.set(k, { type, affects, lastModified: content });
|
||||
} else if (type === 'd') {
|
||||
ASSERT(typeof content === "string");
|
||||
files.set(k, { type, affects, contentHash: content, contents: [] });
|
||||
} else {
|
||||
files.set(k, { type, affects });
|
||||
}
|
||||
}
|
||||
for (const entry of workEntries) {
|
||||
const [
|
||||
k,
|
||||
value,
|
||||
readFiles,
|
||||
readWorks,
|
||||
writeFiles,
|
||||
writeAssets,
|
||||
affects,
|
||||
] = entry;
|
||||
works.set(k, {
|
||||
value,
|
||||
reads: {
|
||||
files: new Set(readFiles),
|
||||
works: new Set(readWorks),
|
||||
},
|
||||
writes: {
|
||||
files: new Set(writeFiles),
|
||||
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, {
|
||||
hash: JSON.parse(UNWRAP(headers.etag)),
|
||||
headers,
|
||||
}])),
|
||||
},
|
||||
affects,
|
||||
});
|
||||
}
|
||||
const statFiles = await Promise.all(expectedFilesOnDisk
|
||||
.map(([k, size, work]) =>
|
||||
fs.stat(path.join(".clover/o", k))
|
||||
.catch((err) => {
|
||||
if (err.code === "ENOENT") return null;
|
||||
throw err;
|
||||
})
|
||||
.then((stat) => ({ k, size, work, stat }))
|
||||
));
|
||||
for (const { k, stat, work, size } of statFiles) {
|
||||
if (stat?.size === size) {
|
||||
writes.set(k, {
|
||||
size: size,
|
||||
buffer: null,
|
||||
work,
|
||||
});
|
||||
} else {
|
||||
forceInvalidateEntry({ affects: [work] });
|
||||
}
|
||||
}
|
||||
for (const [hash, raw, gzip, zstd] of assetEntries) {
|
||||
assets.set(hash, { raw, gzip, zstd });
|
||||
}
|
||||
|
||||
await Promise.all(Array.from(files, async ([k, file]) => {
|
||||
try {
|
||||
if (file.type === "d") {
|
||||
const contents = file.contents = await fs.readdir(k);
|
||||
contents.sort();
|
||||
const contentHash = crypto
|
||||
.createHash("sha1")
|
||||
.update(contents.join("\0"))
|
||||
.digest("base64url");
|
||||
if (file.contentHash !== contentHash) {
|
||||
file.contentHash = contentHash;
|
||||
throw new Error();
|
||||
}
|
||||
} else if (file.type === 'f') {
|
||||
const lastModified = await fs.stat(k)
|
||||
.then(x => Math.floor(x.mtimeMs), () => 0);
|
||||
if (file.lastModified !== lastModified) {
|
||||
file.lastModified = lastModified;
|
||||
throw new Error();
|
||||
}
|
||||
} else {
|
||||
file.type satisfies 'null';
|
||||
const stat = await fs.stat(k).catch(() => null);
|
||||
if (stat) throw new Error();
|
||||
}
|
||||
} catch (e) {
|
||||
forceInvalidateEntry(file);
|
||||
if (file.type === 'null') files.delete(k);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
export function getAssetManifest() {
|
||||
const writer = new BufferWriter();
|
||||
const asset = Object.fromEntries(
|
||||
Array.from(works, (work) => work[1].writes.assets)
|
||||
.filter((map) => map.size > 0)
|
||||
.flatMap((map) =>
|
||||
Array.from(map, ([key, { hash, headers }]) => {
|
||||
const { raw, gzip, zstd } = UNWRAP(
|
||||
assets.get(hash),
|
||||
`Asset ${key} (${hash})`,
|
||||
);
|
||||
return [key, {
|
||||
raw: writer.write(raw, "raw:" + hash),
|
||||
gzip: writer.write(gzip, "raw:" + hash),
|
||||
zstd: writer.write(zstd, "raw:" + hash),
|
||||
headers,
|
||||
}] as const;
|
||||
})
|
||||
),
|
||||
) satisfies BuiltAssetMap;
|
||||
return { json: asset, blob: writer.get() };
|
||||
}
|
||||
|
||||
/* Input/Output with automatic tracking.
|
||||
* - Inputs read with Io are tracked to know when to rebuild
|
||||
* - Outputs written with Io are deleted when abandoned.
|
||||
*/
|
||||
export class Io {
|
||||
needs: Needs = {
|
||||
files: new Set(),
|
||||
works: new Set(),
|
||||
};
|
||||
writes: Writes = {
|
||||
files: new Map(),
|
||||
assets: new Map(),
|
||||
};
|
||||
constructor(public key: string) {}
|
||||
reads: Reads = { files: new Set(), works: new Set() };
|
||||
writes: Writes = { files: new Set(), assets: new Map() };
|
||||
|
||||
#trackFs(file: string) {
|
||||
const resolved = toAbs(file);
|
||||
const key = toRel(resolved);
|
||||
this.needs.files.add(key);
|
||||
this.reads.files.add(key);
|
||||
return { resolved, key };
|
||||
}
|
||||
readWork<T>(ref: Ref<T>): Promise<T> {
|
||||
this.needs.works.add(ref.key);
|
||||
return ref;
|
||||
async readWork<T>(ref: Ref<T>): Promise<T> {
|
||||
this.reads.works.add(ref.key);
|
||||
return await ref;
|
||||
}
|
||||
/** Track a file in the compilation without reading it. */
|
||||
async trackFile(file: string) {
|
||||
|
@ -148,33 +412,41 @@ export class Io {
|
|||
if (!files.get(key)) {
|
||||
let lastModified: number = 0;
|
||||
try {
|
||||
lastModified = (await fs.stat(file)).mtimeMs;
|
||||
} catch {}
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified,
|
||||
});
|
||||
lastModified = Math.floor((await fs.stat(file)).mtimeMs);
|
||||
files.set(key, { type: "f", lastModified, affects: [] });
|
||||
} catch {
|
||||
files.set(key, { type: "null", affects: [] });
|
||||
}
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
async readFile(file: string) {
|
||||
return fs.readFile(await this.trackFile(file), "utf-8");
|
||||
}
|
||||
async readJson<T>(file: string) {
|
||||
return JSON.parse(await this.readFile(file)) as T;
|
||||
}
|
||||
async readDir(dir: string) {
|
||||
const { key, resolved } = this.#trackFs(dir);
|
||||
let result: string[] = [];
|
||||
const existing = files.get(key);
|
||||
try {
|
||||
result = await fs.readdir(resolved);
|
||||
return result;
|
||||
} finally {
|
||||
if (existing?.type === 'd') return existing.contents;
|
||||
const contents = await fs.readdir(resolved);
|
||||
contents.sort();
|
||||
const contentHash = crypto
|
||||
.createHash("sha1")
|
||||
.update(result.join("\0"))
|
||||
.update(contents.join("\0"))
|
||||
.digest("base64url");
|
||||
files.set(key, {
|
||||
type: "d",
|
||||
affects: [],
|
||||
contentHash,
|
||||
contents,
|
||||
});
|
||||
return contents;
|
||||
} catch (err) {
|
||||
if (!existing) files.set(key, { type: "null", affects: [] });
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
async readDirRecursive(dir: string): Promise<string[]> {
|
||||
|
@ -205,11 +477,12 @@ export class Io {
|
|||
const seen = new Set<string>();
|
||||
let current;
|
||||
while ((current = queue.shift())) {
|
||||
const stat = hot.getFileStat(resolved);
|
||||
const stat = hot.getFileStat(current);
|
||||
if (!stat) continue;
|
||||
const { key } = this.#trackFs(current);
|
||||
if (!files.get(key)) {
|
||||
files.set(key, {
|
||||
type: "f",
|
||||
affects: [],
|
||||
lastModified: stat?.lastModified ?? 0,
|
||||
});
|
||||
|
@ -223,74 +496,104 @@ export class Io {
|
|||
}
|
||||
}
|
||||
}
|
||||
writeAsset(pathname: string, blob: string | Buffer, headersOption?: HeadersInit) {
|
||||
async writeAsset(
|
||||
pathname: string,
|
||||
blob: string | Buffer,
|
||||
headersOption?: HeadersInit,
|
||||
) {
|
||||
ASSERT(pathname.startsWith("/"));
|
||||
ASSERT(!seenWrites.has("a:" + pathname));
|
||||
|
||||
const buffer = typeof blob === "string" ? Buffer.from(blob) : blob;
|
||||
|
||||
const headers = new Headers(headersOption ?? {});
|
||||
const hash = crypto.createHash('sha1').update(blob).digest('hex');
|
||||
const hash = crypto.createHash("sha1").update(buffer).digest("hex");
|
||||
if (!headers.has("Content-Type")) {
|
||||
headers.set("Content-Type", mime.contentTypeFor(pathname));
|
||||
}
|
||||
headers.set("ETag", JSON.stringify(hash));
|
||||
ASSERT(!this.writes.assets.has(pathname));
|
||||
this.writes.assets.set(pathname, {
|
||||
hash,
|
||||
// @ts-expect-error TODO
|
||||
headers: Object.fromEntries(headers)
|
||||
headers: Object.fromEntries(headers),
|
||||
});
|
||||
if (!assets.has(hash)) {
|
||||
jobs += 1;
|
||||
assets.set(hash, undefined!);
|
||||
const [gzipBuffer, zstdBuffer] = await Promise.all([
|
||||
gzip(buffer),
|
||||
zstdCompress(buffer),
|
||||
]);
|
||||
assets.set(hash, {
|
||||
raw: buffer,
|
||||
gzip: gzipBuffer,
|
||||
zstd: zstdBuffer,
|
||||
});
|
||||
jobs -= 1;
|
||||
}
|
||||
}
|
||||
writeFile(subPath: string, blob: string | Buffer) {
|
||||
ASSERT(!this.writes.assets.has(subPath));
|
||||
this.writes.files.set(subPath, Buffer.isBuffer(blob) ? blob : Buffer.from(blob));
|
||||
ASSERT(!subPath.startsWith("/"));
|
||||
ASSERT(
|
||||
!seenWrites.has("f:" + subPath),
|
||||
`File overwritten: ${JSON.stringify(subPath)}`,
|
||||
);
|
||||
seenWrites.add("f:" + subPath);
|
||||
const buffer = Buffer.isBuffer(blob) ? blob : Buffer.from(blob);
|
||||
writes.set(subPath, {
|
||||
buffer,
|
||||
size: buffer.byteLength,
|
||||
work: this.key,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function applyDiff(
|
||||
key: string,
|
||||
list: Map<string, { affects: string[] }>,
|
||||
beforeIter: Iterable<string>,
|
||||
afterIter: Iterable<string>,
|
||||
) {
|
||||
const before = Array.from(beforeIter);
|
||||
const after = Array.from(afterIter);
|
||||
for (const add of after.filter((x) => !before.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(add));
|
||||
ASSERT(!affects.includes(key));
|
||||
affects.push(key);
|
||||
class BufferWriter {
|
||||
size = 0;
|
||||
seen = new Map<string, BufferView>();
|
||||
buffers: Buffer[] = [];
|
||||
|
||||
write(buffer: Buffer, hash: string): BufferView {
|
||||
let view = this.seen.get(hash);
|
||||
if (view) return view;
|
||||
view = [this.size, this.size += buffer.byteLength];
|
||||
this.seen.set(hash, view);
|
||||
this.buffers.push(buffer);
|
||||
return view;
|
||||
}
|
||||
for (const remove of before.filter((x) => !after.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(remove));
|
||||
ASSERT(affects.includes(key));
|
||||
affects.splice(affects.indexOf(key), 1);
|
||||
|
||||
get() {
|
||||
return Buffer.concat(this.buffers);
|
||||
}
|
||||
}
|
||||
|
||||
export function validateSerializable(value: unknown, key: string) {
|
||||
if (typeof value === "string") {
|
||||
if (value.includes(hot.projectRoot)) {
|
||||
throw new Error(
|
||||
`Return value must not contain the CWD for portability, found at ${key}`,
|
||||
);
|
||||
}
|
||||
} else if (value && typeof value === "object") {
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
||||
} else if (Object.getPrototypeOf(value) === Object.prototype) {
|
||||
Object.entries(value).forEach(([k, v]) =>
|
||||
validateSerializable(v, `${key}.${k}`)
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Return value must be a plain JS object, found ${
|
||||
Object.getPrototypeOf(value).constructor.name
|
||||
} at ${key}`,
|
||||
);
|
||||
}
|
||||
} else if (["bigint", "function", "symbol"].includes(typeof value)) {
|
||||
throw new Error(
|
||||
`Return value must be a plain JS object, found ${typeof value} at ${key}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (typeof value === "string") {
|
||||
if (value.includes(hot.projectRoot)) {
|
||||
throw new Error(
|
||||
`Return value must not contain the CWD for portability, found at ${key}`,
|
||||
);
|
||||
}
|
||||
} else if (value && typeof value === "object") {
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
||||
} else if (Object.getPrototypeOf(value) === Object.prototype) {
|
||||
Object.entries(value).forEach(([k, v]) =>
|
||||
validateSerializable(v, `${key}.${k}`)
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Return value must be a plain JS object, found ${
|
||||
Object.getPrototypeOf(value).constructor.name
|
||||
} at ${key}`,
|
||||
);
|
||||
}
|
||||
} else if (["bigint", "function", "symbol"].includes(typeof value)) {
|
||||
throw new Error(
|
||||
`Return value must be a plain JS object, found ${typeof value} at ${key}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function toAbs(absPath: string) {
|
||||
return path.resolve(hot.projectRoot, absPath);
|
||||
|
@ -300,39 +603,43 @@ export function toRel(absPath: string) {
|
|||
return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/");
|
||||
}
|
||||
|
||||
type BufferView = [start: number, end: number];
|
||||
type File = TrackedFile | TrackedDir;
|
||||
interface Needs {
|
||||
export type BufferView = [start: number, end: number];
|
||||
interface Reads {
|
||||
files: Set<string>;
|
||||
works: Set<string>;
|
||||
}
|
||||
interface FileWrite {
|
||||
buffer: Buffer | null;
|
||||
size: number;
|
||||
work: string;
|
||||
}
|
||||
interface Writes {
|
||||
files: Map<string, Buffer>;
|
||||
files: Set<string>;
|
||||
assets: Map<string, {
|
||||
hash: string,
|
||||
headers: Record<string, string>
|
||||
hash: string;
|
||||
headers: Record<string, string>;
|
||||
}>;
|
||||
}
|
||||
interface Asset {
|
||||
raw: Buffer;
|
||||
gzip: Buffer;
|
||||
zstd: Buffer;
|
||||
refs: number;
|
||||
}
|
||||
interface Work<T = unknown> {
|
||||
value: T;
|
||||
affects: string[];
|
||||
needs: Needs;
|
||||
reads: Reads;
|
||||
writes: Writes;
|
||||
}
|
||||
interface TrackedFile {
|
||||
lastModified: number;
|
||||
affects: string[];
|
||||
}
|
||||
interface TrackedDir {
|
||||
contentHash: string;
|
||||
affects: string[];
|
||||
}
|
||||
type TrackedFile =
|
||||
& {
|
||||
affects: string[];
|
||||
}
|
||||
& (
|
||||
| { type: "f"; lastModified: number }
|
||||
| { type: "d"; contentHash: string; contents: string[] | null }
|
||||
| { type: "null"; }
|
||||
);
|
||||
export interface BuiltAssetMap {
|
||||
[route: string]: BuiltAsset;
|
||||
}
|
||||
|
@ -343,12 +650,17 @@ export interface BuiltAsset {
|
|||
headers: Record<string, string>;
|
||||
}
|
||||
|
||||
const gzip = util.promisify(zlib.gzip);
|
||||
const zstdCompress = util.promisify(zlib.zstdCompress);
|
||||
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as path from "node:path";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as util from "node:util";
|
||||
import * as crypto from "node:crypto";
|
||||
import * as async from "#sitegen/async";
|
||||
import type { Spinner } from "@paperclover/console/Spinner";
|
||||
import * as mime from "#sitegen/mime";
|
||||
import type { View } from "#sitegen/view";
|
||||
import * as zlib from "node:zlib";
|
||||
import * as console from "@paperclover/console";
|
||||
import { Spinner } from "@paperclover/console/Spinner";
|
||||
import { formatSize } from "@/file-viewer/format.ts";
|
||||
import * as msgpackr from "msgpackr";
|
||||
|
|
|
@ -69,7 +69,7 @@ export function etagMatches(etag: string, ifNoneMatch: string) {
|
|||
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
|
||||
}
|
||||
|
||||
function subarrayAsset([start, end]: View) {
|
||||
function subarrayAsset([start, end]: BufferView) {
|
||||
return assets!.buf.subarray(start, end);
|
||||
}
|
||||
|
||||
|
@ -115,6 +115,6 @@ process.on("message", (msg: any) => {
|
|||
import * as fs from "#sitegen/fs";
|
||||
import type { Context, Next } from "hono";
|
||||
import type { StatusCode } from "hono/utils/http-status";
|
||||
import type { BuiltAsset, BuiltAssetMap, View } from "../incremental.ts";
|
||||
import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts";
|
||||
import { Buffer } from "node:buffer";
|
||||
import * as path from "node:path";
|
||||
|
|
125
package-lock.json
generated
125
package-lock.json
generated
|
@ -15,6 +15,7 @@
|
|||
"hls.js": "^1.6.5",
|
||||
"hono": "^4.7.11",
|
||||
"marko": "^6.0.20",
|
||||
"msgpackr": "^1.11.5",
|
||||
"puppeteer": "^24.10.1",
|
||||
"sharp": "^0.34.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
|
@ -1479,6 +1480,84 @@
|
|||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz",
|
||||
"integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz",
|
||||
"integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz",
|
||||
"integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz",
|
||||
"integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@paperclover/console": {
|
||||
"resolved": "git+https://git.paperclover.net/clo/console.git#1a6ac2b79fdd8a21a1c57d25723975872bc07e3e",
|
||||
"dependencies": {
|
||||
|
@ -3766,6 +3845,37 @@
|
|||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/msgpackr": {
|
||||
"version": "1.11.5",
|
||||
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.5.tgz",
|
||||
"integrity": "sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==",
|
||||
"license": "MIT",
|
||||
"optionalDependencies": {
|
||||
"msgpackr-extract": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/msgpackr-extract": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
||||
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"node-gyp-build-optional-packages": "5.2.2"
|
||||
},
|
||||
"bin": {
|
||||
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
||||
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/netmask": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz",
|
||||
|
@ -3775,6 +3885,21 @@
|
|||
"node": ">= 0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-gyp-build-optional-packages": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
||||
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"detect-libc": "^2.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"node-gyp-build-optional-packages": "bin.js",
|
||||
"node-gyp-build-optional-packages-optional": "optional.js",
|
||||
"node-gyp-build-optional-packages-test": "build-test.js"
|
||||
}
|
||||
},
|
||||
"node_modules/node-releases": {
|
||||
"version": "2.0.19",
|
||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
"hls.js": "^1.6.5",
|
||||
"hono": "^4.7.11",
|
||||
"marko": "^6.0.20",
|
||||
"msgpackr": "^1.11.5",
|
||||
"puppeteer": "^24.10.1",
|
||||
"sharp": "^0.34.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
|
|
|
@ -4,7 +4,9 @@
|
|||
font-weight: 400 750;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-variation-settings: "CASL" 0.25, "MONO" 0;
|
||||
font-variation-settings:
|
||||
"CASL" 0.25,
|
||||
"MONO" 0;
|
||||
font-style: oblique -15deg 0deg;
|
||||
unicode-range: U+0020-007E;
|
||||
}
|
||||
|
@ -14,7 +16,9 @@
|
|||
font-weight: 400 800;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-variation-settings: "CASL" 0.25, "MONO" 1;
|
||||
font-variation-settings:
|
||||
"CASL" 0.25,
|
||||
"MONO" 1;
|
||||
font-style: oblique -15deg 0deg;
|
||||
unicode-range: U+0020-007E;
|
||||
}
|
||||
|
@ -24,21 +28,13 @@
|
|||
font-weight: 400 800;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-variation-settings: "CASL" 0.25, "MONO" 1;
|
||||
font-variation-settings:
|
||||
"CASL" 0.25,
|
||||
"MONO" 1;
|
||||
font-style: oblique -15deg 0deg;
|
||||
unicode-range:
|
||||
U+00C0-00FF,
|
||||
U+00A9,
|
||||
U+2190-2193,
|
||||
U+2018,
|
||||
U+2019,
|
||||
U+201C,
|
||||
U+201D,
|
||||
U+2022,
|
||||
U+00A0-00A8,
|
||||
U+00AA-00BF,
|
||||
U+2194-2199,
|
||||
U+0100-017F;
|
||||
U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022,
|
||||
U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F;
|
||||
}
|
||||
|
||||
*,
|
||||
|
@ -63,7 +59,7 @@ main {
|
|||
}
|
||||
|
||||
h1 {
|
||||
font-size: 2em;
|
||||
font-size: 2.5em;
|
||||
}
|
||||
|
||||
h1,
|
||||
|
|
Loading…
Reference in a new issue