rewrite incremental #21
11 changed files with 767 additions and 1298 deletions
|
@ -1,17 +1,20 @@
|
||||||
// This file implements client-side bundling, mostly wrapping esbuild.
|
// This file implements client-side bundling, mostly wrapping esbuild.
|
||||||
|
bundleClientJavaScript.label = "bundle client-side javascript";
|
||||||
export async function bundleClientJavaScript(
|
export async function bundleClientJavaScript(
|
||||||
referencedScripts: string[],
|
io: Io,
|
||||||
|
{ clientRefs, extraPublicScripts, dev = false }: {
|
||||||
|
clientRefs: string[],
|
||||||
extraPublicScripts: string[],
|
extraPublicScripts: string[],
|
||||||
incr: Incremental,
|
dev: boolean
|
||||||
dev: boolean = false,
|
}
|
||||||
) {
|
) {
|
||||||
const entryPoints = [
|
const entryPoints = [
|
||||||
...new Set([
|
...new Set([
|
||||||
...referencedScripts.map((file) => path.resolve(hot.projectSrc, file)),
|
...clientRefs.map(x => `src/${x}`),
|
||||||
...extraPublicScripts,
|
...extraPublicScripts,
|
||||||
]),
|
].map(toAbs)),
|
||||||
];
|
];
|
||||||
if (entryPoints.length === 0) return;
|
if (entryPoints.length === 0) return {};
|
||||||
const invalidFiles = entryPoints
|
const invalidFiles = entryPoints
|
||||||
.filter((file) => !file.match(/\.client\.[tj]sx?/));
|
.filter((file) => !file.match(/\.client\.[tj]sx?/));
|
||||||
if (invalidFiles.length > 0) {
|
if (invalidFiles.length > 0) {
|
||||||
|
@ -24,7 +27,7 @@ export async function bundleClientJavaScript(
|
||||||
|
|
||||||
const clientPlugins: esbuild.Plugin[] = [
|
const clientPlugins: esbuild.Plugin[] = [
|
||||||
projectRelativeResolution(),
|
projectRelativeResolution(),
|
||||||
markoViaBuildCache(incr),
|
markoViaBuildCache(),
|
||||||
];
|
];
|
||||||
|
|
||||||
const bundle = await esbuild.build({
|
const bundle = await esbuild.build({
|
||||||
|
@ -65,7 +68,7 @@ export async function bundleClientJavaScript(
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
const { metafile, outputFiles } = bundle;
|
const { metafile, outputFiles } = bundle;
|
||||||
const promises: Promise<void>[] = [];
|
const scripts: Record<string, string> = {};
|
||||||
for (const file of outputFiles) {
|
for (const file of outputFiles) {
|
||||||
const { text } = file;
|
const { text } = file;
|
||||||
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
||||||
|
@ -78,33 +81,27 @@ export async function bundleClientJavaScript(
|
||||||
if (!chunk) {
|
if (!chunk) {
|
||||||
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
|
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
|
||||||
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
|
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
|
||||||
incr.put({
|
scripts[key] = text;
|
||||||
sources,
|
|
||||||
kind: "script",
|
|
||||||
key,
|
|
||||||
value: text,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
// Register chunks and public scripts as assets.
|
// Register chunks and public scripts as assets.
|
||||||
if (chunk || publicScriptRoutes.includes(route)) {
|
if (chunk || publicScriptRoutes.includes(route)) {
|
||||||
promises.push(incr.putAsset({
|
io.writeAsset(route, text);
|
||||||
sources,
|
|
||||||
key: route,
|
|
||||||
body: text,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await Promise.all(promises);
|
return scripts;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ServerPlatform = "node" | "passthru";
|
export type ServerPlatform = "node" | "passthru";
|
||||||
export async function bundleServerJavaScript(
|
export async function bundleServerJavaScript(
|
||||||
incr: Incremental,
|
io: Io,
|
||||||
platform: ServerPlatform = "node",
|
{
|
||||||
|
entry,
|
||||||
|
platform,
|
||||||
|
}: {
|
||||||
|
entry: string,
|
||||||
|
platform: ServerPlatform
|
||||||
|
},
|
||||||
) {
|
) {
|
||||||
if (incr.hasArtifact("backendBundle", platform)) return;
|
|
||||||
|
|
||||||
// Comment
|
|
||||||
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
||||||
|
|
||||||
const viewSource = [
|
const viewSource = [
|
||||||
|
@ -136,7 +133,7 @@ export async function bundleServerJavaScript(
|
||||||
"$views": viewSource,
|
"$views": viewSource,
|
||||||
}),
|
}),
|
||||||
projectRelativeResolution(),
|
projectRelativeResolution(),
|
||||||
markoViaBuildCache(incr),
|
markoViaBuildCache(),
|
||||||
{
|
{
|
||||||
name: "replace client references",
|
name: "replace client references",
|
||||||
setup(b) {
|
setup(b) {
|
||||||
|
@ -282,39 +279,14 @@ export async function finalizeServerJavaScript(
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function markoViaBuildCache(incr: Incremental): esbuild.Plugin {
|
|
||||||
return {
|
|
||||||
name: "marko via build cache",
|
|
||||||
setup(b) {
|
|
||||||
b.onLoad(
|
|
||||||
{ filter: /\.marko$/ },
|
|
||||||
async ({ path: file }) => {
|
|
||||||
const key = path.relative(hot.projectRoot, file)
|
|
||||||
.replaceAll("\\", "/");
|
|
||||||
const cacheEntry = incr.out.serverMarko.get(key);
|
|
||||||
if (!cacheEntry) {
|
|
||||||
if (!fs.existsSync(file)) {
|
|
||||||
console.log(`File does not exist: ${file}`);
|
|
||||||
}
|
|
||||||
throw new Error("Marko file not in cache: " + file);
|
|
||||||
}
|
|
||||||
return ({
|
|
||||||
loader: "ts",
|
|
||||||
contents: cacheEntry.src,
|
|
||||||
resolveDir: path.dirname(file),
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
import * as esbuild from "esbuild";
|
import * as esbuild from "esbuild";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import process from "node:process";
|
import process from "node:process";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
import { projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
|
import { markoViaBuildCache, projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
|
||||||
import { Incremental } from "./incremental.ts";
|
import { Io, toAbs, toRel } from "./incremental.ts";
|
||||||
import * as css from "./css.ts";
|
import * as css from "./css.ts";
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as mime from "#sitegen/mime";
|
import * as mime from "#sitegen/mime";
|
||||||
|
import * as incr from './incremental.ts';
|
||||||
|
|
|
@ -40,11 +40,6 @@ export function preprocess(css: string, theme: Theme): string {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Output {
|
|
||||||
text: string;
|
|
||||||
sources: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function styleKey(
|
export function styleKey(
|
||||||
cssImports: string[],
|
cssImports: string[],
|
||||||
theme: Theme,
|
theme: Theme,
|
||||||
|
@ -60,11 +55,14 @@ export function styleKey(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function bundleCssFiles(
|
export async function bundleCssFiles(
|
||||||
|
io: Io,
|
||||||
|
{ cssImports, theme, dev }: {
|
||||||
cssImports: string[],
|
cssImports: string[],
|
||||||
theme: Theme,
|
theme: Theme,
|
||||||
dev: boolean = false,
|
dev: boolean,
|
||||||
): Promise<Output> {
|
}
|
||||||
cssImports = cssImports.map((file) => path.resolve(hot.projectSrc, file));
|
) {
|
||||||
|
cssImports = await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
||||||
const plugin = {
|
const plugin = {
|
||||||
name: "clover css",
|
name: "clover css",
|
||||||
setup(b) {
|
setup(b) {
|
||||||
|
@ -106,15 +104,11 @@ export async function bundleCssFiles(
|
||||||
throw new AggregateError(warnings, "CSS Build Failed");
|
throw new AggregateError(warnings, "CSS Build Failed");
|
||||||
}
|
}
|
||||||
if (outputFiles.length > 1) throw new Error("Too many output files");
|
if (outputFiles.length > 1) throw new Error("Too many output files");
|
||||||
return {
|
return outputFiles[0].text;
|
||||||
text: outputFiles[0].text,
|
|
||||||
sources: Object.keys(metafile.outputs["$input$.css"].inputs)
|
|
||||||
.filter((x) => !x.startsWith("vfs:")),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
import * as esbuild from "esbuild";
|
import * as esbuild from "esbuild";
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import { virtualFiles } from "./esbuild-support.ts";
|
import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts";
|
||||||
|
|
|
@ -13,6 +13,7 @@ export function ssrSync<A extends Addons>(node: Node, addon: A = {} as A) {
|
||||||
const resolved = resolveNode(r, node);
|
const resolved = resolveNode(r, node);
|
||||||
return { text: renderNode(resolved), addon };
|
return { text: renderNode(resolved), addon };
|
||||||
}
|
}
|
||||||
|
export { ssrSync as sync };
|
||||||
|
|
||||||
export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
||||||
const r = initRender(true, addon);
|
const r = initRender(true, addon);
|
||||||
|
@ -20,7 +21,7 @@ export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
||||||
if (r.async === 0) {
|
if (r.async === 0) {
|
||||||
return Promise.resolve({ text: renderNode(resolved), addon });
|
return Promise.resolve({ text: renderNode(resolved), addon });
|
||||||
}
|
}
|
||||||
const { resolve, reject, promise } = Promise.withResolvers<Result>();
|
const { resolve, reject, promise } = Promise.withResolvers<Result<A>>();
|
||||||
r.asyncDone = () => {
|
r.asyncDone = () => {
|
||||||
const rejections = r.rejections;
|
const rejections = r.rejections;
|
||||||
if (!rejections) return resolve({ text: renderNode(resolved), addon });
|
if (!rejections) return resolve({ text: renderNode(resolved), addon });
|
||||||
|
@ -29,6 +30,7 @@ export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
||||||
};
|
};
|
||||||
return promise;
|
return promise;
|
||||||
}
|
}
|
||||||
|
export { ssrAsync as async };
|
||||||
|
|
||||||
/** Inline HTML into a render without escaping it */
|
/** Inline HTML into a render without escaping it */
|
||||||
export function html(rawText: ResolvedNode): DirectHtml {
|
export function html(rawText: ResolvedNode): DirectHtml {
|
||||||
|
|
|
@ -73,7 +73,35 @@ export function projectRelativeResolution(root = process.cwd() + "/src") {
|
||||||
} satisfies esbuild.Plugin;
|
} satisfies esbuild.Plugin;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function markoViaBuildCache(): esbuild.Plugin {
|
||||||
|
return {
|
||||||
|
name: "marko via build cache",
|
||||||
|
setup(b) {
|
||||||
|
b.onLoad(
|
||||||
|
{ filter: /\.marko$/ },
|
||||||
|
async ({ path: file }) => {
|
||||||
|
const cacheEntry = markoCache.get(file);
|
||||||
|
if (!cacheEntry) {
|
||||||
|
if (!fs.existsSync(file)) {
|
||||||
|
console.warn(`File does not exist: ${file}`);
|
||||||
|
}
|
||||||
|
console.log(markoCache.keys());
|
||||||
|
throw new Error("Marko file not in cache: " + file);
|
||||||
|
}
|
||||||
|
return ({
|
||||||
|
loader: "ts",
|
||||||
|
contents: cacheEntry.src,
|
||||||
|
resolveDir: path.dirname(file),
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
import * as esbuild from "esbuild";
|
import * as esbuild from "esbuild";
|
||||||
import * as string from "#sitegen/string";
|
import * as string from "#sitegen/string";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import * as hot from "./hot.ts";
|
import * as fs from "#sitegen/fs";
|
||||||
|
import * as incr from "./incremental.ts";
|
||||||
|
import * as hot from "./hot.ts";import { markoCache } from "./marko.ts";
|
||||||
|
|
|
@ -368,40 +368,6 @@ export async function sitegen(
|
||||||
// -- concatenate static rendered pages --
|
// -- concatenate static rendered pages --
|
||||||
status.text = `Concat Pages`;
|
status.text = `Concat Pages`;
|
||||||
await Promise.all(pages.map(async (page) => {
|
await Promise.all(pages.map(async (page) => {
|
||||||
if (incr.hasArtifact("asset", page.id)) return;
|
|
||||||
const {
|
|
||||||
html,
|
|
||||||
meta,
|
|
||||||
cssImports,
|
|
||||||
theme,
|
|
||||||
clientRefs,
|
|
||||||
} = UNWRAP(incr.out.pageMetadata.get(page.id));
|
|
||||||
const scriptIds = clientRefs.map(hot.getScriptId);
|
|
||||||
const styleKey = css.styleKey(cssImports, theme);
|
|
||||||
const style = UNWRAP(
|
|
||||||
incr.out.style.get(styleKey),
|
|
||||||
`Missing style ${styleKey}`,
|
|
||||||
);
|
|
||||||
const doc = wrapDocument({
|
|
||||||
body: html,
|
|
||||||
head: meta,
|
|
||||||
inlineCss: style,
|
|
||||||
scripts: scriptIds.map(
|
|
||||||
(ref) => UNWRAP(incr.out.script.get(ref), `Missing script ${ref}`),
|
|
||||||
).map((x) => `{${x}}`).join("\n"),
|
|
||||||
});
|
|
||||||
await incr.putAsset({
|
|
||||||
sources: [
|
|
||||||
page.file,
|
|
||||||
...incr.sourcesFor("style", styleKey),
|
|
||||||
...scriptIds.flatMap((ref) => incr.sourcesFor("script", ref)),
|
|
||||||
],
|
|
||||||
key: page.id,
|
|
||||||
body: doc,
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "text/html",
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}));
|
}));
|
||||||
status.format = () => "";
|
status.format = () => "";
|
||||||
status.text = ``;
|
status.text = ``;
|
||||||
|
|
|
@ -1,162 +1,86 @@
|
||||||
export async function main() {
|
const { toRel, toAbs } = incr;
|
||||||
// const startTime = performance.now();
|
const globalCssPath = toAbs("src/global.css");
|
||||||
|
|
||||||
// -- readdir to find all site files --
|
export async function main() {
|
||||||
const siteConfig = await incr.work({
|
const startTime = performance.now();
|
||||||
label: "reading manifest",
|
|
||||||
run: (io) => io.import<{ siteSections: sg.Section[] }>("site.ts"),
|
// -- read config and discover files --
|
||||||
});
|
const siteConfig = await incr.work(readManifest);
|
||||||
const {
|
const {
|
||||||
staticFiles,
|
staticFiles,
|
||||||
scripts,
|
scripts,
|
||||||
views,
|
views,
|
||||||
pages,
|
pages,
|
||||||
} = (await Promise.all(
|
} = await discoverAllFiles(siteConfig);
|
||||||
|
|
||||||
|
// TODO: make sure that `static` and `pages` does not overlap
|
||||||
|
|
||||||
|
// TODO: loadMarkoCache
|
||||||
|
|
||||||
|
// -- perform build-time rendering --
|
||||||
|
const builtPages = pages.map((item) => incr.work(preparePage, item));
|
||||||
|
const builtViews = views.map((item) => incr.work(prepareView, item));
|
||||||
|
const builtStaticFiles = staticFiles.map((item) =>
|
||||||
|
incr.work(
|
||||||
|
async (io, { id, file }) => io.writeAsset(id, await io.readFile(file)),
|
||||||
|
item,
|
||||||
|
)
|
||||||
|
);
|
||||||
|
const routes = await Promise.all([...builtViews, ...builtPages]);
|
||||||
|
|
||||||
|
// -- bundle server javascript (backend and views) --
|
||||||
|
const backends = siteConfig.backends.map((backend) => incr.work(bundle.bundleServerJavaScript, {}))
|
||||||
|
|
||||||
|
// -- page resources --
|
||||||
|
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
||||||
|
clientRefs: routes.flatMap((x) => x.clientRefs),
|
||||||
|
extraPublicScripts: scripts.map((entry) => entry.file),
|
||||||
|
dev: false,
|
||||||
|
});
|
||||||
|
const styleMap = prepareInlineCss(routes);
|
||||||
|
|
||||||
|
// -- backend --
|
||||||
|
|
||||||
|
// -- assemble page assets --
|
||||||
|
const pAssemblePages = builtPages.map((page) =>
|
||||||
|
assembleAndWritePage(page, styleMap, scriptMap)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
incr.serializeToDisk();
|
||||||
|
}
|
||||||
|
|
||||||
|
readManifest.label = "reading manifest";
|
||||||
|
export async function readManifest(io: Io) {
|
||||||
|
const cfg = await io.import<typeof import("../src/site.ts")>("src/site.ts");
|
||||||
|
return {
|
||||||
|
siteSections: cfg.siteSections.map((section) => ({
|
||||||
|
root: toRel(section.root),
|
||||||
|
})),
|
||||||
|
backends: cfg.backends.map(toRel),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function discoverAllFiles(
|
||||||
|
siteConfig: Awaited<ReturnType<typeof readManifest>>,
|
||||||
|
) {
|
||||||
|
return (
|
||||||
|
await Promise.all(
|
||||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||||
incr.work({
|
incr.work(scanSiteSection, toAbs(sectionRoot))
|
||||||
key: sectionRoot,
|
|
||||||
label: "discovering files in " + sectionRoot,
|
|
||||||
run: (io) => scanSiteSection(io, sectionRoot),
|
|
||||||
})
|
|
||||||
),
|
),
|
||||||
)).reduce((acc, next) => ({
|
)
|
||||||
|
).reduce((acc, next) => ({
|
||||||
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
||||||
pages: acc.pages.concat(next.pages),
|
pages: acc.pages.concat(next.pages),
|
||||||
views: acc.views.concat(next.views),
|
views: acc.views.concat(next.views),
|
||||||
scripts: acc.scripts.concat(next.scripts),
|
scripts: acc.scripts.concat(next.scripts),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const globalCssPath = path.join(hot.projectSrc, "global.css");
|
|
||||||
|
|
||||||
// TODO: loadMarkoCache
|
|
||||||
|
|
||||||
const builtPages = pages.map((item) =>
|
|
||||||
incr.work({
|
|
||||||
label: item.id,
|
|
||||||
key: item,
|
|
||||||
async run(io) {
|
|
||||||
// -- load and validate module --
|
|
||||||
let {
|
|
||||||
default: Page,
|
|
||||||
meta: metadata,
|
|
||||||
theme: pageTheme,
|
|
||||||
layout,
|
|
||||||
} = await io.import<any>(item.file);
|
|
||||||
if (!Page) {
|
|
||||||
throw new Error("Page is missing a 'default' export.");
|
|
||||||
}
|
|
||||||
if (!metadata) {
|
|
||||||
throw new Error("Page is missing 'meta' export with a title.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// -- css --
|
scanSiteSection.getLabel = (input: string) =>
|
||||||
if (layout?.theme) pageTheme = layout.theme;
|
"discovering files in " + toRel(input);
|
||||||
const theme: css.Theme = {
|
export async function scanSiteSection(io: Io, sectionRoot: string) {
|
||||||
...css.defaultTheme,
|
|
||||||
...pageTheme,
|
|
||||||
};
|
|
||||||
const cssImports = Array.from(
|
|
||||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
|
||||||
(file) => path.relative(hot.projectSrc, file),
|
|
||||||
);
|
|
||||||
|
|
||||||
// -- metadata --
|
|
||||||
const renderedMetaPromise = Promise.resolve(
|
|
||||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
|
||||||
).then((m) => meta.renderMeta(m));
|
|
||||||
|
|
||||||
// -- html --
|
|
||||||
let page = [engine.kElement, Page, {}];
|
|
||||||
if (layout?.default) {
|
|
||||||
page = [engine.kElement, layout.default, { children: page }];
|
|
||||||
}
|
|
||||||
const bodyPromise = engine.ssrAsync(page, {
|
|
||||||
sitegen: sg.initRender(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
|
||||||
bodyPromise,
|
|
||||||
renderedMetaPromise,
|
|
||||||
]);
|
|
||||||
if (!renderedMeta.includes("<title>")) {
|
|
||||||
throw new Error(
|
|
||||||
"Page is missing 'meta.title'. " +
|
|
||||||
"All pages need a title tag.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const styleKey = css.styleKey(cssImports, theme);
|
|
||||||
return {
|
|
||||||
html: text,
|
|
||||||
meta: renderedMeta,
|
|
||||||
cssImports,
|
|
||||||
theme: theme ?? null,
|
|
||||||
styleKey,
|
|
||||||
clientRefs: Array.from(addon.sitegen.scripts),
|
|
||||||
};
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
// const builtViews = views.map((item) =>
|
|
||||||
// incr.work({
|
|
||||||
// label: item.id,
|
|
||||||
// key: item,
|
|
||||||
// async run(io) {
|
|
||||||
// const module = require(item.file);
|
|
||||||
// if (!module.meta) {
|
|
||||||
// throw new Error(`${item.file} is missing 'export const meta'`);
|
|
||||||
// }
|
|
||||||
// if (!module.default) {
|
|
||||||
// throw new Error(`${item.file} is missing a default export.`);
|
|
||||||
// }
|
|
||||||
// const pageTheme = module.layout?.theme ?? module.theme;
|
|
||||||
// const theme: css.Theme = {
|
|
||||||
// ...css.defaultTheme,
|
|
||||||
// ...pageTheme,
|
|
||||||
// };
|
|
||||||
// const cssImports = Array.from(
|
|
||||||
// new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
|
||||||
// (file) => path.relative(hot.projectSrc, file),
|
|
||||||
// );
|
|
||||||
// const styleKey = css.styleKey(cssImports, theme);
|
|
||||||
// return {
|
|
||||||
// file: path.relative(hot.projectRoot, item.file),
|
|
||||||
// cssImports,
|
|
||||||
// theme,
|
|
||||||
// clientRefs: hot.getClientScriptRefs(item.file),
|
|
||||||
// hasLayout: !!module.layout?.default,
|
|
||||||
// styleKey,
|
|
||||||
// };
|
|
||||||
// },
|
|
||||||
// })
|
|
||||||
// );
|
|
||||||
//
|
|
||||||
// // -- inline style sheets, used and shared by pages and views --
|
|
||||||
// const builtCss = Promise.all([...builtViews, ...builtPages]).then((items) => {
|
|
||||||
// const map = new Map<string, {}>();
|
|
||||||
// for (const { styleKey, cssImports, theme } of items) {
|
|
||||||
// if (map.has(styleKey)) continue;
|
|
||||||
// map.set(
|
|
||||||
// styleKey,
|
|
||||||
// incr.work({
|
|
||||||
// label: `bundle css ${styleKey}`,
|
|
||||||
// async run(io) {
|
|
||||||
// await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
|
||||||
// const { text } = await css.bundleCssFiles(cssImports, theme);
|
|
||||||
// return text;
|
|
||||||
// },
|
|
||||||
// }),
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
// TODO: make sure that `static` and `pages` does not overlap
|
|
||||||
await Promise.all(builtPages);
|
|
||||||
incr.serializeToDisk();
|
|
||||||
// -- bundle server javascript (backend and views) --
|
|
||||||
}
|
|
||||||
|
|
||||||
async function scanSiteSection(io: incr.Io, sectionRoot: string) {
|
|
||||||
// Static files are compressed and served as-is.
|
// Static files are compressed and served as-is.
|
||||||
// - "{section}/static/*.png"
|
// - "{section}/static/*.png"
|
||||||
let staticFiles: FileItem[] = [];
|
let staticFiles: FileItem[] = [];
|
||||||
|
@ -229,23 +153,164 @@ async function scanSiteSection(io: incr.Io, sectionRoot: string) {
|
||||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||||
const trim = ext
|
const trim = ext
|
||||||
? subPath
|
? subPath
|
||||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
: subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/");
|
||||||
".",
|
|
||||||
"/",
|
|
||||||
);
|
|
||||||
let id = prefix + trim.replaceAll("\\", "/");
|
let id = prefix + trim.replaceAll("\\", "/");
|
||||||
if (prefix === "/" && id.endsWith("/index")) {
|
if (prefix === "/" && id.endsWith("/index")) {
|
||||||
id = id.slice(0, -"/index".length) || "/";
|
id = id.slice(0, -"/index".length) || "/";
|
||||||
}
|
}
|
||||||
list.push({ id, file: file });
|
list.push({ id, file: path.relative(hot.projectRoot, file) });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return { staticFiles, pages, views, scripts };
|
return { staticFiles, pages, views, scripts };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function preparePage(io: Io, item: sg.FileItem) {
|
||||||
|
// -- load and validate module --
|
||||||
|
let {
|
||||||
|
default: Page,
|
||||||
|
meta: metadata,
|
||||||
|
theme: pageTheme,
|
||||||
|
layout,
|
||||||
|
} = await io.import<any>(item.file);
|
||||||
|
if (!Page) {
|
||||||
|
throw new Error("Page is missing a 'default' export.");
|
||||||
|
}
|
||||||
|
if (!metadata) {
|
||||||
|
throw new Error("Page is missing 'meta' export with a title.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- css --
|
||||||
|
if (layout?.theme) pageTheme = layout.theme;
|
||||||
|
const theme: css.Theme = {
|
||||||
|
...css.defaultTheme,
|
||||||
|
...pageTheme,
|
||||||
|
};
|
||||||
|
const cssImports = Array.from(
|
||||||
|
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||||
|
(file) => path.relative(hot.projectSrc, file),
|
||||||
|
);
|
||||||
|
|
||||||
|
// -- metadata --
|
||||||
|
const renderedMetaPromise = Promise.resolve(
|
||||||
|
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||||
|
).then((m) => meta.renderMeta(m));
|
||||||
|
|
||||||
|
// -- html --
|
||||||
|
let page = [engine.kElement, Page, {}];
|
||||||
|
if (layout?.default) {
|
||||||
|
page = [engine.kElement, layout.default, { children: page }];
|
||||||
|
}
|
||||||
|
const bodyPromise = engine.ssrAsync(page, {
|
||||||
|
sitegen: sg.initRender(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||||
|
bodyPromise,
|
||||||
|
renderedMetaPromise,
|
||||||
|
]);
|
||||||
|
if (!renderedMeta.includes("<title>")) {
|
||||||
|
throw new Error(
|
||||||
|
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const styleKey = css.styleKey(cssImports, theme);
|
||||||
|
return {
|
||||||
|
id: item.id,
|
||||||
|
html: text,
|
||||||
|
meta: renderedMeta,
|
||||||
|
cssImports,
|
||||||
|
theme: theme ?? null,
|
||||||
|
styleKey,
|
||||||
|
clientRefs: Array.from(addon.sitegen.scripts),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function prepareView(io: Io, item: sg.FileItem) {
|
||||||
|
const module = await io.import<any>(item.file);
|
||||||
|
if (!module.meta) {
|
||||||
|
throw new Error(`${item.file} is missing 'export const meta'`);
|
||||||
|
}
|
||||||
|
if (!module.default) {
|
||||||
|
throw new Error(`${item.file} is missing a default export.`);
|
||||||
|
}
|
||||||
|
const pageTheme = module.layout?.theme ?? module.theme;
|
||||||
|
const theme: css.Theme = {
|
||||||
|
...css.defaultTheme,
|
||||||
|
...pageTheme,
|
||||||
|
};
|
||||||
|
const cssImports = Array.from(
|
||||||
|
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||||
|
(file) => path.relative(hot.projectSrc, file),
|
||||||
|
);
|
||||||
|
const styleKey = css.styleKey(cssImports, theme);
|
||||||
|
return {
|
||||||
|
file: path.relative(hot.projectRoot, item.file),
|
||||||
|
cssImports,
|
||||||
|
theme,
|
||||||
|
clientRefs: hot.getClientScriptRefs(item.file),
|
||||||
|
hasLayout: !!module.layout?.default,
|
||||||
|
styleKey,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function prepareInlineCss(
|
||||||
|
items: Array<{
|
||||||
|
styleKey: string;
|
||||||
|
cssImports: string[];
|
||||||
|
theme: css.Theme;
|
||||||
|
}>,
|
||||||
|
) {
|
||||||
|
const map = new Map<string, incr.Ref<string>>();
|
||||||
|
for (const { styleKey, cssImports, theme } of items) {
|
||||||
|
if (map.has(styleKey)) continue;
|
||||||
|
map.set(
|
||||||
|
styleKey,
|
||||||
|
incr.work(css.bundleCssFiles, {
|
||||||
|
cssImports,
|
||||||
|
theme,
|
||||||
|
dev: false,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
type PreparedPage = Awaited<ReturnType<typeof preparePage>>;
|
||||||
|
export async function assembleAndWritePage(
|
||||||
|
pageWork: Promise<PreparedPage>,
|
||||||
|
styleMap: Map<string, incr.Ref<string>>,
|
||||||
|
scriptWork: incr.Ref<Record<string, string>>,
|
||||||
|
) {
|
||||||
|
const page = await pageWork;
|
||||||
|
return incr.work(
|
||||||
|
async (io, { id, html, meta, styleKey, clientRefs }) => {
|
||||||
|
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
|
||||||
|
|
||||||
|
const scriptIds = clientRefs.map(hot.getScriptId);
|
||||||
|
const scriptMap = await io.readWork(scriptWork);
|
||||||
|
const scripts = scriptIds.map((ref) =>
|
||||||
|
UNWRAP(scriptMap[ref], `Missing script ${ref}`)
|
||||||
|
)
|
||||||
|
.map((x) => `{${x}}`).join("\n");
|
||||||
|
|
||||||
|
const doc = wrapDocument({
|
||||||
|
body: html,
|
||||||
|
head: meta,
|
||||||
|
inlineCss,
|
||||||
|
scripts,
|
||||||
|
});
|
||||||
|
io.writeAsset(id, doc, {
|
||||||
|
"Content-Type": "text/html",
|
||||||
|
});
|
||||||
|
},
|
||||||
|
page,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
import * as sg from "#sitegen";
|
import * as sg from "#sitegen";
|
||||||
import * as incr from "./incremental2.ts";
|
import * as incr from "./incremental.ts";
|
||||||
|
import { Io } from "./incremental.ts";
|
||||||
import { OnceMap, Queue } from "#sitegen/async";
|
import { OnceMap, Queue } from "#sitegen/async";
|
||||||
import * as bundle from "./bundle.ts";
|
import * as bundle from "./bundle.ts";
|
||||||
import * as css from "./css.ts";
|
import * as css from "./css.ts";
|
||||||
|
|
|
@ -114,7 +114,7 @@ function loadEsbuild(module: NodeJS.Module, filepath: string) {
|
||||||
interface LoadOptions {
|
interface LoadOptions {
|
||||||
scannedClientRefs?: string[];
|
scannedClientRefs?: string[];
|
||||||
}
|
}
|
||||||
function loadEsbuildCode(
|
export function loadEsbuildCode(
|
||||||
module: NodeJS.Module,
|
module: NodeJS.Module,
|
||||||
filepath: string,
|
filepath: string,
|
||||||
src: string,
|
src: string,
|
||||||
|
@ -155,7 +155,7 @@ function loadEsbuildCode(
|
||||||
return module._compile(src, filepath, "commonjs");
|
return module._compile(src, filepath, "commonjs");
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveClientRef(sourcePath: string, ref: string) {
|
export function resolveClientRef(sourcePath: string, ref: string) {
|
||||||
const filePath = resolveFrom(sourcePath, ref);
|
const filePath = resolveFrom(sourcePath, ref);
|
||||||
if (
|
if (
|
||||||
!filePath.endsWith(".client.ts") &&
|
!filePath.endsWith(".client.ts") &&
|
||||||
|
@ -166,44 +166,10 @@ function resolveClientRef(sourcePath: string, ref: string) {
|
||||||
return path.relative(projectSrc, filePath);
|
return path.relative(projectSrc, filePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: extract the marko compilation tools out, lazy load them
|
let lazyMarko: typeof import('./marko.ts') | null = null;
|
||||||
export interface MarkoCacheEntry {
|
|
||||||
src: string;
|
|
||||||
scannedClientRefs: string[];
|
|
||||||
}
|
|
||||||
export const markoCache = new Map<string, MarkoCacheEntry>();
|
|
||||||
function loadMarko(module: NodeJS.Module, filepath: string) {
|
function loadMarko(module: NodeJS.Module, filepath: string) {
|
||||||
let cache = markoCache.get(filepath);
|
lazyMarko ??= require<typeof import('./marko.ts')>("./framework/marko.ts");
|
||||||
if (!cache) {
|
lazyMarko.loadMarko(module, filepath);
|
||||||
let src = fs.readFileSync(filepath, "utf8");
|
|
||||||
// A non-standard thing here is Clover Sitegen implements
|
|
||||||
// its own client side scripting stuff, so it overrides
|
|
||||||
// bare client import statements to it's own usage.
|
|
||||||
const scannedClientRefs = new Set<string>();
|
|
||||||
if (src.match(/^\s*client\s+import\s+["']/m)) {
|
|
||||||
src = src.replace(
|
|
||||||
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
|
|
||||||
(_, src) => {
|
|
||||||
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
|
|
||||||
const resolved = resolveClientRef(filepath, ref);
|
|
||||||
scannedClientRefs.add(resolved);
|
|
||||||
return `<CloverScriptInclude=${
|
|
||||||
JSON.stringify(getScriptId(resolved))
|
|
||||||
} />`;
|
|
||||||
},
|
|
||||||
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
|
|
||||||
}
|
|
||||||
|
|
||||||
src = marko.compileSync(src, filepath).code;
|
|
||||||
src = src.replace("marko/debug/html", "#ssr/marko");
|
|
||||||
cache = { src, scannedClientRefs: Array.from(scannedClientRefs) };
|
|
||||||
markoCache.set(filepath, cache);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { src, scannedClientRefs } = cache;
|
|
||||||
return loadEsbuildCode(module, filepath, src, {
|
|
||||||
scannedClientRefs,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadMdx(module: NodeJS.Module, filepath: string) {
|
function loadMdx(module: NodeJS.Module, filepath: string) {
|
||||||
|
@ -345,11 +311,10 @@ declare module "node:module" {
|
||||||
): unknown;
|
): unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
import * as fs from "./lib/fs.ts";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import { pathToFileURL } from "node:url";
|
import { pathToFileURL } from "node:url";
|
||||||
import * as esbuild from "esbuild";
|
import * as esbuild from "esbuild";
|
||||||
import * as marko from "@marko/compiler";
|
|
||||||
import { createRequire } from "node:module";
|
import { createRequire } from "node:module";
|
||||||
import * as mdx from "@mdx-js/mdx";
|
import * as mdx from "@mdx-js/mdx";
|
||||||
import * as self from "./hot.ts";
|
import * as self from "./hot.ts";
|
||||||
|
|
|
@ -1,657 +1,354 @@
|
||||||
// Incremental contains multiple maps for the different kinds
|
// Incremental compilation framework
|
||||||
// of Artifact, which contain a list of source files which
|
let running = false;
|
||||||
// were used to produce it. When files change, Incremental sees
|
let seenWorks = new Set<string>();
|
||||||
// that the `mtime` is newer, and purges the referenced artifacts.
|
let works = new Map<string, Work>();
|
||||||
|
let files = new Map<string, File>();
|
||||||
|
let assets = new Map<string, Asset>();
|
||||||
|
|
||||||
type SourceId = string; // relative to project root, e.g. 'src/global.css'
|
export interface Ref<T> extends Promise<T> {
|
||||||
type ArtifactId = string; // `${ArtifactType}\0${string}`
|
|
||||||
type Sha1Id = string; // Sha1 hex string
|
|
||||||
|
|
||||||
// -- artifact types --
|
|
||||||
interface ArtifactMap {
|
|
||||||
/* An asset (serve with "#sitegen/asset" */
|
|
||||||
asset: Asset;
|
|
||||||
/* The bundled text of a '.client.ts' script */
|
|
||||||
// TODO: track imports this has into `asset`
|
|
||||||
script: string;
|
|
||||||
/* The bundled style tag contents. Keyed by 'css.styleKey' */
|
|
||||||
style: string;
|
|
||||||
/* Metadata about a static page */
|
|
||||||
pageMetadata: PageMetadata;
|
|
||||||
/* Metadata about a dynamic view */
|
|
||||||
viewMetadata: ViewMetadata;
|
|
||||||
/* Cached '.marko' server compilation */
|
|
||||||
serverMarko: hot.MarkoCacheEntry;
|
|
||||||
/* Backend source code, pre-replacement. Keyed by platform type. */
|
|
||||||
backendBundle: BackendBundle;
|
|
||||||
/* One file in the backend receives post-processing. */
|
|
||||||
backendReplace: Buffer;
|
|
||||||
}
|
|
||||||
type ArtifactKind = keyof ArtifactMap;
|
|
||||||
/* Automatic path tracing is performed to make it so that
|
|
||||||
* specifying 'sources: [file]' refers to it and everything it imports.
|
|
||||||
* These kinds do not have that behavior
|
|
||||||
*/
|
|
||||||
const exactDependencyKinds = ["serverMarko"];
|
|
||||||
export interface Asset {
|
|
||||||
buffer: Buffer;
|
|
||||||
headers: Record<string, string | undefined>;
|
|
||||||
hash: string;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* This interface intentionally omits the *contents*
|
|
||||||
* of its scripts and styles for fine-grained rebuilds.
|
|
||||||
*/
|
|
||||||
export interface PageMetadata {
|
|
||||||
html: string;
|
|
||||||
meta: string;
|
|
||||||
cssImports: string[];
|
|
||||||
theme: css.Theme;
|
|
||||||
clientRefs: string[];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Like a page, this intentionally omits resources,
|
|
||||||
* but additionally omits the bundled server code.
|
|
||||||
*/
|
|
||||||
export interface ViewMetadata {
|
|
||||||
file: string;
|
|
||||||
// staticMeta: string | null; TODO
|
|
||||||
cssImports: string[];
|
|
||||||
theme: css.Theme;
|
|
||||||
clientRefs: string[];
|
|
||||||
hasLayout: boolean;
|
|
||||||
}
|
|
||||||
export interface BackendBundle {
|
|
||||||
magicWord: string;
|
|
||||||
fileWithMagicWord: string | null;
|
|
||||||
files: Record<string, Buffer>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// -- incremental support types --
|
|
||||||
export interface PutBase {
|
|
||||||
sources: SourceId[];
|
|
||||||
key: string;
|
key: string;
|
||||||
}
|
}
|
||||||
export interface Put<T extends ArtifactKind> extends PutBase {
|
|
||||||
kind: T;
|
|
||||||
value: ArtifactMap[T];
|
|
||||||
}
|
|
||||||
export interface Invalidations {
|
|
||||||
lastModified: number;
|
|
||||||
outputs: Set<ArtifactId>;
|
|
||||||
files: Set<SourceId>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Incremental {
|
/**
|
||||||
/** The generated artifacts */
|
* Declare and begin a unit of work. Return value is memoized and
|
||||||
out: {
|
* only re-run when inputs (via `Io`) change. Outputs are written
|
||||||
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
* at the end of a compilation (see `compile`).
|
||||||
} = {
|
|
||||||
asset: new Map(),
|
|
||||||
script: new Map(),
|
|
||||||
style: new Map(),
|
|
||||||
pageMetadata: new Map(),
|
|
||||||
viewMetadata: new Map(),
|
|
||||||
serverMarko: new Map(),
|
|
||||||
backendBundle: new Map(),
|
|
||||||
backendReplace: new Map(),
|
|
||||||
};
|
|
||||||
/** Tracking filesystem entries to `srcId` */
|
|
||||||
invals = new Map<SourceId, Invalidations>();
|
|
||||||
/** Tracking output keys to files */
|
|
||||||
sources = new Map<ArtifactId, SourceId[]>();
|
|
||||||
|
|
||||||
/** Compressed resources */
|
|
||||||
compress = new Map<Sha1Id, Compressed>();
|
|
||||||
compressQueue = new Queue<CompressJob, void>({
|
|
||||||
name: "Compress",
|
|
||||||
maxJobs: 5,
|
|
||||||
fn: this.compressImpl.bind(this),
|
|
||||||
passive: true,
|
|
||||||
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
|
||||||
});
|
|
||||||
|
|
||||||
/** Reset at the end of each update */
|
|
||||||
round = {
|
|
||||||
inserted: new Set<ArtifactId>(),
|
|
||||||
referenced: new Set<ArtifactId>(),
|
|
||||||
};
|
|
||||||
|
|
||||||
getArtifact<T extends ArtifactKind>(kind: T, key: string) {
|
|
||||||
this.round.referenced.add(`${kind}\0${key}`);
|
|
||||||
return this.out[kind].get(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
hasArtifact(kind: ArtifactKind, key: string) {
|
|
||||||
return this.getArtifact(kind, key) != null;
|
|
||||||
}
|
|
||||||
|
|
||||||
sourcesFor(kind: ArtifactKind, key: string) {
|
|
||||||
return UNWRAP(
|
|
||||||
this.sources.get(kind + "\0" + key),
|
|
||||||
`No artifact '${kind}' '${key}'`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
shake() {
|
|
||||||
const toPublic = (str: string) => {
|
|
||||||
const [kind, key] = str.split("\0");
|
|
||||||
return { kind: kind as ArtifactKind, key };
|
|
||||||
};
|
|
||||||
const inserted = Array.from(this.round.inserted, toPublic);
|
|
||||||
const referenced = Array.from(this.round.referenced, toPublic);
|
|
||||||
const unreferenced: { kind: ArtifactKind; key: string }[] = [];
|
|
||||||
|
|
||||||
for (const kind in this.out) {
|
|
||||||
const map = this.out[kind as keyof typeof this.out];
|
|
||||||
if (!map) continue;
|
|
||||||
for (const key of map.keys()) {
|
|
||||||
if (!this.round.referenced.has(`${kind}\0${key}`)) {
|
|
||||||
unreferenced.push({ kind: kind as ArtifactKind, key });
|
|
||||||
// this.out[kind as ArtifactKind].delete(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.round.inserted.clear();
|
|
||||||
this.round.referenced.clear();
|
|
||||||
|
|
||||||
return { inserted, referenced, unreferenced };
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Put built artifacts into the incremental cache. The entry points
|
|
||||||
* used to build this must be provided. 'Incremental' will trace JS
|
|
||||||
* imports and file modification times tracked by 'hot.ts'.
|
|
||||||
*/
|
*/
|
||||||
put<T extends ArtifactKind>({
|
export function work<O>(job: (io: Io) => Promise<O>): Ref<O>;
|
||||||
sources,
|
export function work<I, O>(job:(io: Io, input: I) => Promise<O>, input: I): Ref<O>;
|
||||||
kind,
|
export function work<I, O>(job: (io: Io, input: I) => Promise<O>, input: I = null as I): Ref<O> {
|
||||||
key,
|
const keySource = [
|
||||||
value,
|
JSON.stringify(util.getCallSites(2)[1]),
|
||||||
}: Put<T>) {
|
util.inspect(input),
|
||||||
// These three invariants affect incremental accuracy.
|
|
||||||
if (this.round.inserted.has(`${kind}\0${key}`)) {
|
|
||||||
console.error(
|
|
||||||
`Artifact ${kind}:${key} was inserted multiple times in the same round!`,
|
|
||||||
);
|
|
||||||
} else if (!this.round.referenced.has(`${kind}\0${key}`)) {
|
|
||||||
console.error(
|
|
||||||
`Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`,
|
|
||||||
);
|
|
||||||
} else if (this.out[kind].has(key)) {
|
|
||||||
console.error(
|
|
||||||
`Artifact ${kind}:${key} is not stale, but overwritten.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.out[kind].set(key, value);
|
|
||||||
|
|
||||||
this.round.inserted.add(`${kind}\0${key}`);
|
|
||||||
|
|
||||||
// Update sources information
|
|
||||||
ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key);
|
|
||||||
sources = sources.map((src) => path.normalize(src));
|
|
||||||
const fullKey = `${kind}\0${key}`;
|
|
||||||
const prevSources = this.sources.get(fullKey);
|
|
||||||
const newSources = new Set(
|
|
||||||
sources.map((file) =>
|
|
||||||
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
|
||||||
),
|
|
||||||
);
|
|
||||||
this.sources.set(fullKey, [...newSources]);
|
|
||||||
for (const source of prevSources ?? []) {
|
|
||||||
if (sources.includes(source)) {
|
|
||||||
newSources.delete(source);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const invals = UNWRAP(this.invals.get(source));
|
|
||||||
ASSERT(invals.outputs.has(fullKey));
|
|
||||||
invals.outputs.delete(fullKey);
|
|
||||||
}
|
|
||||||
// Use reflection from the plugin system to get imports.
|
|
||||||
for (const source of newSources) {
|
|
||||||
const invals = this.#getOrInitInvals(source);
|
|
||||||
invals.outputs.add(fullKey);
|
|
||||||
this.#followImports(source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: this doesnt remove stuff when it disappeary
|
|
||||||
#getOrInitInvals(source: string) {
|
|
||||||
let invals = this.invals.get(source);
|
|
||||||
if (!invals) {
|
|
||||||
const lastModified = hot.getFileStat(source)?.lastModified ??
|
|
||||||
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
|
|
||||||
this.invals.set(
|
|
||||||
source,
|
|
||||||
invals = {
|
|
||||||
lastModified,
|
|
||||||
files: new Set(),
|
|
||||||
outputs: new Set(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return invals;
|
|
||||||
}
|
|
||||||
|
|
||||||
#followImports(file: string) {
|
|
||||||
const stat = hot.getFileStat(file);
|
|
||||||
if (!stat) return;
|
|
||||||
for (const i of stat.imports) {
|
|
||||||
const invals = this.#getOrInitInvals(i);
|
|
||||||
invals.files.add(file);
|
|
||||||
this.#followImports(i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async statAllFiles() {
|
|
||||||
for (const file of this.invals.keys()) {
|
|
||||||
try {
|
|
||||||
const mtime = fs.statSync(file).mtimeMs;
|
|
||||||
this.updateStat(file, mtime);
|
|
||||||
} catch (err) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
updateStat(file: string, newLastModified: number | null) {
|
|
||||||
file = path.relative(hot.projectRoot, file);
|
|
||||||
const stat = this.invals.get(file);
|
|
||||||
ASSERT(stat, "Updated stat on untracked file " + file);
|
|
||||||
const hasUpdate = !newLastModified || stat.lastModified < newLastModified;
|
|
||||||
if (hasUpdate) {
|
|
||||||
// Invalidate
|
|
||||||
console.info(file + " " + (newLastModified ? "updated" : "deleted"));
|
|
||||||
hot.unload(file);
|
|
||||||
const invalidQueue = [file];
|
|
||||||
let currentInvalid;
|
|
||||||
while (currentInvalid = invalidQueue.pop()) {
|
|
||||||
const invalidations = this.invals.get(currentInvalid);
|
|
||||||
ASSERT(
|
|
||||||
invalidations,
|
|
||||||
"No reason to track file '" + currentInvalid +
|
|
||||||
"' if it has no invalidations",
|
|
||||||
);
|
|
||||||
const { files, outputs } = invalidations;
|
|
||||||
for (const out of outputs) {
|
|
||||||
const [kind, artifactKey] = out.split("\0");
|
|
||||||
this.out[kind as ArtifactKind].delete(artifactKey);
|
|
||||||
}
|
|
||||||
invalidQueue.push(...files);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (newLastModified) {
|
|
||||||
stat.lastModified = newLastModified;
|
|
||||||
} else {
|
|
||||||
this.invals.delete(file);
|
|
||||||
}
|
|
||||||
return hasUpdate;
|
|
||||||
}
|
|
||||||
|
|
||||||
async putAsset(info: PutAsset) {
|
|
||||||
const { body, headers, key } = info;
|
|
||||||
const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body);
|
|
||||||
const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer))
|
|
||||||
.toString("hex");
|
|
||||||
const value: Asset = {
|
|
||||||
buffer,
|
|
||||||
headers: {
|
|
||||||
"Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key),
|
|
||||||
"ETag": JSON.stringify(hash),
|
|
||||||
...headers,
|
|
||||||
},
|
|
||||||
hash,
|
|
||||||
};
|
|
||||||
const a = this.put({ ...info, kind: "asset", value });
|
|
||||||
if (!this.compress.has(hash)) {
|
|
||||||
const label = info.key;
|
|
||||||
this.compress.set(hash, {
|
|
||||||
zstd: undefined,
|
|
||||||
gzip: undefined,
|
|
||||||
});
|
|
||||||
this.compressQueue.add({ label, buffer, algo: "zstd", hash });
|
|
||||||
this.compressQueue.add({ label, buffer, algo: "gzip", hash });
|
|
||||||
}
|
|
||||||
return a;
|
|
||||||
}
|
|
||||||
|
|
||||||
async compressImpl({ algo, buffer, hash }: CompressJob) {
|
|
||||||
let out;
|
|
||||||
if (algo === "zstd") out = await zstd(buffer);
|
|
||||||
else if (algo === "gzip") out = await gzip(buffer, { level: 9 });
|
|
||||||
else algo satisfies never;
|
|
||||||
|
|
||||||
let entry = this.compress.get(hash);
|
|
||||||
if (!entry) {
|
|
||||||
this.compress.set(
|
|
||||||
hash,
|
|
||||||
entry = {
|
|
||||||
zstd: undefined,
|
|
||||||
gzip: undefined,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
entry![algo] = out;
|
|
||||||
}
|
|
||||||
|
|
||||||
serialize() {
|
|
||||||
const writer = new BufferWriter();
|
|
||||||
|
|
||||||
// -- artifact --
|
|
||||||
const asset = Array.from(
|
|
||||||
this.out.asset,
|
|
||||||
([key, { buffer, hash, headers }]) => {
|
|
||||||
const raw = writer.write(buffer, hash);
|
|
||||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
||||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
|
||||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
|
||||||
return [key, {
|
|
||||||
raw,
|
|
||||||
gzip,
|
|
||||||
zstd,
|
|
||||||
hash,
|
|
||||||
headers: headers as Record<string, string>,
|
|
||||||
}] satisfies SerializedMeta["asset"][0];
|
|
||||||
},
|
|
||||||
);
|
|
||||||
const script = Array.from(this.out.script);
|
|
||||||
const style = Array.from(this.out.style);
|
|
||||||
const pageMetadata = Array.from(this.out.pageMetadata);
|
|
||||||
const viewMetadata = Array.from(this.out.viewMetadata);
|
|
||||||
const serverMarko = Array.from(this.out.serverMarko);
|
|
||||||
const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => {
|
|
||||||
return [k, {
|
|
||||||
magicWord: v.magicWord,
|
|
||||||
fileWithMagicWord: v.fileWithMagicWord,
|
|
||||||
files: Object.entries(v.files).map(
|
|
||||||
([file, contents]) => [
|
|
||||||
file,
|
|
||||||
writer.write(contents, "backendBundle" + k + ":" + file),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
}] satisfies SerializedMeta["backendBundle"][0];
|
|
||||||
});
|
|
||||||
const backendReplace = Array.from(
|
|
||||||
this.out.backendReplace,
|
|
||||||
([k, v]) =>
|
|
||||||
[
|
|
||||||
k,
|
|
||||||
writer.write(v, "backendReplace" + k),
|
|
||||||
] satisfies SerializedMeta["backendReplace"][0],
|
|
||||||
);
|
|
||||||
// -- incremental metadata --
|
|
||||||
const invals = Array.from(this.invals, ([key, value]) => {
|
|
||||||
const { lastModified, files, outputs } = value;
|
|
||||||
return [key, {
|
|
||||||
m: lastModified,
|
|
||||||
f: [...files],
|
|
||||||
o: [...outputs],
|
|
||||||
}] satisfies SerializedMeta["invals"][0];
|
|
||||||
});
|
|
||||||
const sources = Array.from(this.sources, ([key, value]) => {
|
|
||||||
return [key, ...value] as [string, ...string[]];
|
|
||||||
});
|
|
||||||
const json = {
|
|
||||||
asset,
|
|
||||||
script,
|
|
||||||
invals,
|
|
||||||
sources,
|
|
||||||
style,
|
|
||||||
pageMetadata,
|
|
||||||
viewMetadata,
|
|
||||||
serverMarko,
|
|
||||||
backendBundle,
|
|
||||||
backendReplace,
|
|
||||||
} satisfies SerializedMeta;
|
|
||||||
const meta = Buffer.from(JSON.stringify(json), "utf-8");
|
|
||||||
|
|
||||||
const lengthBuffer = Buffer.alloc(4);
|
|
||||||
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
|
||||||
|
|
||||||
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromSerialized(buffer: Buffer): Incremental {
|
|
||||||
const metaLength = buffer.readUint32LE(0);
|
|
||||||
const meta: SerializedMeta = JSON.parse(
|
|
||||||
buffer.subarray(4, 4 + metaLength).toString("utf8"),
|
|
||||||
);
|
|
||||||
const view = ([start, end]: View) =>
|
|
||||||
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
|
|
||||||
|
|
||||||
const incr = new Incremental();
|
|
||||||
incr.out = {
|
|
||||||
asset: new Map(meta.asset.map(([key, value]) => {
|
|
||||||
const { hash, raw, gzip, zstd, headers } = value;
|
|
||||||
if ((gzip || zstd) && !incr.compress.has(hash)) {
|
|
||||||
incr.compress.set(hash, {
|
|
||||||
gzip: gzip ? view(gzip) : undefined,
|
|
||||||
zstd: zstd ? view(zstd) : undefined,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return [key, {
|
|
||||||
buffer: view(raw),
|
|
||||||
headers: headers,
|
|
||||||
hash: hash,
|
|
||||||
}];
|
|
||||||
})),
|
|
||||||
script: new Map(meta.script),
|
|
||||||
style: new Map(meta.style),
|
|
||||||
pageMetadata: new Map(meta.pageMetadata),
|
|
||||||
viewMetadata: new Map(meta.viewMetadata),
|
|
||||||
serverMarko: new Map(meta.serverMarko),
|
|
||||||
backendBundle: new Map(meta.backendBundle.map(([key, value]) => {
|
|
||||||
return [key, {
|
|
||||||
magicWord: value.magicWord,
|
|
||||||
fileWithMagicWord: value.fileWithMagicWord,
|
|
||||||
files: Object.fromEntries(
|
|
||||||
value.files.map(([file, contents]) => [file, view(contents)]),
|
|
||||||
),
|
|
||||||
}];
|
|
||||||
})),
|
|
||||||
backendReplace: new Map(
|
|
||||||
meta.backendReplace.map(([key, contents]) => [key, view(contents)]),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
|
|
||||||
return [key, {
|
|
||||||
lastModified: m,
|
|
||||||
files: new Set(f),
|
|
||||||
outputs: new Set(o),
|
|
||||||
}];
|
|
||||||
}));
|
|
||||||
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
|
|
||||||
return incr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Move the cached (server) marko transpilations from this incremental
|
|
||||||
* into the running process.
|
|
||||||
*/
|
|
||||||
loadMarkoCache() {
|
|
||||||
hot.markoCache.clear();
|
|
||||||
for (const [key, value] of this.out.serverMarko) {
|
|
||||||
hot.markoCache.set(path.resolve(hot.projectRoot, key), value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Move the cached (server) marko transpilations from this incremental
|
|
||||||
* into the running process.
|
|
||||||
*/
|
|
||||||
snapshotMarkoCache() {
|
|
||||||
for (const [file, value] of hot.markoCache) {
|
|
||||||
const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/");
|
|
||||||
// Only insert if it doesn't exist. Calling 'put' when it
|
|
||||||
// already exists would inform the user of extra calls to put.
|
|
||||||
if (!this.hasArtifact("serverMarko", key)) {
|
|
||||||
this.put({
|
|
||||||
kind: "serverMarko",
|
|
||||||
sources: [file],
|
|
||||||
key,
|
|
||||||
value,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
toDisk(file = ".clover/incr.state") {
|
|
||||||
const buffer = this.serialize();
|
|
||||||
fs.writeFileSync(file, buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
static fromDisk(file = ".clover/incr.state"): Incremental {
|
|
||||||
try {
|
|
||||||
const buffer = fs.readFileSync(file);
|
|
||||||
return Incremental.fromSerialized(buffer);
|
|
||||||
} catch (err: any) {
|
|
||||||
if (err.code === "ENOENT") return new Incremental();
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async wait() {
|
|
||||||
await this.compressQueue.done({ method: "success" });
|
|
||||||
}
|
|
||||||
|
|
||||||
async flush(
|
|
||||||
platform: bundle.ServerPlatform,
|
|
||||||
dir = path.resolve(".clover/out"),
|
|
||||||
) {
|
|
||||||
ASSERT(!this.compressQueue.active);
|
|
||||||
const join = (...args: string[]) => path.join(dir, ...args);
|
|
||||||
const writer = new BufferWriter();
|
|
||||||
|
|
||||||
// TODO: ensure all compressed got compressed
|
|
||||||
|
|
||||||
const asset = Object.fromEntries(
|
|
||||||
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
|
||||||
const raw = writer.write(buffer, hash);
|
|
||||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
||||||
const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz");
|
|
||||||
const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd");
|
|
||||||
return [key, { raw, gzip, zstd, headers }];
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
const backendBundle = UNWRAP(this.out.backendBundle.get(platform));
|
|
||||||
|
|
||||||
// Arrange output files
|
|
||||||
const outFiles: Array<[file: string, contents: string | Buffer]> = [
|
|
||||||
// Asset manifest
|
|
||||||
["static.json", JSON.stringify(asset)],
|
|
||||||
["static.blob", writer.get()],
|
|
||||||
|
|
||||||
// Backend
|
|
||||||
...Object.entries(backendBundle.files).map(([subPath, contents]) =>
|
|
||||||
[
|
|
||||||
subPath,
|
|
||||||
subPath === backendBundle.fileWithMagicWord
|
|
||||||
? UNWRAP(this.out.backendReplace.get(platform))
|
|
||||||
: contents,
|
|
||||||
] as [string, Buffer]
|
|
||||||
),
|
|
||||||
];
|
];
|
||||||
|
const key = crypto
|
||||||
|
.createHash("sha1")
|
||||||
|
.update(keySource.join(":"))
|
||||||
|
.digest("base64url");
|
||||||
|
ASSERT(
|
||||||
|
!seenWorks.has(key),
|
||||||
|
`Key '${key}' must be unique during the build.` +
|
||||||
|
`To fix this, provide a manual 'key' argument.`,
|
||||||
|
);
|
||||||
|
seenWorks.add(key);
|
||||||
|
|
||||||
// TODO: check duplicates
|
const prev = works.get(key) as Work<O> | null;
|
||||||
|
if (prev) {
|
||||||
|
const promise = Promise.resolve(prev.value) as Ref<O>;
|
||||||
|
promise.key = key;
|
||||||
|
return promise;
|
||||||
|
};
|
||||||
|
|
||||||
// Perform all i/o
|
const io = new Io();
|
||||||
|
const promise = job(io, input).then((value) => {
|
||||||
|
const { needs, writes } = io;
|
||||||
|
|
||||||
|
// Apply the deltas to the graph
|
||||||
|
applyDiff(key, files, [], needs.files);
|
||||||
|
applyDiff(key, works, [], needs.works);
|
||||||
|
|
||||||
|
validateSerializable(value, "");
|
||||||
|
|
||||||
|
works.set(key, {
|
||||||
|
value,
|
||||||
|
affects: [],
|
||||||
|
needs,
|
||||||
|
writes
|
||||||
|
});
|
||||||
|
}) as Ref<O>;
|
||||||
|
promise.key = key;
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
export async function compile<T>(compiler: () => Promise<T>) {
|
||||||
|
ASSERT(!running, `Cannot run twice`);
|
||||||
|
running = true;
|
||||||
|
try {
|
||||||
|
const value = await compiler();
|
||||||
|
seenWorks.clear();
|
||||||
|
ASSERT(!queue.active, `Queue was still running`);
|
||||||
|
await queue.done();
|
||||||
|
return { value };
|
||||||
|
} finally {
|
||||||
|
running = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function forceInvalidate(file: string) {
|
||||||
|
const resolved = toAbs(file);
|
||||||
|
const key = toRel(resolved);
|
||||||
|
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
||||||
|
}
|
||||||
|
|
||||||
|
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
||||||
|
const queue = [...entry.affects];
|
||||||
|
let key;
|
||||||
|
while ((key = queue.shift())) {
|
||||||
|
const { needs, affects } = UNWRAP(works.get(key));
|
||||||
|
applyDiff(key, files, needs.files, []);
|
||||||
|
applyDiff(key, works, needs.works, []);
|
||||||
|
works.delete(key);
|
||||||
|
queue.push(...affects);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function reset() {
|
||||||
|
ASSERT(!running);
|
||||||
|
works.clear();
|
||||||
|
files.clear();
|
||||||
|
assets.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function serialize() {
|
||||||
|
// Aiming for a compact JSON format.
|
||||||
|
const fileEntries = Array.from(files, ([k, v]) => [
|
||||||
|
k,
|
||||||
|
"lastModified" in v ? "f" : "d",
|
||||||
|
"lastModified" in v ? v.lastModified : v.contentHash,
|
||||||
|
...v.affects,
|
||||||
|
]);
|
||||||
|
const workEntries = Array.from(works, ([k, v]) => [k, v.value, ...v.affects]);
|
||||||
|
return JSON.stringify({
|
||||||
|
file: fileEntries,
|
||||||
|
work: workEntries,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
export function serializeToDisk(file = ".clover/incr.state") {
|
||||||
|
fs.writeMkdirSync(file, serialize());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/* Input/Output with automatic tracking.
|
||||||
|
* - Inputs read with Io are tracked to know when to rebuild
|
||||||
|
* - Outputs written with Io are deleted when abandoned.
|
||||||
|
*/
|
||||||
|
export class Io {
|
||||||
|
needs: Needs = {
|
||||||
|
files: new Set(),
|
||||||
|
works: new Set(),
|
||||||
|
};
|
||||||
|
writes: Writes = {
|
||||||
|
files: new Map(),
|
||||||
|
assets: new Map(),
|
||||||
|
};
|
||||||
|
|
||||||
|
#trackFs(file: string) {
|
||||||
|
const resolved = toAbs(file);
|
||||||
|
const key = toRel(resolved);
|
||||||
|
this.needs.files.add(key);
|
||||||
|
return { resolved, key };
|
||||||
|
}
|
||||||
|
readWork<T>(ref: Ref<T>): Promise<T> {
|
||||||
|
this.needs.works.add(ref.key);
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
/** Track a file in the compilation without reading it. */
|
||||||
|
async trackFile(file: string) {
|
||||||
|
const { key, resolved } = this.#trackFs(file);
|
||||||
|
if (!files.get(key)) {
|
||||||
|
let lastModified: number = 0;
|
||||||
|
try {
|
||||||
|
lastModified = (await fs.stat(file)).mtimeMs;
|
||||||
|
} catch {}
|
||||||
|
files.set(key, {
|
||||||
|
affects: [],
|
||||||
|
lastModified,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return resolved;
|
||||||
|
}
|
||||||
|
async readFile(file: string) {
|
||||||
|
return fs.readFile(await this.trackFile(file), "utf-8");
|
||||||
|
}
|
||||||
|
async readDir(dir: string) {
|
||||||
|
const { key, resolved } = this.#trackFs(dir);
|
||||||
|
let result: string[] = [];
|
||||||
|
try {
|
||||||
|
result = await fs.readdir(resolved);
|
||||||
|
return result;
|
||||||
|
} finally {
|
||||||
|
const contentHash = crypto
|
||||||
|
.createHash("sha1")
|
||||||
|
.update(result.join("\0"))
|
||||||
|
.digest("base64url");
|
||||||
|
files.set(key, {
|
||||||
|
affects: [],
|
||||||
|
contentHash,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async readDirRecursive(dir: string): Promise<string[]> {
|
||||||
|
const dirs = await this.readDir(dir);
|
||||||
|
return (
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
outFiles.map(([subPath, contents]) =>
|
dirs.map(async (child) => {
|
||||||
fs.writeMkdir(join(subPath), contents, { flush: true })
|
const abs = path.join(dir, child);
|
||||||
),
|
const stat = await fs.stat(abs);
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
return (await this.readDirRecursive(abs)).map((grand) =>
|
||||||
|
path.join(child, grand)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return child;
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
).flat();
|
||||||
|
}
|
||||||
|
/* Track all dependencies of a module. */
|
||||||
|
async import<T>(file: string): Promise<T> {
|
||||||
|
const { resolved } = this.#trackFs(file);
|
||||||
|
try {
|
||||||
|
return require(resolved) as T;
|
||||||
|
} finally {
|
||||||
|
const queue = [resolved];
|
||||||
|
const seen = new Set<string>();
|
||||||
|
let current;
|
||||||
|
while ((current = queue.shift())) {
|
||||||
|
const stat = hot.getFileStat(resolved);
|
||||||
|
if (!stat) continue;
|
||||||
|
const { key } = this.#trackFs(current);
|
||||||
|
if (!files.get(key)) {
|
||||||
|
files.set(key, {
|
||||||
|
affects: [],
|
||||||
|
lastModified: stat?.lastModified ?? 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
for (const imp of stat.imports) {
|
||||||
|
if (!seen.has(imp)) {
|
||||||
|
seen.add(imp);
|
||||||
|
queue.push(imp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writeAsset(pathname: string, blob: string | Buffer, headersOption?: HeadersInit) {
|
||||||
|
ASSERT(pathname.startsWith("/"));
|
||||||
|
const headers = new Headers(headersOption ?? {});
|
||||||
|
const hash = crypto.createHash('sha1').update(blob).digest('hex');
|
||||||
|
if (!headers.has("Content-Type")) {
|
||||||
|
headers.set("Content-Type", mime.contentTypeFor(pathname));
|
||||||
|
}
|
||||||
|
headers.set("ETag", JSON.stringify(hash));
|
||||||
|
ASSERT(!this.writes.assets.has(pathname));
|
||||||
|
this.writes.assets.set(pathname, {
|
||||||
|
hash,
|
||||||
|
// @ts-expect-error TODO
|
||||||
|
headers: Object.fromEntries(headers)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
writeFile(subPath: string, blob: string | Buffer) {
|
||||||
|
ASSERT(!this.writes.assets.has(subPath));
|
||||||
|
this.writes.files.set(subPath, Buffer.isBuffer(blob) ? blob : Buffer.from(blob));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyDiff(
|
||||||
|
key: string,
|
||||||
|
list: Map<string, { affects: string[] }>,
|
||||||
|
beforeIter: Iterable<string>,
|
||||||
|
afterIter: Iterable<string>,
|
||||||
|
) {
|
||||||
|
const before = Array.from(beforeIter);
|
||||||
|
const after = Array.from(afterIter);
|
||||||
|
for (const add of after.filter((x) => !before.includes(x))) {
|
||||||
|
const { affects } = UNWRAP(list.get(add));
|
||||||
|
ASSERT(!affects.includes(key));
|
||||||
|
affects.push(key);
|
||||||
|
}
|
||||||
|
for (const remove of before.filter((x) => !after.includes(x))) {
|
||||||
|
const { affects } = UNWRAP(list.get(remove));
|
||||||
|
ASSERT(affects.includes(key));
|
||||||
|
affects.splice(affects.indexOf(key), 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateSerializable(value: unknown, key: string) {
|
||||||
|
if (typeof value === "string") {
|
||||||
|
if (value.includes(hot.projectRoot)) {
|
||||||
|
throw new Error(
|
||||||
|
`Return value must not contain the CWD for portability, found at ${key}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else if (value && typeof value === "object") {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
||||||
|
} else if (Object.getPrototypeOf(value) === Object.prototype) {
|
||||||
|
Object.entries(value).forEach(([k, v]) =>
|
||||||
|
validateSerializable(v, `${key}.${k}`)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`Return value must be a plain JS object, found ${
|
||||||
|
Object.getPrototypeOf(value).constructor.name
|
||||||
|
} at ${key}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else if (["bigint", "function", "symbol"].includes(typeof value)) {
|
||||||
|
throw new Error(
|
||||||
|
`Return value must be a plain JS object, found ${typeof value} at ${key}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PutAsset extends PutBase {
|
export function toAbs(absPath: string) {
|
||||||
body: string | Buffer;
|
return path.resolve(hot.projectRoot, absPath);
|
||||||
headers?: Record<string, string | undefined>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Compressed {
|
export function toRel(absPath: string) {
|
||||||
gzip?: Buffer;
|
return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/");
|
||||||
zstd?: Buffer;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CompressJob {
|
type BufferView = [start: number, end: number];
|
||||||
algo: "zstd" | "gzip";
|
type File = TrackedFile | TrackedDir;
|
||||||
buffer: Buffer;
|
interface Needs {
|
||||||
label: string;
|
files: Set<string>;
|
||||||
hash: string;
|
works: Set<string>;
|
||||||
}
|
}
|
||||||
|
interface Writes {
|
||||||
class BufferWriter {
|
files: Map<string, Buffer>;
|
||||||
size = 0;
|
assets: Map<string, {
|
||||||
seen = new Map<string, View>();
|
hash: string,
|
||||||
buffers: Buffer[] = [];
|
headers: Record<string, string>
|
||||||
|
}>;
|
||||||
write(buffer: Buffer, hash: string): View {
|
|
||||||
let view = this.seen.get(hash);
|
|
||||||
if (view) return view;
|
|
||||||
view = [this.size, this.size += buffer.byteLength];
|
|
||||||
this.seen.set(hash, view);
|
|
||||||
this.buffers.push(buffer);
|
|
||||||
return view;
|
|
||||||
}
|
}
|
||||||
|
interface Asset {
|
||||||
get() {
|
raw: Buffer;
|
||||||
return Buffer.concat(this.buffers);
|
gzip: Buffer;
|
||||||
|
zstd: Buffer;
|
||||||
|
refs: number;
|
||||||
}
|
}
|
||||||
|
interface Work<T = unknown> {
|
||||||
|
value: T;
|
||||||
|
affects: string[];
|
||||||
|
needs: Needs;
|
||||||
|
writes: Writes;
|
||||||
|
}
|
||||||
|
interface TrackedFile {
|
||||||
|
lastModified: number;
|
||||||
|
affects: string[];
|
||||||
|
}
|
||||||
|
interface TrackedDir {
|
||||||
|
contentHash: string;
|
||||||
|
affects: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export type View = [start: number, end: number];
|
|
||||||
|
|
||||||
export interface BuiltAssetMap {
|
export interface BuiltAssetMap {
|
||||||
[route: string]: BuiltAsset;
|
[route: string]: BuiltAsset;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BuiltAsset {
|
export interface BuiltAsset {
|
||||||
raw: View;
|
raw: BufferView;
|
||||||
gzip: View;
|
gzip: BufferView;
|
||||||
zstd: View;
|
zstd: BufferView;
|
||||||
headers: Record<string, string>;
|
headers: Record<string, string>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SerializedMeta {
|
|
||||||
asset: Array<[route: string, data: {
|
|
||||||
raw: View;
|
|
||||||
gzip: View | null;
|
|
||||||
zstd: View | null;
|
|
||||||
hash: string;
|
|
||||||
headers: Record<string, string>;
|
|
||||||
}]>;
|
|
||||||
script: Array<[key: string, value: string]>;
|
|
||||||
style: Array<[key: string, value: string]>;
|
|
||||||
pageMetadata: Array<[key: string, PageMetadata]>;
|
|
||||||
viewMetadata: Array<[key: string, ViewMetadata]>;
|
|
||||||
serverMarko: Array<[key: string, hot.MarkoCacheEntry]>;
|
|
||||||
backendBundle: Array<[platform: string, {
|
|
||||||
magicWord: string;
|
|
||||||
fileWithMagicWord: string | null;
|
|
||||||
files: Array<[string, View]>;
|
|
||||||
}]>;
|
|
||||||
backendReplace: Array<[key: string, View]>;
|
|
||||||
|
|
||||||
invals: Array<[key: string, {
|
|
||||||
/** Modified */
|
|
||||||
m: number;
|
|
||||||
f: SourceId[];
|
|
||||||
o: ArtifactId[];
|
|
||||||
}]>;
|
|
||||||
sources: Array<[string, ...string[]]>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const gzip = util.promisify(zlib.gzip);
|
|
||||||
const zstd = util.promisify(zlib.zstdCompress);
|
|
||||||
|
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as zlib from "node:zlib";
|
|
||||||
import * as util from "node:util";
|
|
||||||
import { Queue } from "#sitegen/async";
|
|
||||||
import * as hot from "./hot.ts";
|
|
||||||
import * as mime from "#sitegen/mime";
|
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import { Buffer } from "node:buffer";
|
import * as hot from "./hot.ts";
|
||||||
import * as css from "./css.ts";
|
import * as util from "node:util";
|
||||||
import type * as bundle from "./bundle.ts";
|
import * as crypto from "node:crypto";
|
||||||
|
import * as async from "#sitegen/async";
|
||||||
|
import type { Spinner } from "@paperclover/console/Spinner";
|
||||||
|
import * as mime from "#sitegen/mime";
|
||||||
|
import type { View } from "#sitegen/view";
|
||||||
|
|
|
@ -1,264 +0,0 @@
|
||||||
// Incremental compilation framework built on a singleton function
|
|
||||||
// `work(label, inputs, io => promise)`. By using the I/O interface
|
|
||||||
// to pull input, dependencies are tracked for you, including pesky
|
|
||||||
// error conditions. This way, the file watching system always recovers.
|
|
||||||
let running = false;
|
|
||||||
let seenWorks = new Set<string>();
|
|
||||||
let works = new Map<string, Work>();
|
|
||||||
let files = new Map<string, File>();
|
|
||||||
let queue = new async.Queue({
|
|
||||||
name: "sitegen!",
|
|
||||||
fn: (
|
|
||||||
item: { label: string; run: (spin: Spinner) => Promise<unknown> },
|
|
||||||
spin,
|
|
||||||
) => item.run(spin),
|
|
||||||
passive: true,
|
|
||||||
getItemText: (item) => item.label,
|
|
||||||
maxJobs: navigator.hardwareConcurrency,
|
|
||||||
});
|
|
||||||
|
|
||||||
interface Job<T> {
|
|
||||||
label: string;
|
|
||||||
wait?: Ref | null | (Ref | null)[];
|
|
||||||
key?: unknown;
|
|
||||||
cores?: number;
|
|
||||||
run: (io: Io) => Promise<T>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function work<T>(job: Job<T>): Promise<T> {
|
|
||||||
const key = crypto.createHash("sha1").update([
|
|
||||||
JSON.stringify(util.getCallSites(2)[1]),
|
|
||||||
util.inspect(job.key),
|
|
||||||
].join(":")).digest("base64url");
|
|
||||||
ASSERT(!seenWorks.has(key), `Key '${key}' must be unique during the build.`);
|
|
||||||
|
|
||||||
// Decide if the cached work is OK
|
|
||||||
const prev = works.get(key) as Work<T>;
|
|
||||||
if (prev?.value) return Promise.resolve(prev.value);
|
|
||||||
|
|
||||||
const promise = Promise.all([job.wait].flat()).then(() =>
|
|
||||||
queue.addReturn({
|
|
||||||
label: job.label,
|
|
||||||
run: async (spin) => {
|
|
||||||
// Perform the work
|
|
||||||
const io = new Io(spin);
|
|
||||||
const value = await job.run(io);
|
|
||||||
const { needs } = io;
|
|
||||||
|
|
||||||
// Apply the deltas to the graph
|
|
||||||
applyDiff(key, files, prev?.needs?.files ?? [], needs.files);
|
|
||||||
applyDiff(key, works, prev?.needs?.works ?? [], needs.works);
|
|
||||||
|
|
||||||
works.set(key, {
|
|
||||||
value,
|
|
||||||
affects: prev?.affects ?? [],
|
|
||||||
needs,
|
|
||||||
});
|
|
||||||
|
|
||||||
return value;
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
return promise as Promise<T>;
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyDiff(
|
|
||||||
key: string,
|
|
||||||
list: Map<string, { affects: string[] }>,
|
|
||||||
beforeIter: Iterable<string>,
|
|
||||||
afterIter: Iterable<string>,
|
|
||||||
) {
|
|
||||||
const before = Array.from(beforeIter);
|
|
||||||
const after = Array.from(afterIter);
|
|
||||||
for (const add of after.filter((x) => !before.includes(x))) {
|
|
||||||
const { affects } = UNWRAP(list.get(add));
|
|
||||||
ASSERT(!affects.includes(key));
|
|
||||||
affects.push(key);
|
|
||||||
}
|
|
||||||
for (const remove of before.filter((x) => !after.includes(x))) {
|
|
||||||
const { affects } = UNWRAP(list.get(remove));
|
|
||||||
ASSERT(affects.includes(key));
|
|
||||||
affects.splice(affects.indexOf(key), 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function compile<T>(compiler: () => Promise<Ref<T>>) {
|
|
||||||
ASSERT(!running, `Cannot run twice`);
|
|
||||||
running = true;
|
|
||||||
try {
|
|
||||||
const ref = await compiler();
|
|
||||||
await ref.wait;
|
|
||||||
seenWorks.clear();
|
|
||||||
ASSERT(!queue.active);
|
|
||||||
await queue.done();
|
|
||||||
return {
|
|
||||||
value: UNWRAP(works.get(ref.key), `Key '${ref.key}' did not finish`)
|
|
||||||
.value as T,
|
|
||||||
};
|
|
||||||
} finally {
|
|
||||||
running = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function forceInvalidate(file: string) {
|
|
||||||
const resolved = path.resolve(hot.projectSrc, file);
|
|
||||||
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
|
||||||
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
|
||||||
}
|
|
||||||
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
|
||||||
const queue = [...entry.affects];
|
|
||||||
let key;
|
|
||||||
while (key = queue.shift()) {
|
|
||||||
const { needs, affects } = UNWRAP(works.get(key));
|
|
||||||
applyDiff(key, files, needs.files, []);
|
|
||||||
applyDiff(key, works, needs.works, []);
|
|
||||||
works.delete(key);
|
|
||||||
queue.push(...affects);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function reset() {
|
|
||||||
ASSERT(!running);
|
|
||||||
works.clear();
|
|
||||||
files.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
export function serialize() {
|
|
||||||
// Aiming for a compact JSON format.
|
|
||||||
const fileEntries = Array.from(files, ([k, v]) => [
|
|
||||||
k,
|
|
||||||
'lastModified' in v ? 'f' : 'd',
|
|
||||||
'lastModified' in v ? v.lastModified : v.contentHash,
|
|
||||||
...v.affects,
|
|
||||||
]);
|
|
||||||
const workEntries = Array.from(works, ([k, v]) => [
|
|
||||||
k,
|
|
||||||
v.value,
|
|
||||||
...v.affects,
|
|
||||||
]);
|
|
||||||
return devalue.uneval({
|
|
||||||
file: fileEntries,
|
|
||||||
work: workEntries,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
export function serializeToDisk(file = ".clover/incr.state") {
|
|
||||||
fs.writeMkdirSync(file, serialize());
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Input/Output with automatic tracking. */
|
|
||||||
export class Io {
|
|
||||||
needs: Needs = {
|
|
||||||
files: new Set(),
|
|
||||||
works: new Set(),
|
|
||||||
};
|
|
||||||
constructor(public spin: Spinner) {}
|
|
||||||
|
|
||||||
#trackFs(file: string) {
|
|
||||||
const resolved = path.resolve(hot.projectSrc, file);
|
|
||||||
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
|
||||||
this.needs.files.add(key);
|
|
||||||
return { resolved, key };
|
|
||||||
}
|
|
||||||
async trackFile(file: string) {
|
|
||||||
const { key, resolved } = this.#trackFs(file);
|
|
||||||
if (!files.get(key)) {
|
|
||||||
let lastModified: number = 0;
|
|
||||||
try {
|
|
||||||
lastModified = (await fs.stat(file)).mtimeMs;
|
|
||||||
} catch {}
|
|
||||||
files.set(key, {
|
|
||||||
affects: [],
|
|
||||||
lastModified,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return resolved;
|
|
||||||
}
|
|
||||||
async readFile(file: string) {
|
|
||||||
return fs.readFile(await this.trackFile(file), "utf-8");
|
|
||||||
}
|
|
||||||
async readDir(dir: string) {
|
|
||||||
const { key, resolved } = this.#trackFs(dir);
|
|
||||||
let result: string[] = [];
|
|
||||||
try {
|
|
||||||
result = await fs.readdir(resolved);
|
|
||||||
return result;
|
|
||||||
} finally {
|
|
||||||
const contentHash = crypto.createHash("sha1").update(result.join("\0"))
|
|
||||||
.digest("base64url");
|
|
||||||
files.set(key, {
|
|
||||||
affects: [],
|
|
||||||
contentHash,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async readDirRecursive(dir: string): Promise<string[]> {
|
|
||||||
const dirs = await this.readDir(dir);
|
|
||||||
return (await Promise.all(dirs.map(async (child) => {
|
|
||||||
const abs = path.join(dir, child);
|
|
||||||
const stat = await fs.stat(abs);
|
|
||||||
if (stat.isDirectory()) {
|
|
||||||
return (await this.readDirRecursive(abs)).map((grand) =>
|
|
||||||
path.join(child, grand)
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
return child;
|
|
||||||
}
|
|
||||||
}))).flat();
|
|
||||||
}
|
|
||||||
async import<T>(file: string): Promise<T> {
|
|
||||||
const { resolved } = this.#trackFs(file);
|
|
||||||
try {
|
|
||||||
return require(resolved) as T;
|
|
||||||
} finally {
|
|
||||||
const queue = [resolved];
|
|
||||||
const seen = new Set<string>();
|
|
||||||
let current;
|
|
||||||
while (current = queue.shift()) {
|
|
||||||
const stat = hot.getFileStat(resolved);
|
|
||||||
if (!stat) continue;
|
|
||||||
const { key } = this.#trackFs(current);
|
|
||||||
if (!files.get(key)) {
|
|
||||||
files.set(key, {
|
|
||||||
affects: [],
|
|
||||||
lastModified: stat?.lastModified ?? 0,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
for (const imp of stat.imports) {
|
|
||||||
if (!seen.has(imp)) {
|
|
||||||
seen.add(imp);
|
|
||||||
queue.push(imp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type File = TrackedFile | TrackedDir;
|
|
||||||
interface Needs {
|
|
||||||
files: Set<string>;
|
|
||||||
works: Set<string>;
|
|
||||||
}
|
|
||||||
interface Work<T = unknown> {
|
|
||||||
value: T;
|
|
||||||
affects: string[];
|
|
||||||
needs: Needs;
|
|
||||||
}
|
|
||||||
interface TrackedFile {
|
|
||||||
lastModified: number;
|
|
||||||
affects: string[];
|
|
||||||
}
|
|
||||||
interface TrackedDir {
|
|
||||||
contentHash: string;
|
|
||||||
affects: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
import * as fs from "#sitegen/fs";
|
|
||||||
import * as path from "node:path";
|
|
||||||
import * as hot from "./hot.ts";
|
|
||||||
import * as util from "node:util";
|
|
||||||
import * as crypto from "node:crypto";
|
|
||||||
import * as async from "#sitegen/async";
|
|
||||||
import type { Spinner } from "@paperclover/console/Spinner";
|
|
||||||
import * as devalue from 'devalue';
|
|
44
framework/marko.ts
Normal file
44
framework/marko.ts
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
export interface MarkoCacheEntry {
|
||||||
|
src: string;
|
||||||
|
scannedClientRefs: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export const markoCache = new Map<string, MarkoCacheEntry>();
|
||||||
|
|
||||||
|
export function loadMarko(module: NodeJS.Module, filepath: string) {
|
||||||
|
let cache = markoCache.get(filepath);
|
||||||
|
if (!cache) {
|
||||||
|
let src = fs.readFileSync(filepath, "utf8");
|
||||||
|
// A non-standard thing here is Clover Sitegen implements
|
||||||
|
// its own client side scripting stuff, so it overrides
|
||||||
|
// bare client import statements to it's own usage.
|
||||||
|
const scannedClientRefs = new Set<string>();
|
||||||
|
if (src.match(/^\s*client\s+import\s+["']/m)) {
|
||||||
|
src = src.replace(
|
||||||
|
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
|
||||||
|
(_, src) => {
|
||||||
|
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
|
||||||
|
const resolved = hot.resolveClientRef(filepath, ref);
|
||||||
|
scannedClientRefs.add(resolved);
|
||||||
|
return `<CloverScriptInclude=${
|
||||||
|
JSON.stringify(hot.getScriptId(resolved))
|
||||||
|
} />`;
|
||||||
|
},
|
||||||
|
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
src = marko.compileSync(src, filepath).code;
|
||||||
|
src = src.replace("marko/debug/html", "#ssr/marko");
|
||||||
|
cache = { src, scannedClientRefs: Array.from(scannedClientRefs) };
|
||||||
|
markoCache.set(filepath, cache);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { src, scannedClientRefs } = cache;
|
||||||
|
return hot.loadEsbuildCode(module, filepath, src, {
|
||||||
|
scannedClientRefs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
import * as marko from "@marko/compiler";
|
||||||
|
import * as hot from "./hot.ts";
|
||||||
|
import * as fs from "#sitegen/fs";
|
|
@ -17,9 +17,9 @@ export async function main() {
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
const timerSpinner = new Spinner({
|
const timerSpinner = new Spinner({
|
||||||
text: () =>
|
text: () =>
|
||||||
`paper clover's scan3 [${
|
`paper clover's scan3 [${((performance.now() - start) / 1000).toFixed(
|
||||||
((performance.now() - start) / 1000).toFixed(1)
|
1,
|
||||||
}s]`,
|
)}s]`,
|
||||||
fps: 10,
|
fps: 10,
|
||||||
});
|
});
|
||||||
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
||||||
|
@ -38,20 +38,23 @@ export async function main() {
|
||||||
qList.addMany(items.map((subPath) => path.join(absPath, subPath)));
|
qList.addMany(items.map((subPath) => path.join(absPath, subPath)));
|
||||||
|
|
||||||
if (mediaFile) {
|
if (mediaFile) {
|
||||||
const deleted = mediaFile.getChildren()
|
const deleted = mediaFile
|
||||||
|
.getChildren()
|
||||||
.filter((child) => !items.includes(child.basename))
|
.filter((child) => !items.includes(child.basename))
|
||||||
.flatMap((child) =>
|
.flatMap((child) =>
|
||||||
child.kind === MediaFileKind.directory
|
child.kind === MediaFileKind.directory
|
||||||
? child.getRecursiveFileChildren()
|
? child.getRecursiveFileChildren()
|
||||||
: child
|
: child,
|
||||||
);
|
);
|
||||||
|
|
||||||
qMeta.addMany(deleted.map((mediaFile) => ({
|
qMeta.addMany(
|
||||||
|
deleted.map((mediaFile) => ({
|
||||||
absPath: path.join(root, mediaFile.path),
|
absPath: path.join(root, mediaFile.path),
|
||||||
publicPath: mediaFile.path,
|
publicPath: mediaFile.path,
|
||||||
stat: null,
|
stat: null,
|
||||||
mediaFile,
|
mediaFile,
|
||||||
})));
|
})),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
|
@ -96,13 +99,13 @@ export async function main() {
|
||||||
if (
|
if (
|
||||||
mediaFile &&
|
mediaFile &&
|
||||||
mediaFile.date.getTime() < stat.mtime.getTime() &&
|
mediaFile.date.getTime() < stat.mtime.getTime() &&
|
||||||
(Date.now() - stat.mtime.getTime()) < monthMilliseconds
|
Date.now() - stat.mtime.getTime() < monthMilliseconds
|
||||||
) {
|
) {
|
||||||
date = mediaFile.date;
|
date = mediaFile.date;
|
||||||
console.warn(
|
console.warn(
|
||||||
`M-time on ${publicPath} was likely corrupted. ${
|
`M-time on ${publicPath} was likely corrupted. ${formatDate(
|
||||||
formatDate(mediaFile.date)
|
mediaFile.date,
|
||||||
} -> ${formatDate(stat.mtime)}`,
|
)} -> ${formatDate(stat.mtime)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
mediaFile = MediaFile.createFile({
|
mediaFile = MediaFile.createFile({
|
||||||
|
@ -129,7 +132,10 @@ export async function main() {
|
||||||
await processor.run({ absPath, stat, mediaFile, spin });
|
await processor.run({ absPath, stat, mediaFile, spin });
|
||||||
mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index)));
|
mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index)));
|
||||||
for (const dependantJob of after) {
|
for (const dependantJob of after) {
|
||||||
ASSERT(dependantJob.needs > 0, `dependantJob.needs > 0, ${dependantJob.needs}`);
|
ASSERT(
|
||||||
|
dependantJob.needs > 0,
|
||||||
|
`dependantJob.needs > 0, ${dependantJob.needs}`,
|
||||||
|
);
|
||||||
dependantJob.needs -= 1;
|
dependantJob.needs -= 1;
|
||||||
if (dependantJob.needs == 0) qProcess.add(dependantJob);
|
if (dependantJob.needs == 0) qProcess.add(dependantJob);
|
||||||
}
|
}
|
||||||
|
@ -149,25 +155,27 @@ export async function main() {
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function queueProcessors(
|
async function queueProcessors({
|
||||||
{ absPath, stat, mediaFile }: Omit<ProcessFileArgs, "spin">,
|
absPath,
|
||||||
) {
|
stat,
|
||||||
|
mediaFile,
|
||||||
|
}: Omit<ProcessFileArgs, "spin">) {
|
||||||
const ext = mediaFile.extensionNonEmpty.toLowerCase();
|
const ext = mediaFile.extensionNonEmpty.toLowerCase();
|
||||||
let possible = processors.filter((p) =>
|
let possible = processors.filter((p) =>
|
||||||
p.include ? p.include.has(ext) : !p.exclude?.has(ext)
|
p.include ? p.include.has(ext) : !p.exclude?.has(ext),
|
||||||
);
|
);
|
||||||
if (possible.length === 0) return;
|
if (possible.length === 0) return;
|
||||||
|
|
||||||
const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1;
|
const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1;
|
||||||
ASSERT(hash <= 0xFFFF, `${hash.toString(16)} has no bits above 16 set`);
|
ASSERT(hash <= 0xffff, `${hash.toString(16)} has no bits above 16 set`);
|
||||||
let processed = mediaFile.processed;
|
let processed = mediaFile.processed;
|
||||||
|
|
||||||
// If the hash has changed, migrate the bitfield over.
|
// If the hash has changed, migrate the bitfield over.
|
||||||
// This also runs when the processor hash is in it's initial 0 state.
|
// This also runs when the processor hash is in it's initial 0 state.
|
||||||
const order = decodeProcessors(mediaFile.processors);
|
const order = decodeProcessors(mediaFile.processors);
|
||||||
if ((processed & 0xFFFF) !== hash) {
|
if ((processed & 0xffff) !== hash) {
|
||||||
const previous = order.filter((_, i) =>
|
const previous = order.filter(
|
||||||
(processed & (1 << (16 + i))) !== 0
|
(_, i) => (processed & (1 << (16 + i))) !== 0,
|
||||||
);
|
);
|
||||||
processed = hash;
|
processed = hash;
|
||||||
for (const { id, hash } of previous) {
|
for (const { id, hash } of previous) {
|
||||||
|
@ -182,13 +190,13 @@ export async function main() {
|
||||||
}
|
}
|
||||||
mediaFile.setProcessors(
|
mediaFile.setProcessors(
|
||||||
processed,
|
processed,
|
||||||
possible.map((p) =>
|
possible
|
||||||
p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xFF)
|
.map((p) => p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xff))
|
||||||
).join(";"),
|
.join(";"),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
possible = order.map(({ id }) =>
|
possible = order.map(({ id }) =>
|
||||||
UNWRAP(possible.find((p) => p.id === id))
|
UNWRAP(possible.find((p) => p.id === id)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,8 +233,9 @@ export async function main() {
|
||||||
|
|
||||||
async function runUndoProcessors(mediaFile: MediaFile) {
|
async function runUndoProcessors(mediaFile: MediaFile) {
|
||||||
const { processed } = mediaFile;
|
const { processed } = mediaFile;
|
||||||
const previous = decodeProcessors(mediaFile.processors)
|
const previous = decodeProcessors(mediaFile.processors).filter(
|
||||||
.filter((_, i) => (processed & (1 << (16 + i))) !== 0);
|
(_, i) => (processed & (1 << (16 + i))) !== 0,
|
||||||
|
);
|
||||||
for (const { id } of previous) {
|
for (const { id } of previous) {
|
||||||
const p = processors.find((p) => p.id === id);
|
const p = processors.find((p) => p.id === id);
|
||||||
if (!p) continue;
|
if (!p) continue;
|
||||||
|
@ -244,22 +253,23 @@ export async function main() {
|
||||||
await qProcess.done();
|
await qProcess.done();
|
||||||
|
|
||||||
// Update directory metadata
|
// Update directory metadata
|
||||||
const dirs = MediaFile.getDirectoriesToReindex()
|
const dirs = MediaFile.getDirectoriesToReindex().sort(
|
||||||
.sort((a, b) => b.path.length - a.path.length);
|
(a, b) => b.path.length - a.path.length,
|
||||||
|
);
|
||||||
for (const dir of dirs) {
|
for (const dir of dirs) {
|
||||||
const children = dir.getChildren();
|
const children = dir.getChildren();
|
||||||
|
|
||||||
// readme.txt
|
// readme.txt
|
||||||
const readmeContent = children.find((x) =>
|
const readmeContent =
|
||||||
x.basename === "readme.txt"
|
children.find((x) => x.basename === "readme.txt")?.contents ?? "";
|
||||||
)?.contents ?? "";
|
|
||||||
|
|
||||||
// dirsort
|
// dirsort
|
||||||
let dirsort: string[] | null = null;
|
let dirsort: string[] | null = null;
|
||||||
const dirSortRaw =
|
const dirSortRaw =
|
||||||
children.find((x) => x.basename === ".dirsort")?.contents ?? "";
|
children.find((x) => x.basename === ".dirsort")?.contents ?? "";
|
||||||
if (dirSortRaw) {
|
if (dirSortRaw) {
|
||||||
dirsort = dirSortRaw.split("\n")
|
dirsort = dirSortRaw
|
||||||
|
.split("\n")
|
||||||
.map((x) => x.trim())
|
.map((x) => x.trim())
|
||||||
.filter(Boolean);
|
.filter(Boolean);
|
||||||
}
|
}
|
||||||
|
@ -284,7 +294,8 @@ export async function main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const dirHash = crypto.createHash("sha1")
|
const dirHash = crypto
|
||||||
|
.createHash("sha1")
|
||||||
.update(dir.path + allHashes)
|
.update(dir.path + allHashes)
|
||||||
.digest("hex");
|
.digest("hex");
|
||||||
|
|
||||||
|
@ -323,19 +334,21 @@ export async function main() {
|
||||||
|
|
||||||
console.info(
|
console.info(
|
||||||
"Updated file viewer index in \x1b[1m" +
|
"Updated file viewer index in \x1b[1m" +
|
||||||
((performance.now() - start) / 1000).toFixed(1) + "s\x1b[0m",
|
((performance.now() - start) / 1000).toFixed(1) +
|
||||||
|
"s\x1b[0m",
|
||||||
);
|
);
|
||||||
|
|
||||||
MediaFile.db.prepare("VACUUM").run();
|
MediaFile.db.prepare("VACUUM").run();
|
||||||
const { duration, count } = MediaFile.db.prepare<
|
const { duration, count } = MediaFile.db
|
||||||
[],
|
.prepare<[], { count: number; duration: number }>(
|
||||||
{ count: number; duration: number }
|
`
|
||||||
>(`
|
|
||||||
select
|
select
|
||||||
count(*) as count,
|
count(*) as count,
|
||||||
sum(duration) as duration
|
sum(duration) as duration
|
||||||
from media_files
|
from media_files
|
||||||
`).getNonNull();
|
`,
|
||||||
|
)
|
||||||
|
.getNonNull();
|
||||||
|
|
||||||
console.info();
|
console.info();
|
||||||
console.info(
|
console.info(
|
||||||
|
@ -365,7 +378,7 @@ const execFile: typeof execFileRaw = ((
|
||||||
) =>
|
) =>
|
||||||
execFileRaw(...args).catch((e: any) => {
|
execFileRaw(...args).catch((e: any) => {
|
||||||
if (e?.message?.startsWith?.("Command failed")) {
|
if (e?.message?.startsWith?.("Command failed")) {
|
||||||
if (e.code > (2 ** 31)) e.code |= 0;
|
if (e.code > 2 ** 31) e.code |= 0;
|
||||||
const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`;
|
const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`;
|
||||||
e.message = `${e.cmd.split(" ")[0]} failed with ${code}`;
|
e.message = `${e.cmd.split(" ")[0]} failed with ${code}`;
|
||||||
}
|
}
|
||||||
|
@ -374,11 +387,7 @@ const execFile: typeof execFileRaw = ((
|
||||||
const ffprobeBin = testProgram("ffprobe", "--help");
|
const ffprobeBin = testProgram("ffprobe", "--help");
|
||||||
const ffmpegBin = testProgram("ffmpeg", "--help");
|
const ffmpegBin = testProgram("ffmpeg", "--help");
|
||||||
|
|
||||||
const ffmpegOptions = [
|
const ffmpegOptions = ["-hide_banner", "-loglevel", "warning"];
|
||||||
"-hide_banner",
|
|
||||||
"-loglevel",
|
|
||||||
"warning",
|
|
||||||
];
|
|
||||||
|
|
||||||
const procDuration: Process = {
|
const procDuration: Process = {
|
||||||
name: "calculate duration",
|
name: "calculate duration",
|
||||||
|
@ -496,13 +505,10 @@ const procImageSubsets: Process = {
|
||||||
for (const size of targetSizes) {
|
for (const size of targetSizes) {
|
||||||
const { w, h } = resizeDimensions(width, height, size);
|
const { w, h } = resizeDimensions(width, height, size);
|
||||||
for (const { ext, args } of transcodeRules.imagePresets) {
|
for (const { ext, args } of transcodeRules.imagePresets) {
|
||||||
spin.text = baseStatus +
|
spin.text = baseStatus + ` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
|
||||||
` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
|
|
||||||
|
|
||||||
stack.use(
|
stack.use(
|
||||||
await produceAsset(
|
await produceAsset(`${mediaFile.hash}/${size}${ext}`, async (out) => {
|
||||||
`${mediaFile.hash}/${size}${ext}`,
|
|
||||||
async (out) => {
|
|
||||||
await fs.mkdir(path.dirname(out));
|
await fs.mkdir(path.dirname(out));
|
||||||
await fs.rm(out, { force: true });
|
await fs.rm(out, { force: true });
|
||||||
await execFile(ffmpegBin!, [
|
await execFile(ffmpegBin!, [
|
||||||
|
@ -515,8 +521,7 @@ const procImageSubsets: Process = {
|
||||||
out,
|
out,
|
||||||
]);
|
]);
|
||||||
return [out];
|
return [out];
|
||||||
},
|
}),
|
||||||
),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -561,21 +566,17 @@ const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({
|
||||||
if (config.encoder && typeof config.encoder.videoSrc === "string") {
|
if (config.encoder && typeof config.encoder.videoSrc === "string") {
|
||||||
const { videoSrc, audioSrc, rate } = config.encoder;
|
const { videoSrc, audioSrc, rate } = config.encoder;
|
||||||
inputArgs = [
|
inputArgs = [
|
||||||
...rate ? ["-r", String(rate)] : [],
|
...(rate ? ["-r", String(rate)] : []),
|
||||||
"-i",
|
"-i",
|
||||||
videoSrc,
|
videoSrc,
|
||||||
...audioSrc ? ["-i", audioSrc] : [],
|
...(audioSrc ? ["-i", audioSrc] : []),
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err?.code !== "ENOENT") throw err;
|
if (err?.code !== "ENOENT") throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
const args = transcodeRules.getVideoArgs(
|
const args = transcodeRules.getVideoArgs(preset, base, inputArgs);
|
||||||
preset,
|
|
||||||
base,
|
|
||||||
inputArgs,
|
|
||||||
);
|
|
||||||
try {
|
try {
|
||||||
const fakeProgress = new Progress({ text: spin.text, spinner: null });
|
const fakeProgress = new Progress({ text: spin.text, spinner: null });
|
||||||
fakeProgress.stop();
|
fakeProgress.stop();
|
||||||
|
@ -612,7 +613,9 @@ const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({
|
||||||
const procCompression = [
|
const procCompression = [
|
||||||
{ name: "gzip", fn: () => zlib.createGzip({ level: 9 }) },
|
{ name: "gzip", fn: () => zlib.createGzip({ level: 9 }) },
|
||||||
{ name: "zstd", fn: () => zlib.createZstdCompress() },
|
{ name: "zstd", fn: () => zlib.createZstdCompress() },
|
||||||
].map(({ name, fn }) => ({
|
].map(
|
||||||
|
({ name, fn }) =>
|
||||||
|
({
|
||||||
name: `compress ${name}`,
|
name: `compress ${name}`,
|
||||||
exclude: rules.extsPreCompressed,
|
exclude: rules.extsPreCompressed,
|
||||||
async run({ absPath, mediaFile }) {
|
async run({ absPath, mediaFile }) {
|
||||||
|
@ -627,7 +630,8 @@ const procCompression = [
|
||||||
return [base];
|
return [base];
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
} satisfies Process as Process));
|
}) satisfies Process as Process,
|
||||||
|
);
|
||||||
|
|
||||||
const processors = [
|
const processors = [
|
||||||
procDimensions,
|
procDimensions,
|
||||||
|
@ -637,16 +641,15 @@ const processors = [
|
||||||
procImageSubsets,
|
procImageSubsets,
|
||||||
...procVideos,
|
...procVideos,
|
||||||
...procCompression,
|
...procCompression,
|
||||||
]
|
].map((process, id, all) => {
|
||||||
.map((process, id, all) => {
|
const strIndex = (id: number) => String.fromCharCode("a".charCodeAt(0) + id);
|
||||||
const strIndex = (id: number) =>
|
|
||||||
String.fromCharCode("a".charCodeAt(0) + id);
|
|
||||||
return {
|
return {
|
||||||
...process as Process,
|
...(process as Process),
|
||||||
id: strIndex(id),
|
id: strIndex(id),
|
||||||
// Create a unique key.
|
// Create a unique key.
|
||||||
hash: new Uint16Array(
|
hash: new Uint16Array(
|
||||||
crypto.createHash("sha1")
|
crypto
|
||||||
|
.createHash("sha1")
|
||||||
.update(
|
.update(
|
||||||
process.run.toString() +
|
process.run.toString() +
|
||||||
(process.version ? String(process.version) : ""),
|
(process.version ? String(process.version) : ""),
|
||||||
|
@ -676,10 +679,7 @@ async function produceAsset(
|
||||||
if (asset.refs === 1) {
|
if (asset.refs === 1) {
|
||||||
const paths = await builder(path.join(workDir, key));
|
const paths = await builder(path.join(workDir, key));
|
||||||
asset.addFiles(
|
asset.addFiles(
|
||||||
paths.map((file) =>
|
paths.map((file) => path.relative(workDir, file).replaceAll("\\", "/")),
|
||||||
path.relative(workDir, file)
|
|
||||||
.replaceAll("\\", "/")
|
|
||||||
),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
|
@ -719,7 +719,7 @@ interface ProcessJob {
|
||||||
absPath: string;
|
absPath: string;
|
||||||
stat: fs.Stats;
|
stat: fs.Stats;
|
||||||
mediaFile: MediaFile;
|
mediaFile: MediaFile;
|
||||||
processor: typeof processors[0];
|
processor: (typeof processors)[0];
|
||||||
index: number;
|
index: number;
|
||||||
after: ProcessJob[];
|
after: ProcessJob[];
|
||||||
needs: number;
|
needs: number;
|
||||||
|
|
Loading…
Reference in a new issue