rewrite incremental #21
11 changed files with 767 additions and 1298 deletions
|
@ -1,17 +1,20 @@
|
|||
// This file implements client-side bundling, mostly wrapping esbuild.
|
||||
bundleClientJavaScript.label = "bundle client-side javascript";
|
||||
export async function bundleClientJavaScript(
|
||||
referencedScripts: string[],
|
||||
extraPublicScripts: string[],
|
||||
incr: Incremental,
|
||||
dev: boolean = false,
|
||||
io: Io,
|
||||
{ clientRefs, extraPublicScripts, dev = false }: {
|
||||
clientRefs: string[],
|
||||
extraPublicScripts: string[],
|
||||
dev: boolean
|
||||
}
|
||||
) {
|
||||
const entryPoints = [
|
||||
...new Set([
|
||||
...referencedScripts.map((file) => path.resolve(hot.projectSrc, file)),
|
||||
...clientRefs.map(x => `src/${x}`),
|
||||
...extraPublicScripts,
|
||||
]),
|
||||
].map(toAbs)),
|
||||
];
|
||||
if (entryPoints.length === 0) return;
|
||||
if (entryPoints.length === 0) return {};
|
||||
const invalidFiles = entryPoints
|
||||
.filter((file) => !file.match(/\.client\.[tj]sx?/));
|
||||
if (invalidFiles.length > 0) {
|
||||
|
@ -24,7 +27,7 @@ export async function bundleClientJavaScript(
|
|||
|
||||
const clientPlugins: esbuild.Plugin[] = [
|
||||
projectRelativeResolution(),
|
||||
markoViaBuildCache(incr),
|
||||
markoViaBuildCache(),
|
||||
];
|
||||
|
||||
const bundle = await esbuild.build({
|
||||
|
@ -65,7 +68,7 @@ export async function bundleClientJavaScript(
|
|||
)
|
||||
);
|
||||
const { metafile, outputFiles } = bundle;
|
||||
const promises: Promise<void>[] = [];
|
||||
const scripts: Record<string, string> = {};
|
||||
for (const file of outputFiles) {
|
||||
const { text } = file;
|
||||
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
||||
|
@ -78,33 +81,27 @@ export async function bundleClientJavaScript(
|
|||
if (!chunk) {
|
||||
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
|
||||
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
|
||||
incr.put({
|
||||
sources,
|
||||
kind: "script",
|
||||
key,
|
||||
value: text,
|
||||
});
|
||||
scripts[key] = text;
|
||||
}
|
||||
// Register chunks and public scripts as assets.
|
||||
if (chunk || publicScriptRoutes.includes(route)) {
|
||||
promises.push(incr.putAsset({
|
||||
sources,
|
||||
key: route,
|
||||
body: text,
|
||||
}));
|
||||
io.writeAsset(route, text);
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
return scripts;
|
||||
}
|
||||
|
||||
export type ServerPlatform = "node" | "passthru";
|
||||
export async function bundleServerJavaScript(
|
||||
incr: Incremental,
|
||||
platform: ServerPlatform = "node",
|
||||
io: Io,
|
||||
{
|
||||
entry,
|
||||
platform,
|
||||
}: {
|
||||
entry: string,
|
||||
platform: ServerPlatform
|
||||
},
|
||||
) {
|
||||
if (incr.hasArtifact("backendBundle", platform)) return;
|
||||
|
||||
// Comment
|
||||
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
|
||||
|
||||
const viewSource = [
|
||||
|
@ -136,7 +133,7 @@ export async function bundleServerJavaScript(
|
|||
"$views": viewSource,
|
||||
}),
|
||||
projectRelativeResolution(),
|
||||
markoViaBuildCache(incr),
|
||||
markoViaBuildCache(),
|
||||
{
|
||||
name: "replace client references",
|
||||
setup(b) {
|
||||
|
@ -282,39 +279,14 @@ export async function finalizeServerJavaScript(
|
|||
});
|
||||
}
|
||||
|
||||
function markoViaBuildCache(incr: Incremental): esbuild.Plugin {
|
||||
return {
|
||||
name: "marko via build cache",
|
||||
setup(b) {
|
||||
b.onLoad(
|
||||
{ filter: /\.marko$/ },
|
||||
async ({ path: file }) => {
|
||||
const key = path.relative(hot.projectRoot, file)
|
||||
.replaceAll("\\", "/");
|
||||
const cacheEntry = incr.out.serverMarko.get(key);
|
||||
if (!cacheEntry) {
|
||||
if (!fs.existsSync(file)) {
|
||||
console.log(`File does not exist: ${file}`);
|
||||
}
|
||||
throw new Error("Marko file not in cache: " + file);
|
||||
}
|
||||
return ({
|
||||
loader: "ts",
|
||||
contents: cacheEntry.src,
|
||||
resolveDir: path.dirname(file),
|
||||
});
|
||||
},
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
import * as esbuild from "esbuild";
|
||||
import * as path from "node:path";
|
||||
import process from "node:process";
|
||||
import * as hot from "./hot.ts";
|
||||
import { projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
|
||||
import { Incremental } from "./incremental.ts";
|
||||
import { markoViaBuildCache, projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
|
||||
import { Io, toAbs, toRel } from "./incremental.ts";
|
||||
import * as css from "./css.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as mime from "#sitegen/mime";
|
||||
import * as incr from './incremental.ts';
|
||||
|
|
|
@ -40,11 +40,6 @@ export function preprocess(css: string, theme: Theme): string {
|
|||
);
|
||||
}
|
||||
|
||||
export interface Output {
|
||||
text: string;
|
||||
sources: string[];
|
||||
}
|
||||
|
||||
export function styleKey(
|
||||
cssImports: string[],
|
||||
theme: Theme,
|
||||
|
@ -60,11 +55,14 @@ export function styleKey(
|
|||
}
|
||||
|
||||
export async function bundleCssFiles(
|
||||
cssImports: string[],
|
||||
theme: Theme,
|
||||
dev: boolean = false,
|
||||
): Promise<Output> {
|
||||
cssImports = cssImports.map((file) => path.resolve(hot.projectSrc, file));
|
||||
io: Io,
|
||||
{ cssImports, theme, dev }: {
|
||||
cssImports: string[],
|
||||
theme: Theme,
|
||||
dev: boolean,
|
||||
}
|
||||
) {
|
||||
cssImports = await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
||||
const plugin = {
|
||||
name: "clover css",
|
||||
setup(b) {
|
||||
|
@ -106,15 +104,11 @@ export async function bundleCssFiles(
|
|||
throw new AggregateError(warnings, "CSS Build Failed");
|
||||
}
|
||||
if (outputFiles.length > 1) throw new Error("Too many output files");
|
||||
return {
|
||||
text: outputFiles[0].text,
|
||||
sources: Object.keys(metafile.outputs["$input$.css"].inputs)
|
||||
.filter((x) => !x.startsWith("vfs:")),
|
||||
};
|
||||
return outputFiles[0].text;
|
||||
}
|
||||
|
||||
import * as esbuild from "esbuild";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as path from "node:path";
|
||||
import { virtualFiles } from "./esbuild-support.ts";
|
||||
import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts";
|
||||
|
|
|
@ -13,6 +13,7 @@ export function ssrSync<A extends Addons>(node: Node, addon: A = {} as A) {
|
|||
const resolved = resolveNode(r, node);
|
||||
return { text: renderNode(resolved), addon };
|
||||
}
|
||||
export { ssrSync as sync };
|
||||
|
||||
export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
||||
const r = initRender(true, addon);
|
||||
|
@ -20,7 +21,7 @@ export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
|||
if (r.async === 0) {
|
||||
return Promise.resolve({ text: renderNode(resolved), addon });
|
||||
}
|
||||
const { resolve, reject, promise } = Promise.withResolvers<Result>();
|
||||
const { resolve, reject, promise } = Promise.withResolvers<Result<A>>();
|
||||
r.asyncDone = () => {
|
||||
const rejections = r.rejections;
|
||||
if (!rejections) return resolve({ text: renderNode(resolved), addon });
|
||||
|
@ -29,6 +30,7 @@ export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
|
|||
};
|
||||
return promise;
|
||||
}
|
||||
export { ssrAsync as async };
|
||||
|
||||
/** Inline HTML into a render without escaping it */
|
||||
export function html(rawText: ResolvedNode): DirectHtml {
|
||||
|
|
|
@ -73,7 +73,35 @@ export function projectRelativeResolution(root = process.cwd() + "/src") {
|
|||
} satisfies esbuild.Plugin;
|
||||
}
|
||||
|
||||
export function markoViaBuildCache(): esbuild.Plugin {
|
||||
return {
|
||||
name: "marko via build cache",
|
||||
setup(b) {
|
||||
b.onLoad(
|
||||
{ filter: /\.marko$/ },
|
||||
async ({ path: file }) => {
|
||||
const cacheEntry = markoCache.get(file);
|
||||
if (!cacheEntry) {
|
||||
if (!fs.existsSync(file)) {
|
||||
console.warn(`File does not exist: ${file}`);
|
||||
}
|
||||
console.log(markoCache.keys());
|
||||
throw new Error("Marko file not in cache: " + file);
|
||||
}
|
||||
return ({
|
||||
loader: "ts",
|
||||
contents: cacheEntry.src,
|
||||
resolveDir: path.dirname(file),
|
||||
});
|
||||
},
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
import * as esbuild from "esbuild";
|
||||
import * as string from "#sitegen/string";
|
||||
import * as path from "node:path";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as incr from "./incremental.ts";
|
||||
import * as hot from "./hot.ts";import { markoCache } from "./marko.ts";
|
||||
|
|
|
@ -368,40 +368,6 @@ export async function sitegen(
|
|||
// -- concatenate static rendered pages --
|
||||
status.text = `Concat Pages`;
|
||||
await Promise.all(pages.map(async (page) => {
|
||||
if (incr.hasArtifact("asset", page.id)) return;
|
||||
const {
|
||||
html,
|
||||
meta,
|
||||
cssImports,
|
||||
theme,
|
||||
clientRefs,
|
||||
} = UNWRAP(incr.out.pageMetadata.get(page.id));
|
||||
const scriptIds = clientRefs.map(hot.getScriptId);
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
const style = UNWRAP(
|
||||
incr.out.style.get(styleKey),
|
||||
`Missing style ${styleKey}`,
|
||||
);
|
||||
const doc = wrapDocument({
|
||||
body: html,
|
||||
head: meta,
|
||||
inlineCss: style,
|
||||
scripts: scriptIds.map(
|
||||
(ref) => UNWRAP(incr.out.script.get(ref), `Missing script ${ref}`),
|
||||
).map((x) => `{${x}}`).join("\n"),
|
||||
});
|
||||
await incr.putAsset({
|
||||
sources: [
|
||||
page.file,
|
||||
...incr.sourcesFor("style", styleKey),
|
||||
...scriptIds.flatMap((ref) => incr.sourcesFor("script", ref)),
|
||||
],
|
||||
key: page.id,
|
||||
body: doc,
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
});
|
||||
}));
|
||||
status.format = () => "";
|
||||
status.text = ``;
|
||||
|
|
|
@ -1,162 +1,86 @@
|
|||
export async function main() {
|
||||
// const startTime = performance.now();
|
||||
const { toRel, toAbs } = incr;
|
||||
const globalCssPath = toAbs("src/global.css");
|
||||
|
||||
// -- readdir to find all site files --
|
||||
const siteConfig = await incr.work({
|
||||
label: "reading manifest",
|
||||
run: (io) => io.import<{ siteSections: sg.Section[] }>("site.ts"),
|
||||
});
|
||||
export async function main() {
|
||||
const startTime = performance.now();
|
||||
|
||||
// -- read config and discover files --
|
||||
const siteConfig = await incr.work(readManifest);
|
||||
const {
|
||||
staticFiles,
|
||||
scripts,
|
||||
views,
|
||||
pages,
|
||||
} = (await Promise.all(
|
||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||
incr.work({
|
||||
key: sectionRoot,
|
||||
label: "discovering files in " + sectionRoot,
|
||||
run: (io) => scanSiteSection(io, sectionRoot),
|
||||
})
|
||||
),
|
||||
)).reduce((acc, next) => ({
|
||||
} = await discoverAllFiles(siteConfig);
|
||||
|
||||
// TODO: make sure that `static` and `pages` does not overlap
|
||||
|
||||
// TODO: loadMarkoCache
|
||||
|
||||
// -- perform build-time rendering --
|
||||
const builtPages = pages.map((item) => incr.work(preparePage, item));
|
||||
const builtViews = views.map((item) => incr.work(prepareView, item));
|
||||
const builtStaticFiles = staticFiles.map((item) =>
|
||||
incr.work(
|
||||
async (io, { id, file }) => io.writeAsset(id, await io.readFile(file)),
|
||||
item,
|
||||
)
|
||||
);
|
||||
const routes = await Promise.all([...builtViews, ...builtPages]);
|
||||
|
||||
// -- bundle server javascript (backend and views) --
|
||||
const backends = siteConfig.backends.map((backend) => incr.work(bundle.bundleServerJavaScript, {}))
|
||||
|
||||
// -- page resources --
|
||||
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
||||
clientRefs: routes.flatMap((x) => x.clientRefs),
|
||||
extraPublicScripts: scripts.map((entry) => entry.file),
|
||||
dev: false,
|
||||
});
|
||||
const styleMap = prepareInlineCss(routes);
|
||||
|
||||
// -- backend --
|
||||
|
||||
// -- assemble page assets --
|
||||
const pAssemblePages = builtPages.map((page) =>
|
||||
assembleAndWritePage(page, styleMap, scriptMap)
|
||||
);
|
||||
|
||||
|
||||
incr.serializeToDisk();
|
||||
}
|
||||
|
||||
readManifest.label = "reading manifest";
|
||||
export async function readManifest(io: Io) {
|
||||
const cfg = await io.import<typeof import("../src/site.ts")>("src/site.ts");
|
||||
return {
|
||||
siteSections: cfg.siteSections.map((section) => ({
|
||||
root: toRel(section.root),
|
||||
})),
|
||||
backends: cfg.backends.map(toRel),
|
||||
};
|
||||
}
|
||||
|
||||
export async function discoverAllFiles(
|
||||
siteConfig: Awaited<ReturnType<typeof readManifest>>,
|
||||
) {
|
||||
return (
|
||||
await Promise.all(
|
||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||
incr.work(scanSiteSection, toAbs(sectionRoot))
|
||||
),
|
||||
)
|
||||
).reduce((acc, next) => ({
|
||||
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
||||
pages: acc.pages.concat(next.pages),
|
||||
views: acc.views.concat(next.views),
|
||||
scripts: acc.scripts.concat(next.scripts),
|
||||
}));
|
||||
|
||||
const globalCssPath = path.join(hot.projectSrc, "global.css");
|
||||
|
||||
// TODO: loadMarkoCache
|
||||
|
||||
const builtPages = pages.map((item) =>
|
||||
incr.work({
|
||||
label: item.id,
|
||||
key: item,
|
||||
async run(io) {
|
||||
// -- load and validate module --
|
||||
let {
|
||||
default: Page,
|
||||
meta: metadata,
|
||||
theme: pageTheme,
|
||||
layout,
|
||||
} = await io.import<any>(item.file);
|
||||
if (!Page) {
|
||||
throw new Error("Page is missing a 'default' export.");
|
||||
}
|
||||
if (!metadata) {
|
||||
throw new Error("Page is missing 'meta' export with a title.");
|
||||
}
|
||||
|
||||
// -- css --
|
||||
if (layout?.theme) pageTheme = layout.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
|
||||
// -- metadata --
|
||||
const renderedMetaPromise = Promise.resolve(
|
||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||
).then((m) => meta.renderMeta(m));
|
||||
|
||||
// -- html --
|
||||
let page = [engine.kElement, Page, {}];
|
||||
if (layout?.default) {
|
||||
page = [engine.kElement, layout.default, { children: page }];
|
||||
}
|
||||
const bodyPromise = engine.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
renderedMetaPromise,
|
||||
]);
|
||||
if (!renderedMeta.includes("<title>")) {
|
||||
throw new Error(
|
||||
"Page is missing 'meta.title'. " +
|
||||
"All pages need a title tag.",
|
||||
);
|
||||
}
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
html: text,
|
||||
meta: renderedMeta,
|
||||
cssImports,
|
||||
theme: theme ?? null,
|
||||
styleKey,
|
||||
clientRefs: Array.from(addon.sitegen.scripts),
|
||||
};
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// const builtViews = views.map((item) =>
|
||||
// incr.work({
|
||||
// label: item.id,
|
||||
// key: item,
|
||||
// async run(io) {
|
||||
// const module = require(item.file);
|
||||
// if (!module.meta) {
|
||||
// throw new Error(`${item.file} is missing 'export const meta'`);
|
||||
// }
|
||||
// if (!module.default) {
|
||||
// throw new Error(`${item.file} is missing a default export.`);
|
||||
// }
|
||||
// const pageTheme = module.layout?.theme ?? module.theme;
|
||||
// const theme: css.Theme = {
|
||||
// ...css.defaultTheme,
|
||||
// ...pageTheme,
|
||||
// };
|
||||
// const cssImports = Array.from(
|
||||
// new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
// (file) => path.relative(hot.projectSrc, file),
|
||||
// );
|
||||
// const styleKey = css.styleKey(cssImports, theme);
|
||||
// return {
|
||||
// file: path.relative(hot.projectRoot, item.file),
|
||||
// cssImports,
|
||||
// theme,
|
||||
// clientRefs: hot.getClientScriptRefs(item.file),
|
||||
// hasLayout: !!module.layout?.default,
|
||||
// styleKey,
|
||||
// };
|
||||
// },
|
||||
// })
|
||||
// );
|
||||
//
|
||||
// // -- inline style sheets, used and shared by pages and views --
|
||||
// const builtCss = Promise.all([...builtViews, ...builtPages]).then((items) => {
|
||||
// const map = new Map<string, {}>();
|
||||
// for (const { styleKey, cssImports, theme } of items) {
|
||||
// if (map.has(styleKey)) continue;
|
||||
// map.set(
|
||||
// styleKey,
|
||||
// incr.work({
|
||||
// label: `bundle css ${styleKey}`,
|
||||
// async run(io) {
|
||||
// await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
||||
// const { text } = await css.bundleCssFiles(cssImports, theme);
|
||||
// return text;
|
||||
// },
|
||||
// }),
|
||||
// );
|
||||
// }
|
||||
// });
|
||||
|
||||
// TODO: make sure that `static` and `pages` does not overlap
|
||||
await Promise.all(builtPages);
|
||||
incr.serializeToDisk();
|
||||
// -- bundle server javascript (backend and views) --
|
||||
}
|
||||
|
||||
async function scanSiteSection(io: incr.Io, sectionRoot: string) {
|
||||
scanSiteSection.getLabel = (input: string) =>
|
||||
"discovering files in " + toRel(input);
|
||||
export async function scanSiteSection(io: Io, sectionRoot: string) {
|
||||
// Static files are compressed and served as-is.
|
||||
// - "{section}/static/*.png"
|
||||
let staticFiles: FileItem[] = [];
|
||||
|
@ -229,23 +153,164 @@ async function scanSiteSection(io: incr.Io, sectionRoot: string) {
|
|||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||
const trim = ext
|
||||
? subPath
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
||||
".",
|
||||
"/",
|
||||
);
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(".", "/");
|
||||
let id = prefix + trim.replaceAll("\\", "/");
|
||||
if (prefix === "/" && id.endsWith("/index")) {
|
||||
id = id.slice(0, -"/index".length) || "/";
|
||||
}
|
||||
list.push({ id, file: file });
|
||||
list.push({ id, file: path.relative(hot.projectRoot, file) });
|
||||
}
|
||||
}
|
||||
|
||||
return { staticFiles, pages, views, scripts };
|
||||
}
|
||||
|
||||
export async function preparePage(io: Io, item: sg.FileItem) {
|
||||
// -- load and validate module --
|
||||
let {
|
||||
default: Page,
|
||||
meta: metadata,
|
||||
theme: pageTheme,
|
||||
layout,
|
||||
} = await io.import<any>(item.file);
|
||||
if (!Page) {
|
||||
throw new Error("Page is missing a 'default' export.");
|
||||
}
|
||||
if (!metadata) {
|
||||
throw new Error("Page is missing 'meta' export with a title.");
|
||||
}
|
||||
|
||||
// -- css --
|
||||
if (layout?.theme) pageTheme = layout.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
|
||||
// -- metadata --
|
||||
const renderedMetaPromise = Promise.resolve(
|
||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||
).then((m) => meta.renderMeta(m));
|
||||
|
||||
// -- html --
|
||||
let page = [engine.kElement, Page, {}];
|
||||
if (layout?.default) {
|
||||
page = [engine.kElement, layout.default, { children: page }];
|
||||
}
|
||||
const bodyPromise = engine.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
renderedMetaPromise,
|
||||
]);
|
||||
if (!renderedMeta.includes("<title>")) {
|
||||
throw new Error(
|
||||
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
||||
);
|
||||
}
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
id: item.id,
|
||||
html: text,
|
||||
meta: renderedMeta,
|
||||
cssImports,
|
||||
theme: theme ?? null,
|
||||
styleKey,
|
||||
clientRefs: Array.from(addon.sitegen.scripts),
|
||||
};
|
||||
}
|
||||
|
||||
export async function prepareView(io: Io, item: sg.FileItem) {
|
||||
const module = await io.import<any>(item.file);
|
||||
if (!module.meta) {
|
||||
throw new Error(`${item.file} is missing 'export const meta'`);
|
||||
}
|
||||
if (!module.default) {
|
||||
throw new Error(`${item.file} is missing a default export.`);
|
||||
}
|
||||
const pageTheme = module.layout?.theme ?? module.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
file: path.relative(hot.projectRoot, item.file),
|
||||
cssImports,
|
||||
theme,
|
||||
clientRefs: hot.getClientScriptRefs(item.file),
|
||||
hasLayout: !!module.layout?.default,
|
||||
styleKey,
|
||||
};
|
||||
}
|
||||
|
||||
export function prepareInlineCss(
|
||||
items: Array<{
|
||||
styleKey: string;
|
||||
cssImports: string[];
|
||||
theme: css.Theme;
|
||||
}>,
|
||||
) {
|
||||
const map = new Map<string, incr.Ref<string>>();
|
||||
for (const { styleKey, cssImports, theme } of items) {
|
||||
if (map.has(styleKey)) continue;
|
||||
map.set(
|
||||
styleKey,
|
||||
incr.work(css.bundleCssFiles, {
|
||||
cssImports,
|
||||
theme,
|
||||
dev: false,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
type PreparedPage = Awaited<ReturnType<typeof preparePage>>;
|
||||
export async function assembleAndWritePage(
|
||||
pageWork: Promise<PreparedPage>,
|
||||
styleMap: Map<string, incr.Ref<string>>,
|
||||
scriptWork: incr.Ref<Record<string, string>>,
|
||||
) {
|
||||
const page = await pageWork;
|
||||
return incr.work(
|
||||
async (io, { id, html, meta, styleKey, clientRefs }) => {
|
||||
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
|
||||
|
||||
const scriptIds = clientRefs.map(hot.getScriptId);
|
||||
const scriptMap = await io.readWork(scriptWork);
|
||||
const scripts = scriptIds.map((ref) =>
|
||||
UNWRAP(scriptMap[ref], `Missing script ${ref}`)
|
||||
)
|
||||
.map((x) => `{${x}}`).join("\n");
|
||||
|
||||
const doc = wrapDocument({
|
||||
body: html,
|
||||
head: meta,
|
||||
inlineCss,
|
||||
scripts,
|
||||
});
|
||||
io.writeAsset(id, doc, {
|
||||
"Content-Type": "text/html",
|
||||
});
|
||||
},
|
||||
page,
|
||||
);
|
||||
}
|
||||
|
||||
import * as sg from "#sitegen";
|
||||
import * as incr from "./incremental2.ts";
|
||||
import * as incr from "./incremental.ts";
|
||||
import { Io } from "./incremental.ts";
|
||||
import { OnceMap, Queue } from "#sitegen/async";
|
||||
import * as bundle from "./bundle.ts";
|
||||
import * as css from "./css.ts";
|
||||
|
|
|
@ -114,7 +114,7 @@ function loadEsbuild(module: NodeJS.Module, filepath: string) {
|
|||
interface LoadOptions {
|
||||
scannedClientRefs?: string[];
|
||||
}
|
||||
function loadEsbuildCode(
|
||||
export function loadEsbuildCode(
|
||||
module: NodeJS.Module,
|
||||
filepath: string,
|
||||
src: string,
|
||||
|
@ -155,7 +155,7 @@ function loadEsbuildCode(
|
|||
return module._compile(src, filepath, "commonjs");
|
||||
}
|
||||
|
||||
function resolveClientRef(sourcePath: string, ref: string) {
|
||||
export function resolveClientRef(sourcePath: string, ref: string) {
|
||||
const filePath = resolveFrom(sourcePath, ref);
|
||||
if (
|
||||
!filePath.endsWith(".client.ts") &&
|
||||
|
@ -166,44 +166,10 @@ function resolveClientRef(sourcePath: string, ref: string) {
|
|||
return path.relative(projectSrc, filePath);
|
||||
}
|
||||
|
||||
// TODO: extract the marko compilation tools out, lazy load them
|
||||
export interface MarkoCacheEntry {
|
||||
src: string;
|
||||
scannedClientRefs: string[];
|
||||
}
|
||||
export const markoCache = new Map<string, MarkoCacheEntry>();
|
||||
let lazyMarko: typeof import('./marko.ts') | null = null;
|
||||
function loadMarko(module: NodeJS.Module, filepath: string) {
|
||||
let cache = markoCache.get(filepath);
|
||||
if (!cache) {
|
||||
let src = fs.readFileSync(filepath, "utf8");
|
||||
// A non-standard thing here is Clover Sitegen implements
|
||||
// its own client side scripting stuff, so it overrides
|
||||
// bare client import statements to it's own usage.
|
||||
const scannedClientRefs = new Set<string>();
|
||||
if (src.match(/^\s*client\s+import\s+["']/m)) {
|
||||
src = src.replace(
|
||||
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
|
||||
(_, src) => {
|
||||
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
|
||||
const resolved = resolveClientRef(filepath, ref);
|
||||
scannedClientRefs.add(resolved);
|
||||
return `<CloverScriptInclude=${
|
||||
JSON.stringify(getScriptId(resolved))
|
||||
} />`;
|
||||
},
|
||||
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
|
||||
}
|
||||
|
||||
src = marko.compileSync(src, filepath).code;
|
||||
src = src.replace("marko/debug/html", "#ssr/marko");
|
||||
cache = { src, scannedClientRefs: Array.from(scannedClientRefs) };
|
||||
markoCache.set(filepath, cache);
|
||||
}
|
||||
|
||||
const { src, scannedClientRefs } = cache;
|
||||
return loadEsbuildCode(module, filepath, src, {
|
||||
scannedClientRefs,
|
||||
});
|
||||
lazyMarko ??= require<typeof import('./marko.ts')>("./framework/marko.ts");
|
||||
lazyMarko.loadMarko(module, filepath);
|
||||
}
|
||||
|
||||
function loadMdx(module: NodeJS.Module, filepath: string) {
|
||||
|
@ -345,11 +311,10 @@ declare module "node:module" {
|
|||
): unknown;
|
||||
}
|
||||
|
||||
import * as fs from "./lib/fs.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import * as esbuild from "esbuild";
|
||||
import * as marko from "@marko/compiler";
|
||||
import { createRequire } from "node:module";
|
||||
import * as mdx from "@mdx-js/mdx";
|
||||
import * as self from "./hot.ts";
|
||||
|
|
|
@ -1,657 +1,354 @@
|
|||
// Incremental contains multiple maps for the different kinds
|
||||
// of Artifact, which contain a list of source files which
|
||||
// were used to produce it. When files change, Incremental sees
|
||||
// that the `mtime` is newer, and purges the referenced artifacts.
|
||||
// Incremental compilation framework
|
||||
let running = false;
|
||||
let seenWorks = new Set<string>();
|
||||
let works = new Map<string, Work>();
|
||||
let files = new Map<string, File>();
|
||||
let assets = new Map<string, Asset>();
|
||||
|
||||
type SourceId = string; // relative to project root, e.g. 'src/global.css'
|
||||
type ArtifactId = string; // `${ArtifactType}\0${string}`
|
||||
type Sha1Id = string; // Sha1 hex string
|
||||
|
||||
// -- artifact types --
|
||||
interface ArtifactMap {
|
||||
/* An asset (serve with "#sitegen/asset" */
|
||||
asset: Asset;
|
||||
/* The bundled text of a '.client.ts' script */
|
||||
// TODO: track imports this has into `asset`
|
||||
script: string;
|
||||
/* The bundled style tag contents. Keyed by 'css.styleKey' */
|
||||
style: string;
|
||||
/* Metadata about a static page */
|
||||
pageMetadata: PageMetadata;
|
||||
/* Metadata about a dynamic view */
|
||||
viewMetadata: ViewMetadata;
|
||||
/* Cached '.marko' server compilation */
|
||||
serverMarko: hot.MarkoCacheEntry;
|
||||
/* Backend source code, pre-replacement. Keyed by platform type. */
|
||||
backendBundle: BackendBundle;
|
||||
/* One file in the backend receives post-processing. */
|
||||
backendReplace: Buffer;
|
||||
}
|
||||
type ArtifactKind = keyof ArtifactMap;
|
||||
/* Automatic path tracing is performed to make it so that
|
||||
* specifying 'sources: [file]' refers to it and everything it imports.
|
||||
* These kinds do not have that behavior
|
||||
*/
|
||||
const exactDependencyKinds = ["serverMarko"];
|
||||
export interface Asset {
|
||||
buffer: Buffer;
|
||||
headers: Record<string, string | undefined>;
|
||||
hash: string;
|
||||
}
|
||||
/**
|
||||
* This interface intentionally omits the *contents*
|
||||
* of its scripts and styles for fine-grained rebuilds.
|
||||
*/
|
||||
export interface PageMetadata {
|
||||
html: string;
|
||||
meta: string;
|
||||
cssImports: string[];
|
||||
theme: css.Theme;
|
||||
clientRefs: string[];
|
||||
}
|
||||
/**
|
||||
* Like a page, this intentionally omits resources,
|
||||
* but additionally omits the bundled server code.
|
||||
*/
|
||||
export interface ViewMetadata {
|
||||
file: string;
|
||||
// staticMeta: string | null; TODO
|
||||
cssImports: string[];
|
||||
theme: css.Theme;
|
||||
clientRefs: string[];
|
||||
hasLayout: boolean;
|
||||
}
|
||||
export interface BackendBundle {
|
||||
magicWord: string;
|
||||
fileWithMagicWord: string | null;
|
||||
files: Record<string, Buffer>;
|
||||
}
|
||||
|
||||
// -- incremental support types --
|
||||
export interface PutBase {
|
||||
sources: SourceId[];
|
||||
export interface Ref<T> extends Promise<T> {
|
||||
key: string;
|
||||
}
|
||||
export interface Put<T extends ArtifactKind> extends PutBase {
|
||||
kind: T;
|
||||
value: ArtifactMap[T];
|
||||
}
|
||||
export interface Invalidations {
|
||||
lastModified: number;
|
||||
outputs: Set<ArtifactId>;
|
||||
files: Set<SourceId>;
|
||||
}
|
||||
|
||||
export class Incremental {
|
||||
/** The generated artifacts */
|
||||
out: {
|
||||
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
||||
} = {
|
||||
asset: new Map(),
|
||||
script: new Map(),
|
||||
style: new Map(),
|
||||
pageMetadata: new Map(),
|
||||
viewMetadata: new Map(),
|
||||
serverMarko: new Map(),
|
||||
backendBundle: new Map(),
|
||||
backendReplace: new Map(),
|
||||
/**
|
||||
* Declare and begin a unit of work. Return value is memoized and
|
||||
* only re-run when inputs (via `Io`) change. Outputs are written
|
||||
* at the end of a compilation (see `compile`).
|
||||
*/
|
||||
export function work<O>(job: (io: Io) => Promise<O>): Ref<O>;
|
||||
export function work<I, O>(job:(io: Io, input: I) => Promise<O>, input: I): Ref<O>;
|
||||
export function work<I, O>(job: (io: Io, input: I) => Promise<O>, input: I = null as I): Ref<O> {
|
||||
const keySource = [
|
||||
JSON.stringify(util.getCallSites(2)[1]),
|
||||
util.inspect(input),
|
||||
];
|
||||
const key = crypto
|
||||
.createHash("sha1")
|
||||
.update(keySource.join(":"))
|
||||
.digest("base64url");
|
||||
ASSERT(
|
||||
!seenWorks.has(key),
|
||||
`Key '${key}' must be unique during the build.` +
|
||||
`To fix this, provide a manual 'key' argument.`,
|
||||
);
|
||||
seenWorks.add(key);
|
||||
|
||||
const prev = works.get(key) as Work<O> | null;
|
||||
if (prev) {
|
||||
const promise = Promise.resolve(prev.value) as Ref<O>;
|
||||
promise.key = key;
|
||||
return promise;
|
||||
};
|
||||
/** Tracking filesystem entries to `srcId` */
|
||||
invals = new Map<SourceId, Invalidations>();
|
||||
/** Tracking output keys to files */
|
||||
sources = new Map<ArtifactId, SourceId[]>();
|
||||
|
||||
/** Compressed resources */
|
||||
compress = new Map<Sha1Id, Compressed>();
|
||||
compressQueue = new Queue<CompressJob, void>({
|
||||
name: "Compress",
|
||||
maxJobs: 5,
|
||||
fn: this.compressImpl.bind(this),
|
||||
passive: true,
|
||||
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
||||
const io = new Io();
|
||||
const promise = job(io, input).then((value) => {
|
||||
const { needs, writes } = io;
|
||||
|
||||
// Apply the deltas to the graph
|
||||
applyDiff(key, files, [], needs.files);
|
||||
applyDiff(key, works, [], needs.works);
|
||||
|
||||
validateSerializable(value, "");
|
||||
|
||||
works.set(key, {
|
||||
value,
|
||||
affects: [],
|
||||
needs,
|
||||
writes
|
||||
});
|
||||
}) as Ref<O>;
|
||||
promise.key = key;
|
||||
return promise;
|
||||
}
|
||||
export async function compile<T>(compiler: () => Promise<T>) {
|
||||
ASSERT(!running, `Cannot run twice`);
|
||||
running = true;
|
||||
try {
|
||||
const value = await compiler();
|
||||
seenWorks.clear();
|
||||
ASSERT(!queue.active, `Queue was still running`);
|
||||
await queue.done();
|
||||
return { value };
|
||||
} finally {
|
||||
running = false;
|
||||
}
|
||||
}
|
||||
|
||||
export function forceInvalidate(file: string) {
|
||||
const resolved = toAbs(file);
|
||||
const key = toRel(resolved);
|
||||
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
||||
}
|
||||
|
||||
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
||||
const queue = [...entry.affects];
|
||||
let key;
|
||||
while ((key = queue.shift())) {
|
||||
const { needs, affects } = UNWRAP(works.get(key));
|
||||
applyDiff(key, files, needs.files, []);
|
||||
applyDiff(key, works, needs.works, []);
|
||||
works.delete(key);
|
||||
queue.push(...affects);
|
||||
}
|
||||
}
|
||||
|
||||
export function reset() {
|
||||
ASSERT(!running);
|
||||
works.clear();
|
||||
files.clear();
|
||||
assets.clear();
|
||||
}
|
||||
|
||||
export function serialize() {
|
||||
// Aiming for a compact JSON format.
|
||||
const fileEntries = Array.from(files, ([k, v]) => [
|
||||
k,
|
||||
"lastModified" in v ? "f" : "d",
|
||||
"lastModified" in v ? v.lastModified : v.contentHash,
|
||||
...v.affects,
|
||||
]);
|
||||
const workEntries = Array.from(works, ([k, v]) => [k, v.value, ...v.affects]);
|
||||
return JSON.stringify({
|
||||
file: fileEntries,
|
||||
work: workEntries,
|
||||
});
|
||||
}
|
||||
export function serializeToDisk(file = ".clover/incr.state") {
|
||||
fs.writeMkdirSync(file, serialize());
|
||||
}
|
||||
|
||||
/** Reset at the end of each update */
|
||||
round = {
|
||||
inserted: new Set<ArtifactId>(),
|
||||
referenced: new Set<ArtifactId>(),
|
||||
|
||||
|
||||
/* Input/Output with automatic tracking.
|
||||
* - Inputs read with Io are tracked to know when to rebuild
|
||||
* - Outputs written with Io are deleted when abandoned.
|
||||
*/
|
||||
export class Io {
|
||||
needs: Needs = {
|
||||
files: new Set(),
|
||||
works: new Set(),
|
||||
};
|
||||
writes: Writes = {
|
||||
files: new Map(),
|
||||
assets: new Map(),
|
||||
};
|
||||
|
||||
getArtifact<T extends ArtifactKind>(kind: T, key: string) {
|
||||
this.round.referenced.add(`${kind}\0${key}`);
|
||||
return this.out[kind].get(key);
|
||||
#trackFs(file: string) {
|
||||
const resolved = toAbs(file);
|
||||
const key = toRel(resolved);
|
||||
this.needs.files.add(key);
|
||||
return { resolved, key };
|
||||
}
|
||||
|
||||
hasArtifact(kind: ArtifactKind, key: string) {
|
||||
return this.getArtifact(kind, key) != null;
|
||||
readWork<T>(ref: Ref<T>): Promise<T> {
|
||||
this.needs.works.add(ref.key);
|
||||
return ref;
|
||||
}
|
||||
|
||||
sourcesFor(kind: ArtifactKind, key: string) {
|
||||
return UNWRAP(
|
||||
this.sources.get(kind + "\0" + key),
|
||||
`No artifact '${kind}' '${key}'`,
|
||||
);
|
||||
}
|
||||
|
||||
shake() {
|
||||
const toPublic = (str: string) => {
|
||||
const [kind, key] = str.split("\0");
|
||||
return { kind: kind as ArtifactKind, key };
|
||||
};
|
||||
const inserted = Array.from(this.round.inserted, toPublic);
|
||||
const referenced = Array.from(this.round.referenced, toPublic);
|
||||
const unreferenced: { kind: ArtifactKind; key: string }[] = [];
|
||||
|
||||
for (const kind in this.out) {
|
||||
const map = this.out[kind as keyof typeof this.out];
|
||||
if (!map) continue;
|
||||
for (const key of map.keys()) {
|
||||
if (!this.round.referenced.has(`${kind}\0${key}`)) {
|
||||
unreferenced.push({ kind: kind as ArtifactKind, key });
|
||||
// this.out[kind as ArtifactKind].delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.round.inserted.clear();
|
||||
this.round.referenced.clear();
|
||||
|
||||
return { inserted, referenced, unreferenced };
|
||||
}
|
||||
|
||||
/*
|
||||
* Put built artifacts into the incremental cache. The entry points
|
||||
* used to build this must be provided. 'Incremental' will trace JS
|
||||
* imports and file modification times tracked by 'hot.ts'.
|
||||
*/
|
||||
put<T extends ArtifactKind>({
|
||||
sources,
|
||||
kind,
|
||||
key,
|
||||
value,
|
||||
}: Put<T>) {
|
||||
// These three invariants affect incremental accuracy.
|
||||
if (this.round.inserted.has(`${kind}\0${key}`)) {
|
||||
console.error(
|
||||
`Artifact ${kind}:${key} was inserted multiple times in the same round!`,
|
||||
);
|
||||
} else if (!this.round.referenced.has(`${kind}\0${key}`)) {
|
||||
console.error(
|
||||
`Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`,
|
||||
);
|
||||
} else if (this.out[kind].has(key)) {
|
||||
console.error(
|
||||
`Artifact ${kind}:${key} is not stale, but overwritten.`,
|
||||
);
|
||||
}
|
||||
|
||||
this.out[kind].set(key, value);
|
||||
|
||||
this.round.inserted.add(`${kind}\0${key}`);
|
||||
|
||||
// Update sources information
|
||||
ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key);
|
||||
sources = sources.map((src) => path.normalize(src));
|
||||
const fullKey = `${kind}\0${key}`;
|
||||
const prevSources = this.sources.get(fullKey);
|
||||
const newSources = new Set(
|
||||
sources.map((file) =>
|
||||
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
||||
),
|
||||
);
|
||||
this.sources.set(fullKey, [...newSources]);
|
||||
for (const source of prevSources ?? []) {
|
||||
if (sources.includes(source)) {
|
||||
newSources.delete(source);
|
||||
continue;
|
||||
}
|
||||
const invals = UNWRAP(this.invals.get(source));
|
||||
ASSERT(invals.outputs.has(fullKey));
|
||||
invals.outputs.delete(fullKey);
|
||||
}
|
||||
// Use reflection from the plugin system to get imports.
|
||||
for (const source of newSources) {
|
||||
const invals = this.#getOrInitInvals(source);
|
||||
invals.outputs.add(fullKey);
|
||||
this.#followImports(source);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this doesnt remove stuff when it disappeary
|
||||
#getOrInitInvals(source: string) {
|
||||
let invals = this.invals.get(source);
|
||||
if (!invals) {
|
||||
const lastModified = hot.getFileStat(source)?.lastModified ??
|
||||
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
|
||||
this.invals.set(
|
||||
source,
|
||||
invals = {
|
||||
lastModified,
|
||||
files: new Set(),
|
||||
outputs: new Set(),
|
||||
},
|
||||
);
|
||||
}
|
||||
return invals;
|
||||
}
|
||||
|
||||
#followImports(file: string) {
|
||||
const stat = hot.getFileStat(file);
|
||||
if (!stat) return;
|
||||
for (const i of stat.imports) {
|
||||
const invals = this.#getOrInitInvals(i);
|
||||
invals.files.add(file);
|
||||
this.#followImports(i);
|
||||
}
|
||||
}
|
||||
|
||||
async statAllFiles() {
|
||||
for (const file of this.invals.keys()) {
|
||||
/** Track a file in the compilation without reading it. */
|
||||
async trackFile(file: string) {
|
||||
const { key, resolved } = this.#trackFs(file);
|
||||
if (!files.get(key)) {
|
||||
let lastModified: number = 0;
|
||||
try {
|
||||
const mtime = fs.statSync(file).mtimeMs;
|
||||
this.updateStat(file, mtime);
|
||||
} catch (err) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateStat(file: string, newLastModified: number | null) {
|
||||
file = path.relative(hot.projectRoot, file);
|
||||
const stat = this.invals.get(file);
|
||||
ASSERT(stat, "Updated stat on untracked file " + file);
|
||||
const hasUpdate = !newLastModified || stat.lastModified < newLastModified;
|
||||
if (hasUpdate) {
|
||||
// Invalidate
|
||||
console.info(file + " " + (newLastModified ? "updated" : "deleted"));
|
||||
hot.unload(file);
|
||||
const invalidQueue = [file];
|
||||
let currentInvalid;
|
||||
while (currentInvalid = invalidQueue.pop()) {
|
||||
const invalidations = this.invals.get(currentInvalid);
|
||||
ASSERT(
|
||||
invalidations,
|
||||
"No reason to track file '" + currentInvalid +
|
||||
"' if it has no invalidations",
|
||||
);
|
||||
const { files, outputs } = invalidations;
|
||||
for (const out of outputs) {
|
||||
const [kind, artifactKey] = out.split("\0");
|
||||
this.out[kind as ArtifactKind].delete(artifactKey);
|
||||
}
|
||||
invalidQueue.push(...files);
|
||||
}
|
||||
}
|
||||
if (newLastModified) {
|
||||
stat.lastModified = newLastModified;
|
||||
} else {
|
||||
this.invals.delete(file);
|
||||
}
|
||||
return hasUpdate;
|
||||
}
|
||||
|
||||
async putAsset(info: PutAsset) {
|
||||
const { body, headers, key } = info;
|
||||
const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body);
|
||||
const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer))
|
||||
.toString("hex");
|
||||
const value: Asset = {
|
||||
buffer,
|
||||
headers: {
|
||||
"Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key),
|
||||
"ETag": JSON.stringify(hash),
|
||||
...headers,
|
||||
},
|
||||
hash,
|
||||
};
|
||||
const a = this.put({ ...info, kind: "asset", value });
|
||||
if (!this.compress.has(hash)) {
|
||||
const label = info.key;
|
||||
this.compress.set(hash, {
|
||||
zstd: undefined,
|
||||
gzip: undefined,
|
||||
lastModified = (await fs.stat(file)).mtimeMs;
|
||||
} catch {}
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified,
|
||||
});
|
||||
this.compressQueue.add({ label, buffer, algo: "zstd", hash });
|
||||
this.compressQueue.add({ label, buffer, algo: "gzip", hash });
|
||||
}
|
||||
return a;
|
||||
return resolved;
|
||||
}
|
||||
|
||||
async compressImpl({ algo, buffer, hash }: CompressJob) {
|
||||
let out;
|
||||
if (algo === "zstd") out = await zstd(buffer);
|
||||
else if (algo === "gzip") out = await gzip(buffer, { level: 9 });
|
||||
else algo satisfies never;
|
||||
|
||||
let entry = this.compress.get(hash);
|
||||
if (!entry) {
|
||||
this.compress.set(
|
||||
hash,
|
||||
entry = {
|
||||
zstd: undefined,
|
||||
gzip: undefined,
|
||||
},
|
||||
);
|
||||
async readFile(file: string) {
|
||||
return fs.readFile(await this.trackFile(file), "utf-8");
|
||||
}
|
||||
async readDir(dir: string) {
|
||||
const { key, resolved } = this.#trackFs(dir);
|
||||
let result: string[] = [];
|
||||
try {
|
||||
result = await fs.readdir(resolved);
|
||||
return result;
|
||||
} finally {
|
||||
const contentHash = crypto
|
||||
.createHash("sha1")
|
||||
.update(result.join("\0"))
|
||||
.digest("base64url");
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
contentHash,
|
||||
});
|
||||
}
|
||||
entry![algo] = out;
|
||||
}
|
||||
|
||||
serialize() {
|
||||
const writer = new BufferWriter();
|
||||
|
||||
// -- artifact --
|
||||
const asset = Array.from(
|
||||
this.out.asset,
|
||||
([key, { buffer, hash, headers }]) => {
|
||||
const raw = writer.write(buffer, hash);
|
||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
||||
return [key, {
|
||||
raw,
|
||||
gzip,
|
||||
zstd,
|
||||
hash,
|
||||
headers: headers as Record<string, string>,
|
||||
}] satisfies SerializedMeta["asset"][0];
|
||||
},
|
||||
);
|
||||
const script = Array.from(this.out.script);
|
||||
const style = Array.from(this.out.style);
|
||||
const pageMetadata = Array.from(this.out.pageMetadata);
|
||||
const viewMetadata = Array.from(this.out.viewMetadata);
|
||||
const serverMarko = Array.from(this.out.serverMarko);
|
||||
const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => {
|
||||
return [k, {
|
||||
magicWord: v.magicWord,
|
||||
fileWithMagicWord: v.fileWithMagicWord,
|
||||
files: Object.entries(v.files).map(
|
||||
([file, contents]) => [
|
||||
file,
|
||||
writer.write(contents, "backendBundle" + k + ":" + file),
|
||||
],
|
||||
),
|
||||
}] satisfies SerializedMeta["backendBundle"][0];
|
||||
});
|
||||
const backendReplace = Array.from(
|
||||
this.out.backendReplace,
|
||||
([k, v]) =>
|
||||
[
|
||||
k,
|
||||
writer.write(v, "backendReplace" + k),
|
||||
] satisfies SerializedMeta["backendReplace"][0],
|
||||
);
|
||||
// -- incremental metadata --
|
||||
const invals = Array.from(this.invals, ([key, value]) => {
|
||||
const { lastModified, files, outputs } = value;
|
||||
return [key, {
|
||||
m: lastModified,
|
||||
f: [...files],
|
||||
o: [...outputs],
|
||||
}] satisfies SerializedMeta["invals"][0];
|
||||
});
|
||||
const sources = Array.from(this.sources, ([key, value]) => {
|
||||
return [key, ...value] as [string, ...string[]];
|
||||
});
|
||||
const json = {
|
||||
asset,
|
||||
script,
|
||||
invals,
|
||||
sources,
|
||||
style,
|
||||
pageMetadata,
|
||||
viewMetadata,
|
||||
serverMarko,
|
||||
backendBundle,
|
||||
backendReplace,
|
||||
} satisfies SerializedMeta;
|
||||
const meta = Buffer.from(JSON.stringify(json), "utf-8");
|
||||
|
||||
const lengthBuffer = Buffer.alloc(4);
|
||||
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
||||
|
||||
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
|
||||
async readDirRecursive(dir: string): Promise<string[]> {
|
||||
const dirs = await this.readDir(dir);
|
||||
return (
|
||||
await Promise.all(
|
||||
dirs.map(async (child) => {
|
||||
const abs = path.join(dir, child);
|
||||
const stat = await fs.stat(abs);
|
||||
if (stat.isDirectory()) {
|
||||
return (await this.readDirRecursive(abs)).map((grand) =>
|
||||
path.join(child, grand)
|
||||
);
|
||||
} else {
|
||||
return child;
|
||||
}
|
||||
}),
|
||||
)
|
||||
).flat();
|
||||
}
|
||||
|
||||
static fromSerialized(buffer: Buffer): Incremental {
|
||||
const metaLength = buffer.readUint32LE(0);
|
||||
const meta: SerializedMeta = JSON.parse(
|
||||
buffer.subarray(4, 4 + metaLength).toString("utf8"),
|
||||
);
|
||||
const view = ([start, end]: View) =>
|
||||
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
|
||||
|
||||
const incr = new Incremental();
|
||||
incr.out = {
|
||||
asset: new Map(meta.asset.map(([key, value]) => {
|
||||
const { hash, raw, gzip, zstd, headers } = value;
|
||||
if ((gzip || zstd) && !incr.compress.has(hash)) {
|
||||
incr.compress.set(hash, {
|
||||
gzip: gzip ? view(gzip) : undefined,
|
||||
zstd: zstd ? view(zstd) : undefined,
|
||||
/* Track all dependencies of a module. */
|
||||
async import<T>(file: string): Promise<T> {
|
||||
const { resolved } = this.#trackFs(file);
|
||||
try {
|
||||
return require(resolved) as T;
|
||||
} finally {
|
||||
const queue = [resolved];
|
||||
const seen = new Set<string>();
|
||||
let current;
|
||||
while ((current = queue.shift())) {
|
||||
const stat = hot.getFileStat(resolved);
|
||||
if (!stat) continue;
|
||||
const { key } = this.#trackFs(current);
|
||||
if (!files.get(key)) {
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified: stat?.lastModified ?? 0,
|
||||
});
|
||||
}
|
||||
return [key, {
|
||||
buffer: view(raw),
|
||||
headers: headers,
|
||||
hash: hash,
|
||||
}];
|
||||
})),
|
||||
script: new Map(meta.script),
|
||||
style: new Map(meta.style),
|
||||
pageMetadata: new Map(meta.pageMetadata),
|
||||
viewMetadata: new Map(meta.viewMetadata),
|
||||
serverMarko: new Map(meta.serverMarko),
|
||||
backendBundle: new Map(meta.backendBundle.map(([key, value]) => {
|
||||
return [key, {
|
||||
magicWord: value.magicWord,
|
||||
fileWithMagicWord: value.fileWithMagicWord,
|
||||
files: Object.fromEntries(
|
||||
value.files.map(([file, contents]) => [file, view(contents)]),
|
||||
),
|
||||
}];
|
||||
})),
|
||||
backendReplace: new Map(
|
||||
meta.backendReplace.map(([key, contents]) => [key, view(contents)]),
|
||||
),
|
||||
};
|
||||
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
|
||||
return [key, {
|
||||
lastModified: m,
|
||||
files: new Set(f),
|
||||
outputs: new Set(o),
|
||||
}];
|
||||
}));
|
||||
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
|
||||
return incr;
|
||||
}
|
||||
|
||||
/*
|
||||
* Move the cached (server) marko transpilations from this incremental
|
||||
* into the running process.
|
||||
*/
|
||||
loadMarkoCache() {
|
||||
hot.markoCache.clear();
|
||||
for (const [key, value] of this.out.serverMarko) {
|
||||
hot.markoCache.set(path.resolve(hot.projectRoot, key), value);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Move the cached (server) marko transpilations from this incremental
|
||||
* into the running process.
|
||||
*/
|
||||
snapshotMarkoCache() {
|
||||
for (const [file, value] of hot.markoCache) {
|
||||
const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/");
|
||||
// Only insert if it doesn't exist. Calling 'put' when it
|
||||
// already exists would inform the user of extra calls to put.
|
||||
if (!this.hasArtifact("serverMarko", key)) {
|
||||
this.put({
|
||||
kind: "serverMarko",
|
||||
sources: [file],
|
||||
key,
|
||||
value,
|
||||
});
|
||||
for (const imp of stat.imports) {
|
||||
if (!seen.has(imp)) {
|
||||
seen.add(imp);
|
||||
queue.push(imp);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toDisk(file = ".clover/incr.state") {
|
||||
const buffer = this.serialize();
|
||||
fs.writeFileSync(file, buffer);
|
||||
}
|
||||
|
||||
static fromDisk(file = ".clover/incr.state"): Incremental {
|
||||
try {
|
||||
const buffer = fs.readFileSync(file);
|
||||
return Incremental.fromSerialized(buffer);
|
||||
} catch (err: any) {
|
||||
if (err.code === "ENOENT") return new Incremental();
|
||||
throw err;
|
||||
writeAsset(pathname: string, blob: string | Buffer, headersOption?: HeadersInit) {
|
||||
ASSERT(pathname.startsWith("/"));
|
||||
const headers = new Headers(headersOption ?? {});
|
||||
const hash = crypto.createHash('sha1').update(blob).digest('hex');
|
||||
if (!headers.has("Content-Type")) {
|
||||
headers.set("Content-Type", mime.contentTypeFor(pathname));
|
||||
}
|
||||
headers.set("ETag", JSON.stringify(hash));
|
||||
ASSERT(!this.writes.assets.has(pathname));
|
||||
this.writes.assets.set(pathname, {
|
||||
hash,
|
||||
// @ts-expect-error TODO
|
||||
headers: Object.fromEntries(headers)
|
||||
});
|
||||
}
|
||||
|
||||
async wait() {
|
||||
await this.compressQueue.done({ method: "success" });
|
||||
}
|
||||
|
||||
async flush(
|
||||
platform: bundle.ServerPlatform,
|
||||
dir = path.resolve(".clover/out"),
|
||||
) {
|
||||
ASSERT(!this.compressQueue.active);
|
||||
const join = (...args: string[]) => path.join(dir, ...args);
|
||||
const writer = new BufferWriter();
|
||||
|
||||
// TODO: ensure all compressed got compressed
|
||||
|
||||
const asset = Object.fromEntries(
|
||||
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
||||
const raw = writer.write(buffer, hash);
|
||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||
const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz");
|
||||
const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd");
|
||||
return [key, { raw, gzip, zstd, headers }];
|
||||
}),
|
||||
);
|
||||
const backendBundle = UNWRAP(this.out.backendBundle.get(platform));
|
||||
|
||||
// Arrange output files
|
||||
const outFiles: Array<[file: string, contents: string | Buffer]> = [
|
||||
// Asset manifest
|
||||
["static.json", JSON.stringify(asset)],
|
||||
["static.blob", writer.get()],
|
||||
|
||||
// Backend
|
||||
...Object.entries(backendBundle.files).map(([subPath, contents]) =>
|
||||
[
|
||||
subPath,
|
||||
subPath === backendBundle.fileWithMagicWord
|
||||
? UNWRAP(this.out.backendReplace.get(platform))
|
||||
: contents,
|
||||
] as [string, Buffer]
|
||||
),
|
||||
];
|
||||
|
||||
// TODO: check duplicates
|
||||
|
||||
// Perform all i/o
|
||||
await Promise.all(
|
||||
outFiles.map(([subPath, contents]) =>
|
||||
fs.writeMkdir(join(subPath), contents, { flush: true })
|
||||
),
|
||||
);
|
||||
writeFile(subPath: string, blob: string | Buffer) {
|
||||
ASSERT(!this.writes.assets.has(subPath));
|
||||
this.writes.files.set(subPath, Buffer.isBuffer(blob) ? blob : Buffer.from(blob));
|
||||
}
|
||||
}
|
||||
|
||||
export interface PutAsset extends PutBase {
|
||||
body: string | Buffer;
|
||||
headers?: Record<string, string | undefined>;
|
||||
}
|
||||
|
||||
export interface Compressed {
|
||||
gzip?: Buffer;
|
||||
zstd?: Buffer;
|
||||
}
|
||||
|
||||
export interface CompressJob {
|
||||
algo: "zstd" | "gzip";
|
||||
buffer: Buffer;
|
||||
label: string;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
class BufferWriter {
|
||||
size = 0;
|
||||
seen = new Map<string, View>();
|
||||
buffers: Buffer[] = [];
|
||||
|
||||
write(buffer: Buffer, hash: string): View {
|
||||
let view = this.seen.get(hash);
|
||||
if (view) return view;
|
||||
view = [this.size, this.size += buffer.byteLength];
|
||||
this.seen.set(hash, view);
|
||||
this.buffers.push(buffer);
|
||||
return view;
|
||||
function applyDiff(
|
||||
key: string,
|
||||
list: Map<string, { affects: string[] }>,
|
||||
beforeIter: Iterable<string>,
|
||||
afterIter: Iterable<string>,
|
||||
) {
|
||||
const before = Array.from(beforeIter);
|
||||
const after = Array.from(afterIter);
|
||||
for (const add of after.filter((x) => !before.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(add));
|
||||
ASSERT(!affects.includes(key));
|
||||
affects.push(key);
|
||||
}
|
||||
|
||||
get() {
|
||||
return Buffer.concat(this.buffers);
|
||||
for (const remove of before.filter((x) => !after.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(remove));
|
||||
ASSERT(affects.includes(key));
|
||||
affects.splice(affects.indexOf(key), 1);
|
||||
}
|
||||
}
|
||||
|
||||
export type View = [start: number, end: number];
|
||||
export function validateSerializable(value: unknown, key: string) {
|
||||
if (typeof value === "string") {
|
||||
if (value.includes(hot.projectRoot)) {
|
||||
throw new Error(
|
||||
`Return value must not contain the CWD for portability, found at ${key}`,
|
||||
);
|
||||
}
|
||||
} else if (value && typeof value === "object") {
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
||||
} else if (Object.getPrototypeOf(value) === Object.prototype) {
|
||||
Object.entries(value).forEach(([k, v]) =>
|
||||
validateSerializable(v, `${key}.${k}`)
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Return value must be a plain JS object, found ${
|
||||
Object.getPrototypeOf(value).constructor.name
|
||||
} at ${key}`,
|
||||
);
|
||||
}
|
||||
} else if (["bigint", "function", "symbol"].includes(typeof value)) {
|
||||
throw new Error(
|
||||
`Return value must be a plain JS object, found ${typeof value} at ${key}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function toAbs(absPath: string) {
|
||||
return path.resolve(hot.projectRoot, absPath);
|
||||
}
|
||||
|
||||
export function toRel(absPath: string) {
|
||||
return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/");
|
||||
}
|
||||
|
||||
type BufferView = [start: number, end: number];
|
||||
type File = TrackedFile | TrackedDir;
|
||||
interface Needs {
|
||||
files: Set<string>;
|
||||
works: Set<string>;
|
||||
}
|
||||
interface Writes {
|
||||
files: Map<string, Buffer>;
|
||||
assets: Map<string, {
|
||||
hash: string,
|
||||
headers: Record<string, string>
|
||||
}>;
|
||||
}
|
||||
interface Asset {
|
||||
raw: Buffer;
|
||||
gzip: Buffer;
|
||||
zstd: Buffer;
|
||||
refs: number;
|
||||
}
|
||||
interface Work<T = unknown> {
|
||||
value: T;
|
||||
affects: string[];
|
||||
needs: Needs;
|
||||
writes: Writes;
|
||||
}
|
||||
interface TrackedFile {
|
||||
lastModified: number;
|
||||
affects: string[];
|
||||
}
|
||||
interface TrackedDir {
|
||||
contentHash: string;
|
||||
affects: string[];
|
||||
}
|
||||
export interface BuiltAssetMap {
|
||||
[route: string]: BuiltAsset;
|
||||
}
|
||||
|
||||
export interface BuiltAsset {
|
||||
raw: View;
|
||||
gzip: View;
|
||||
zstd: View;
|
||||
raw: BufferView;
|
||||
gzip: BufferView;
|
||||
zstd: BufferView;
|
||||
headers: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface SerializedMeta {
|
||||
asset: Array<[route: string, data: {
|
||||
raw: View;
|
||||
gzip: View | null;
|
||||
zstd: View | null;
|
||||
hash: string;
|
||||
headers: Record<string, string>;
|
||||
}]>;
|
||||
script: Array<[key: string, value: string]>;
|
||||
style: Array<[key: string, value: string]>;
|
||||
pageMetadata: Array<[key: string, PageMetadata]>;
|
||||
viewMetadata: Array<[key: string, ViewMetadata]>;
|
||||
serverMarko: Array<[key: string, hot.MarkoCacheEntry]>;
|
||||
backendBundle: Array<[platform: string, {
|
||||
magicWord: string;
|
||||
fileWithMagicWord: string | null;
|
||||
files: Array<[string, View]>;
|
||||
}]>;
|
||||
backendReplace: Array<[key: string, View]>;
|
||||
|
||||
invals: Array<[key: string, {
|
||||
/** Modified */
|
||||
m: number;
|
||||
f: SourceId[];
|
||||
o: ArtifactId[];
|
||||
}]>;
|
||||
sources: Array<[string, ...string[]]>;
|
||||
}
|
||||
|
||||
const gzip = util.promisify(zlib.gzip);
|
||||
const zstd = util.promisify(zlib.zstdCompress);
|
||||
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as zlib from "node:zlib";
|
||||
import * as util from "node:util";
|
||||
import { Queue } from "#sitegen/async";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as mime from "#sitegen/mime";
|
||||
import * as path from "node:path";
|
||||
import { Buffer } from "node:buffer";
|
||||
import * as css from "./css.ts";
|
||||
import type * as bundle from "./bundle.ts";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as util from "node:util";
|
||||
import * as crypto from "node:crypto";
|
||||
import * as async from "#sitegen/async";
|
||||
import type { Spinner } from "@paperclover/console/Spinner";
|
||||
import * as mime from "#sitegen/mime";
|
||||
import type { View } from "#sitegen/view";
|
||||
|
|
|
@ -1,264 +0,0 @@
|
|||
// Incremental compilation framework built on a singleton function
|
||||
// `work(label, inputs, io => promise)`. By using the I/O interface
|
||||
// to pull input, dependencies are tracked for you, including pesky
|
||||
// error conditions. This way, the file watching system always recovers.
|
||||
let running = false;
|
||||
let seenWorks = new Set<string>();
|
||||
let works = new Map<string, Work>();
|
||||
let files = new Map<string, File>();
|
||||
let queue = new async.Queue({
|
||||
name: "sitegen!",
|
||||
fn: (
|
||||
item: { label: string; run: (spin: Spinner) => Promise<unknown> },
|
||||
spin,
|
||||
) => item.run(spin),
|
||||
passive: true,
|
||||
getItemText: (item) => item.label,
|
||||
maxJobs: navigator.hardwareConcurrency,
|
||||
});
|
||||
|
||||
interface Job<T> {
|
||||
label: string;
|
||||
wait?: Ref | null | (Ref | null)[];
|
||||
key?: unknown;
|
||||
cores?: number;
|
||||
run: (io: Io) => Promise<T>;
|
||||
}
|
||||
|
||||
export function work<T>(job: Job<T>): Promise<T> {
|
||||
const key = crypto.createHash("sha1").update([
|
||||
JSON.stringify(util.getCallSites(2)[1]),
|
||||
util.inspect(job.key),
|
||||
].join(":")).digest("base64url");
|
||||
ASSERT(!seenWorks.has(key), `Key '${key}' must be unique during the build.`);
|
||||
|
||||
// Decide if the cached work is OK
|
||||
const prev = works.get(key) as Work<T>;
|
||||
if (prev?.value) return Promise.resolve(prev.value);
|
||||
|
||||
const promise = Promise.all([job.wait].flat()).then(() =>
|
||||
queue.addReturn({
|
||||
label: job.label,
|
||||
run: async (spin) => {
|
||||
// Perform the work
|
||||
const io = new Io(spin);
|
||||
const value = await job.run(io);
|
||||
const { needs } = io;
|
||||
|
||||
// Apply the deltas to the graph
|
||||
applyDiff(key, files, prev?.needs?.files ?? [], needs.files);
|
||||
applyDiff(key, works, prev?.needs?.works ?? [], needs.works);
|
||||
|
||||
works.set(key, {
|
||||
value,
|
||||
affects: prev?.affects ?? [],
|
||||
needs,
|
||||
});
|
||||
|
||||
return value;
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
return promise as Promise<T>;
|
||||
}
|
||||
|
||||
function applyDiff(
|
||||
key: string,
|
||||
list: Map<string, { affects: string[] }>,
|
||||
beforeIter: Iterable<string>,
|
||||
afterIter: Iterable<string>,
|
||||
) {
|
||||
const before = Array.from(beforeIter);
|
||||
const after = Array.from(afterIter);
|
||||
for (const add of after.filter((x) => !before.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(add));
|
||||
ASSERT(!affects.includes(key));
|
||||
affects.push(key);
|
||||
}
|
||||
for (const remove of before.filter((x) => !after.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(remove));
|
||||
ASSERT(affects.includes(key));
|
||||
affects.splice(affects.indexOf(key), 1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function compile<T>(compiler: () => Promise<Ref<T>>) {
|
||||
ASSERT(!running, `Cannot run twice`);
|
||||
running = true;
|
||||
try {
|
||||
const ref = await compiler();
|
||||
await ref.wait;
|
||||
seenWorks.clear();
|
||||
ASSERT(!queue.active);
|
||||
await queue.done();
|
||||
return {
|
||||
value: UNWRAP(works.get(ref.key), `Key '${ref.key}' did not finish`)
|
||||
.value as T,
|
||||
};
|
||||
} finally {
|
||||
running = false;
|
||||
}
|
||||
}
|
||||
|
||||
export function forceInvalidate(file: string) {
|
||||
const resolved = path.resolve(hot.projectSrc, file);
|
||||
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
||||
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
||||
}
|
||||
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
||||
const queue = [...entry.affects];
|
||||
let key;
|
||||
while (key = queue.shift()) {
|
||||
const { needs, affects } = UNWRAP(works.get(key));
|
||||
applyDiff(key, files, needs.files, []);
|
||||
applyDiff(key, works, needs.works, []);
|
||||
works.delete(key);
|
||||
queue.push(...affects);
|
||||
}
|
||||
}
|
||||
|
||||
export function reset() {
|
||||
ASSERT(!running);
|
||||
works.clear();
|
||||
files.clear();
|
||||
}
|
||||
|
||||
export function serialize() {
|
||||
// Aiming for a compact JSON format.
|
||||
const fileEntries = Array.from(files, ([k, v]) => [
|
||||
k,
|
||||
'lastModified' in v ? 'f' : 'd',
|
||||
'lastModified' in v ? v.lastModified : v.contentHash,
|
||||
...v.affects,
|
||||
]);
|
||||
const workEntries = Array.from(works, ([k, v]) => [
|
||||
k,
|
||||
v.value,
|
||||
...v.affects,
|
||||
]);
|
||||
return devalue.uneval({
|
||||
file: fileEntries,
|
||||
work: workEntries,
|
||||
});
|
||||
}
|
||||
export function serializeToDisk(file = ".clover/incr.state") {
|
||||
fs.writeMkdirSync(file, serialize());
|
||||
}
|
||||
|
||||
/* Input/Output with automatic tracking. */
|
||||
export class Io {
|
||||
needs: Needs = {
|
||||
files: new Set(),
|
||||
works: new Set(),
|
||||
};
|
||||
constructor(public spin: Spinner) {}
|
||||
|
||||
#trackFs(file: string) {
|
||||
const resolved = path.resolve(hot.projectSrc, file);
|
||||
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
||||
this.needs.files.add(key);
|
||||
return { resolved, key };
|
||||
}
|
||||
async trackFile(file: string) {
|
||||
const { key, resolved } = this.#trackFs(file);
|
||||
if (!files.get(key)) {
|
||||
let lastModified: number = 0;
|
||||
try {
|
||||
lastModified = (await fs.stat(file)).mtimeMs;
|
||||
} catch {}
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified,
|
||||
});
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
async readFile(file: string) {
|
||||
return fs.readFile(await this.trackFile(file), "utf-8");
|
||||
}
|
||||
async readDir(dir: string) {
|
||||
const { key, resolved } = this.#trackFs(dir);
|
||||
let result: string[] = [];
|
||||
try {
|
||||
result = await fs.readdir(resolved);
|
||||
return result;
|
||||
} finally {
|
||||
const contentHash = crypto.createHash("sha1").update(result.join("\0"))
|
||||
.digest("base64url");
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
contentHash,
|
||||
});
|
||||
}
|
||||
}
|
||||
async readDirRecursive(dir: string): Promise<string[]> {
|
||||
const dirs = await this.readDir(dir);
|
||||
return (await Promise.all(dirs.map(async (child) => {
|
||||
const abs = path.join(dir, child);
|
||||
const stat = await fs.stat(abs);
|
||||
if (stat.isDirectory()) {
|
||||
return (await this.readDirRecursive(abs)).map((grand) =>
|
||||
path.join(child, grand)
|
||||
);
|
||||
} else {
|
||||
return child;
|
||||
}
|
||||
}))).flat();
|
||||
}
|
||||
async import<T>(file: string): Promise<T> {
|
||||
const { resolved } = this.#trackFs(file);
|
||||
try {
|
||||
return require(resolved) as T;
|
||||
} finally {
|
||||
const queue = [resolved];
|
||||
const seen = new Set<string>();
|
||||
let current;
|
||||
while (current = queue.shift()) {
|
||||
const stat = hot.getFileStat(resolved);
|
||||
if (!stat) continue;
|
||||
const { key } = this.#trackFs(current);
|
||||
if (!files.get(key)) {
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified: stat?.lastModified ?? 0,
|
||||
});
|
||||
}
|
||||
for (const imp of stat.imports) {
|
||||
if (!seen.has(imp)) {
|
||||
seen.add(imp);
|
||||
queue.push(imp);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type File = TrackedFile | TrackedDir;
|
||||
interface Needs {
|
||||
files: Set<string>;
|
||||
works: Set<string>;
|
||||
}
|
||||
interface Work<T = unknown> {
|
||||
value: T;
|
||||
affects: string[];
|
||||
needs: Needs;
|
||||
}
|
||||
interface TrackedFile {
|
||||
lastModified: number;
|
||||
affects: string[];
|
||||
}
|
||||
interface TrackedDir {
|
||||
contentHash: string;
|
||||
affects: string[];
|
||||
}
|
||||
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as path from "node:path";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as util from "node:util";
|
||||
import * as crypto from "node:crypto";
|
||||
import * as async from "#sitegen/async";
|
||||
import type { Spinner } from "@paperclover/console/Spinner";
|
||||
import * as devalue from 'devalue';
|
44
framework/marko.ts
Normal file
44
framework/marko.ts
Normal file
|
@ -0,0 +1,44 @@
|
|||
export interface MarkoCacheEntry {
|
||||
src: string;
|
||||
scannedClientRefs: string[];
|
||||
}
|
||||
|
||||
export const markoCache = new Map<string, MarkoCacheEntry>();
|
||||
|
||||
export function loadMarko(module: NodeJS.Module, filepath: string) {
|
||||
let cache = markoCache.get(filepath);
|
||||
if (!cache) {
|
||||
let src = fs.readFileSync(filepath, "utf8");
|
||||
// A non-standard thing here is Clover Sitegen implements
|
||||
// its own client side scripting stuff, so it overrides
|
||||
// bare client import statements to it's own usage.
|
||||
const scannedClientRefs = new Set<string>();
|
||||
if (src.match(/^\s*client\s+import\s+["']/m)) {
|
||||
src = src.replace(
|
||||
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
|
||||
(_, src) => {
|
||||
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
|
||||
const resolved = hot.resolveClientRef(filepath, ref);
|
||||
scannedClientRefs.add(resolved);
|
||||
return `<CloverScriptInclude=${
|
||||
JSON.stringify(hot.getScriptId(resolved))
|
||||
} />`;
|
||||
},
|
||||
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
|
||||
}
|
||||
|
||||
src = marko.compileSync(src, filepath).code;
|
||||
src = src.replace("marko/debug/html", "#ssr/marko");
|
||||
cache = { src, scannedClientRefs: Array.from(scannedClientRefs) };
|
||||
markoCache.set(filepath, cache);
|
||||
}
|
||||
|
||||
const { src, scannedClientRefs } = cache;
|
||||
return hot.loadEsbuildCode(module, filepath, src, {
|
||||
scannedClientRefs,
|
||||
});
|
||||
}
|
||||
|
||||
import * as marko from "@marko/compiler";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as fs from "#sitegen/fs";
|
|
@ -17,9 +17,9 @@ export async function main() {
|
|||
const start = performance.now();
|
||||
const timerSpinner = new Spinner({
|
||||
text: () =>
|
||||
`paper clover's scan3 [${
|
||||
((performance.now() - start) / 1000).toFixed(1)
|
||||
}s]`,
|
||||
`paper clover's scan3 [${((performance.now() - start) / 1000).toFixed(
|
||||
1,
|
||||
)}s]`,
|
||||
fps: 10,
|
||||
});
|
||||
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
||||
|
@ -38,20 +38,23 @@ export async function main() {
|
|||
qList.addMany(items.map((subPath) => path.join(absPath, subPath)));
|
||||
|
||||
if (mediaFile) {
|
||||
const deleted = mediaFile.getChildren()
|
||||
const deleted = mediaFile
|
||||
.getChildren()
|
||||
.filter((child) => !items.includes(child.basename))
|
||||
.flatMap((child) =>
|
||||
child.kind === MediaFileKind.directory
|
||||
? child.getRecursiveFileChildren()
|
||||
: child
|
||||
: child,
|
||||
);
|
||||
|
||||
qMeta.addMany(deleted.map((mediaFile) => ({
|
||||
absPath: path.join(root, mediaFile.path),
|
||||
publicPath: mediaFile.path,
|
||||
stat: null,
|
||||
mediaFile,
|
||||
})));
|
||||
qMeta.addMany(
|
||||
deleted.map((mediaFile) => ({
|
||||
absPath: path.join(root, mediaFile.path),
|
||||
publicPath: mediaFile.path,
|
||||
stat: null,
|
||||
mediaFile,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
return;
|
||||
|
@ -96,13 +99,13 @@ export async function main() {
|
|||
if (
|
||||
mediaFile &&
|
||||
mediaFile.date.getTime() < stat.mtime.getTime() &&
|
||||
(Date.now() - stat.mtime.getTime()) < monthMilliseconds
|
||||
Date.now() - stat.mtime.getTime() < monthMilliseconds
|
||||
) {
|
||||
date = mediaFile.date;
|
||||
console.warn(
|
||||
`M-time on ${publicPath} was likely corrupted. ${
|
||||
formatDate(mediaFile.date)
|
||||
} -> ${formatDate(stat.mtime)}`,
|
||||
`M-time on ${publicPath} was likely corrupted. ${formatDate(
|
||||
mediaFile.date,
|
||||
)} -> ${formatDate(stat.mtime)}`,
|
||||
);
|
||||
}
|
||||
mediaFile = MediaFile.createFile({
|
||||
|
@ -129,7 +132,10 @@ export async function main() {
|
|||
await processor.run({ absPath, stat, mediaFile, spin });
|
||||
mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index)));
|
||||
for (const dependantJob of after) {
|
||||
ASSERT(dependantJob.needs > 0, `dependantJob.needs > 0, ${dependantJob.needs}`);
|
||||
ASSERT(
|
||||
dependantJob.needs > 0,
|
||||
`dependantJob.needs > 0, ${dependantJob.needs}`,
|
||||
);
|
||||
dependantJob.needs -= 1;
|
||||
if (dependantJob.needs == 0) qProcess.add(dependantJob);
|
||||
}
|
||||
|
@ -149,25 +155,27 @@ export async function main() {
|
|||
}));
|
||||
}
|
||||
|
||||
async function queueProcessors(
|
||||
{ absPath, stat, mediaFile }: Omit<ProcessFileArgs, "spin">,
|
||||
) {
|
||||
async function queueProcessors({
|
||||
absPath,
|
||||
stat,
|
||||
mediaFile,
|
||||
}: Omit<ProcessFileArgs, "spin">) {
|
||||
const ext = mediaFile.extensionNonEmpty.toLowerCase();
|
||||
let possible = processors.filter((p) =>
|
||||
p.include ? p.include.has(ext) : !p.exclude?.has(ext)
|
||||
p.include ? p.include.has(ext) : !p.exclude?.has(ext),
|
||||
);
|
||||
if (possible.length === 0) return;
|
||||
|
||||
const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1;
|
||||
ASSERT(hash <= 0xFFFF, `${hash.toString(16)} has no bits above 16 set`);
|
||||
ASSERT(hash <= 0xffff, `${hash.toString(16)} has no bits above 16 set`);
|
||||
let processed = mediaFile.processed;
|
||||
|
||||
// If the hash has changed, migrate the bitfield over.
|
||||
// This also runs when the processor hash is in it's initial 0 state.
|
||||
const order = decodeProcessors(mediaFile.processors);
|
||||
if ((processed & 0xFFFF) !== hash) {
|
||||
const previous = order.filter((_, i) =>
|
||||
(processed & (1 << (16 + i))) !== 0
|
||||
if ((processed & 0xffff) !== hash) {
|
||||
const previous = order.filter(
|
||||
(_, i) => (processed & (1 << (16 + i))) !== 0,
|
||||
);
|
||||
processed = hash;
|
||||
for (const { id, hash } of previous) {
|
||||
|
@ -182,13 +190,13 @@ export async function main() {
|
|||
}
|
||||
mediaFile.setProcessors(
|
||||
processed,
|
||||
possible.map((p) =>
|
||||
p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xFF)
|
||||
).join(";"),
|
||||
possible
|
||||
.map((p) => p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xff))
|
||||
.join(";"),
|
||||
);
|
||||
} else {
|
||||
possible = order.map(({ id }) =>
|
||||
UNWRAP(possible.find((p) => p.id === id))
|
||||
UNWRAP(possible.find((p) => p.id === id)),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -225,8 +233,9 @@ export async function main() {
|
|||
|
||||
async function runUndoProcessors(mediaFile: MediaFile) {
|
||||
const { processed } = mediaFile;
|
||||
const previous = decodeProcessors(mediaFile.processors)
|
||||
.filter((_, i) => (processed & (1 << (16 + i))) !== 0);
|
||||
const previous = decodeProcessors(mediaFile.processors).filter(
|
||||
(_, i) => (processed & (1 << (16 + i))) !== 0,
|
||||
);
|
||||
for (const { id } of previous) {
|
||||
const p = processors.find((p) => p.id === id);
|
||||
if (!p) continue;
|
||||
|
@ -244,22 +253,23 @@ export async function main() {
|
|||
await qProcess.done();
|
||||
|
||||
// Update directory metadata
|
||||
const dirs = MediaFile.getDirectoriesToReindex()
|
||||
.sort((a, b) => b.path.length - a.path.length);
|
||||
const dirs = MediaFile.getDirectoriesToReindex().sort(
|
||||
(a, b) => b.path.length - a.path.length,
|
||||
);
|
||||
for (const dir of dirs) {
|
||||
const children = dir.getChildren();
|
||||
|
||||
// readme.txt
|
||||
const readmeContent = children.find((x) =>
|
||||
x.basename === "readme.txt"
|
||||
)?.contents ?? "";
|
||||
const readmeContent =
|
||||
children.find((x) => x.basename === "readme.txt")?.contents ?? "";
|
||||
|
||||
// dirsort
|
||||
let dirsort: string[] | null = null;
|
||||
const dirSortRaw =
|
||||
children.find((x) => x.basename === ".dirsort")?.contents ?? "";
|
||||
if (dirSortRaw) {
|
||||
dirsort = dirSortRaw.split("\n")
|
||||
dirsort = dirSortRaw
|
||||
.split("\n")
|
||||
.map((x) => x.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
@ -284,7 +294,8 @@ export async function main() {
|
|||
}
|
||||
}
|
||||
|
||||
const dirHash = crypto.createHash("sha1")
|
||||
const dirHash = crypto
|
||||
.createHash("sha1")
|
||||
.update(dir.path + allHashes)
|
||||
.digest("hex");
|
||||
|
||||
|
@ -323,19 +334,21 @@ export async function main() {
|
|||
|
||||
console.info(
|
||||
"Updated file viewer index in \x1b[1m" +
|
||||
((performance.now() - start) / 1000).toFixed(1) + "s\x1b[0m",
|
||||
((performance.now() - start) / 1000).toFixed(1) +
|
||||
"s\x1b[0m",
|
||||
);
|
||||
|
||||
MediaFile.db.prepare("VACUUM").run();
|
||||
const { duration, count } = MediaFile.db.prepare<
|
||||
[],
|
||||
{ count: number; duration: number }
|
||||
>(`
|
||||
const { duration, count } = MediaFile.db
|
||||
.prepare<[], { count: number; duration: number }>(
|
||||
`
|
||||
select
|
||||
count(*) as count,
|
||||
sum(duration) as duration
|
||||
from media_files
|
||||
`).getNonNull();
|
||||
`,
|
||||
)
|
||||
.getNonNull();
|
||||
|
||||
console.info();
|
||||
console.info(
|
||||
|
@ -365,7 +378,7 @@ const execFile: typeof execFileRaw = ((
|
|||
) =>
|
||||
execFileRaw(...args).catch((e: any) => {
|
||||
if (e?.message?.startsWith?.("Command failed")) {
|
||||
if (e.code > (2 ** 31)) e.code |= 0;
|
||||
if (e.code > 2 ** 31) e.code |= 0;
|
||||
const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`;
|
||||
e.message = `${e.cmd.split(" ")[0]} failed with ${code}`;
|
||||
}
|
||||
|
@ -374,11 +387,7 @@ const execFile: typeof execFileRaw = ((
|
|||
const ffprobeBin = testProgram("ffprobe", "--help");
|
||||
const ffmpegBin = testProgram("ffmpeg", "--help");
|
||||
|
||||
const ffmpegOptions = [
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
];
|
||||
const ffmpegOptions = ["-hide_banner", "-loglevel", "warning"];
|
||||
|
||||
const procDuration: Process = {
|
||||
name: "calculate duration",
|
||||
|
@ -496,27 +505,23 @@ const procImageSubsets: Process = {
|
|||
for (const size of targetSizes) {
|
||||
const { w, h } = resizeDimensions(width, height, size);
|
||||
for (const { ext, args } of transcodeRules.imagePresets) {
|
||||
spin.text = baseStatus +
|
||||
` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
|
||||
spin.text = baseStatus + ` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
|
||||
|
||||
stack.use(
|
||||
await produceAsset(
|
||||
`${mediaFile.hash}/${size}${ext}`,
|
||||
async (out) => {
|
||||
await fs.mkdir(path.dirname(out));
|
||||
await fs.rm(out, { force: true });
|
||||
await execFile(ffmpegBin!, [
|
||||
...ffmpegOptions,
|
||||
"-i",
|
||||
absPath,
|
||||
"-vf",
|
||||
`scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`,
|
||||
...args,
|
||||
out,
|
||||
]);
|
||||
return [out];
|
||||
},
|
||||
),
|
||||
await produceAsset(`${mediaFile.hash}/${size}${ext}`, async (out) => {
|
||||
await fs.mkdir(path.dirname(out));
|
||||
await fs.rm(out, { force: true });
|
||||
await execFile(ffmpegBin!, [
|
||||
...ffmpegOptions,
|
||||
"-i",
|
||||
absPath,
|
||||
"-vf",
|
||||
`scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`,
|
||||
...args,
|
||||
out,
|
||||
]);
|
||||
return [out];
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -561,21 +566,17 @@ const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({
|
|||
if (config.encoder && typeof config.encoder.videoSrc === "string") {
|
||||
const { videoSrc, audioSrc, rate } = config.encoder;
|
||||
inputArgs = [
|
||||
...rate ? ["-r", String(rate)] : [],
|
||||
...(rate ? ["-r", String(rate)] : []),
|
||||
"-i",
|
||||
videoSrc,
|
||||
...audioSrc ? ["-i", audioSrc] : [],
|
||||
...(audioSrc ? ["-i", audioSrc] : []),
|
||||
];
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err?.code !== "ENOENT") throw err;
|
||||
}
|
||||
|
||||
const args = transcodeRules.getVideoArgs(
|
||||
preset,
|
||||
base,
|
||||
inputArgs,
|
||||
);
|
||||
const args = transcodeRules.getVideoArgs(preset, base, inputArgs);
|
||||
try {
|
||||
const fakeProgress = new Progress({ text: spin.text, spinner: null });
|
||||
fakeProgress.stop();
|
||||
|
@ -612,22 +613,25 @@ const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({
|
|||
const procCompression = [
|
||||
{ name: "gzip", fn: () => zlib.createGzip({ level: 9 }) },
|
||||
{ name: "zstd", fn: () => zlib.createZstdCompress() },
|
||||
].map(({ name, fn }) => ({
|
||||
name: `compress ${name}`,
|
||||
exclude: rules.extsPreCompressed,
|
||||
async run({ absPath, mediaFile }) {
|
||||
if ((mediaFile.size ?? 0) < 10) return;
|
||||
await produceAsset(`${mediaFile.hash}/${name}`, async (base) => {
|
||||
fs.mkdirSync(path.dirname(base));
|
||||
await stream.promises.pipeline(
|
||||
fs.createReadStream(absPath),
|
||||
fn(),
|
||||
fs.createWriteStream(base),
|
||||
);
|
||||
return [base];
|
||||
});
|
||||
},
|
||||
} satisfies Process as Process));
|
||||
].map(
|
||||
({ name, fn }) =>
|
||||
({
|
||||
name: `compress ${name}`,
|
||||
exclude: rules.extsPreCompressed,
|
||||
async run({ absPath, mediaFile }) {
|
||||
if ((mediaFile.size ?? 0) < 10) return;
|
||||
await produceAsset(`${mediaFile.hash}/${name}`, async (base) => {
|
||||
fs.mkdirSync(path.dirname(base));
|
||||
await stream.promises.pipeline(
|
||||
fs.createReadStream(absPath),
|
||||
fn(),
|
||||
fs.createWriteStream(base),
|
||||
);
|
||||
return [base];
|
||||
});
|
||||
},
|
||||
}) satisfies Process as Process,
|
||||
);
|
||||
|
||||
const processors = [
|
||||
procDimensions,
|
||||
|
@ -637,30 +641,29 @@ const processors = [
|
|||
procImageSubsets,
|
||||
...procVideos,
|
||||
...procCompression,
|
||||
]
|
||||
.map((process, id, all) => {
|
||||
const strIndex = (id: number) =>
|
||||
String.fromCharCode("a".charCodeAt(0) + id);
|
||||
return {
|
||||
...process as Process,
|
||||
id: strIndex(id),
|
||||
// Create a unique key.
|
||||
hash: new Uint16Array(
|
||||
crypto.createHash("sha1")
|
||||
.update(
|
||||
process.run.toString() +
|
||||
(process.version ? String(process.version) : ""),
|
||||
)
|
||||
.digest().buffer,
|
||||
).reduce((a, b) => a ^ b),
|
||||
depends: (process.depends ?? []).map((depend) => {
|
||||
const index = all.findIndex((p) => p.name === depend);
|
||||
if (index === -1) throw new Error(`Cannot find depend '${depend}'`);
|
||||
if (index === id) throw new Error(`Cannot depend on self: '${depend}'`);
|
||||
return strIndex(index);
|
||||
}),
|
||||
};
|
||||
});
|
||||
].map((process, id, all) => {
|
||||
const strIndex = (id: number) => String.fromCharCode("a".charCodeAt(0) + id);
|
||||
return {
|
||||
...(process as Process),
|
||||
id: strIndex(id),
|
||||
// Create a unique key.
|
||||
hash: new Uint16Array(
|
||||
crypto
|
||||
.createHash("sha1")
|
||||
.update(
|
||||
process.run.toString() +
|
||||
(process.version ? String(process.version) : ""),
|
||||
)
|
||||
.digest().buffer,
|
||||
).reduce((a, b) => a ^ b),
|
||||
depends: (process.depends ?? []).map((depend) => {
|
||||
const index = all.findIndex((p) => p.name === depend);
|
||||
if (index === -1) throw new Error(`Cannot find depend '${depend}'`);
|
||||
if (index === id) throw new Error(`Cannot depend on self: '${depend}'`);
|
||||
return strIndex(index);
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
function resizeDimensions(w: number, h: number, desiredWidth: number) {
|
||||
ASSERT(desiredWidth < w, `${desiredWidth} < ${w}`);
|
||||
|
@ -676,10 +679,7 @@ async function produceAsset(
|
|||
if (asset.refs === 1) {
|
||||
const paths = await builder(path.join(workDir, key));
|
||||
asset.addFiles(
|
||||
paths.map((file) =>
|
||||
path.relative(workDir, file)
|
||||
.replaceAll("\\", "/")
|
||||
),
|
||||
paths.map((file) => path.relative(workDir, file).replaceAll("\\", "/")),
|
||||
);
|
||||
}
|
||||
return {
|
||||
|
@ -719,7 +719,7 @@ interface ProcessJob {
|
|||
absPath: string;
|
||||
stat: fs.Stats;
|
||||
mediaFile: MediaFile;
|
||||
processor: typeof processors[0];
|
||||
processor: (typeof processors)[0];
|
||||
index: number;
|
||||
after: ProcessJob[];
|
||||
needs: number;
|
||||
|
|
Loading…
Reference in a new issue