354 lines
9.6 KiB
TypeScript
354 lines
9.6 KiB
TypeScript
// Incremental compilation framework
|
|
let running = false;
|
|
let seenWorks = new Set<string>();
|
|
let works = new Map<string, Work>();
|
|
let files = new Map<string, File>();
|
|
let assets = new Map<string, Asset>();
|
|
|
|
export interface Ref<T> extends Promise<T> {
|
|
key: string;
|
|
}
|
|
|
|
/**
|
|
* Declare and begin a unit of work. Return value is memoized and
|
|
* only re-run when inputs (via `Io`) change. Outputs are written
|
|
* at the end of a compilation (see `compile`).
|
|
*/
|
|
export function work<O>(job: (io: Io) => Promise<O>): Ref<O>;
|
|
export function work<I, O>(job:(io: Io, input: I) => Promise<O>, input: I): Ref<O>;
|
|
export function work<I, O>(job: (io: Io, input: I) => Promise<O>, input: I = null as I): Ref<O> {
|
|
const keySource = [
|
|
JSON.stringify(util.getCallSites(2)[1]),
|
|
util.inspect(input),
|
|
];
|
|
const key = crypto
|
|
.createHash("sha1")
|
|
.update(keySource.join(":"))
|
|
.digest("base64url");
|
|
ASSERT(
|
|
!seenWorks.has(key),
|
|
`Key '${key}' must be unique during the build.` +
|
|
`To fix this, provide a manual 'key' argument.`,
|
|
);
|
|
seenWorks.add(key);
|
|
|
|
const prev = works.get(key) as Work<O> | null;
|
|
if (prev) {
|
|
const promise = Promise.resolve(prev.value) as Ref<O>;
|
|
promise.key = key;
|
|
return promise;
|
|
};
|
|
|
|
const io = new Io();
|
|
const promise = job(io, input).then((value) => {
|
|
const { needs, writes } = io;
|
|
|
|
// Apply the deltas to the graph
|
|
applyDiff(key, files, [], needs.files);
|
|
applyDiff(key, works, [], needs.works);
|
|
|
|
validateSerializable(value, "");
|
|
|
|
works.set(key, {
|
|
value,
|
|
affects: [],
|
|
needs,
|
|
writes
|
|
});
|
|
}) as Ref<O>;
|
|
promise.key = key;
|
|
return promise;
|
|
}
|
|
export async function compile<T>(compiler: () => Promise<T>) {
|
|
ASSERT(!running, `Cannot run twice`);
|
|
running = true;
|
|
try {
|
|
const value = await compiler();
|
|
seenWorks.clear();
|
|
ASSERT(!queue.active, `Queue was still running`);
|
|
await queue.done();
|
|
return { value };
|
|
} finally {
|
|
running = false;
|
|
}
|
|
}
|
|
|
|
export function forceInvalidate(file: string) {
|
|
const resolved = toAbs(file);
|
|
const key = toRel(resolved);
|
|
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
|
}
|
|
|
|
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
|
const queue = [...entry.affects];
|
|
let key;
|
|
while ((key = queue.shift())) {
|
|
const { needs, affects } = UNWRAP(works.get(key));
|
|
applyDiff(key, files, needs.files, []);
|
|
applyDiff(key, works, needs.works, []);
|
|
works.delete(key);
|
|
queue.push(...affects);
|
|
}
|
|
}
|
|
|
|
export function reset() {
|
|
ASSERT(!running);
|
|
works.clear();
|
|
files.clear();
|
|
assets.clear();
|
|
}
|
|
|
|
export function serialize() {
|
|
// Aiming for a compact JSON format.
|
|
const fileEntries = Array.from(files, ([k, v]) => [
|
|
k,
|
|
"lastModified" in v ? "f" : "d",
|
|
"lastModified" in v ? v.lastModified : v.contentHash,
|
|
...v.affects,
|
|
]);
|
|
const workEntries = Array.from(works, ([k, v]) => [k, v.value, ...v.affects]);
|
|
return JSON.stringify({
|
|
file: fileEntries,
|
|
work: workEntries,
|
|
});
|
|
}
|
|
export function serializeToDisk(file = ".clover/incr.state") {
|
|
fs.writeMkdirSync(file, serialize());
|
|
}
|
|
|
|
|
|
|
|
/* Input/Output with automatic tracking.
|
|
* - Inputs read with Io are tracked to know when to rebuild
|
|
* - Outputs written with Io are deleted when abandoned.
|
|
*/
|
|
export class Io {
|
|
needs: Needs = {
|
|
files: new Set(),
|
|
works: new Set(),
|
|
};
|
|
writes: Writes = {
|
|
files: new Map(),
|
|
assets: new Map(),
|
|
};
|
|
|
|
#trackFs(file: string) {
|
|
const resolved = toAbs(file);
|
|
const key = toRel(resolved);
|
|
this.needs.files.add(key);
|
|
return { resolved, key };
|
|
}
|
|
readWork<T>(ref: Ref<T>): Promise<T> {
|
|
this.needs.works.add(ref.key);
|
|
return ref;
|
|
}
|
|
/** Track a file in the compilation without reading it. */
|
|
async trackFile(file: string) {
|
|
const { key, resolved } = this.#trackFs(file);
|
|
if (!files.get(key)) {
|
|
let lastModified: number = 0;
|
|
try {
|
|
lastModified = (await fs.stat(file)).mtimeMs;
|
|
} catch {}
|
|
files.set(key, {
|
|
affects: [],
|
|
lastModified,
|
|
});
|
|
}
|
|
return resolved;
|
|
}
|
|
async readFile(file: string) {
|
|
return fs.readFile(await this.trackFile(file), "utf-8");
|
|
}
|
|
async readDir(dir: string) {
|
|
const { key, resolved } = this.#trackFs(dir);
|
|
let result: string[] = [];
|
|
try {
|
|
result = await fs.readdir(resolved);
|
|
return result;
|
|
} finally {
|
|
const contentHash = crypto
|
|
.createHash("sha1")
|
|
.update(result.join("\0"))
|
|
.digest("base64url");
|
|
files.set(key, {
|
|
affects: [],
|
|
contentHash,
|
|
});
|
|
}
|
|
}
|
|
async readDirRecursive(dir: string): Promise<string[]> {
|
|
const dirs = await this.readDir(dir);
|
|
return (
|
|
await Promise.all(
|
|
dirs.map(async (child) => {
|
|
const abs = path.join(dir, child);
|
|
const stat = await fs.stat(abs);
|
|
if (stat.isDirectory()) {
|
|
return (await this.readDirRecursive(abs)).map((grand) =>
|
|
path.join(child, grand)
|
|
);
|
|
} else {
|
|
return child;
|
|
}
|
|
}),
|
|
)
|
|
).flat();
|
|
}
|
|
/* Track all dependencies of a module. */
|
|
async import<T>(file: string): Promise<T> {
|
|
const { resolved } = this.#trackFs(file);
|
|
try {
|
|
return require(resolved) as T;
|
|
} finally {
|
|
const queue = [resolved];
|
|
const seen = new Set<string>();
|
|
let current;
|
|
while ((current = queue.shift())) {
|
|
const stat = hot.getFileStat(resolved);
|
|
if (!stat) continue;
|
|
const { key } = this.#trackFs(current);
|
|
if (!files.get(key)) {
|
|
files.set(key, {
|
|
affects: [],
|
|
lastModified: stat?.lastModified ?? 0,
|
|
});
|
|
}
|
|
for (const imp of stat.imports) {
|
|
if (!seen.has(imp)) {
|
|
seen.add(imp);
|
|
queue.push(imp);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
writeAsset(pathname: string, blob: string | Buffer, headersOption?: HeadersInit) {
|
|
ASSERT(pathname.startsWith("/"));
|
|
const headers = new Headers(headersOption ?? {});
|
|
const hash = crypto.createHash('sha1').update(blob).digest('hex');
|
|
if (!headers.has("Content-Type")) {
|
|
headers.set("Content-Type", mime.contentTypeFor(pathname));
|
|
}
|
|
headers.set("ETag", JSON.stringify(hash));
|
|
ASSERT(!this.writes.assets.has(pathname));
|
|
this.writes.assets.set(pathname, {
|
|
hash,
|
|
// @ts-expect-error TODO
|
|
headers: Object.fromEntries(headers)
|
|
});
|
|
}
|
|
writeFile(subPath: string, blob: string | Buffer) {
|
|
ASSERT(!this.writes.assets.has(subPath));
|
|
this.writes.files.set(subPath, Buffer.isBuffer(blob) ? blob : Buffer.from(blob));
|
|
}
|
|
}
|
|
|
|
function applyDiff(
|
|
key: string,
|
|
list: Map<string, { affects: string[] }>,
|
|
beforeIter: Iterable<string>,
|
|
afterIter: Iterable<string>,
|
|
) {
|
|
const before = Array.from(beforeIter);
|
|
const after = Array.from(afterIter);
|
|
for (const add of after.filter((x) => !before.includes(x))) {
|
|
const { affects } = UNWRAP(list.get(add));
|
|
ASSERT(!affects.includes(key));
|
|
affects.push(key);
|
|
}
|
|
for (const remove of before.filter((x) => !after.includes(x))) {
|
|
const { affects } = UNWRAP(list.get(remove));
|
|
ASSERT(affects.includes(key));
|
|
affects.splice(affects.indexOf(key), 1);
|
|
}
|
|
}
|
|
|
|
export function validateSerializable(value: unknown, key: string) {
|
|
if (typeof value === "string") {
|
|
if (value.includes(hot.projectRoot)) {
|
|
throw new Error(
|
|
`Return value must not contain the CWD for portability, found at ${key}`,
|
|
);
|
|
}
|
|
} else if (value && typeof value === "object") {
|
|
if (Array.isArray(value)) {
|
|
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
|
} else if (Object.getPrototypeOf(value) === Object.prototype) {
|
|
Object.entries(value).forEach(([k, v]) =>
|
|
validateSerializable(v, `${key}.${k}`)
|
|
);
|
|
} else {
|
|
throw new Error(
|
|
`Return value must be a plain JS object, found ${
|
|
Object.getPrototypeOf(value).constructor.name
|
|
} at ${key}`,
|
|
);
|
|
}
|
|
} else if (["bigint", "function", "symbol"].includes(typeof value)) {
|
|
throw new Error(
|
|
`Return value must be a plain JS object, found ${typeof value} at ${key}`,
|
|
);
|
|
}
|
|
}
|
|
|
|
export function toAbs(absPath: string) {
|
|
return path.resolve(hot.projectRoot, absPath);
|
|
}
|
|
|
|
export function toRel(absPath: string) {
|
|
return path.relative(hot.projectRoot, absPath).replaceAll("\\", "/");
|
|
}
|
|
|
|
type BufferView = [start: number, end: number];
|
|
type File = TrackedFile | TrackedDir;
|
|
interface Needs {
|
|
files: Set<string>;
|
|
works: Set<string>;
|
|
}
|
|
interface Writes {
|
|
files: Map<string, Buffer>;
|
|
assets: Map<string, {
|
|
hash: string,
|
|
headers: Record<string, string>
|
|
}>;
|
|
}
|
|
interface Asset {
|
|
raw: Buffer;
|
|
gzip: Buffer;
|
|
zstd: Buffer;
|
|
refs: number;
|
|
}
|
|
interface Work<T = unknown> {
|
|
value: T;
|
|
affects: string[];
|
|
needs: Needs;
|
|
writes: Writes;
|
|
}
|
|
interface TrackedFile {
|
|
lastModified: number;
|
|
affects: string[];
|
|
}
|
|
interface TrackedDir {
|
|
contentHash: string;
|
|
affects: string[];
|
|
}
|
|
export interface BuiltAssetMap {
|
|
[route: string]: BuiltAsset;
|
|
}
|
|
export interface BuiltAsset {
|
|
raw: BufferView;
|
|
gzip: BufferView;
|
|
zstd: BufferView;
|
|
headers: Record<string, string>;
|
|
}
|
|
|
|
import * as fs from "#sitegen/fs";
|
|
import * as path from "node:path";
|
|
import * as hot from "./hot.ts";
|
|
import * as util from "node:util";
|
|
import * as crypto from "node:crypto";
|
|
import * as async from "#sitegen/async";
|
|
import type { Spinner } from "@paperclover/console/Spinner";
|
|
import * as mime from "#sitegen/mime";
|
|
import type { View } from "#sitegen/view";
|