264 lines
7 KiB
TypeScript
264 lines
7 KiB
TypeScript
// Incremental compilation framework built on a singleton function
|
|
// `work(label, inputs, io => promise)`. By using the I/O interface
|
|
// to pull input, dependencies are tracked for you, including pesky
|
|
// error conditions. This way, the file watching system always recovers.
|
|
let running = false;
|
|
let seenWorks = new Set<string>();
|
|
let works = new Map<string, Work>();
|
|
let files = new Map<string, File>();
|
|
let queue = new async.Queue({
|
|
name: "sitegen!",
|
|
fn: (
|
|
item: { label: string; run: (spin: Spinner) => Promise<unknown> },
|
|
spin,
|
|
) => item.run(spin),
|
|
passive: true,
|
|
getItemText: (item) => item.label,
|
|
maxJobs: navigator.hardwareConcurrency,
|
|
});
|
|
|
|
interface Job<T> {
|
|
label: string;
|
|
wait?: Ref | null | (Ref | null)[];
|
|
key?: unknown;
|
|
cores?: number;
|
|
run: (io: Io) => Promise<T>;
|
|
}
|
|
|
|
export function work<T>(job: Job<T>): Promise<T> {
|
|
const key = crypto.createHash("sha1").update([
|
|
JSON.stringify(util.getCallSites(2)[1]),
|
|
util.inspect(job.key),
|
|
].join(":")).digest("base64url");
|
|
ASSERT(!seenWorks.has(key), `Key '${key}' must be unique during the build.`);
|
|
|
|
// Decide if the cached work is OK
|
|
const prev = works.get(key) as Work<T>;
|
|
if (prev?.value) return Promise.resolve(prev.value);
|
|
|
|
const promise = Promise.all([job.wait].flat()).then(() =>
|
|
queue.addReturn({
|
|
label: job.label,
|
|
run: async (spin) => {
|
|
// Perform the work
|
|
const io = new Io(spin);
|
|
const value = await job.run(io);
|
|
const { needs } = io;
|
|
|
|
// Apply the deltas to the graph
|
|
applyDiff(key, files, prev?.needs?.files ?? [], needs.files);
|
|
applyDiff(key, works, prev?.needs?.works ?? [], needs.works);
|
|
|
|
works.set(key, {
|
|
value,
|
|
affects: prev?.affects ?? [],
|
|
needs,
|
|
});
|
|
|
|
return value;
|
|
},
|
|
})
|
|
);
|
|
|
|
return promise as Promise<T>;
|
|
}
|
|
|
|
function applyDiff(
|
|
key: string,
|
|
list: Map<string, { affects: string[] }>,
|
|
beforeIter: Iterable<string>,
|
|
afterIter: Iterable<string>,
|
|
) {
|
|
const before = Array.from(beforeIter);
|
|
const after = Array.from(afterIter);
|
|
for (const add of after.filter((x) => !before.includes(x))) {
|
|
const { affects } = UNWRAP(list.get(add));
|
|
ASSERT(!affects.includes(key));
|
|
affects.push(key);
|
|
}
|
|
for (const remove of before.filter((x) => !after.includes(x))) {
|
|
const { affects } = UNWRAP(list.get(remove));
|
|
ASSERT(affects.includes(key));
|
|
affects.splice(affects.indexOf(key), 1);
|
|
}
|
|
}
|
|
|
|
export async function compile<T>(compiler: () => Promise<Ref<T>>) {
|
|
ASSERT(!running, `Cannot run twice`);
|
|
running = true;
|
|
try {
|
|
const ref = await compiler();
|
|
await ref.wait;
|
|
seenWorks.clear();
|
|
ASSERT(!queue.active);
|
|
await queue.done();
|
|
return {
|
|
value: UNWRAP(works.get(ref.key), `Key '${ref.key}' did not finish`)
|
|
.value as T,
|
|
};
|
|
} finally {
|
|
running = false;
|
|
}
|
|
}
|
|
|
|
export function forceInvalidate(file: string) {
|
|
const resolved = path.resolve(hot.projectSrc, file);
|
|
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
|
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
|
}
|
|
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
|
const queue = [...entry.affects];
|
|
let key;
|
|
while (key = queue.shift()) {
|
|
const { needs, affects } = UNWRAP(works.get(key));
|
|
applyDiff(key, files, needs.files, []);
|
|
applyDiff(key, works, needs.works, []);
|
|
works.delete(key);
|
|
queue.push(...affects);
|
|
}
|
|
}
|
|
|
|
export function reset() {
|
|
ASSERT(!running);
|
|
works.clear();
|
|
files.clear();
|
|
}
|
|
|
|
export function serialize() {
|
|
// Aiming for a compact JSON format.
|
|
const fileEntries = Array.from(files, ([k, v]) => [
|
|
k,
|
|
'lastModified' in v ? 'f' : 'd',
|
|
'lastModified' in v ? v.lastModified : v.contentHash,
|
|
...v.affects,
|
|
]);
|
|
const workEntries = Array.from(works, ([k, v]) => [
|
|
k,
|
|
v.value,
|
|
...v.affects,
|
|
]);
|
|
return devalue.uneval({
|
|
file: fileEntries,
|
|
work: workEntries,
|
|
});
|
|
}
|
|
export function serializeToDisk(file = ".clover/incr.state") {
|
|
fs.writeMkdirSync(file, serialize());
|
|
}
|
|
|
|
/* Input/Output with automatic tracking. */
|
|
export class Io {
|
|
needs: Needs = {
|
|
files: new Set(),
|
|
works: new Set(),
|
|
};
|
|
constructor(public spin: Spinner) {}
|
|
|
|
#trackFs(file: string) {
|
|
const resolved = path.resolve(hot.projectSrc, file);
|
|
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
|
this.needs.files.add(key);
|
|
return { resolved, key };
|
|
}
|
|
async trackFile(file: string) {
|
|
const { key, resolved } = this.#trackFs(file);
|
|
if (!files.get(key)) {
|
|
let lastModified: number = 0;
|
|
try {
|
|
lastModified = (await fs.stat(file)).mtimeMs;
|
|
} catch {}
|
|
files.set(key, {
|
|
affects: [],
|
|
lastModified,
|
|
});
|
|
}
|
|
return resolved;
|
|
}
|
|
async readFile(file: string) {
|
|
return fs.readFile(await this.trackFile(file), "utf-8");
|
|
}
|
|
async readDir(dir: string) {
|
|
const { key, resolved } = this.#trackFs(dir);
|
|
let result: string[] = [];
|
|
try {
|
|
result = await fs.readdir(resolved);
|
|
return result;
|
|
} finally {
|
|
const contentHash = crypto.createHash("sha1").update(result.join("\0"))
|
|
.digest("base64url");
|
|
files.set(key, {
|
|
affects: [],
|
|
contentHash,
|
|
});
|
|
}
|
|
}
|
|
async readDirRecursive(dir: string): Promise<string[]> {
|
|
const dirs = await this.readDir(dir);
|
|
return (await Promise.all(dirs.map(async (child) => {
|
|
const abs = path.join(dir, child);
|
|
const stat = await fs.stat(abs);
|
|
if (stat.isDirectory()) {
|
|
return (await this.readDirRecursive(abs)).map((grand) =>
|
|
path.join(child, grand)
|
|
);
|
|
} else {
|
|
return child;
|
|
}
|
|
}))).flat();
|
|
}
|
|
async import<T>(file: string): Promise<T> {
|
|
const { resolved } = this.#trackFs(file);
|
|
try {
|
|
return require(resolved) as T;
|
|
} finally {
|
|
const queue = [resolved];
|
|
const seen = new Set<string>();
|
|
let current;
|
|
while (current = queue.shift()) {
|
|
const stat = hot.getFileStat(resolved);
|
|
if (!stat) continue;
|
|
const { key } = this.#trackFs(current);
|
|
if (!files.get(key)) {
|
|
files.set(key, {
|
|
affects: [],
|
|
lastModified: stat?.lastModified ?? 0,
|
|
});
|
|
}
|
|
for (const imp of stat.imports) {
|
|
if (!seen.has(imp)) {
|
|
seen.add(imp);
|
|
queue.push(imp);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
type File = TrackedFile | TrackedDir;
|
|
interface Needs {
|
|
files: Set<string>;
|
|
works: Set<string>;
|
|
}
|
|
interface Work<T = unknown> {
|
|
value: T;
|
|
affects: string[];
|
|
needs: Needs;
|
|
}
|
|
interface TrackedFile {
|
|
lastModified: number;
|
|
affects: string[];
|
|
}
|
|
interface TrackedDir {
|
|
contentHash: string;
|
|
affects: string[];
|
|
}
|
|
|
|
import * as fs from "#sitegen/fs";
|
|
import * as path from "node:path";
|
|
import * as hot from "./hot.ts";
|
|
import * as util from "node:util";
|
|
import * as crypto from "node:crypto";
|
|
import * as async from "#sitegen/async";
|
|
import type { Spinner } from "@paperclover/console/Spinner";
|
|
import * as devalue from 'devalue';
|