rewrite incremental #21
8 changed files with 596 additions and 25 deletions
|
@ -6,12 +6,7 @@ globalThis.UNWRAP = (t, ...args) => {
|
|||
}
|
||||
return t;
|
||||
};
|
||||
globalThis.ASSERT = (t, ...args) => {
|
||||
if (!t) {
|
||||
throw new Error(
|
||||
args.length > 0 ? util.format(...args) : "Assertion Failed",
|
||||
);
|
||||
}
|
||||
};
|
||||
globalThis.ASSERT = assert.ok;
|
||||
|
||||
import * as util from "node:util";
|
||||
import * as assert from 'node:assert'
|
||||
|
|
2
framework/definitions.d.ts
vendored
2
framework/definitions.d.ts
vendored
|
@ -1,4 +1,4 @@
|
|||
declare function UNWRAP<T>(value: T | null | undefined, ...log: unknown[]): T;
|
||||
declare function ASSERT(value: unknown, ...log: unknown[]): asserts value;
|
||||
declare function ASSERT(value: unknown, message?: string): asserts value;
|
||||
|
||||
type Timer = ReturnType<typeof setTimeout>;
|
||||
|
|
|
@ -46,20 +46,7 @@ export async function sitegen(
|
|||
const sections: sg.Section[] =
|
||||
require(path.join(root, "site.ts")).siteSections;
|
||||
|
||||
// Static files are compressed and served as-is.
|
||||
// - "{section}/static/*.png"
|
||||
let staticFiles: FileItem[] = [];
|
||||
// Pages are rendered then served as static files.
|
||||
// - "{section}/pages/*.marko"
|
||||
let pages: FileItem[] = [];
|
||||
// Views are dynamically rendered pages called via backend code.
|
||||
// - "{section}/views/*.tsx"
|
||||
let views: FileItem[] = [];
|
||||
// Public scripts are bundled for the client as static assets under "/js/[...]"
|
||||
// This is used for the file viewer's canvases.
|
||||
// Note that '.client.ts' can be placed anywhere in the file structure.
|
||||
// - "{section}/scripts/*.client.ts"
|
||||
let scripts: FileItem[] = [];
|
||||
|
||||
|
||||
// -- Scan for files --
|
||||
status.text = "Scanning Project";
|
||||
|
|
259
framework/generate2.ts
Normal file
259
framework/generate2.ts
Normal file
|
@ -0,0 +1,259 @@
|
|||
export async function main() {
|
||||
// const startTime = performance.now();
|
||||
|
||||
// -- readdir to find all site files --
|
||||
const siteConfig = await incr.work({
|
||||
label: "reading manifest",
|
||||
run: (io) => io.import<{ siteSections: sg.Section[] }>("site.ts"),
|
||||
});
|
||||
const {
|
||||
staticFiles,
|
||||
scripts,
|
||||
views,
|
||||
pages,
|
||||
} = (await Promise.all(
|
||||
siteConfig.siteSections.map(({ root: sectionRoot }) =>
|
||||
incr.work({
|
||||
key: sectionRoot,
|
||||
label: "discovering files in " + sectionRoot,
|
||||
run: (io) => scanSiteSection(io, sectionRoot),
|
||||
})
|
||||
),
|
||||
)).reduce((acc, next) => ({
|
||||
staticFiles: acc.staticFiles.concat(next.staticFiles),
|
||||
pages: acc.pages.concat(next.pages),
|
||||
views: acc.views.concat(next.views),
|
||||
scripts: acc.scripts.concat(next.scripts),
|
||||
}));
|
||||
|
||||
const globalCssPath = path.join(hot.projectSrc, "global.css");
|
||||
|
||||
// TODO: loadMarkoCache
|
||||
|
||||
const builtPages = pages.map((item) =>
|
||||
incr.work({
|
||||
label: item.id,
|
||||
key: item,
|
||||
async run(io) {
|
||||
// -- load and validate module --
|
||||
let {
|
||||
default: Page,
|
||||
meta: metadata,
|
||||
theme: pageTheme,
|
||||
layout,
|
||||
} = await io.import<any>(item.file);
|
||||
if (!Page) {
|
||||
throw new Error("Page is missing a 'default' export.");
|
||||
}
|
||||
if (!metadata) {
|
||||
throw new Error("Page is missing 'meta' export with a title.");
|
||||
}
|
||||
|
||||
// -- css --
|
||||
if (layout?.theme) pageTheme = layout.theme;
|
||||
const theme: css.Theme = {
|
||||
...css.defaultTheme,
|
||||
...pageTheme,
|
||||
};
|
||||
const cssImports = Array.from(
|
||||
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
(file) => path.relative(hot.projectSrc, file),
|
||||
);
|
||||
|
||||
// -- metadata --
|
||||
const renderedMetaPromise = Promise.resolve(
|
||||
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
|
||||
).then((m) => meta.renderMeta(m));
|
||||
|
||||
// -- html --
|
||||
let page = [engine.kElement, Page, {}];
|
||||
if (layout?.default) {
|
||||
page = [engine.kElement, layout.default, { children: page }];
|
||||
}
|
||||
const bodyPromise = engine.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
renderedMetaPromise,
|
||||
]);
|
||||
if (!renderedMeta.includes("<title>")) {
|
||||
throw new Error(
|
||||
"Page is missing 'meta.title'. " +
|
||||
"All pages need a title tag.",
|
||||
);
|
||||
}
|
||||
const styleKey = css.styleKey(cssImports, theme);
|
||||
return {
|
||||
html: text,
|
||||
meta: renderedMeta,
|
||||
cssImports,
|
||||
theme: theme ?? null,
|
||||
styleKey,
|
||||
clientRefs: Array.from(addon.sitegen.scripts),
|
||||
};
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
// const builtViews = views.map((item) =>
|
||||
// incr.work({
|
||||
// label: item.id,
|
||||
// key: item,
|
||||
// async run(io) {
|
||||
// const module = require(item.file);
|
||||
// if (!module.meta) {
|
||||
// throw new Error(`${item.file} is missing 'export const meta'`);
|
||||
// }
|
||||
// if (!module.default) {
|
||||
// throw new Error(`${item.file} is missing a default export.`);
|
||||
// }
|
||||
// const pageTheme = module.layout?.theme ?? module.theme;
|
||||
// const theme: css.Theme = {
|
||||
// ...css.defaultTheme,
|
||||
// ...pageTheme,
|
||||
// };
|
||||
// const cssImports = Array.from(
|
||||
// new Set([globalCssPath, ...hot.getCssImports(item.file)]),
|
||||
// (file) => path.relative(hot.projectSrc, file),
|
||||
// );
|
||||
// const styleKey = css.styleKey(cssImports, theme);
|
||||
// return {
|
||||
// file: path.relative(hot.projectRoot, item.file),
|
||||
// cssImports,
|
||||
// theme,
|
||||
// clientRefs: hot.getClientScriptRefs(item.file),
|
||||
// hasLayout: !!module.layout?.default,
|
||||
// styleKey,
|
||||
// };
|
||||
// },
|
||||
// })
|
||||
// );
|
||||
//
|
||||
// // -- inline style sheets, used and shared by pages and views --
|
||||
// const builtCss = Promise.all([...builtViews, ...builtPages]).then((items) => {
|
||||
// const map = new Map<string, {}>();
|
||||
// for (const { styleKey, cssImports, theme } of items) {
|
||||
// if (map.has(styleKey)) continue;
|
||||
// map.set(
|
||||
// styleKey,
|
||||
// incr.work({
|
||||
// label: `bundle css ${styleKey}`,
|
||||
// async run(io) {
|
||||
// await Promise.all(cssImports.map((file) => io.trackFile(file)));
|
||||
// const { text } = await css.bundleCssFiles(cssImports, theme);
|
||||
// return text;
|
||||
// },
|
||||
// }),
|
||||
// );
|
||||
// }
|
||||
// });
|
||||
|
||||
// TODO: make sure that `static` and `pages` does not overlap
|
||||
await Promise.all(builtPages);
|
||||
incr.serializeToDisk();
|
||||
// -- bundle server javascript (backend and views) --
|
||||
}
|
||||
|
||||
async function scanSiteSection(io: incr.Io, sectionRoot: string) {
|
||||
// Static files are compressed and served as-is.
|
||||
// - "{section}/static/*.png"
|
||||
let staticFiles: FileItem[] = [];
|
||||
// Pages are rendered then served as static files.
|
||||
// - "{section}/pages/*.marko"
|
||||
let pages: FileItem[] = [];
|
||||
// Views are dynamically rendered pages called via backend code.
|
||||
// - "{section}/views/*.tsx"
|
||||
let views: FileItem[] = [];
|
||||
// Public scripts are bundled for the client as static assets under "/js/[...]"
|
||||
// This is used for the file viewer's canvases.
|
||||
// Note that '.client.ts' can be placed anywhere in the file structure.
|
||||
// - "{section}/scripts/*.client.ts"
|
||||
let scripts: FileItem[] = [];
|
||||
|
||||
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
|
||||
const rootPrefix = hot.projectSrc === sectionRoot
|
||||
? ""
|
||||
: path.relative(hot.projectSrc, sectionRoot) + "/";
|
||||
const kinds = [
|
||||
{
|
||||
dir: sectionPath("pages"),
|
||||
list: pages,
|
||||
prefix: "/",
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("static"),
|
||||
list: staticFiles,
|
||||
prefix: "/",
|
||||
ext: true,
|
||||
},
|
||||
{
|
||||
dir: sectionPath("scripts"),
|
||||
list: scripts,
|
||||
prefix: rootPrefix,
|
||||
include: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
{
|
||||
dir: sectionPath("views"),
|
||||
list: views,
|
||||
prefix: rootPrefix,
|
||||
include: [".tsx", ".mdx", ".marko"],
|
||||
exclude: [".client.ts", ".client.tsx"],
|
||||
},
|
||||
];
|
||||
for (const kind of kinds) {
|
||||
const {
|
||||
dir,
|
||||
list,
|
||||
prefix,
|
||||
include = [""],
|
||||
exclude = [],
|
||||
ext = false,
|
||||
} = kind;
|
||||
|
||||
let items;
|
||||
try {
|
||||
items = await io.readDirRecursive(dir);
|
||||
} catch (err: any) {
|
||||
if (err.code === "ENOENT") continue;
|
||||
throw err;
|
||||
}
|
||||
for (const subPath of items) {
|
||||
const file = path.join(dir, subPath);
|
||||
const stat = fs.statSync(file);
|
||||
if (stat.isDirectory()) continue;
|
||||
if (!include.some((e) => subPath.endsWith(e))) continue;
|
||||
if (exclude.some((e) => subPath.endsWith(e))) continue;
|
||||
const trim = ext
|
||||
? subPath
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
||||
".",
|
||||
"/",
|
||||
);
|
||||
let id = prefix + trim.replaceAll("\\", "/");
|
||||
if (prefix === "/" && id.endsWith("/index")) {
|
||||
id = id.slice(0, -"/index".length) || "/";
|
||||
}
|
||||
list.push({ id, file: file });
|
||||
}
|
||||
}
|
||||
|
||||
return { staticFiles, pages, views, scripts };
|
||||
}
|
||||
|
||||
import * as sg from "#sitegen";
|
||||
import * as incr from "./incremental2.ts";
|
||||
import { OnceMap, Queue } from "#sitegen/async";
|
||||
import * as bundle from "./bundle.ts";
|
||||
import * as css from "./css.ts";
|
||||
import * as engine from "./engine/ssr.ts";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import type { FileItem } from "#sitegen";
|
||||
import * as path from "node:path";
|
||||
import * as meta from "#sitegen/meta";
|
||||
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
|
||||
import { wrapDocument } from "./lib/view.ts";
|
|
@ -42,8 +42,7 @@ export function getFileStat(filepath: string) {
|
|||
}
|
||||
|
||||
function shouldTrackPath(filename: string) {
|
||||
return !filename.includes("node_modules") &&
|
||||
!filename.includes(import.meta.dirname);
|
||||
return !filename.includes("node_modules");
|
||||
}
|
||||
|
||||
const Module = load<typeof import("node:module")>("node:module");
|
||||
|
@ -59,8 +58,8 @@ Module.prototype._compile = function (
|
|||
filename,
|
||||
format,
|
||||
);
|
||||
const stat = fs.statSync(filename);
|
||||
if (shouldTrackPath(filename)) {
|
||||
const stat = fs.statSync(filename);
|
||||
const cssImportsMaybe: string[] = [];
|
||||
const imports: string[] = [];
|
||||
for (const { filename: file, cloverClientRefs } of this.children) {
|
||||
|
|
56
framework/incremental.test.ts
Normal file
56
framework/incremental.test.ts
Normal file
|
@ -0,0 +1,56 @@
|
|||
test("trivial case", async () => {
|
||||
incr.reset();
|
||||
|
||||
const file1 = tmpFile("example.txt");
|
||||
file1.write("one");
|
||||
|
||||
async function compilation() {
|
||||
const first = incr.work({
|
||||
label: "first compute",
|
||||
async run (io) {
|
||||
await setTimeout(1000);
|
||||
const contents = await io.readFile(file1.path);
|
||||
return [contents, Math.random()] as const;
|
||||
}
|
||||
});
|
||||
const second = incr.work({
|
||||
label: "second compute",
|
||||
wait: first,
|
||||
async run (io) {
|
||||
await setTimeout(1000);
|
||||
return io.readWork(first)[0].toUpperCase();
|
||||
}
|
||||
});
|
||||
const third = incr.work({
|
||||
label: "third compute",
|
||||
wait: first,
|
||||
async run (io) {
|
||||
await setTimeout(1000);
|
||||
return io.readWork(first)[1] * 1000;
|
||||
}
|
||||
});
|
||||
return incr.work({
|
||||
label: "last compute",
|
||||
wait: [second, third],
|
||||
async run (io) {
|
||||
await setTimeout(1000);
|
||||
return {
|
||||
second: io.readWork(second),
|
||||
third: io.readWork(third),
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
const { value: first } = await incr.compile(compilation);
|
||||
const { value: second } = await incr.compile(compilation);
|
||||
ASSERT(first === second);
|
||||
incr.forceInvalidate(file1.path);
|
||||
const { value: third } = await incr.compile(compilation);
|
||||
ASSERT(first !== third);
|
||||
ASSERT(first[0] === third[0]);
|
||||
});
|
||||
|
||||
import * as incr from "./incremental2.ts";
|
||||
import { beforeEach, test } from "node:test";
|
||||
import { tmpFile } from "#sitegen/testing";import { setTimeout } from "node:timers/promises";
|
||||
|
264
framework/incremental2.ts
Normal file
264
framework/incremental2.ts
Normal file
|
@ -0,0 +1,264 @@
|
|||
// Incremental compilation framework built on a singleton function
|
||||
// `work(label, inputs, io => promise)`. By using the I/O interface
|
||||
// to pull input, dependencies are tracked for you, including pesky
|
||||
// error conditions. This way, the file watching system always recovers.
|
||||
let running = false;
|
||||
let seenWorks = new Set<string>();
|
||||
let works = new Map<string, Work>();
|
||||
let files = new Map<string, File>();
|
||||
let queue = new async.Queue({
|
||||
name: "sitegen!",
|
||||
fn: (
|
||||
item: { label: string; run: (spin: Spinner) => Promise<unknown> },
|
||||
spin,
|
||||
) => item.run(spin),
|
||||
passive: true,
|
||||
getItemText: (item) => item.label,
|
||||
maxJobs: navigator.hardwareConcurrency,
|
||||
});
|
||||
|
||||
interface Job<T> {
|
||||
label: string;
|
||||
wait?: Ref | null | (Ref | null)[];
|
||||
key?: unknown;
|
||||
cores?: number;
|
||||
run: (io: Io) => Promise<T>;
|
||||
}
|
||||
|
||||
export function work<T>(job: Job<T>): Promise<T> {
|
||||
const key = crypto.createHash("sha1").update([
|
||||
JSON.stringify(util.getCallSites(2)[1]),
|
||||
util.inspect(job.key),
|
||||
].join(":")).digest("base64url");
|
||||
ASSERT(!seenWorks.has(key), `Key '${key}' must be unique during the build.`);
|
||||
|
||||
// Decide if the cached work is OK
|
||||
const prev = works.get(key) as Work<T>;
|
||||
if (prev?.value) return Promise.resolve(prev.value);
|
||||
|
||||
const promise = Promise.all([job.wait].flat()).then(() =>
|
||||
queue.addReturn({
|
||||
label: job.label,
|
||||
run: async (spin) => {
|
||||
// Perform the work
|
||||
const io = new Io(spin);
|
||||
const value = await job.run(io);
|
||||
const { needs } = io;
|
||||
|
||||
// Apply the deltas to the graph
|
||||
applyDiff(key, files, prev?.needs?.files ?? [], needs.files);
|
||||
applyDiff(key, works, prev?.needs?.works ?? [], needs.works);
|
||||
|
||||
works.set(key, {
|
||||
value,
|
||||
affects: prev?.affects ?? [],
|
||||
needs,
|
||||
});
|
||||
|
||||
return value;
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
return promise as Promise<T>;
|
||||
}
|
||||
|
||||
function applyDiff(
|
||||
key: string,
|
||||
list: Map<string, { affects: string[] }>,
|
||||
beforeIter: Iterable<string>,
|
||||
afterIter: Iterable<string>,
|
||||
) {
|
||||
const before = Array.from(beforeIter);
|
||||
const after = Array.from(afterIter);
|
||||
for (const add of after.filter((x) => !before.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(add));
|
||||
ASSERT(!affects.includes(key));
|
||||
affects.push(key);
|
||||
}
|
||||
for (const remove of before.filter((x) => !after.includes(x))) {
|
||||
const { affects } = UNWRAP(list.get(remove));
|
||||
ASSERT(affects.includes(key));
|
||||
affects.splice(affects.indexOf(key), 1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function compile<T>(compiler: () => Promise<Ref<T>>) {
|
||||
ASSERT(!running, `Cannot run twice`);
|
||||
running = true;
|
||||
try {
|
||||
const ref = await compiler();
|
||||
await ref.wait;
|
||||
seenWorks.clear();
|
||||
ASSERT(!queue.active);
|
||||
await queue.done();
|
||||
return {
|
||||
value: UNWRAP(works.get(ref.key), `Key '${ref.key}' did not finish`)
|
||||
.value as T,
|
||||
};
|
||||
} finally {
|
||||
running = false;
|
||||
}
|
||||
}
|
||||
|
||||
export function forceInvalidate(file: string) {
|
||||
const resolved = path.resolve(hot.projectSrc, file);
|
||||
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
||||
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
||||
}
|
||||
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
||||
const queue = [...entry.affects];
|
||||
let key;
|
||||
while (key = queue.shift()) {
|
||||
const { needs, affects } = UNWRAP(works.get(key));
|
||||
applyDiff(key, files, needs.files, []);
|
||||
applyDiff(key, works, needs.works, []);
|
||||
works.delete(key);
|
||||
queue.push(...affects);
|
||||
}
|
||||
}
|
||||
|
||||
export function reset() {
|
||||
ASSERT(!running);
|
||||
works.clear();
|
||||
files.clear();
|
||||
}
|
||||
|
||||
export function serialize() {
|
||||
// Aiming for a compact JSON format.
|
||||
const fileEntries = Array.from(files, ([k, v]) => [
|
||||
k,
|
||||
'lastModified' in v ? 'f' : 'd',
|
||||
'lastModified' in v ? v.lastModified : v.contentHash,
|
||||
...v.affects,
|
||||
]);
|
||||
const workEntries = Array.from(works, ([k, v]) => [
|
||||
k,
|
||||
v.value,
|
||||
...v.affects,
|
||||
]);
|
||||
return devalue.uneval({
|
||||
file: fileEntries,
|
||||
work: workEntries,
|
||||
});
|
||||
}
|
||||
export function serializeToDisk(file = ".clover/incr.state") {
|
||||
fs.writeMkdirSync(file, serialize());
|
||||
}
|
||||
|
||||
/* Input/Output with automatic tracking. */
|
||||
export class Io {
|
||||
needs: Needs = {
|
||||
files: new Set(),
|
||||
works: new Set(),
|
||||
};
|
||||
constructor(public spin: Spinner) {}
|
||||
|
||||
#trackFs(file: string) {
|
||||
const resolved = path.resolve(hot.projectSrc, file);
|
||||
const key = path.relative(hot.projectRoot, resolved).replaceAll("\\", "/");
|
||||
this.needs.files.add(key);
|
||||
return { resolved, key };
|
||||
}
|
||||
async trackFile(file: string) {
|
||||
const { key, resolved } = this.#trackFs(file);
|
||||
if (!files.get(key)) {
|
||||
let lastModified: number = 0;
|
||||
try {
|
||||
lastModified = (await fs.stat(file)).mtimeMs;
|
||||
} catch {}
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified,
|
||||
});
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
async readFile(file: string) {
|
||||
return fs.readFile(await this.trackFile(file), "utf-8");
|
||||
}
|
||||
async readDir(dir: string) {
|
||||
const { key, resolved } = this.#trackFs(dir);
|
||||
let result: string[] = [];
|
||||
try {
|
||||
result = await fs.readdir(resolved);
|
||||
return result;
|
||||
} finally {
|
||||
const contentHash = crypto.createHash("sha1").update(result.join("\0"))
|
||||
.digest("base64url");
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
contentHash,
|
||||
});
|
||||
}
|
||||
}
|
||||
async readDirRecursive(dir: string): Promise<string[]> {
|
||||
const dirs = await this.readDir(dir);
|
||||
return (await Promise.all(dirs.map(async (child) => {
|
||||
const abs = path.join(dir, child);
|
||||
const stat = await fs.stat(abs);
|
||||
if (stat.isDirectory()) {
|
||||
return (await this.readDirRecursive(abs)).map((grand) =>
|
||||
path.join(child, grand)
|
||||
);
|
||||
} else {
|
||||
return child;
|
||||
}
|
||||
}))).flat();
|
||||
}
|
||||
async import<T>(file: string): Promise<T> {
|
||||
const { resolved } = this.#trackFs(file);
|
||||
try {
|
||||
return require(resolved) as T;
|
||||
} finally {
|
||||
const queue = [resolved];
|
||||
const seen = new Set<string>();
|
||||
let current;
|
||||
while (current = queue.shift()) {
|
||||
const stat = hot.getFileStat(resolved);
|
||||
if (!stat) continue;
|
||||
const { key } = this.#trackFs(current);
|
||||
if (!files.get(key)) {
|
||||
files.set(key, {
|
||||
affects: [],
|
||||
lastModified: stat?.lastModified ?? 0,
|
||||
});
|
||||
}
|
||||
for (const imp of stat.imports) {
|
||||
if (!seen.has(imp)) {
|
||||
seen.add(imp);
|
||||
queue.push(imp);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type File = TrackedFile | TrackedDir;
|
||||
interface Needs {
|
||||
files: Set<string>;
|
||||
works: Set<string>;
|
||||
}
|
||||
interface Work<T = unknown> {
|
||||
value: T;
|
||||
affects: string[];
|
||||
needs: Needs;
|
||||
}
|
||||
interface TrackedFile {
|
||||
lastModified: number;
|
||||
affects: string[];
|
||||
}
|
||||
interface TrackedDir {
|
||||
contentHash: string;
|
||||
affects: string[];
|
||||
}
|
||||
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as path from "node:path";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as util from "node:util";
|
||||
import * as crypto from "node:crypto";
|
||||
import * as async from "#sitegen/async";
|
||||
import type { Spinner } from "@paperclover/console/Spinner";
|
||||
import * as devalue from 'devalue';
|
11
framework/lib/testing.ts
Normal file
11
framework/lib/testing.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
export function tmpFile(basename: string) {
|
||||
const file = path.join(import.meta.dirname, '../../.clover/testing', basename);
|
||||
return {
|
||||
path: file,
|
||||
read: fs.readFile.bind(fs, file),
|
||||
write: fs.writeMkdir.bind(fs, file),
|
||||
};
|
||||
}
|
||||
|
||||
import * as path from 'node:path';
|
||||
import * as fs from './fs.ts';
|
Loading…
Reference in a new issue