start regeneration

This commit is contained in:
clover caruso 2025-08-03 00:31:56 -04:00
parent 568fd83c34
commit 15b0a6ff70
7 changed files with 345 additions and 249 deletions

View file

@ -111,7 +111,7 @@ export type ServerPlatform = "node" | "passthru";
export interface ServerSideOptions {
entries: string[];
viewItems: sg.FileItem[];
viewRefs: incr.Ref<PreparedView>[];
viewRefs: incr.Ref<PageOrView>[];
styleMap: Map<string, incr.Ref<string>>;
scriptMap: incr.Ref<Record<string, string>>;
platform: ServerPlatform;
@ -150,10 +150,24 @@ export async function bundleServerJavaScript({
};
}, viewItems);
const wBundles = entries.map(
(entry) =>
await incr.work(async (io) => {
io.writeFile(
"../ts/view.d.ts",
[
entry,
"export interface RegisteredViews {",
...viewItems
.filter((view) => !view.id.startsWith("page:"))
.map(
(view) =>
` ${JSON.stringify(view.id)}: ` +
`typeof import(${JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))}),`,
),
"}",
].join("\n"),
);
});
const wBundles = entries.map((entry) =>
incr.work(async (io, entry) => {
const pkg = await io.readJson<{
dependencies: Record<string, string>;
@ -203,10 +217,7 @@ export async function bundleServerJavaScript({
chunkNames: "c.[hash]",
entryNames: path.basename(entry, path.extname(entry)),
entryPoints: [
path.join(
import.meta.dirname,
"backend/entry-" + platform + ".ts",
),
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
],
platform: "node",
format: "esm",
@ -258,10 +269,9 @@ export async function bundleServerJavaScript({
}
return fileWithMagicWord;
}, entry),
] as const,
);
const wProcessed = wBundles.map(async ([entry, wBundle]) => {
const wProcessed = wBundles.map(async (wBundle) => {
if (!(await wBundle)) return;
await incr.work(async (io) => {
// Only the reachable resources need to be read and inserted into the bundle.
@ -322,3 +332,4 @@ import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";
import * as incr from "./incremental.ts";
import * as sg from "#sitegen";
import type { PageOrView } from "./generate.ts";

View file

@ -18,6 +18,7 @@ export const createTemplate = (
const r = render.current;
// Support using Marko outside of Clover SSR
if (!r) return renderer(props, n);
render.setCurrent(null);
const markoResult = renderFn.call(renderer, {
...props,
$global: { clover: r, cloverAsyncMarker },

View file

@ -35,6 +35,11 @@ export async function generate() {
);
const routes = await Promise.all([...builtViews, ...builtPages]);
const viewsAndDynPages: incr.Ref<PageOrView>[] = [
...builtViews,
...builtPages.filter((page) => UNWRAP(page.value).regenerate),
];
// -- page resources --
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
clientRefs: routes.flatMap((x) => x.clientRefs),
@ -49,8 +54,11 @@ export async function generate() {
platform: "node",
styleMap,
scriptMap,
viewItems: views,
viewRefs: builtViews,
viewItems: viewsAndDynPages.map((ref) => {
const { id, file, type } = UNWRAP(ref.value);
return { id: type === "page" ? `page:${id}` : id, file };
}),
viewRefs: viewsAndDynPages,
});
// -- assemble page assets --
@ -181,6 +189,7 @@ export async function preparePage(io: Io, item: sg.FileItem) {
meta: metadata,
theme: pageTheme,
layout,
regenerate,
} = await io.import<any>(item.file);
if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
@ -219,16 +228,36 @@ export async function preparePage(io: Io, item: sg.FileItem) {
"Page is missing 'meta.title'. " + "All pages need a title tag.",
);
}
// -- regeneration --
let regeneration: Regeneration | null = null;
if (typeof regenerate?.seconds === "number") {
regeneration ??= {};
regeneration.seconds = regenerate.seconds;
}
if (regenerate?.tags) {
regeneration ??= {};
regeneration.tags = regenerate.tags;
}
const styleKey = css.styleKey(cssImports, theme);
return {
type: "page",
id: item.id,
file: item.file,
regenerate: regeneration,
html: text,
meta: renderedMeta,
cssImports,
theme: theme ?? null,
styleKey,
clientRefs: Array.from(addon[sg.userData.key].scripts),
};
} as const;
}
interface Regeneration {
seconds?: number;
tags?: string[];
}
export async function prepareView(io: Io, item: sg.FileItem) {
@ -243,13 +272,15 @@ export async function prepareView(io: Io, item: sg.FileItem) {
);
const styleKey = css.styleKey(cssImports, theme);
return {
file: path.relative(hot.projectRoot, item.file),
type: "view",
id: item.id,
file: item.file,
cssImports,
theme,
clientRefs: hot.getClientScriptRefs(item.file),
hasLayout: !!module.layout?.default,
styleKey,
};
} as const;
}
export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
@ -284,7 +315,7 @@ export async function assembleAndWritePage(
.map((x) => `{${x}}`)
.join("\n");
const doc = wrapDocument({
const doc = sg.wrapDocument({
body: html,
head: meta,
inlineCss,
@ -296,6 +327,8 @@ export async function assembleAndWritePage(
}, page);
}
export type PageOrView = PreparedPage | PreparedView;
import * as sg from "#sitegen";
import * as incr from "./incremental.ts";
import { Io } from "./incremental.ts";
@ -307,4 +340,3 @@ import * as fs from "#sitegen/fs";
import type { FileItem } from "#sitegen";
import * as path from "node:path";
import * as meta from "#sitegen/meta";
import { wrapDocument } from "./lib/view.ts";

View file

@ -21,6 +21,7 @@ export interface Ref<T> {
onRejected: (error: unknown) => void,
): void;
key: string;
get value(): T | null;
}
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
@ -48,7 +49,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const prev = works.get(key) as Work<O> | null;
if (prev) {
return { key, then: (done) => done(prev.value) };
return { key, then: (done) => done(prev.value), value: prev.value };
}
async function perform() {
@ -85,7 +86,12 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
let cached: Promise<O>;
return {
key,
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject),
then(fufill, reject) {
(cached ??= perform()).then(fufill, reject);
},
get value() {
return (works.get(this.key)?.value as O) ?? null;
},
};
}
@ -97,11 +103,7 @@ export async function compile<T>(compiler: () => Promise<T>) {
const start = performance.now();
const timerSpinner = new Spinner({
text: () =>
`sitegen! [${
((performance.now() - start) / 1000).toFixed(
1,
)
}s]`,
`sitegen! [${((performance.now() - start) / 1000).toFixed(1)}s]`,
fps: 10,
});
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -114,8 +116,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
return {
value,
watchFiles: new Set(files.keys()),
newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)),
newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')),
newOutputs: Array.from(seenWrites)
.filter((x) => x.startsWith("f:"))
.map((x) => x.slice(2)),
newAssets: !Array.from(seenWrites).some((x) => x.startsWith("a:")),
};
} finally {
running = false;
@ -127,9 +131,14 @@ export async function compile<T>(compiler: () => Promise<T>) {
export async function flush(start: number) {
// Trim
const detachedFiles = new Set<string>;
const referencedAssets = new Set<string>;
for (const [k, { writes: { assets } }] of works) {
const detachedFiles = new Set<string>();
const referencedAssets = new Set<string>();
for (const [
k,
{
writes: { assets },
},
] of works) {
if (seenWorks.has(k)) {
for (const asset of assets.values()) referencedAssets.add(asset.hash);
continue;
@ -142,8 +151,7 @@ export async function flush(start: number) {
detachedFiles.add(k);
}
for (const k of assets.keys()) {
if (!referencedAssets.has(k))
assets.delete(k);
if (!referencedAssets.has(k)) assets.delete(k);
}
const p = [];
@ -171,9 +179,9 @@ export async function flush(start: number) {
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
console.writeLine(` - ${assets.size} static assets`);
console.writeLine(
` - dist: ${formatSize(dist)}, incremental: ${
formatSize(serialized.byteLength)
}`,
` - dist: ${formatSize(dist)}, incremental: ${formatSize(
serialized.byteLength,
)}`,
);
}
@ -212,13 +220,14 @@ function deleteWork(key: string) {
affects.splice(affects.indexOf(key), 1);
}
for (const remove of affects) {
const { reads: { works: list } } = UNWRAP(works.get(remove), remove);
const {
reads: { works: list },
} = UNWRAP(works.get(remove), remove);
ASSERT(list.has(key));
list.delete(key);
}
for (const file of w.files) {
if (UNWRAP(writes.get(file)).work === key)
writes.delete(file);
if (UNWRAP(writes.get(file)).work === key) writes.delete(file);
}
// Assets are temporarily kept, trimmed via manual GC after compilation.
@ -234,23 +243,32 @@ export function reset() {
}
export function serialize() {
const fileEntries = Array.from(files, ([k, v]) =>
const fileEntries = Array.from(
files,
([k, v]) =>
[
k,
v.type,
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null,
v.type === "f" ? v.lastModified : v.type === "d" ? v.contentHash : null,
...v.affects,
] as const);
const workEntries = Array.from(works, ([k, v]) =>
] as const,
);
const workEntries = Array.from(
works,
([k, v]) =>
[
k,
v.value,
Array.from(v.reads.files),
Array.from(v.reads.works),
Array.from(v.writes.files),
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const),
Array.from(
v.writes.assets,
([k, { headers }]) => [k, headers] as const,
),
v.affects,
] as const);
] as const,
);
const expectedFilesOnDisk = Array.from(
writes,
([k, { size, work }]) => [k, size, work] as const,
@ -280,7 +298,7 @@ async function deserialize(buffer: Buffer) {
if (type === "f") {
ASSERT(typeof content === "number");
files.set(k, { type, affects, lastModified: content });
} else if (type === 'd') {
} else if (type === "d") {
ASSERT(typeof content === "string");
files.set(k, { type, affects, contentHash: content, contents: [] });
} else {
@ -288,15 +306,8 @@ async function deserialize(buffer: Buffer) {
}
}
for (const entry of workEntries) {
const [
k,
value,
readFiles,
readWorks,
writeFiles,
writeAssets,
affects,
] = entry;
const [k, value, readFiles, readWorks, writeFiles, writeAssets, affects] =
entry;
works.set(k, {
value,
reads: {
@ -305,23 +316,30 @@ async function deserialize(buffer: Buffer) {
},
writes: {
files: new Set(writeFiles),
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, {
assets: new Map(
Array.from(writeAssets, ([k, headers]) => [
k,
{
hash: JSON.parse(UNWRAP(headers.etag)),
headers,
}])),
},
]),
),
},
affects,
});
}
const statFiles = await Promise.all(expectedFilesOnDisk
.map(([k, size, work]) =>
fs.stat(path.join(".clover/o", k))
const statFiles = await Promise.all(
expectedFilesOnDisk.map(([k, size, work]) =>
fs
.stat(path.join(".clover/o", k))
.catch((err) => {
if (err.code === "ENOENT") return null;
throw err;
})
.then((stat) => ({ k, size, work, stat }))
));
.then((stat) => ({ k, size, work, stat })),
),
);
for (const { k, stat, work, size } of statFiles) {
if (stat?.size === size) {
writes.set(k, {
@ -337,19 +355,24 @@ async function deserialize(buffer: Buffer) {
assets.set(hash, { raw, gzip, zstd });
}
await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file)));
await Promise.all(
Array.from(files, ([key, file]) => invalidateEntry(key, file)),
);
}
export async function invalidate(filePath: string): Promise<boolean> {
const key = toRel(toAbs(filePath));
const file = UNWRAP(files.get(key), `Untracked file '${key}'`)
return invalidateEntry(key, file)
const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
return invalidateEntry(key, file);
}
export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> {
export async function invalidateEntry(
key: string,
file: TrackedFile,
): Promise<boolean> {
try {
if (file.type === "d") {
const contents = file.contents = await fs.readdir(key);
const contents = (file.contents = await fs.readdir(key));
contents.sort();
const contentHash = crypto
.createHash("sha1")
@ -359,15 +382,17 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
file.contentHash = contentHash;
throw new Error();
}
} else if (file.type === 'f') {
const lastModified = await fs.stat(key)
.then(x => Math.floor(x.mtimeMs), () => 0);
} else if (file.type === "f") {
const lastModified = await fs.stat(key).then(
(x) => Math.floor(x.mtimeMs),
() => 0,
);
if (file.lastModified !== lastModified) {
file.lastModified = lastModified;
throw new Error();
}
} else {
file.type satisfies 'null';
file.type satisfies "null";
const stat = await fs.stat(key).catch(() => null);
if (stat) throw new Error();
}
@ -375,7 +400,7 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
} catch (e) {
forceInvalidate(file);
hot.unload(toAbs(key));
if (file.type === 'null') files.delete(key);
if (file.type === "null") files.delete(key);
return true;
}
}
@ -391,13 +416,16 @@ export function getAssetManifest() {
assets.get(hash),
`Asset ${key} (${hash})`,
);
return [key, {
return [
key,
{
raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash),
headers,
}] as const;
})
},
] as const;
}),
),
) satisfies BuiltAssetMap;
return { json: asset, blob: writer.get() };
@ -446,7 +474,7 @@ export class Io {
const { key, resolved } = this.#trackFs(dir);
const existing = files.get(key);
try {
if (existing?.type === 'd') return existing.contents;
if (existing?.type === "d") return existing.contents;
const contents = await fs.readdir(resolved);
contents.sort();
const contentHash = crypto
@ -474,7 +502,7 @@ export class Io {
const stat = await fs.stat(abs);
if (stat.isDirectory()) {
return (await this.readDirRecursive(abs)).map((grand) =>
path.join(child, grand)
path.join(child, grand),
);
} else {
return child;
@ -572,7 +600,7 @@ class BufferWriter {
write(buffer: Buffer, hash: string): BufferView {
let view = this.seen.get(hash);
if (view) return view;
view = [this.size, this.size += buffer.byteLength];
view = [this.size, (this.size += buffer.byteLength)];
this.seen.set(hash, view);
this.buffers.push(buffer);
return view;
@ -593,9 +621,12 @@ export function validateSerializable(value: unknown, key: string) {
} else if (value && typeof value === "object") {
if (Array.isArray(value)) {
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
} else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) {
} else if (
Object.getPrototypeOf(value) === Object.prototype ||
Buffer.isBuffer(value)
) {
Object.entries(value).forEach(([k, v]) =>
validateSerializable(v, `${key}.${k}`)
validateSerializable(v, `${key}.${k}`),
);
} else {
throw new Error(
@ -631,10 +662,13 @@ interface FileWrite {
}
interface Writes {
files: Set<string>;
assets: Map<string, {
assets: Map<
string,
{
hash: string;
headers: Record<string, string>;
}>;
}
>;
}
interface Asset {
raw: Buffer;
@ -648,15 +682,13 @@ interface Work<T = unknown> {
writes: Writes;
affects: string[];
}
type TrackedFile =
& {
type TrackedFile = {
affects: string[];
}
& (
} & (
| { type: "f"; lastModified: number }
| { type: "d"; contentHash: string; contents: string[] }
| { type: "null"; }
);
| { type: "null" }
);
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}

View file

@ -30,4 +30,22 @@ export function addScript(id: ScriptId | { value: ScriptId }) {
userData.get().scripts.add(typeof id === "string" ? id : id.value);
}
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as render from "#engine/render";

View file

@ -1,3 +1,22 @@
// The "view" system allows rendering dynamic pages within backends.
// This is done by scanning all `views` dirs, bundling their client
// resources, and then providing `renderView` which renders a page.
//
// This system also implements page regeneration.
let codegen: Codegen;
try {
codegen = require("$views");
} catch {
throw new Error("Can only import '#sitegen/view' in backends.");
}
// Generated in `bundle.ts`
export interface Codegen {
views: Record<ViewKey, View>;
scripts: Record<string, string>;
regenTtls: Ttl[];
regenTags: Record<string, ViewKey[]>;
}
export interface View {
component: render.Component;
meta:
@ -7,23 +26,30 @@ export interface View {
inlineCss: string;
scripts: Record<string, string>;
}
export interface Ttl {
seconds: number;
key: ViewKey;
}
type ViewKey = keyof ViewMap;
let views: Record<string, View> = null!;
let scripts: Record<string, string> = null!;
export async function renderView(
export async function renderView<K extends ViewKey>(
context: hono.Context,
id: string,
props: Record<string, unknown>,
id: K,
props: PropsFromModule<ViewMap[K]>,
) {
return context.html(await renderViewToString(id, { context, ...props }));
}
export async function renderViewToString(
id: string,
props: Record<string, unknown>,
type PropsFromModule<M extends any> = M extends {
default: (props: infer T) => render.Node;
}
? T
: never;
export async function renderViewToString<K extends ViewKey>(
id: K,
props: PropsFromModule<ViewMap[K]>,
) {
views ?? ({ views, scripts } = require("$views"));
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
@ -32,7 +58,7 @@ export async function renderViewToString(
inlineCss,
layout,
meta: metadata,
}: View = UNWRAP(views[id], `Missing view ${id}`);
}: View = UNWRAP(codegen.views[id], `Missing view ${id}`);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
@ -48,48 +74,26 @@ export async function renderViewToString(
} = await render.async(page, { [sg.userData.key]: sg.initRender() });
// -- join document and send --
return wrapDocument({
return sg.wrapDocument({
body,
head: await renderedMetaPromise,
inlineCss,
scripts: joinScripts(
Array.from(sitegen.scripts, (id) =>
UNWRAP(scripts[id], `Missing script ${id}`),
UNWRAP(codegen.scripts[id], `Missing script ${id}`),
),
),
});
}
export function provideViewData(v: typeof views, s: typeof scripts) {
(views = v), (scripts = s);
}
export function joinScripts(scriptSources: string[]) {
const { length } = scriptSources;
if (length === 0) return "";
if (length === 1) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";");
}
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as meta from "./meta.ts";
import type * as hono from "#hono";
import * as render from "#engine/render";
import * as sg from "./sitegen.ts";
import type { RegisteredViews as ViewMap } from "../../.clover/ts/view.d.ts";

View file

@ -1,11 +1,9 @@
export * as layout from "../layout.tsx";
export const regenerate = {
manual: true,
};
export interface Input {
admin?: boolean;
}
export * as layout from "../layout.tsx";
export const regenerate = { tags: ["q+a"] };
export const meta: Metadata = {
title: "paper clover q+a",
description: "ask clover a question",
@ -14,7 +12,7 @@ export const meta: Metadata = {
<const/{ admin = false } = input />
<const/questions = [...Question.getAll()] />
<if=true>
<if=!admin>
<question-form />
</>
<for|question| of=questions>