feat: dynamic page regeneration #24

Merged
clo merged 3 commits from regen into master 2025-08-11 22:43:27 -07:00
7 changed files with 345 additions and 249 deletions
Showing only changes of commit 15b0a6ff70 - Show all commits

View file

@ -111,7 +111,7 @@ export type ServerPlatform = "node" | "passthru";
export interface ServerSideOptions { export interface ServerSideOptions {
entries: string[]; entries: string[];
viewItems: sg.FileItem[]; viewItems: sg.FileItem[];
viewRefs: incr.Ref<PreparedView>[]; viewRefs: incr.Ref<PageOrView>[];
styleMap: Map<string, incr.Ref<string>>; styleMap: Map<string, incr.Ref<string>>;
scriptMap: incr.Ref<Record<string, string>>; scriptMap: incr.Ref<Record<string, string>>;
platform: ServerPlatform; platform: ServerPlatform;
@ -150,10 +150,24 @@ export async function bundleServerJavaScript({
}; };
}, viewItems); }, viewItems);
const wBundles = entries.map( await incr.work(async (io) => {
(entry) => io.writeFile(
"../ts/view.d.ts",
[ [
entry, "export interface RegisteredViews {",
...viewItems
.filter((view) => !view.id.startsWith("page:"))
.map(
(view) =>
` ${JSON.stringify(view.id)}: ` +
`typeof import(${JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))}),`,
),
"}",
].join("\n"),
);
});
const wBundles = entries.map((entry) =>
incr.work(async (io, entry) => { incr.work(async (io, entry) => {
const pkg = await io.readJson<{ const pkg = await io.readJson<{
dependencies: Record<string, string>; dependencies: Record<string, string>;
@ -203,10 +217,7 @@ export async function bundleServerJavaScript({
chunkNames: "c.[hash]", chunkNames: "c.[hash]",
entryNames: path.basename(entry, path.extname(entry)), entryNames: path.basename(entry, path.extname(entry)),
entryPoints: [ entryPoints: [
path.join( path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
import.meta.dirname,
"backend/entry-" + platform + ".ts",
),
], ],
platform: "node", platform: "node",
format: "esm", format: "esm",
@ -258,10 +269,9 @@ export async function bundleServerJavaScript({
} }
return fileWithMagicWord; return fileWithMagicWord;
}, entry), }, entry),
] as const,
); );
const wProcessed = wBundles.map(async ([entry, wBundle]) => { const wProcessed = wBundles.map(async (wBundle) => {
if (!(await wBundle)) return; if (!(await wBundle)) return;
await incr.work(async (io) => { await incr.work(async (io) => {
// Only the reachable resources need to be read and inserted into the bundle. // Only the reachable resources need to be read and inserted into the bundle.
@ -322,3 +332,4 @@ import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime"; import * as mime from "#sitegen/mime";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import * as sg from "#sitegen"; import * as sg from "#sitegen";
import type { PageOrView } from "./generate.ts";

View file

@ -18,6 +18,7 @@ export const createTemplate = (
const r = render.current; const r = render.current;
// Support using Marko outside of Clover SSR // Support using Marko outside of Clover SSR
if (!r) return renderer(props, n); if (!r) return renderer(props, n);
render.setCurrent(null);
const markoResult = renderFn.call(renderer, { const markoResult = renderFn.call(renderer, {
...props, ...props,
$global: { clover: r, cloverAsyncMarker }, $global: { clover: r, cloverAsyncMarker },

View file

@ -35,6 +35,11 @@ export async function generate() {
); );
const routes = await Promise.all([...builtViews, ...builtPages]); const routes = await Promise.all([...builtViews, ...builtPages]);
const viewsAndDynPages: incr.Ref<PageOrView>[] = [
...builtViews,
...builtPages.filter((page) => UNWRAP(page.value).regenerate),
];
// -- page resources -- // -- page resources --
const scriptMap = incr.work(bundle.bundleClientJavaScript, { const scriptMap = incr.work(bundle.bundleClientJavaScript, {
clientRefs: routes.flatMap((x) => x.clientRefs), clientRefs: routes.flatMap((x) => x.clientRefs),
@ -49,8 +54,11 @@ export async function generate() {
platform: "node", platform: "node",
styleMap, styleMap,
scriptMap, scriptMap,
viewItems: views, viewItems: viewsAndDynPages.map((ref) => {
viewRefs: builtViews, const { id, file, type } = UNWRAP(ref.value);
return { id: type === "page" ? `page:${id}` : id, file };
}),
viewRefs: viewsAndDynPages,
}); });
// -- assemble page assets -- // -- assemble page assets --
@ -181,6 +189,7 @@ export async function preparePage(io: Io, item: sg.FileItem) {
meta: metadata, meta: metadata,
theme: pageTheme, theme: pageTheme,
layout, layout,
regenerate,
} = await io.import<any>(item.file); } = await io.import<any>(item.file);
if (!Page) throw new Error("Page is missing a 'default' export."); if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) throw new Error("Page is missing 'meta' export with a title."); if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
@ -219,16 +228,36 @@ export async function preparePage(io: Io, item: sg.FileItem) {
"Page is missing 'meta.title'. " + "All pages need a title tag.", "Page is missing 'meta.title'. " + "All pages need a title tag.",
); );
} }
// -- regeneration --
let regeneration: Regeneration | null = null;
if (typeof regenerate?.seconds === "number") {
regeneration ??= {};
regeneration.seconds = regenerate.seconds;
}
if (regenerate?.tags) {
regeneration ??= {};
regeneration.tags = regenerate.tags;
}
const styleKey = css.styleKey(cssImports, theme); const styleKey = css.styleKey(cssImports, theme);
return { return {
type: "page",
id: item.id, id: item.id,
file: item.file,
regenerate: regeneration,
html: text, html: text,
meta: renderedMeta, meta: renderedMeta,
cssImports, cssImports,
theme: theme ?? null, theme: theme ?? null,
styleKey, styleKey,
clientRefs: Array.from(addon[sg.userData.key].scripts), clientRefs: Array.from(addon[sg.userData.key].scripts),
}; } as const;
}
interface Regeneration {
seconds?: number;
tags?: string[];
} }
export async function prepareView(io: Io, item: sg.FileItem) { export async function prepareView(io: Io, item: sg.FileItem) {
@ -243,13 +272,15 @@ export async function prepareView(io: Io, item: sg.FileItem) {
); );
const styleKey = css.styleKey(cssImports, theme); const styleKey = css.styleKey(cssImports, theme);
return { return {
file: path.relative(hot.projectRoot, item.file), type: "view",
id: item.id,
file: item.file,
cssImports, cssImports,
theme, theme,
clientRefs: hot.getClientScriptRefs(item.file), clientRefs: hot.getClientScriptRefs(item.file),
hasLayout: !!module.layout?.default, hasLayout: !!module.layout?.default,
styleKey, styleKey,
}; } as const;
} }
export type PreparedView = Awaited<ReturnType<typeof prepareView>>; export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
@ -284,7 +315,7 @@ export async function assembleAndWritePage(
.map((x) => `{${x}}`) .map((x) => `{${x}}`)
.join("\n"); .join("\n");
const doc = wrapDocument({ const doc = sg.wrapDocument({
body: html, body: html,
head: meta, head: meta,
inlineCss, inlineCss,
@ -296,6 +327,8 @@ export async function assembleAndWritePage(
}, page); }, page);
} }
export type PageOrView = PreparedPage | PreparedView;
import * as sg from "#sitegen"; import * as sg from "#sitegen";
import * as incr from "./incremental.ts"; import * as incr from "./incremental.ts";
import { Io } from "./incremental.ts"; import { Io } from "./incremental.ts";
@ -307,4 +340,3 @@ import * as fs from "#sitegen/fs";
import type { FileItem } from "#sitegen"; import type { FileItem } from "#sitegen";
import * as path from "node:path"; import * as path from "node:path";
import * as meta from "#sitegen/meta"; import * as meta from "#sitegen/meta";
import { wrapDocument } from "./lib/view.ts";

View file

@ -21,6 +21,7 @@ export interface Ref<T> {
onRejected: (error: unknown) => void, onRejected: (error: unknown) => void,
): void; ): void;
key: string; key: string;
get value(): T | null;
} }
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>; type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
@ -48,7 +49,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const prev = works.get(key) as Work<O> | null; const prev = works.get(key) as Work<O> | null;
if (prev) { if (prev) {
return { key, then: (done) => done(prev.value) }; return { key, then: (done) => done(prev.value), value: prev.value };
} }
async function perform() { async function perform() {
@ -85,7 +86,12 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
let cached: Promise<O>; let cached: Promise<O>;
return { return {
key, key,
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject), then(fufill, reject) {
(cached ??= perform()).then(fufill, reject);
},
get value() {
return (works.get(this.key)?.value as O) ?? null;
},
}; };
} }
@ -97,11 +103,7 @@ export async function compile<T>(compiler: () => Promise<T>) {
const start = performance.now(); const start = performance.now();
const timerSpinner = new Spinner({ const timerSpinner = new Spinner({
text: () => text: () =>
`sitegen! [${ `sitegen! [${((performance.now() - start) / 1000).toFixed(1)}s]`,
((performance.now() - start) / 1000).toFixed(
1,
)
}s]`,
fps: 10, fps: 10,
}); });
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() }; using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -114,8 +116,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
return { return {
value, value,
watchFiles: new Set(files.keys()), watchFiles: new Set(files.keys()),
newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)), newOutputs: Array.from(seenWrites)
newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')), .filter((x) => x.startsWith("f:"))
.map((x) => x.slice(2)),
newAssets: !Array.from(seenWrites).some((x) => x.startsWith("a:")),
}; };
} finally { } finally {
running = false; running = false;
@ -127,9 +131,14 @@ export async function compile<T>(compiler: () => Promise<T>) {
export async function flush(start: number) { export async function flush(start: number) {
// Trim // Trim
const detachedFiles = new Set<string>; const detachedFiles = new Set<string>();
const referencedAssets = new Set<string>; const referencedAssets = new Set<string>();
for (const [k, { writes: { assets } }] of works) { for (const [
k,
{
writes: { assets },
},
] of works) {
if (seenWorks.has(k)) { if (seenWorks.has(k)) {
for (const asset of assets.values()) referencedAssets.add(asset.hash); for (const asset of assets.values()) referencedAssets.add(asset.hash);
continue; continue;
@ -142,8 +151,7 @@ export async function flush(start: number) {
detachedFiles.add(k); detachedFiles.add(k);
} }
for (const k of assets.keys()) { for (const k of assets.keys()) {
if (!referencedAssets.has(k)) if (!referencedAssets.has(k)) assets.delete(k);
assets.delete(k);
} }
const p = []; const p = [];
@ -171,9 +179,9 @@ export async function flush(start: number) {
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`); console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
console.writeLine(` - ${assets.size} static assets`); console.writeLine(` - ${assets.size} static assets`);
console.writeLine( console.writeLine(
` - dist: ${formatSize(dist)}, incremental: ${ ` - dist: ${formatSize(dist)}, incremental: ${formatSize(
formatSize(serialized.byteLength) serialized.byteLength,
}`, )}`,
); );
} }
@ -212,13 +220,14 @@ function deleteWork(key: string) {
affects.splice(affects.indexOf(key), 1); affects.splice(affects.indexOf(key), 1);
} }
for (const remove of affects) { for (const remove of affects) {
const { reads: { works: list } } = UNWRAP(works.get(remove), remove); const {
reads: { works: list },
} = UNWRAP(works.get(remove), remove);
ASSERT(list.has(key)); ASSERT(list.has(key));
list.delete(key); list.delete(key);
} }
for (const file of w.files) { for (const file of w.files) {
if (UNWRAP(writes.get(file)).work === key) if (UNWRAP(writes.get(file)).work === key) writes.delete(file);
writes.delete(file);
} }
// Assets are temporarily kept, trimmed via manual GC after compilation. // Assets are temporarily kept, trimmed via manual GC after compilation.
@ -234,23 +243,32 @@ export function reset() {
} }
export function serialize() { export function serialize() {
const fileEntries = Array.from(files, ([k, v]) => const fileEntries = Array.from(
files,
([k, v]) =>
[ [
k, k,
v.type, v.type,
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null, v.type === "f" ? v.lastModified : v.type === "d" ? v.contentHash : null,
...v.affects, ...v.affects,
] as const); ] as const,
const workEntries = Array.from(works, ([k, v]) => );
const workEntries = Array.from(
works,
([k, v]) =>
[ [
k, k,
v.value, v.value,
Array.from(v.reads.files), Array.from(v.reads.files),
Array.from(v.reads.works), Array.from(v.reads.works),
Array.from(v.writes.files), Array.from(v.writes.files),
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const), Array.from(
v.writes.assets,
([k, { headers }]) => [k, headers] as const,
),
v.affects, v.affects,
] as const); ] as const,
);
const expectedFilesOnDisk = Array.from( const expectedFilesOnDisk = Array.from(
writes, writes,
([k, { size, work }]) => [k, size, work] as const, ([k, { size, work }]) => [k, size, work] as const,
@ -280,7 +298,7 @@ async function deserialize(buffer: Buffer) {
if (type === "f") { if (type === "f") {
ASSERT(typeof content === "number"); ASSERT(typeof content === "number");
files.set(k, { type, affects, lastModified: content }); files.set(k, { type, affects, lastModified: content });
} else if (type === 'd') { } else if (type === "d") {
ASSERT(typeof content === "string"); ASSERT(typeof content === "string");
files.set(k, { type, affects, contentHash: content, contents: [] }); files.set(k, { type, affects, contentHash: content, contents: [] });
} else { } else {
@ -288,15 +306,8 @@ async function deserialize(buffer: Buffer) {
} }
} }
for (const entry of workEntries) { for (const entry of workEntries) {
const [ const [k, value, readFiles, readWorks, writeFiles, writeAssets, affects] =
k, entry;
value,
readFiles,
readWorks,
writeFiles,
writeAssets,
affects,
] = entry;
works.set(k, { works.set(k, {
value, value,
reads: { reads: {
@ -305,23 +316,30 @@ async function deserialize(buffer: Buffer) {
}, },
writes: { writes: {
files: new Set(writeFiles), files: new Set(writeFiles),
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, { assets: new Map(
Array.from(writeAssets, ([k, headers]) => [
k,
{
hash: JSON.parse(UNWRAP(headers.etag)), hash: JSON.parse(UNWRAP(headers.etag)),
headers, headers,
}])), },
]),
),
}, },
affects, affects,
}); });
} }
const statFiles = await Promise.all(expectedFilesOnDisk const statFiles = await Promise.all(
.map(([k, size, work]) => expectedFilesOnDisk.map(([k, size, work]) =>
fs.stat(path.join(".clover/o", k)) fs
.stat(path.join(".clover/o", k))
.catch((err) => { .catch((err) => {
if (err.code === "ENOENT") return null; if (err.code === "ENOENT") return null;
throw err; throw err;
}) })
.then((stat) => ({ k, size, work, stat })) .then((stat) => ({ k, size, work, stat })),
)); ),
);
for (const { k, stat, work, size } of statFiles) { for (const { k, stat, work, size } of statFiles) {
if (stat?.size === size) { if (stat?.size === size) {
writes.set(k, { writes.set(k, {
@ -337,19 +355,24 @@ async function deserialize(buffer: Buffer) {
assets.set(hash, { raw, gzip, zstd }); assets.set(hash, { raw, gzip, zstd });
} }
await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file))); await Promise.all(
Array.from(files, ([key, file]) => invalidateEntry(key, file)),
);
} }
export async function invalidate(filePath: string): Promise<boolean> { export async function invalidate(filePath: string): Promise<boolean> {
const key = toRel(toAbs(filePath)); const key = toRel(toAbs(filePath));
const file = UNWRAP(files.get(key), `Untracked file '${key}'`) const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
return invalidateEntry(key, file) return invalidateEntry(key, file);
} }
export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> { export async function invalidateEntry(
key: string,
file: TrackedFile,
): Promise<boolean> {
try { try {
if (file.type === "d") { if (file.type === "d") {
const contents = file.contents = await fs.readdir(key); const contents = (file.contents = await fs.readdir(key));
contents.sort(); contents.sort();
const contentHash = crypto const contentHash = crypto
.createHash("sha1") .createHash("sha1")
@ -359,15 +382,17 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
file.contentHash = contentHash; file.contentHash = contentHash;
throw new Error(); throw new Error();
} }
} else if (file.type === 'f') { } else if (file.type === "f") {
const lastModified = await fs.stat(key) const lastModified = await fs.stat(key).then(
.then(x => Math.floor(x.mtimeMs), () => 0); (x) => Math.floor(x.mtimeMs),
() => 0,
);
if (file.lastModified !== lastModified) { if (file.lastModified !== lastModified) {
file.lastModified = lastModified; file.lastModified = lastModified;
throw new Error(); throw new Error();
} }
} else { } else {
file.type satisfies 'null'; file.type satisfies "null";
const stat = await fs.stat(key).catch(() => null); const stat = await fs.stat(key).catch(() => null);
if (stat) throw new Error(); if (stat) throw new Error();
} }
@ -375,7 +400,7 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
} catch (e) { } catch (e) {
forceInvalidate(file); forceInvalidate(file);
hot.unload(toAbs(key)); hot.unload(toAbs(key));
if (file.type === 'null') files.delete(key); if (file.type === "null") files.delete(key);
return true; return true;
} }
} }
@ -391,13 +416,16 @@ export function getAssetManifest() {
assets.get(hash), assets.get(hash),
`Asset ${key} (${hash})`, `Asset ${key} (${hash})`,
); );
return [key, { return [
key,
{
raw: writer.write(raw, "raw:" + hash), raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash), gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash), zstd: writer.write(zstd, "zstd:" + hash),
headers, headers,
}] as const; },
}) ] as const;
}),
), ),
) satisfies BuiltAssetMap; ) satisfies BuiltAssetMap;
return { json: asset, blob: writer.get() }; return { json: asset, blob: writer.get() };
@ -446,7 +474,7 @@ export class Io {
const { key, resolved } = this.#trackFs(dir); const { key, resolved } = this.#trackFs(dir);
const existing = files.get(key); const existing = files.get(key);
try { try {
if (existing?.type === 'd') return existing.contents; if (existing?.type === "d") return existing.contents;
const contents = await fs.readdir(resolved); const contents = await fs.readdir(resolved);
contents.sort(); contents.sort();
const contentHash = crypto const contentHash = crypto
@ -474,7 +502,7 @@ export class Io {
const stat = await fs.stat(abs); const stat = await fs.stat(abs);
if (stat.isDirectory()) { if (stat.isDirectory()) {
return (await this.readDirRecursive(abs)).map((grand) => return (await this.readDirRecursive(abs)).map((grand) =>
path.join(child, grand) path.join(child, grand),
); );
} else { } else {
return child; return child;
@ -572,7 +600,7 @@ class BufferWriter {
write(buffer: Buffer, hash: string): BufferView { write(buffer: Buffer, hash: string): BufferView {
let view = this.seen.get(hash); let view = this.seen.get(hash);
if (view) return view; if (view) return view;
view = [this.size, this.size += buffer.byteLength]; view = [this.size, (this.size += buffer.byteLength)];
this.seen.set(hash, view); this.seen.set(hash, view);
this.buffers.push(buffer); this.buffers.push(buffer);
return view; return view;
@ -593,9 +621,12 @@ export function validateSerializable(value: unknown, key: string) {
} else if (value && typeof value === "object") { } else if (value && typeof value === "object") {
if (Array.isArray(value)) { if (Array.isArray(value)) {
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`)); value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
} else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) { } else if (
Object.getPrototypeOf(value) === Object.prototype ||
Buffer.isBuffer(value)
) {
Object.entries(value).forEach(([k, v]) => Object.entries(value).forEach(([k, v]) =>
validateSerializable(v, `${key}.${k}`) validateSerializable(v, `${key}.${k}`),
); );
} else { } else {
throw new Error( throw new Error(
@ -631,10 +662,13 @@ interface FileWrite {
} }
interface Writes { interface Writes {
files: Set<string>; files: Set<string>;
assets: Map<string, { assets: Map<
string,
{
hash: string; hash: string;
headers: Record<string, string>; headers: Record<string, string>;
}>; }
>;
} }
interface Asset { interface Asset {
raw: Buffer; raw: Buffer;
@ -648,15 +682,13 @@ interface Work<T = unknown> {
writes: Writes; writes: Writes;
affects: string[]; affects: string[];
} }
type TrackedFile = type TrackedFile = {
& {
affects: string[]; affects: string[];
} } & (
& (
| { type: "f"; lastModified: number } | { type: "f"; lastModified: number }
| { type: "d"; contentHash: string; contents: string[] } | { type: "d"; contentHash: string; contents: string[] }
| { type: "null"; } | { type: "null" }
); );
export interface BuiltAssetMap { export interface BuiltAssetMap {
[route: string]: BuiltAsset; [route: string]: BuiltAsset;
} }

View file

@ -30,4 +30,22 @@ export function addScript(id: ScriptId | { value: ScriptId }) {
userData.get().scripts.add(typeof id === "string" ? id : id.value); userData.get().scripts.add(typeof id === "string" ? id : id.value);
} }
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as render from "#engine/render"; import * as render from "#engine/render";

View file

@ -1,3 +1,22 @@
// The "view" system allows rendering dynamic pages within backends.
// This is done by scanning all `views` dirs, bundling their client
// resources, and then providing `renderView` which renders a page.
//
// This system also implements page regeneration.
let codegen: Codegen;
try {
codegen = require("$views");
} catch {
throw new Error("Can only import '#sitegen/view' in backends.");
}
// Generated in `bundle.ts`
export interface Codegen {
views: Record<ViewKey, View>;
scripts: Record<string, string>;
regenTtls: Ttl[];
regenTags: Record<string, ViewKey[]>;
}
export interface View { export interface View {
component: render.Component; component: render.Component;
meta: meta:
@ -7,23 +26,30 @@ export interface View {
inlineCss: string; inlineCss: string;
scripts: Record<string, string>; scripts: Record<string, string>;
} }
export interface Ttl {
seconds: number;
key: ViewKey;
}
type ViewKey = keyof ViewMap;
let views: Record<string, View> = null!; export async function renderView<K extends ViewKey>(
let scripts: Record<string, string> = null!;
export async function renderView(
context: hono.Context, context: hono.Context,
id: string, id: K,
props: Record<string, unknown>, props: PropsFromModule<ViewMap[K]>,
) { ) {
return context.html(await renderViewToString(id, { context, ...props })); return context.html(await renderViewToString(id, { context, ...props }));
} }
export async function renderViewToString( type PropsFromModule<M extends any> = M extends {
id: string, default: (props: infer T) => render.Node;
props: Record<string, unknown>, }
? T
: never;
export async function renderViewToString<K extends ViewKey>(
id: K,
props: PropsFromModule<ViewMap[K]>,
) { ) {
views ?? ({ views, scripts } = require("$views"));
// The view contains pre-bundled CSS and scripts, but keeps the scripts // The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer // separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page. // includes the canvas for the current page, but only the current page.
@ -32,7 +58,7 @@ export async function renderViewToString(
inlineCss, inlineCss,
layout, layout,
meta: metadata, meta: metadata,
}: View = UNWRAP(views[id], `Missing view ${id}`); }: View = UNWRAP(codegen.views[id], `Missing view ${id}`);
// -- metadata -- // -- metadata --
const renderedMetaPromise = Promise.resolve( const renderedMetaPromise = Promise.resolve(
@ -48,48 +74,26 @@ export async function renderViewToString(
} = await render.async(page, { [sg.userData.key]: sg.initRender() }); } = await render.async(page, { [sg.userData.key]: sg.initRender() });
// -- join document and send -- // -- join document and send --
return wrapDocument({ return sg.wrapDocument({
body, body,
head: await renderedMetaPromise, head: await renderedMetaPromise,
inlineCss, inlineCss,
scripts: joinScripts( scripts: joinScripts(
Array.from(sitegen.scripts, (id) => Array.from(sitegen.scripts, (id) =>
UNWRAP(scripts[id], `Missing script ${id}`), UNWRAP(codegen.scripts[id], `Missing script ${id}`),
), ),
), ),
}); });
} }
export function provideViewData(v: typeof views, s: typeof scripts) {
(views = v), (scripts = s);
}
export function joinScripts(scriptSources: string[]) { export function joinScripts(scriptSources: string[]) {
const { length } = scriptSources; const { length } = scriptSources;
if (length === 0) return ""; if (length === 0) return "";
if (length === 1) return scriptSources[0]; if (length === 1) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";"); return scriptSources.map((source) => `{${source}}`).join(";");
} }
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as meta from "./meta.ts"; import * as meta from "./meta.ts";
import type * as hono from "#hono"; import type * as hono from "#hono";
import * as render from "#engine/render"; import * as render from "#engine/render";
import * as sg from "./sitegen.ts"; import * as sg from "./sitegen.ts";
import type { RegisteredViews as ViewMap } from "../../.clover/ts/view.d.ts";

View file

@ -1,11 +1,9 @@
export * as layout from "../layout.tsx";
export const regenerate = {
manual: true,
};
export interface Input { export interface Input {
admin?: boolean; admin?: boolean;
} }
export * as layout from "../layout.tsx";
export const regenerate = { tags: ["q+a"] };
export const meta: Metadata = { export const meta: Metadata = {
title: "paper clover q+a", title: "paper clover q+a",
description: "ask clover a question", description: "ask clover a question",
@ -14,7 +12,7 @@ export const meta: Metadata = {
<const/{ admin = false } = input /> <const/{ admin = false } = input />
<const/questions = [...Question.getAll()] /> <const/questions = [...Question.getAll()] />
<if=true> <if=!admin>
<question-form /> <question-form />
</> </>
<for|question| of=questions> <for|question| of=questions>