feat: dynamic page regeneration #24
7 changed files with 345 additions and 249 deletions
|
@ -111,7 +111,7 @@ export type ServerPlatform = "node" | "passthru";
|
||||||
export interface ServerSideOptions {
|
export interface ServerSideOptions {
|
||||||
entries: string[];
|
entries: string[];
|
||||||
viewItems: sg.FileItem[];
|
viewItems: sg.FileItem[];
|
||||||
viewRefs: incr.Ref<PreparedView>[];
|
viewRefs: incr.Ref<PageOrView>[];
|
||||||
styleMap: Map<string, incr.Ref<string>>;
|
styleMap: Map<string, incr.Ref<string>>;
|
||||||
scriptMap: incr.Ref<Record<string, string>>;
|
scriptMap: incr.Ref<Record<string, string>>;
|
||||||
platform: ServerPlatform;
|
platform: ServerPlatform;
|
||||||
|
@ -150,118 +150,128 @@ export async function bundleServerJavaScript({
|
||||||
};
|
};
|
||||||
}, viewItems);
|
}, viewItems);
|
||||||
|
|
||||||
const wBundles = entries.map(
|
await incr.work(async (io) => {
|
||||||
(entry) =>
|
io.writeFile(
|
||||||
|
"../ts/view.d.ts",
|
||||||
[
|
[
|
||||||
entry,
|
"export interface RegisteredViews {",
|
||||||
incr.work(async (io, entry) => {
|
...viewItems
|
||||||
const pkg = await io.readJson<{
|
.filter((view) => !view.id.startsWith("page:"))
|
||||||
dependencies: Record<string, string>;
|
.map(
|
||||||
}>("package.json");
|
(view) =>
|
||||||
|
` ${JSON.stringify(view.id)}: ` +
|
||||||
|
`typeof import(${JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))}),`,
|
||||||
|
),
|
||||||
|
"}",
|
||||||
|
].join("\n"),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
let magicWord = null as string | null;
|
const wBundles = entries.map((entry) =>
|
||||||
// -- plugins --
|
incr.work(async (io, entry) => {
|
||||||
const serverPlugins: esbuild.Plugin[] = [
|
const pkg = await io.readJson<{
|
||||||
virtualFiles({
|
dependencies: Record<string, string>;
|
||||||
// only add dependency when imported.
|
}>("package.json");
|
||||||
$views: async () => {
|
|
||||||
const view = await io.readWork(wViewSource);
|
|
||||||
({ magicWord } = view);
|
|
||||||
return view.file;
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
projectRelativeResolution(),
|
|
||||||
markoViaBuildCache(),
|
|
||||||
{
|
|
||||||
name: "replace client references",
|
|
||||||
setup(b) {
|
|
||||||
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
|
|
||||||
contents: hot.resolveClientRefs(
|
|
||||||
await fs.readFile(file, "utf-8"),
|
|
||||||
file,
|
|
||||||
).code,
|
|
||||||
loader: path.extname(file).slice(1) as esbuild.Loader,
|
|
||||||
}));
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "mark css external",
|
|
||||||
setup(b) {
|
|
||||||
b.onResolve({ filter: /\.css$/ }, () => ({
|
|
||||||
path: ".",
|
|
||||||
namespace: "dropped",
|
|
||||||
}));
|
|
||||||
b.onLoad({ filter: /./, namespace: "dropped" }, () => ({
|
|
||||||
contents: "",
|
|
||||||
}));
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const { metafile, outputFiles } = await esbuild.build({
|
let magicWord = null as string | null;
|
||||||
bundle: true,
|
// -- plugins --
|
||||||
chunkNames: "c.[hash]",
|
const serverPlugins: esbuild.Plugin[] = [
|
||||||
entryNames: path.basename(entry, path.extname(entry)),
|
virtualFiles({
|
||||||
entryPoints: [
|
// only add dependency when imported.
|
||||||
path.join(
|
$views: async () => {
|
||||||
import.meta.dirname,
|
const view = await io.readWork(wViewSource);
|
||||||
"backend/entry-" + platform + ".ts",
|
({ magicWord } = view);
|
||||||
),
|
return view.file;
|
||||||
],
|
},
|
||||||
platform: "node",
|
}),
|
||||||
format: "esm",
|
projectRelativeResolution(),
|
||||||
minify: false,
|
markoViaBuildCache(),
|
||||||
outdir: "out!",
|
{
|
||||||
plugins: serverPlugins,
|
name: "replace client references",
|
||||||
splitting: true,
|
setup(b) {
|
||||||
logLevel: "silent",
|
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
|
||||||
write: false,
|
contents: hot.resolveClientRefs(
|
||||||
metafile: true,
|
await fs.readFile(file, "utf-8"),
|
||||||
jsx: "automatic",
|
file,
|
||||||
jsxImportSource: "#engine",
|
).code,
|
||||||
jsxDev: false,
|
loader: path.extname(file).slice(1) as esbuild.Loader,
|
||||||
define: {
|
}));
|
||||||
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
},
|
||||||
"globalThis.CLOVER_SERVER_ENTRY": JSON.stringify(entry),
|
},
|
||||||
},
|
{
|
||||||
external: Object.keys(pkg.dependencies).filter(
|
name: "mark css external",
|
||||||
(x) => !x.startsWith("@paperclover"),
|
setup(b) {
|
||||||
),
|
b.onResolve({ filter: /\.css$/ }, () => ({
|
||||||
});
|
path: ".",
|
||||||
await trackEsbuild(io, metafile);
|
namespace: "dropped",
|
||||||
|
}));
|
||||||
|
b.onLoad({ filter: /./, namespace: "dropped" }, () => ({
|
||||||
|
contents: "",
|
||||||
|
}));
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
let fileWithMagicWord: {
|
const { metafile, outputFiles } = await esbuild.build({
|
||||||
bytes: Buffer;
|
bundle: true,
|
||||||
basename: string;
|
chunkNames: "c.[hash]",
|
||||||
magicWord: string;
|
entryNames: path.basename(entry, path.extname(entry)),
|
||||||
} | null = null;
|
entryPoints: [
|
||||||
for (const output of outputFiles) {
|
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
|
||||||
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
|
],
|
||||||
const key = "out!/" + basename.replaceAll("\\", "/");
|
platform: "node",
|
||||||
// If this contains the generated "$views" file, then
|
format: "esm",
|
||||||
// mark this file as the one for replacement. Because
|
minify: false,
|
||||||
// `splitting` is `true`, esbuild will not emit this
|
outdir: "out!",
|
||||||
// file in more than one chunk.
|
plugins: serverPlugins,
|
||||||
if (
|
splitting: true,
|
||||||
magicWord &&
|
logLevel: "silent",
|
||||||
metafile.outputs[key].inputs["framework/lib/view.ts"]
|
write: false,
|
||||||
) {
|
metafile: true,
|
||||||
ASSERT(!fileWithMagicWord);
|
jsx: "automatic",
|
||||||
fileWithMagicWord = {
|
jsxImportSource: "#engine",
|
||||||
basename,
|
jsxDev: false,
|
||||||
bytes: Buffer.from(output.contents),
|
define: {
|
||||||
magicWord,
|
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
||||||
};
|
"globalThis.CLOVER_SERVER_ENTRY": JSON.stringify(entry),
|
||||||
} else {
|
},
|
||||||
io.writeFile(basename, Buffer.from(output.contents));
|
external: Object.keys(pkg.dependencies).filter(
|
||||||
}
|
(x) => !x.startsWith("@paperclover"),
|
||||||
}
|
),
|
||||||
return fileWithMagicWord;
|
});
|
||||||
}, entry),
|
await trackEsbuild(io, metafile);
|
||||||
] as const,
|
|
||||||
|
let fileWithMagicWord: {
|
||||||
|
bytes: Buffer;
|
||||||
|
basename: string;
|
||||||
|
magicWord: string;
|
||||||
|
} | null = null;
|
||||||
|
for (const output of outputFiles) {
|
||||||
|
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
|
||||||
|
const key = "out!/" + basename.replaceAll("\\", "/");
|
||||||
|
// If this contains the generated "$views" file, then
|
||||||
|
// mark this file as the one for replacement. Because
|
||||||
|
// `splitting` is `true`, esbuild will not emit this
|
||||||
|
// file in more than one chunk.
|
||||||
|
if (
|
||||||
|
magicWord &&
|
||||||
|
metafile.outputs[key].inputs["framework/lib/view.ts"]
|
||||||
|
) {
|
||||||
|
ASSERT(!fileWithMagicWord);
|
||||||
|
fileWithMagicWord = {
|
||||||
|
basename,
|
||||||
|
bytes: Buffer.from(output.contents),
|
||||||
|
magicWord,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
io.writeFile(basename, Buffer.from(output.contents));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fileWithMagicWord;
|
||||||
|
}, entry),
|
||||||
);
|
);
|
||||||
|
|
||||||
const wProcessed = wBundles.map(async ([entry, wBundle]) => {
|
const wProcessed = wBundles.map(async (wBundle) => {
|
||||||
if (!(await wBundle)) return;
|
if (!(await wBundle)) return;
|
||||||
await incr.work(async (io) => {
|
await incr.work(async (io) => {
|
||||||
// Only the reachable resources need to be read and inserted into the bundle.
|
// Only the reachable resources need to be read and inserted into the bundle.
|
||||||
|
@ -322,3 +332,4 @@ import * as fs from "#sitegen/fs";
|
||||||
import * as mime from "#sitegen/mime";
|
import * as mime from "#sitegen/mime";
|
||||||
import * as incr from "./incremental.ts";
|
import * as incr from "./incremental.ts";
|
||||||
import * as sg from "#sitegen";
|
import * as sg from "#sitegen";
|
||||||
|
import type { PageOrView } from "./generate.ts";
|
||||||
|
|
|
@ -18,6 +18,7 @@ export const createTemplate = (
|
||||||
const r = render.current;
|
const r = render.current;
|
||||||
// Support using Marko outside of Clover SSR
|
// Support using Marko outside of Clover SSR
|
||||||
if (!r) return renderer(props, n);
|
if (!r) return renderer(props, n);
|
||||||
|
render.setCurrent(null);
|
||||||
const markoResult = renderFn.call(renderer, {
|
const markoResult = renderFn.call(renderer, {
|
||||||
...props,
|
...props,
|
||||||
$global: { clover: r, cloverAsyncMarker },
|
$global: { clover: r, cloverAsyncMarker },
|
||||||
|
|
|
@ -35,6 +35,11 @@ export async function generate() {
|
||||||
);
|
);
|
||||||
const routes = await Promise.all([...builtViews, ...builtPages]);
|
const routes = await Promise.all([...builtViews, ...builtPages]);
|
||||||
|
|
||||||
|
const viewsAndDynPages: incr.Ref<PageOrView>[] = [
|
||||||
|
...builtViews,
|
||||||
|
...builtPages.filter((page) => UNWRAP(page.value).regenerate),
|
||||||
|
];
|
||||||
|
|
||||||
// -- page resources --
|
// -- page resources --
|
||||||
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
const scriptMap = incr.work(bundle.bundleClientJavaScript, {
|
||||||
clientRefs: routes.flatMap((x) => x.clientRefs),
|
clientRefs: routes.flatMap((x) => x.clientRefs),
|
||||||
|
@ -49,8 +54,11 @@ export async function generate() {
|
||||||
platform: "node",
|
platform: "node",
|
||||||
styleMap,
|
styleMap,
|
||||||
scriptMap,
|
scriptMap,
|
||||||
viewItems: views,
|
viewItems: viewsAndDynPages.map((ref) => {
|
||||||
viewRefs: builtViews,
|
const { id, file, type } = UNWRAP(ref.value);
|
||||||
|
return { id: type === "page" ? `page:${id}` : id, file };
|
||||||
|
}),
|
||||||
|
viewRefs: viewsAndDynPages,
|
||||||
});
|
});
|
||||||
|
|
||||||
// -- assemble page assets --
|
// -- assemble page assets --
|
||||||
|
@ -181,6 +189,7 @@ export async function preparePage(io: Io, item: sg.FileItem) {
|
||||||
meta: metadata,
|
meta: metadata,
|
||||||
theme: pageTheme,
|
theme: pageTheme,
|
||||||
layout,
|
layout,
|
||||||
|
regenerate,
|
||||||
} = await io.import<any>(item.file);
|
} = await io.import<any>(item.file);
|
||||||
if (!Page) throw new Error("Page is missing a 'default' export.");
|
if (!Page) throw new Error("Page is missing a 'default' export.");
|
||||||
if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
|
if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
|
||||||
|
@ -219,16 +228,36 @@ export async function preparePage(io: Io, item: sg.FileItem) {
|
||||||
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
"Page is missing 'meta.title'. " + "All pages need a title tag.",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// -- regeneration --
|
||||||
|
let regeneration: Regeneration | null = null;
|
||||||
|
if (typeof regenerate?.seconds === "number") {
|
||||||
|
regeneration ??= {};
|
||||||
|
regeneration.seconds = regenerate.seconds;
|
||||||
|
}
|
||||||
|
if (regenerate?.tags) {
|
||||||
|
regeneration ??= {};
|
||||||
|
regeneration.tags = regenerate.tags;
|
||||||
|
}
|
||||||
|
|
||||||
const styleKey = css.styleKey(cssImports, theme);
|
const styleKey = css.styleKey(cssImports, theme);
|
||||||
return {
|
return {
|
||||||
|
type: "page",
|
||||||
id: item.id,
|
id: item.id,
|
||||||
|
file: item.file,
|
||||||
|
regenerate: regeneration,
|
||||||
html: text,
|
html: text,
|
||||||
meta: renderedMeta,
|
meta: renderedMeta,
|
||||||
cssImports,
|
cssImports,
|
||||||
theme: theme ?? null,
|
theme: theme ?? null,
|
||||||
styleKey,
|
styleKey,
|
||||||
clientRefs: Array.from(addon[sg.userData.key].scripts),
|
clientRefs: Array.from(addon[sg.userData.key].scripts),
|
||||||
};
|
} as const;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Regeneration {
|
||||||
|
seconds?: number;
|
||||||
|
tags?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function prepareView(io: Io, item: sg.FileItem) {
|
export async function prepareView(io: Io, item: sg.FileItem) {
|
||||||
|
@ -243,13 +272,15 @@ export async function prepareView(io: Io, item: sg.FileItem) {
|
||||||
);
|
);
|
||||||
const styleKey = css.styleKey(cssImports, theme);
|
const styleKey = css.styleKey(cssImports, theme);
|
||||||
return {
|
return {
|
||||||
file: path.relative(hot.projectRoot, item.file),
|
type: "view",
|
||||||
|
id: item.id,
|
||||||
|
file: item.file,
|
||||||
cssImports,
|
cssImports,
|
||||||
theme,
|
theme,
|
||||||
clientRefs: hot.getClientScriptRefs(item.file),
|
clientRefs: hot.getClientScriptRefs(item.file),
|
||||||
hasLayout: !!module.layout?.default,
|
hasLayout: !!module.layout?.default,
|
||||||
styleKey,
|
styleKey,
|
||||||
};
|
} as const;
|
||||||
}
|
}
|
||||||
export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
|
export type PreparedView = Awaited<ReturnType<typeof prepareView>>;
|
||||||
|
|
||||||
|
@ -284,7 +315,7 @@ export async function assembleAndWritePage(
|
||||||
.map((x) => `{${x}}`)
|
.map((x) => `{${x}}`)
|
||||||
.join("\n");
|
.join("\n");
|
||||||
|
|
||||||
const doc = wrapDocument({
|
const doc = sg.wrapDocument({
|
||||||
body: html,
|
body: html,
|
||||||
head: meta,
|
head: meta,
|
||||||
inlineCss,
|
inlineCss,
|
||||||
|
@ -296,6 +327,8 @@ export async function assembleAndWritePage(
|
||||||
}, page);
|
}, page);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type PageOrView = PreparedPage | PreparedView;
|
||||||
|
|
||||||
import * as sg from "#sitegen";
|
import * as sg from "#sitegen";
|
||||||
import * as incr from "./incremental.ts";
|
import * as incr from "./incremental.ts";
|
||||||
import { Io } from "./incremental.ts";
|
import { Io } from "./incremental.ts";
|
||||||
|
@ -307,4 +340,3 @@ import * as fs from "#sitegen/fs";
|
||||||
import type { FileItem } from "#sitegen";
|
import type { FileItem } from "#sitegen";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import * as meta from "#sitegen/meta";
|
import * as meta from "#sitegen/meta";
|
||||||
import { wrapDocument } from "./lib/view.ts";
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ export interface Ref<T> {
|
||||||
onRejected: (error: unknown) => void,
|
onRejected: (error: unknown) => void,
|
||||||
): void;
|
): void;
|
||||||
key: string;
|
key: string;
|
||||||
|
get value(): T | null;
|
||||||
}
|
}
|
||||||
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
|
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
|
||||||
|
|
||||||
|
@ -48,7 +49,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
||||||
|
|
||||||
const prev = works.get(key) as Work<O> | null;
|
const prev = works.get(key) as Work<O> | null;
|
||||||
if (prev) {
|
if (prev) {
|
||||||
return { key, then: (done) => done(prev.value) };
|
return { key, then: (done) => done(prev.value), value: prev.value };
|
||||||
}
|
}
|
||||||
|
|
||||||
async function perform() {
|
async function perform() {
|
||||||
|
@ -85,7 +86,12 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
||||||
let cached: Promise<O>;
|
let cached: Promise<O>;
|
||||||
return {
|
return {
|
||||||
key,
|
key,
|
||||||
then: (fufill, reject) => void (cached ??= perform()).then(fufill, reject),
|
then(fufill, reject) {
|
||||||
|
(cached ??= perform()).then(fufill, reject);
|
||||||
|
},
|
||||||
|
get value() {
|
||||||
|
return (works.get(this.key)?.value as O) ?? null;
|
||||||
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,11 +103,7 @@ export async function compile<T>(compiler: () => Promise<T>) {
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
const timerSpinner = new Spinner({
|
const timerSpinner = new Spinner({
|
||||||
text: () =>
|
text: () =>
|
||||||
`sitegen! [${
|
`sitegen! [${((performance.now() - start) / 1000).toFixed(1)}s]`,
|
||||||
((performance.now() - start) / 1000).toFixed(
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
}s]`,
|
|
||||||
fps: 10,
|
fps: 10,
|
||||||
});
|
});
|
||||||
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
||||||
|
@ -114,8 +116,10 @@ export async function compile<T>(compiler: () => Promise<T>) {
|
||||||
return {
|
return {
|
||||||
value,
|
value,
|
||||||
watchFiles: new Set(files.keys()),
|
watchFiles: new Set(files.keys()),
|
||||||
newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)),
|
newOutputs: Array.from(seenWrites)
|
||||||
newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')),
|
.filter((x) => x.startsWith("f:"))
|
||||||
|
.map((x) => x.slice(2)),
|
||||||
|
newAssets: !Array.from(seenWrites).some((x) => x.startsWith("a:")),
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
running = false;
|
running = false;
|
||||||
|
@ -127,9 +131,14 @@ export async function compile<T>(compiler: () => Promise<T>) {
|
||||||
|
|
||||||
export async function flush(start: number) {
|
export async function flush(start: number) {
|
||||||
// Trim
|
// Trim
|
||||||
const detachedFiles = new Set<string>;
|
const detachedFiles = new Set<string>();
|
||||||
const referencedAssets = new Set<string>;
|
const referencedAssets = new Set<string>();
|
||||||
for (const [k, { writes: { assets } }] of works) {
|
for (const [
|
||||||
|
k,
|
||||||
|
{
|
||||||
|
writes: { assets },
|
||||||
|
},
|
||||||
|
] of works) {
|
||||||
if (seenWorks.has(k)) {
|
if (seenWorks.has(k)) {
|
||||||
for (const asset of assets.values()) referencedAssets.add(asset.hash);
|
for (const asset of assets.values()) referencedAssets.add(asset.hash);
|
||||||
continue;
|
continue;
|
||||||
|
@ -142,8 +151,7 @@ export async function flush(start: number) {
|
||||||
detachedFiles.add(k);
|
detachedFiles.add(k);
|
||||||
}
|
}
|
||||||
for (const k of assets.keys()) {
|
for (const k of assets.keys()) {
|
||||||
if (!referencedAssets.has(k))
|
if (!referencedAssets.has(k)) assets.delete(k);
|
||||||
assets.delete(k);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const p = [];
|
const p = [];
|
||||||
|
@ -171,9 +179,9 @@ export async function flush(start: number) {
|
||||||
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
|
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
|
||||||
console.writeLine(` - ${assets.size} static assets`);
|
console.writeLine(` - ${assets.size} static assets`);
|
||||||
console.writeLine(
|
console.writeLine(
|
||||||
` - dist: ${formatSize(dist)}, incremental: ${
|
` - dist: ${formatSize(dist)}, incremental: ${formatSize(
|
||||||
formatSize(serialized.byteLength)
|
serialized.byteLength,
|
||||||
}`,
|
)}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,13 +220,14 @@ function deleteWork(key: string) {
|
||||||
affects.splice(affects.indexOf(key), 1);
|
affects.splice(affects.indexOf(key), 1);
|
||||||
}
|
}
|
||||||
for (const remove of affects) {
|
for (const remove of affects) {
|
||||||
const { reads: { works: list } } = UNWRAP(works.get(remove), remove);
|
const {
|
||||||
|
reads: { works: list },
|
||||||
|
} = UNWRAP(works.get(remove), remove);
|
||||||
ASSERT(list.has(key));
|
ASSERT(list.has(key));
|
||||||
list.delete(key);
|
list.delete(key);
|
||||||
}
|
}
|
||||||
for (const file of w.files) {
|
for (const file of w.files) {
|
||||||
if (UNWRAP(writes.get(file)).work === key)
|
if (UNWRAP(writes.get(file)).work === key) writes.delete(file);
|
||||||
writes.delete(file);
|
|
||||||
}
|
}
|
||||||
// Assets are temporarily kept, trimmed via manual GC after compilation.
|
// Assets are temporarily kept, trimmed via manual GC after compilation.
|
||||||
|
|
||||||
|
@ -234,23 +243,32 @@ export function reset() {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function serialize() {
|
export function serialize() {
|
||||||
const fileEntries = Array.from(files, ([k, v]) =>
|
const fileEntries = Array.from(
|
||||||
[
|
files,
|
||||||
k,
|
([k, v]) =>
|
||||||
v.type,
|
[
|
||||||
v.type === 'f' ? v.lastModified : v.type === 'd' ? v.contentHash : null,
|
k,
|
||||||
...v.affects,
|
v.type,
|
||||||
] as const);
|
v.type === "f" ? v.lastModified : v.type === "d" ? v.contentHash : null,
|
||||||
const workEntries = Array.from(works, ([k, v]) =>
|
...v.affects,
|
||||||
[
|
] as const,
|
||||||
k,
|
);
|
||||||
v.value,
|
const workEntries = Array.from(
|
||||||
Array.from(v.reads.files),
|
works,
|
||||||
Array.from(v.reads.works),
|
([k, v]) =>
|
||||||
Array.from(v.writes.files),
|
[
|
||||||
Array.from(v.writes.assets, ([k, { headers }]) => [k, headers] as const),
|
k,
|
||||||
v.affects,
|
v.value,
|
||||||
] as const);
|
Array.from(v.reads.files),
|
||||||
|
Array.from(v.reads.works),
|
||||||
|
Array.from(v.writes.files),
|
||||||
|
Array.from(
|
||||||
|
v.writes.assets,
|
||||||
|
([k, { headers }]) => [k, headers] as const,
|
||||||
|
),
|
||||||
|
v.affects,
|
||||||
|
] as const,
|
||||||
|
);
|
||||||
const expectedFilesOnDisk = Array.from(
|
const expectedFilesOnDisk = Array.from(
|
||||||
writes,
|
writes,
|
||||||
([k, { size, work }]) => [k, size, work] as const,
|
([k, { size, work }]) => [k, size, work] as const,
|
||||||
|
@ -280,7 +298,7 @@ async function deserialize(buffer: Buffer) {
|
||||||
if (type === "f") {
|
if (type === "f") {
|
||||||
ASSERT(typeof content === "number");
|
ASSERT(typeof content === "number");
|
||||||
files.set(k, { type, affects, lastModified: content });
|
files.set(k, { type, affects, lastModified: content });
|
||||||
} else if (type === 'd') {
|
} else if (type === "d") {
|
||||||
ASSERT(typeof content === "string");
|
ASSERT(typeof content === "string");
|
||||||
files.set(k, { type, affects, contentHash: content, contents: [] });
|
files.set(k, { type, affects, contentHash: content, contents: [] });
|
||||||
} else {
|
} else {
|
||||||
|
@ -288,15 +306,8 @@ async function deserialize(buffer: Buffer) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const entry of workEntries) {
|
for (const entry of workEntries) {
|
||||||
const [
|
const [k, value, readFiles, readWorks, writeFiles, writeAssets, affects] =
|
||||||
k,
|
entry;
|
||||||
value,
|
|
||||||
readFiles,
|
|
||||||
readWorks,
|
|
||||||
writeFiles,
|
|
||||||
writeAssets,
|
|
||||||
affects,
|
|
||||||
] = entry;
|
|
||||||
works.set(k, {
|
works.set(k, {
|
||||||
value,
|
value,
|
||||||
reads: {
|
reads: {
|
||||||
|
@ -305,23 +316,30 @@ async function deserialize(buffer: Buffer) {
|
||||||
},
|
},
|
||||||
writes: {
|
writes: {
|
||||||
files: new Set(writeFiles),
|
files: new Set(writeFiles),
|
||||||
assets: new Map(Array.from(writeAssets, ([k, headers]) => [k, {
|
assets: new Map(
|
||||||
hash: JSON.parse(UNWRAP(headers.etag)),
|
Array.from(writeAssets, ([k, headers]) => [
|
||||||
headers,
|
k,
|
||||||
}])),
|
{
|
||||||
|
hash: JSON.parse(UNWRAP(headers.etag)),
|
||||||
|
headers,
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
affects,
|
affects,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const statFiles = await Promise.all(expectedFilesOnDisk
|
const statFiles = await Promise.all(
|
||||||
.map(([k, size, work]) =>
|
expectedFilesOnDisk.map(([k, size, work]) =>
|
||||||
fs.stat(path.join(".clover/o", k))
|
fs
|
||||||
|
.stat(path.join(".clover/o", k))
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
if (err.code === "ENOENT") return null;
|
if (err.code === "ENOENT") return null;
|
||||||
throw err;
|
throw err;
|
||||||
})
|
})
|
||||||
.then((stat) => ({ k, size, work, stat }))
|
.then((stat) => ({ k, size, work, stat })),
|
||||||
));
|
),
|
||||||
|
);
|
||||||
for (const { k, stat, work, size } of statFiles) {
|
for (const { k, stat, work, size } of statFiles) {
|
||||||
if (stat?.size === size) {
|
if (stat?.size === size) {
|
||||||
writes.set(k, {
|
writes.set(k, {
|
||||||
|
@ -337,19 +355,24 @@ async function deserialize(buffer: Buffer) {
|
||||||
assets.set(hash, { raw, gzip, zstd });
|
assets.set(hash, { raw, gzip, zstd });
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file)));
|
await Promise.all(
|
||||||
|
Array.from(files, ([key, file]) => invalidateEntry(key, file)),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function invalidate(filePath: string): Promise<boolean> {
|
export async function invalidate(filePath: string): Promise<boolean> {
|
||||||
const key = toRel(toAbs(filePath));
|
const key = toRel(toAbs(filePath));
|
||||||
const file = UNWRAP(files.get(key), `Untracked file '${key}'`)
|
const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
|
||||||
return invalidateEntry(key, file)
|
return invalidateEntry(key, file);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> {
|
export async function invalidateEntry(
|
||||||
|
key: string,
|
||||||
|
file: TrackedFile,
|
||||||
|
): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
if (file.type === "d") {
|
if (file.type === "d") {
|
||||||
const contents = file.contents = await fs.readdir(key);
|
const contents = (file.contents = await fs.readdir(key));
|
||||||
contents.sort();
|
contents.sort();
|
||||||
const contentHash = crypto
|
const contentHash = crypto
|
||||||
.createHash("sha1")
|
.createHash("sha1")
|
||||||
|
@ -359,23 +382,25 @@ export async function invalidateEntry(key: string, file: TrackedFile): Promise<b
|
||||||
file.contentHash = contentHash;
|
file.contentHash = contentHash;
|
||||||
throw new Error();
|
throw new Error();
|
||||||
}
|
}
|
||||||
} else if (file.type === 'f') {
|
} else if (file.type === "f") {
|
||||||
const lastModified = await fs.stat(key)
|
const lastModified = await fs.stat(key).then(
|
||||||
.then(x => Math.floor(x.mtimeMs), () => 0);
|
(x) => Math.floor(x.mtimeMs),
|
||||||
|
() => 0,
|
||||||
|
);
|
||||||
if (file.lastModified !== lastModified) {
|
if (file.lastModified !== lastModified) {
|
||||||
file.lastModified = lastModified;
|
file.lastModified = lastModified;
|
||||||
throw new Error();
|
throw new Error();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
file.type satisfies 'null';
|
file.type satisfies "null";
|
||||||
const stat = await fs.stat(key).catch(() => null);
|
const stat = await fs.stat(key).catch(() => null);
|
||||||
if (stat) throw new Error();
|
if (stat) throw new Error();
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
forceInvalidate(file);
|
forceInvalidate(file);
|
||||||
hot.unload(toAbs(key));
|
hot.unload(toAbs(key));
|
||||||
if (file.type === 'null') files.delete(key);
|
if (file.type === "null") files.delete(key);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -391,13 +416,16 @@ export function getAssetManifest() {
|
||||||
assets.get(hash),
|
assets.get(hash),
|
||||||
`Asset ${key} (${hash})`,
|
`Asset ${key} (${hash})`,
|
||||||
);
|
);
|
||||||
return [key, {
|
return [
|
||||||
raw: writer.write(raw, "raw:" + hash),
|
key,
|
||||||
gzip: writer.write(gzip, "gzip:" + hash),
|
{
|
||||||
zstd: writer.write(zstd, "zstd:" + hash),
|
raw: writer.write(raw, "raw:" + hash),
|
||||||
headers,
|
gzip: writer.write(gzip, "gzip:" + hash),
|
||||||
}] as const;
|
zstd: writer.write(zstd, "zstd:" + hash),
|
||||||
})
|
headers,
|
||||||
|
},
|
||||||
|
] as const;
|
||||||
|
}),
|
||||||
),
|
),
|
||||||
) satisfies BuiltAssetMap;
|
) satisfies BuiltAssetMap;
|
||||||
return { json: asset, blob: writer.get() };
|
return { json: asset, blob: writer.get() };
|
||||||
|
@ -444,9 +472,9 @@ export class Io {
|
||||||
}
|
}
|
||||||
async readDir(dir: string) {
|
async readDir(dir: string) {
|
||||||
const { key, resolved } = this.#trackFs(dir);
|
const { key, resolved } = this.#trackFs(dir);
|
||||||
const existing = files.get(key);
|
const existing = files.get(key);
|
||||||
try {
|
try {
|
||||||
if (existing?.type === 'd') return existing.contents;
|
if (existing?.type === "d") return existing.contents;
|
||||||
const contents = await fs.readdir(resolved);
|
const contents = await fs.readdir(resolved);
|
||||||
contents.sort();
|
contents.sort();
|
||||||
const contentHash = crypto
|
const contentHash = crypto
|
||||||
|
@ -474,7 +502,7 @@ export class Io {
|
||||||
const stat = await fs.stat(abs);
|
const stat = await fs.stat(abs);
|
||||||
if (stat.isDirectory()) {
|
if (stat.isDirectory()) {
|
||||||
return (await this.readDirRecursive(abs)).map((grand) =>
|
return (await this.readDirRecursive(abs)).map((grand) =>
|
||||||
path.join(child, grand)
|
path.join(child, grand),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
return child;
|
return child;
|
||||||
|
@ -572,7 +600,7 @@ class BufferWriter {
|
||||||
write(buffer: Buffer, hash: string): BufferView {
|
write(buffer: Buffer, hash: string): BufferView {
|
||||||
let view = this.seen.get(hash);
|
let view = this.seen.get(hash);
|
||||||
if (view) return view;
|
if (view) return view;
|
||||||
view = [this.size, this.size += buffer.byteLength];
|
view = [this.size, (this.size += buffer.byteLength)];
|
||||||
this.seen.set(hash, view);
|
this.seen.set(hash, view);
|
||||||
this.buffers.push(buffer);
|
this.buffers.push(buffer);
|
||||||
return view;
|
return view;
|
||||||
|
@ -593,9 +621,12 @@ export function validateSerializable(value: unknown, key: string) {
|
||||||
} else if (value && typeof value === "object") {
|
} else if (value && typeof value === "object") {
|
||||||
if (Array.isArray(value)) {
|
if (Array.isArray(value)) {
|
||||||
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
||||||
} else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) {
|
} else if (
|
||||||
|
Object.getPrototypeOf(value) === Object.prototype ||
|
||||||
|
Buffer.isBuffer(value)
|
||||||
|
) {
|
||||||
Object.entries(value).forEach(([k, v]) =>
|
Object.entries(value).forEach(([k, v]) =>
|
||||||
validateSerializable(v, `${key}.${k}`)
|
validateSerializable(v, `${key}.${k}`),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -631,10 +662,13 @@ interface FileWrite {
|
||||||
}
|
}
|
||||||
interface Writes {
|
interface Writes {
|
||||||
files: Set<string>;
|
files: Set<string>;
|
||||||
assets: Map<string, {
|
assets: Map<
|
||||||
hash: string;
|
string,
|
||||||
headers: Record<string, string>;
|
{
|
||||||
}>;
|
hash: string;
|
||||||
|
headers: Record<string, string>;
|
||||||
|
}
|
||||||
|
>;
|
||||||
}
|
}
|
||||||
interface Asset {
|
interface Asset {
|
||||||
raw: Buffer;
|
raw: Buffer;
|
||||||
|
@ -648,15 +682,13 @@ interface Work<T = unknown> {
|
||||||
writes: Writes;
|
writes: Writes;
|
||||||
affects: string[];
|
affects: string[];
|
||||||
}
|
}
|
||||||
type TrackedFile =
|
type TrackedFile = {
|
||||||
& {
|
affects: string[];
|
||||||
affects: string[];
|
} & (
|
||||||
}
|
| { type: "f"; lastModified: number }
|
||||||
& (
|
| { type: "d"; contentHash: string; contents: string[] }
|
||||||
| { type: "f"; lastModified: number }
|
| { type: "null" }
|
||||||
| { type: "d"; contentHash: string; contents: string[] }
|
);
|
||||||
| { type: "null"; }
|
|
||||||
);
|
|
||||||
export interface BuiltAssetMap {
|
export interface BuiltAssetMap {
|
||||||
[route: string]: BuiltAsset;
|
[route: string]: BuiltAsset;
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,4 +30,22 @@ export function addScript(id: ScriptId | { value: ScriptId }) {
|
||||||
userData.get().scripts.add(typeof id === "string" ? id : id.value);
|
userData.get().scripts.add(typeof id === "string" ? id : id.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function wrapDocument({
|
||||||
|
body,
|
||||||
|
head,
|
||||||
|
inlineCss,
|
||||||
|
scripts,
|
||||||
|
}: {
|
||||||
|
head: string;
|
||||||
|
body: string;
|
||||||
|
inlineCss: string;
|
||||||
|
scripts: string;
|
||||||
|
}) {
|
||||||
|
return `<!doctype html><html lang=en><head>${head}${
|
||||||
|
inlineCss ? `<style>${inlineCss}</style>` : ""
|
||||||
|
}</head><body>${body}${
|
||||||
|
scripts ? `<script>${scripts}</script>` : ""
|
||||||
|
}</body></html>`;
|
||||||
|
}
|
||||||
|
|
||||||
import * as render from "#engine/render";
|
import * as render from "#engine/render";
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
// The "view" system allows rendering dynamic pages within backends.
|
||||||
|
// This is done by scanning all `views` dirs, bundling their client
|
||||||
|
// resources, and then providing `renderView` which renders a page.
|
||||||
|
//
|
||||||
|
// This system also implements page regeneration.
|
||||||
|
let codegen: Codegen;
|
||||||
|
try {
|
||||||
|
codegen = require("$views");
|
||||||
|
} catch {
|
||||||
|
throw new Error("Can only import '#sitegen/view' in backends.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generated in `bundle.ts`
|
||||||
|
export interface Codegen {
|
||||||
|
views: Record<ViewKey, View>;
|
||||||
|
scripts: Record<string, string>;
|
||||||
|
regenTtls: Ttl[];
|
||||||
|
regenTags: Record<string, ViewKey[]>;
|
||||||
|
}
|
||||||
export interface View {
|
export interface View {
|
||||||
component: render.Component;
|
component: render.Component;
|
||||||
meta:
|
meta:
|
||||||
|
@ -7,23 +26,30 @@ export interface View {
|
||||||
inlineCss: string;
|
inlineCss: string;
|
||||||
scripts: Record<string, string>;
|
scripts: Record<string, string>;
|
||||||
}
|
}
|
||||||
|
export interface Ttl {
|
||||||
|
seconds: number;
|
||||||
|
key: ViewKey;
|
||||||
|
}
|
||||||
|
type ViewKey = keyof ViewMap;
|
||||||
|
|
||||||
let views: Record<string, View> = null!;
|
export async function renderView<K extends ViewKey>(
|
||||||
let scripts: Record<string, string> = null!;
|
|
||||||
|
|
||||||
export async function renderView(
|
|
||||||
context: hono.Context,
|
context: hono.Context,
|
||||||
id: string,
|
id: K,
|
||||||
props: Record<string, unknown>,
|
props: PropsFromModule<ViewMap[K]>,
|
||||||
) {
|
) {
|
||||||
return context.html(await renderViewToString(id, { context, ...props }));
|
return context.html(await renderViewToString(id, { context, ...props }));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function renderViewToString(
|
type PropsFromModule<M extends any> = M extends {
|
||||||
id: string,
|
default: (props: infer T) => render.Node;
|
||||||
props: Record<string, unknown>,
|
}
|
||||||
|
? T
|
||||||
|
: never;
|
||||||
|
|
||||||
|
export async function renderViewToString<K extends ViewKey>(
|
||||||
|
id: K,
|
||||||
|
props: PropsFromModule<ViewMap[K]>,
|
||||||
) {
|
) {
|
||||||
views ?? ({ views, scripts } = require("$views"));
|
|
||||||
// The view contains pre-bundled CSS and scripts, but keeps the scripts
|
// The view contains pre-bundled CSS and scripts, but keeps the scripts
|
||||||
// separate for run-time dynamic scripts. For example, the file viewer
|
// separate for run-time dynamic scripts. For example, the file viewer
|
||||||
// includes the canvas for the current page, but only the current page.
|
// includes the canvas for the current page, but only the current page.
|
||||||
|
@ -32,7 +58,7 @@ export async function renderViewToString(
|
||||||
inlineCss,
|
inlineCss,
|
||||||
layout,
|
layout,
|
||||||
meta: metadata,
|
meta: metadata,
|
||||||
}: View = UNWRAP(views[id], `Missing view ${id}`);
|
}: View = UNWRAP(codegen.views[id], `Missing view ${id}`);
|
||||||
|
|
||||||
// -- metadata --
|
// -- metadata --
|
||||||
const renderedMetaPromise = Promise.resolve(
|
const renderedMetaPromise = Promise.resolve(
|
||||||
|
@ -48,48 +74,26 @@ export async function renderViewToString(
|
||||||
} = await render.async(page, { [sg.userData.key]: sg.initRender() });
|
} = await render.async(page, { [sg.userData.key]: sg.initRender() });
|
||||||
|
|
||||||
// -- join document and send --
|
// -- join document and send --
|
||||||
return wrapDocument({
|
return sg.wrapDocument({
|
||||||
body,
|
body,
|
||||||
head: await renderedMetaPromise,
|
head: await renderedMetaPromise,
|
||||||
inlineCss,
|
inlineCss,
|
||||||
scripts: joinScripts(
|
scripts: joinScripts(
|
||||||
Array.from(sitegen.scripts, (id) =>
|
Array.from(sitegen.scripts, (id) =>
|
||||||
UNWRAP(scripts[id], `Missing script ${id}`),
|
UNWRAP(codegen.scripts[id], `Missing script ${id}`),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function provideViewData(v: typeof views, s: typeof scripts) {
|
|
||||||
(views = v), (scripts = s);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function joinScripts(scriptSources: string[]) {
|
export function joinScripts(scriptSources: string[]) {
|
||||||
const { length } = scriptSources;
|
const { length } = scriptSources;
|
||||||
if (length === 0) return "";
|
if (length === 0) return "";
|
||||||
if (length === 1) return scriptSources[0];
|
if (length === 1) return scriptSources[0];
|
||||||
return scriptSources.map((source) => `{${source}}`).join(";");
|
return scriptSources.map((source) => `{${source}}`).join(";");
|
||||||
}
|
}
|
||||||
|
|
||||||
export function wrapDocument({
|
|
||||||
body,
|
|
||||||
head,
|
|
||||||
inlineCss,
|
|
||||||
scripts,
|
|
||||||
}: {
|
|
||||||
head: string;
|
|
||||||
body: string;
|
|
||||||
inlineCss: string;
|
|
||||||
scripts: string;
|
|
||||||
}) {
|
|
||||||
return `<!doctype html><html lang=en><head>${head}${
|
|
||||||
inlineCss ? `<style>${inlineCss}</style>` : ""
|
|
||||||
}</head><body>${body}${
|
|
||||||
scripts ? `<script>${scripts}</script>` : ""
|
|
||||||
}</body></html>`;
|
|
||||||
}
|
|
||||||
|
|
||||||
import * as meta from "./meta.ts";
|
import * as meta from "./meta.ts";
|
||||||
import type * as hono from "#hono";
|
import type * as hono from "#hono";
|
||||||
import * as render from "#engine/render";
|
import * as render from "#engine/render";
|
||||||
import * as sg from "./sitegen.ts";
|
import * as sg from "./sitegen.ts";
|
||||||
|
import type { RegisteredViews as ViewMap } from "../../.clover/ts/view.d.ts";
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
export * as layout from "../layout.tsx";
|
|
||||||
export const regenerate = {
|
|
||||||
manual: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface Input {
|
export interface Input {
|
||||||
admin?: boolean;
|
admin?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export * as layout from "../layout.tsx";
|
||||||
|
export const regenerate = { tags: ["q+a"] };
|
||||||
export const meta: Metadata = {
|
export const meta: Metadata = {
|
||||||
title: "paper clover q+a",
|
title: "paper clover q+a",
|
||||||
description: "ask clover a question",
|
description: "ask clover a question",
|
||||||
|
@ -14,7 +12,7 @@ export const meta: Metadata = {
|
||||||
<const/{ admin = false } = input />
|
<const/{ admin = false } = input />
|
||||||
<const/questions = [...Question.getAll()] />
|
<const/questions = [...Question.getAll()] />
|
||||||
|
|
||||||
<if=true>
|
<if=!admin>
|
||||||
<question-form />
|
<question-form />
|
||||||
</>
|
</>
|
||||||
<for|question| of=questions>
|
<for|question| of=questions>
|
||||||
|
|
Loading…
Reference in a new issue