feat: dynamic page regeneration #24

Merged
clo merged 3 commits from regen into master 2025-08-11 22:43:27 -07:00
38 changed files with 736 additions and 755 deletions
Showing only changes of commit 56f13c676c - Show all commits

View file

@ -3,14 +3,8 @@
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils";
};
outputs =
{ nixpkgs, utils, ... }:
utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
outputs = inputs: inputs.utils.lib.eachDefaultSystem (system:
with inputs.nixpkgs.legacyPackages.${system}; {
devShells.default = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
@ -24,6 +18,12 @@
pkgs.rsync
];
};
}
);
devShells.min = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
pkgs.rsync
];
};
});
}

View file

@ -88,19 +88,19 @@ export async function bundleClientJavaScript(
for (const file of outputFiles) {
const { text } = file;
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
const sources = Object.keys(inputs).filter((x) => !isIgnoredSource(x));
const { entryPoint } = UNWRAP(metafile.outputs["out!" + route]);
// Register non-chunks as script entries.
const chunk = route.startsWith("/js/c.");
if (!chunk) {
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
const key = hot.getScriptId(toAbs(UNWRAP(entryPoint)));
console.log(route, key);
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
scripts[key] = text;
}
// Register chunks and public scripts as assets.
if (chunk || publicScriptRoutes.includes(route)) {
p.push(io.writeAsset(route, text));
p.push(io.writeAsset({ pathname: route, buffer: text }));
}
}
await Promise.all(p);
@ -124,13 +124,39 @@ export async function bundleServerJavaScript({
entries,
platform,
}: ServerSideOptions) {
const wViewSource = incr.work(async (_, viewItems: sg.FileItem[]) => {
const regenKeys: Record<string, string[]> = {};
const regenTtls: view.Ttl[] = [];
for (const ref of viewRefs) {
const value = UNWRAP(ref.value);
if (value.type === "page" && (value.regenerate?.tags?.length ?? 0) > 0) {
for (const tag of value.regenerate!.tags!) {
(regenKeys[tag] ??= []).push(`page:${value.id}`);
}
}
if (value.type === "page" && (value.regenerate?.seconds ?? 0) > 0) {
regenTtls.push({
key: `page:${value.id}` as view.Key,
seconds: value.regenerate!.seconds!,
});
}
}
const wViewSource = incr.work(
async (
_,
{ viewItems, regenKeys, regenTtls }: {
viewItems: sg.FileItem[];
regenKeys: Record<string, string[]>;
regenTtls: view.Ttl[];
},
) => {
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
return {
magicWord,
file: [
...viewItems.map(
(view, i) => `import * as view${i} from ${JSON.stringify(view.file)}`,
(view, i) =>
`import * as view${i} from ${JSON.stringify(view.file)}`,
),
`const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`,
@ -143,14 +169,19 @@ export async function bundleServerJavaScript({
` layout: view${i}.layout?.default ?? null,`,
` inlineCss: styles[${magicWord}[${i}]]`,
` },`,
].join("\n"),
].join("\n")
),
"}",
`export const regenTags = ${JSON.stringify(regenKeys)};`,
`export const regenTtls = ${JSON.stringify(regenTtls)};`,
].join("\n"),
};
}, viewItems);
},
{ viewItems, regenKeys, regenTtls },
);
await incr.work(async (io) => {
await incr.work(
async (io, { regenKeys, viewItems }) => {
io.writeFile(
"../ts/view.d.ts",
[
@ -160,12 +191,19 @@ export async function bundleServerJavaScript({
.map(
(view) =>
` ${JSON.stringify(view.id)}: ` +
`typeof import(${JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))}),`,
`typeof import(${
JSON.stringify(path.relative(".clover/ts", toAbs(view.file)))
}),`,
),
"}",
"export type RegenKey = " +
(regenKeys.map((key) => JSON.stringify(key)).join(" | ") ||
"never"),
].join("\n"),
);
});
},
{ regenKeys: Object.keys(regenKeys), viewItems },
);
const wBundles = entries.map((entry) =>
incr.work(async (io, entry) => {
@ -255,7 +293,7 @@ export async function bundleServerJavaScript({
// file in more than one chunk.
if (
magicWord &&
metafile.outputs[key].inputs["framework/lib/view.ts"]
UNWRAP(metafile.outputs[key]).inputs["framework/lib/view.ts"]
) {
ASSERT(!fileWithMagicWord);
fileWithMagicWord = {
@ -268,7 +306,7 @@ export async function bundleServerJavaScript({
}
}
return fileWithMagicWord;
}, entry),
}, entry)
);
const wProcessed = wBundles.map(async (wBundle) => {
@ -288,7 +326,7 @@ export async function bundleServerJavaScript({
const viewStyleKeys = views.map((view) => view.styleKey);
const viewCssBundles = await Promise.all(
viewStyleKeys.map((key) =>
io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key)),
io.readWork(UNWRAP(styleMap.get(key), "Style key: " + key))
),
);
const styleList = Array.from(new Set(viewCssBundles));
@ -307,7 +345,7 @@ export async function bundleServerJavaScript({
return JSON.stringify(Object.fromEntries(neededScripts));
}
// Reference an index into `styleList`
return `${styleList.indexOf(viewCssBundles[i])}`;
return `${styleList.indexOf(UNWRAP(viewCssBundles[i]))}`;
});
io.writeFile(basename, text);
@ -333,3 +371,4 @@ import * as mime from "#sitegen/mime";
import * as incr from "./incremental.ts";
import * as sg from "#sitegen";
import type { PageOrView } from "./generate.ts";
import type * as view from "#sitegen/view";

View file

@ -57,12 +57,14 @@ export function styleKey(
export async function bundleCssFiles(
io: Io,
{ cssImports, theme, dev }: {
cssImports: string[],
theme: Theme,
dev: boolean,
}
cssImports: string[];
theme: Theme;
dev: boolean;
},
) {
cssImports = await Promise.all(cssImports.map((file) => io.trackFile('src/' + file)));
cssImports = await Promise.all(
cssImports.map((file) => io.trackFile("src/" + file)),
);
const plugin = {
name: "clover css",
setup(b) {
@ -111,4 +113,5 @@ import * as esbuild from "esbuild";
import * as fs from "#sitegen/fs";
import * as hot from "./hot.ts";
import * as path from "node:path";
import { virtualFiles } from "./esbuild-support.ts";import type { Io } from "./incremental.ts";
import { virtualFiles } from "./esbuild-support.ts";
import type { Io } from "./incremental.ts";

View file

@ -9,4 +9,4 @@ globalThis.UNWRAP = (t, ...args) => {
globalThis.ASSERT = assert.ok;
import * as util from "node:util";
import * as assert from 'node:assert'
import * as assert from "node:assert";

View file

@ -51,4 +51,4 @@ declare global {
}
}
import * as render from "./render.ts";
import * as render from "#engine/render";

View file

@ -1,5 +1,5 @@
import { test } from "node:test";
import * as render from "./render.ts";
import * as render from "#engine/render";
test("sanity", (t) => t.assert.equal(render.sync("gm <3").text, "gm &lt;3"));
test("simple tree", (t) =>

View file

@ -118,8 +118,11 @@ export function resolveNode(r: State, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHtml(node);
if (typeof node === "string") return escapeHtmlContent(node);
if (typeof node === "number") return String(node); // no escaping ever
if (typeof node === "symbol" && node.toString() === kElement.toString()) {
throw new Error(`There are two instances of Clover SSR loaded!`);
}
throw new Error(`Cannot render ${inspect(node)} to HTML`);
}
if (node instanceof Promise) {
@ -217,12 +220,14 @@ function stringifyElement(element: ResolvedElement) {
let attr;
switch (prop) {
default:
attr = `${prop}=${quoteIfNeeded(escapeHtml(String(value)))}`;
attr = `${prop}=${quoteIfNeeded(escapeAttribute(String(value)))}`;
break;
case "className":
// Legacy React Compat
case "class":
attr = `class=${quoteIfNeeded(escapeHtml(clsx(value as ClsxInput)))}`;
attr = `class=${
quoteIfNeeded(escapeAttribute(clsx(value as ClsxInput)))
}`;
break;
case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`");
@ -233,7 +238,7 @@ function stringifyElement(element: ResolvedElement) {
case "key":
continue;
}
if (needSpace) (out += " "), (needSpace = !attr.endsWith('"'));
if (needSpace) ((out += " "), (needSpace = !attr.endsWith('"')));
out += attr;
}
out += ">";
@ -254,14 +259,16 @@ export function stringifyStyleAttribute(style: Record<string, string>) {
let out = ``;
for (const styleName in style) {
if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${escapeHtml(
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeAttribute(
String(style[styleName]),
)}`;
)
}`;
}
return "style=" + quoteIfNeeded(out);
}
export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"';
if (text.match(/["/>]/)) return '"' + text + '"';
return text;
}
@ -303,6 +310,21 @@ export function clsx(mix: ClsxInput) {
return str;
}
export const escapeHtmlContent = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
// TODO: combine into one function which decides if an attribute needs quotes
// and escapes it correctly depending on the context.
const escapeAttribute = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;");
/** @deprecated */
export const escapeHtml = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;")

View file

@ -42,7 +42,7 @@ export function Suspense({ children, fallback }: SuspenseProps): render.Node {
r.asyncDone = () => {
const rejections = r.rejections;
if (rejections && rejections.length > 0) throw new Error("TODO");
state.pushChunk?.(name, (ip[0] = resolved));
state.pushChunk?.(name, ip[0] = resolved);
};
return render.raw(ip);
}
@ -99,4 +99,4 @@ export async function* renderStreaming<
return addonOutput as unknown as T;
}
import * as render from "./render.ts";
import * as render from "#engine/render";

View file

@ -1,7 +1,12 @@
type Awaitable<T> = T | Promise<T>;
export function virtualFiles(
map: Record<string, string | esbuild.OnLoadResult | (() => Awaitable<string | esbuild.OnLoadResult>)>,
map: Record<
string,
| string
| esbuild.OnLoadResult
| (() => Awaitable<string | esbuild.OnLoadResult>)
>,
) {
return {
name: "clover vfs",
@ -22,7 +27,7 @@ export function virtualFiles(
{ filter: /./, namespace: "vfs" },
async ({ path }) => {
let entry = map[path];
if (typeof entry === 'function') entry = await entry();
if (typeof entry === "function") entry = await entry();
return ({
resolveDir: ".",
loader: "ts",
@ -88,7 +93,6 @@ export function markoViaBuildCache(): esbuild.Plugin {
if (!fs.existsSync(file)) {
console.warn(`File does not exist: ${file}`);
}
console.log(markoCache.keys());
throw new Error("Marko file not in cache: " + file);
}
return ({
@ -106,7 +110,7 @@ export function isIgnoredSource(source: string) {
return source.includes("<define:") ||
source.startsWith("vfs:") ||
source.startsWith("dropped:") ||
source.includes("node_modules")
source.includes("node_modules");
}
import * as esbuild from "esbuild";
@ -114,4 +118,5 @@ import * as string from "#sitegen/string";
import * as path from "node:path";
import * as fs from "#sitegen/fs";
import * as incr from "./incremental.ts";
import * as hot from "./hot.ts";import { markoCache } from "./marko.ts";
import * as hot from "./hot.ts";
import { markoCache } from "./marko.ts";

View file

@ -7,15 +7,16 @@ const { toRel, toAbs } = incr;
const globalCssPath = toAbs("src/global.css");
export async function main() {
await incr.restore();
if (!process.argv.includes("-f")) await incr.restore();
await incr.compile(generate);
}
export async function generate() {
// -- read config and discover files --
const siteConfig = await incr.work(readManifest);
const { staticFiles, scripts, views, pages } =
await discoverAllFiles(siteConfig);
const { staticFiles, scripts, views, pages } = await discoverAllFiles(
siteConfig,
);
// TODO: make sure that `static` and `pages` does not overlap
@ -28,9 +29,12 @@ export async function generate() {
staticFiles.map((item) =>
incr.work(
async (io, { id, file }) =>
void (await io.writeAsset(id, await io.readFile(file))),
void (await io.writeAsset({
pathname: id,
buffer: await io.readFile(file),
})),
item,
),
)
),
);
const routes = await Promise.all([...builtViews, ...builtPages]);
@ -63,7 +67,7 @@ export async function generate() {
// -- assemble page assets --
const pAssemblePages = builtPages.map((page) =>
assembleAndWritePage(page, styleMap, scriptMap),
assembleAndWritePage(page, styleMap, scriptMap)
);
await Promise.all([builtBackend, builtStaticFiles, ...pAssemblePages]);
@ -85,7 +89,7 @@ export async function discoverAllFiles(
return (
await Promise.all(
siteConfig.siteSections.map(({ root: sectionRoot }) =>
incr.work(scanSiteSection, toAbs(sectionRoot)),
incr.work(scanSiteSection, toAbs(sectionRoot))
),
)
).reduce((acc, next) => ({
@ -113,8 +117,7 @@ export async function scanSiteSection(io: Io, sectionRoot: string) {
let scripts: FileItem[] = [];
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix =
hot.projectSrc === sectionRoot
const rootPrefix = hot.projectSrc === sectionRoot
? ""
: path.relative(hot.projectSrc, sectionRoot) + "/";
const kinds = [
@ -190,7 +193,7 @@ export async function preparePage(io: Io, item: sg.FileItem) {
theme: pageTheme,
layout,
regenerate,
} = await io.import<any>(item.file);
} = await io.import<sg.PageExports>(item.file);
if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) throw new Error("Page is missing 'meta' export with a title.");
@ -229,23 +232,12 @@ export async function preparePage(io: Io, item: sg.FileItem) {
);
}
// -- regeneration --
let regeneration: Regeneration | null = null;
if (typeof regenerate?.seconds === "number") {
regeneration ??= {};
regeneration.seconds = regenerate.seconds;
}
if (regenerate?.tags) {
regeneration ??= {};
regeneration.tags = regenerate.tags;
}
const styleKey = css.styleKey(cssImports, theme);
return {
type: "page",
id: item.id,
file: item.file,
regenerate: regeneration,
regenerate,
html: text,
meta: renderedMeta,
cssImports,
@ -255,11 +247,6 @@ export async function preparePage(io: Io, item: sg.FileItem) {
} as const;
}
interface Regeneration {
seconds?: number;
tags?: string[];
}
export async function prepareView(io: Io, item: sg.FileItem) {
const module = await io.import<any>(item.file);
if (!module.meta) throw new Error(`View is missing 'export const meta'`);
@ -305,7 +292,8 @@ export async function assembleAndWritePage(
scriptWork: incr.Ref<Record<string, string>>,
) {
const page = await pageWork;
return incr.work(async (io, { id, html, meta, styleKey, clientRefs }) => {
return incr.work(
async (io, { id, html, meta, styleKey, clientRefs, regenerate }) => {
const inlineCss = await io.readWork(UNWRAP(styleMap.get(styleKey)));
const scriptIds = clientRefs.map(hot.getScriptId);
@ -315,16 +303,23 @@ export async function assembleAndWritePage(
.map((x) => `{${x}}`)
.join("\n");
const doc = sg.wrapDocument({
const buffer = sg.wrapDocument({
body: html,
head: meta,
inlineCss,
scripts,
});
await io.writeAsset(id, doc, {
await io.writeAsset({
pathname: id,
buffer,
headers: {
"Content-Type": "text/html",
},
regenerative: !!regenerate,
});
}, page);
},
page,
);
}
export type PageOrView = PreparedPage | PreparedView;

View file

@ -78,8 +78,7 @@ Module.prototype._compile = function (
}
}
fileStats.set(filename, {
cssImportsRecursive:
cssImportsMaybe.length > 0
cssImportsRecursive: cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe))
: null,
imports,
@ -137,8 +136,7 @@ export function loadEsbuildCode(
src = code;
}
if (src.includes("import.meta")) {
src =
`
src = `
import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())};
import.meta.dirname = ${JSON.stringify(path.dirname(filepath))};
import.meta.filename = ${JSON.stringify(filepath)};

View file

@ -11,7 +11,7 @@ test("trivial case", async () => {
await setTimeout(1000);
const contents = await io.readFile(file1.path);
return [contents, Math.random()] as const;
}
},
});
const second = incr.work({
label: "second compute",
@ -19,7 +19,7 @@ test("trivial case", async () => {
async run(io) {
await setTimeout(1000);
return io.readWork(first)[0].toUpperCase();
}
},
});
const third = incr.work({
label: "third compute",
@ -27,7 +27,7 @@ test("trivial case", async () => {
async run(io) {
await setTimeout(1000);
return io.readWork(first)[1] * 1000;
}
},
});
return incr.work({
label: "last compute",
@ -37,8 +37,8 @@ test("trivial case", async () => {
return {
second: io.readWork(second),
third: io.readWork(third),
}
}
};
},
});
}
const { value: first } = await incr.compile(compilation);
@ -52,5 +52,5 @@ test("trivial case", async () => {
import * as incr from "./incremental2.ts";
import { beforeEach, test } from "node:test";
import { tmpFile } from "#sitegen/testing";import { setTimeout } from "node:timers/promises";
import { tmpFile } from "#sitegen/testing";
import { setTimeout } from "node:timers/promises";

View file

@ -2,8 +2,7 @@
// See `work()`, `compile()`, and `invalidate()` for details.
//
// All state is serializable to allow recovering state across sessions.
// This framework special-cases the asset map, but is otherwise
// agnostic of what it is a compiler for.
// This library special-cases the asset map, but is otherwise agnostic.
let running = false;
let jobs = 0;
let newKeys = 0;
@ -15,28 +14,25 @@ let writes = new Map<string, FileWrite>();
let assets = new Map<string, Asset>(); // keyed by hash
export interface Ref<T> {
/** This method is compatible with `await` syntax */
then(
onFulfilled: (value: T) => void,
onRejected: (error: unknown) => void,
): void;
key: string;
/** This method is compatible with `await` syntax */
then(resolve: (value: T) => void, reject: (error: unknown) => void): void;
get value(): T | null;
}
type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
/**
* Declare and a unit of work. Return value is memoized and
* only rebuilt when inputs (declared via `Io`) change. Outputs
* are written at the end of a compilation (see `compile`).
* Declare and a unit of work. Return value is memoized and only rebuilt when
* inputs change. Inputs are tracked via the `io` interface, as well as a hash
* of the `input` param and caller source code. Outputs are written at the end
* of a compilation (see `compile`).
*
* If the returned `Ref` is not awaited or read
* via io.readWork, the job is never started.
* Work items are lazy, only started when `Ref` is awaited or `io.readWork`ed.
*/
export function work<O>(job: Job<void, O>): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const source = JSON.stringify(util.getCallSites(2)[1]);
const source = JSON.stringify(UNWRAP(util.getCallSites(2)[1]));
const keySource = [source, util.inspect(input)].join(":");
const key = crypto.createHash("sha1").update(keySource).digest("base64url");
ASSERT(running);
@ -60,13 +56,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
const value = await job(io, input);
validateSerializable(value, "");
const { reads, writes } = io;
works.set(key, {
value,
affects: [],
reads,
writes,
debug: source,
});
works.set(key, { value, affects: [], reads, writes });
for (const add of reads.files) {
const { affects } = UNWRAP(files.get(add));
ASSERT(!affects.includes(key));
@ -133,12 +123,8 @@ export async function flush(start: number) {
// Trim
const detachedFiles = new Set<string>();
const referencedAssets = new Set<string>();
for (const [
k,
{
writes: { assets },
},
] of works) {
for (const [k, v] of works) {
const assets = v.writes.assets;
if (seenWorks.has(k)) {
for (const asset of assets.values()) referencedAssets.add(asset.hash);
continue;
@ -150,11 +136,9 @@ export async function flush(start: number) {
files.delete(k);
detachedFiles.add(k);
}
for (const k of assets.keys()) {
if (!referencedAssets.has(k)) assets.delete(k);
}
for (const k of assets.keys()) if (!referencedAssets.has(k)) assets.delete(k);
const p = [];
const p: Promise<void>[] = [];
// File writes
let dist = 0;
for (const [key, { buffer, size }] of writes) {
@ -163,10 +147,14 @@ export async function flush(start: number) {
}
// Asset map
{
const { json, blob } = getAssetManifest();
const { json, blob, dynamic, dts } = getAssetManifest();
const jsonString = Buffer.from(JSON.stringify(json));
p.push(fs.writeMkdir(".clover/o/static.json", jsonString));
p.push(fs.writeMkdir(".clover/o/static.blob", blob));
p.push(fs.writeMkdir(".clover/o/asset.json", jsonString));
p.push(fs.writeMkdir(".clover/o/asset.blob", blob));
p.push(fs.writeMkdir(".clover/ts/asset.d.ts", dts));
for (const [k, v] of dynamic) {
p.push(fs.writeMkdir(`.clover/o/dynamic/${k}`, v));
}
dist += blob.byteLength + jsonString.byteLength;
}
await Promise.all(p);
@ -179,9 +167,8 @@ export async function flush(start: number) {
console.writeLine(` - ${works.size} keys (${works.size - newKeys} cached)`);
console.writeLine(` - ${assets.size} static assets`);
console.writeLine(
` - dist: ${formatSize(dist)}, incremental: ${formatSize(
serialized.byteLength,
)}`,
` - dist: ${formatSize(dist)},` +
` incremental: ${formatSize(serialized.byteLength)}`,
);
}
@ -337,7 +324,7 @@ async function deserialize(buffer: Buffer) {
if (err.code === "ENOENT") return null;
throw err;
})
.then((stat) => ({ k, size, work, stat })),
.then((stat) => ({ k, size, work, stat }))
),
);
for (const { k, stat, work, size } of statFiles) {
@ -356,19 +343,23 @@ async function deserialize(buffer: Buffer) {
}
await Promise.all(
Array.from(files, ([key, file]) => invalidateEntry(key, file)),
Array.from(files, ([key, file]) => invalidateEntry(key, file, false)),
);
}
export async function invalidate(filePath: string): Promise<boolean> {
export async function invalidate(
filePath: string,
unloadModule: boolean = true,
): Promise<boolean> {
const key = toRel(toAbs(filePath));
const file = UNWRAP(files.get(key), `Untracked file '${key}'`);
return invalidateEntry(key, file);
return invalidateEntry(key, file, unloadModule);
}
export async function invalidateEntry(
key: string,
file: TrackedFile,
unloadModule: boolean,
): Promise<boolean> {
try {
if (file.type === "d") {
@ -399,36 +390,62 @@ export async function invalidateEntry(
return false;
} catch (e) {
forceInvalidate(file);
if (unloadModule) {
// TODO: handle when this triggers unloading of `generate.ts`
hot.unload(toAbs(key));
}
if (file.type === "null") files.delete(key);
return true;
}
}
export function getAssetManifest() {
const dynamic = new Map<string, Buffer>();
const writer = new BufferWriter();
const asset = Object.fromEntries(
const assetMap = Object.fromEntries(
Array.from(works, (work) => work[1].writes.assets)
.filter((map) => map.size > 0)
.flatMap((map) =>
Array.from(map, ([key, { hash, headers }]) => {
Array.from(map, ([key, { hash, headers, regenerative }]) => {
const { raw, gzip, zstd } = UNWRAP(
assets.get(hash),
`Asset ${key} (${hash})`,
);
if (regenerative) {
const id = crypto
.createHash("sha1")
.update(key)
.digest("hex")
.slice(0, 16); /* TODO */
dynamic.set(
id,
manifest.packDynamicBuffer(raw, gzip, zstd, headers),
);
return [key, { type: 1, id }] as const;
}
return [
key,
{
type: 0,
raw: writer.write(raw, "raw:" + hash),
gzip: writer.write(gzip, "gzip:" + hash),
zstd: writer.write(zstd, "zstd:" + hash),
headers,
},
] as const;
}),
})
),
) satisfies BuiltAssetMap;
return { json: asset, blob: writer.get() };
) satisfies manifest.Manifest;
return {
json: assetMap,
blob: writer.get(),
dynamic,
dts: "export type AssetKey = " +
Object.keys(assetMap)
.map((key) => JSON.stringify(key))
.join(" | ") +
"\n",
};
}
/* Input/Output with automatic tracking.
@ -502,7 +519,7 @@ export class Io {
const stat = await fs.stat(abs);
if (stat.isDirectory()) {
return (await this.readDirRecursive(abs)).map((grand) =>
path.join(child, grand),
path.join(child, grand)
);
} else {
return child;
@ -540,26 +557,27 @@ export class Io {
}
}
}
async writeAsset(
pathname: string,
blob: string | Buffer,
headersOption?: HeadersInit,
) {
ASSERT(pathname.startsWith("/"));
ASSERT(!seenWrites.has("a:" + pathname));
const buffer = typeof blob === "string" ? Buffer.from(blob) : blob;
const headers = new Headers(headersOption ?? {});
async writeAsset(asset: {
pathname: string;
buffer: string | Buffer;
regenerative?: boolean;
headers?: HeadersInit;
}) {
ASSERT(asset.pathname.startsWith("/"));
ASSERT(!seenWrites.has("a:" + asset.pathname));
const buffer = typeof asset.buffer === "string"
? Buffer.from(asset.buffer)
: asset.buffer;
const headers = new Headers(asset.headers ?? {});
const hash = crypto.createHash("sha1").update(buffer).digest("hex");
if (!headers.has("Content-Type")) {
headers.set("Content-Type", mime.contentTypeFor(pathname));
headers.set("Content-Type", mime.contentTypeFor(asset.pathname));
}
headers.set("ETag", JSON.stringify(hash));
this.writes.assets.set(pathname, {
headers.set("etag", JSON.stringify(hash));
this.writes.assets.set(asset.pathname, {
hash,
// @ts-expect-error TODO
headers: Object.fromEntries(headers),
regenerative: !!asset.regenerative,
});
if (!assets.has(hash)) {
jobs += 1;
@ -600,7 +618,7 @@ class BufferWriter {
write(buffer: Buffer, hash: string): BufferView {
let view = this.seen.get(hash);
if (view) return view;
view = [this.size, (this.size += buffer.byteLength)];
view = [this.size, this.size += buffer.byteLength];
this.seen.set(hash, view);
this.buffers.push(buffer);
return view;
@ -626,7 +644,7 @@ export function validateSerializable(value: unknown, key: string) {
Buffer.isBuffer(value)
) {
Object.entries(value).forEach(([k, v]) =>
validateSerializable(v, `${key}.${k}`),
validateSerializable(v, `${key}.${k}`)
);
} else {
throw new Error(
@ -662,54 +680,48 @@ interface FileWrite {
}
interface Writes {
files: Set<string>;
assets: Map<
string,
{
hash: string;
headers: Record<string, string>;
}
>;
assets: Map<string, AssetWrite>;
}
interface Asset {
raw: Buffer;
gzip: Buffer;
zstd: Buffer;
}
interface AssetWrite {
hash: string;
headers: Record<string, string>;
regenerative: boolean;
}
interface Work<T = unknown> {
debug?: string;
value: T;
reads: Reads;
writes: Writes;
affects: string[];
}
type TrackedFile = {
affects: string[];
} & (
type TrackedFile =
& { affects: string[] }
& (
| { type: "f"; lastModified: number }
| { type: "d"; contentHash: string; contents: string[] }
| { type: "null" }
);
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}
export interface BuiltAsset {
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
headers: Record<string, string>;
}
const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress);
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as hot from "./hot.ts";
import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";
import * as manifest from "#sitegen/assets";
import * as path from "node:path";
import * as util from "node:util";
import * as crypto from "node:crypto";
import * as mime from "#sitegen/mime";
import * as zlib from "node:zlib";
import * as console from "@paperclover/console";
import { Spinner } from "@paperclover/console/Spinner";
import { formatSize } from "@/file-viewer/format.ts";
import * as msgpackr from "msgpackr";

View file

@ -1,73 +1,114 @@
interface Loaded {
map: BuiltAssetMap;
buf: Buffer;
}
let assets: Loaded | null = null;
// Static and dynamic assets are built alongside the server binary.
// This module implements decoding and serving of the asset blobs,
// but also implements patching of dynamic assets. The `Manifest`
// is generated by `incremental.ts`
const root = import.meta.dirname;
let current: Loaded | null = null;
export type StaticPageId = string;
// TODO: rename all these types
type DynamicId = string;
export type { Key };
export type Manifest =
& {
[K in Key]: StaticAsset | DynamicAsset;
}
& {
[string: string]: StaticAsset | DynamicAsset;
};
export interface StaticAsset extends AssetBase {
type: 0;
}
interface AssetBase {
headers: Record<string, string> & { etag: string };
raw: BufferView;
gzip: BufferView;
zstd: BufferView;
}
export interface DynamicAsset {
type: 1;
id: DynamicId;
}
interface Loaded {
map: Manifest;
static: Buffer;
dynamic: Map<DynamicId, DynamicEntry>;
}
export interface DynamicEntry extends AssetBase {
buffer: Buffer;
}
export async function reload() {
const [map, buf] = await Promise.all([
fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"),
fs.readFile(path.join(import.meta.dirname, "static.blob")),
]);
return (assets = { map: JSON.parse(map), buf });
}
export function reloadSync() {
const map = fs.readFileSync(
path.join(import.meta.dirname, "static.json"),
"utf8",
const map = await fs.readJson<Manifest>(path.join(root, "asset.json"));
const statics = await fs.readFile(path.join(root, "asset.blob"));
const dynamic = new Map(
await Promise.all(
Object.entries(map)
.filter((entry): entry is [string, DynamicAsset] => entry[1].type === 1)
.map(async ([k, v]) =>
[
v.id,
await fs.readFile(path.join(root, "dynamic", v.id))
.then(loadRegenerative),
] as const
),
),
);
const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob"));
return (assets = { map: JSON.parse(map), buf });
return (current = { map, static: statics, dynamic });
}
export async function middleware(c: Context, next: Next) {
if (!assets) await reload();
const asset = assets!.map[c.req.path];
if (!current) current = await reload();
const asset = current.map[c.req.path];
if (asset) return assetInner(c, asset, 200);
return next();
}
export async function notFound(c: Context) {
if (!assets) await reload();
if (!current) current = await reload();
let pathname = c.req.path;
do {
const asset = assets!.map[pathname + "/404"];
const asset = current.map[pathname + "/404"];
if (asset) return assetInner(c, asset, 404);
pathname = pathname.slice(0, pathname.lastIndexOf("/"));
} while (pathname);
const asset = assets!.map["/404"];
const asset = current.map["/404"];
if (asset) return assetInner(c, asset, 404);
return c.text("the 'Not Found' page was not found", 404);
}
export async function serveAsset(
c: Context,
id: StaticPageId,
status: StatusCode,
) {
return assetInner(c, (assets ?? (await reload())).map[id], status);
export async function serveAsset(c: Context, id: Key, status: StatusCode) {
return assetInner(c, (current ?? (await reload())).map[id], status);
}
/** @deprecated */
export function hasAsset(id: string) {
return (assets ?? reloadSync()).map[id] !== undefined;
return UNWRAP(current).map[id] !== undefined;
}
export function etagMatches(etag: string, ifNoneMatch: string) {
return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1;
}
function subarrayAsset([start, end]: BufferView) {
return assets!.buf.subarray(start, end);
function assetInner(c: Context, asset: Manifest[Key], status: StatusCode) {
ASSERT(current);
if (asset.type === 0) {
return respondWithBufferAndViews(c, current.static, asset, status);
} else {
const entry = UNWRAP(current.dynamic.get(asset.id));
return respondWithBufferAndViews(c, entry.buffer, entry, status);
}
}
function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
const ifnonematch = c.req.header("If-None-Match");
if (ifnonematch) {
const etag = asset.headers.ETag;
if (etagMatches(etag, ifnonematch)) {
function respondWithBufferAndViews(
c: Context,
buffer: Buffer,
asset: AssetBase,
status: StatusCode,
) {
const ifNoneMatch = c.req.header("If-None-Match");
if (ifNoneMatch) {
const etag = asset.headers.etag;
if (etagMatches(etag, ifNoneMatch)) {
return (c.res = new Response(null, {
status: 304,
statusText: "Not Modified",
@ -80,24 +121,103 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
const acceptEncoding = c.req.header("Accept-Encoding") ?? "";
let body;
let headers = asset.headers;
if (acceptEncoding.includes("zstd") && asset.zstd) {
body = subarrayAsset(asset.zstd);
if (acceptEncoding.includes("zstd")) {
body = buffer.subarray(...asset.zstd);
headers = {
...asset.headers,
"Content-Encoding": "zstd",
};
} else if (acceptEncoding.includes("gzip") && asset.gzip) {
body = subarrayAsset(asset.gzip);
} else if (acceptEncoding.includes("gzip")) {
body = buffer.subarray(...asset.gzip);
headers = {
...asset.headers,
"Content-Encoding": "gzip",
};
} else {
body = subarrayAsset(asset.raw);
body = buffer.subarray(...asset.raw);
}
return (c.res = new Response(body, { headers, status }));
}
export function packDynamicBuffer(
raw: Buffer,
gzip: Buffer,
zstd: Buffer,
headers: Record<string, string>,
) {
const headersBuffer = Buffer.from(
Object.entries(headers)
.map((entry) => entry.join(":"))
.join("\n"),
"utf-8",
);
const header = new Uint32Array(3);
header[0] = headersBuffer.byteLength + header.byteLength;
header[1] = header[0] + raw.byteLength;
header[2] = header[1] + gzip.byteLength;
return Buffer.concat([
Buffer.from(header.buffer),
headersBuffer,
raw,
gzip,
zstd,
]);
}
function loadRegenerative(buffer: Buffer): DynamicEntry {
const headersEnd = buffer.readUInt32LE(0);
const headers = Object.fromEntries(
buffer
.subarray(3 * 4, headersEnd)
.toString("utf-8")
.split("\n")
.map((line) => {
const i = line.indexOf(":");
return [line.slice(0, i), line.slice(i + 1)];
}),
);
const raw = buffer.readUInt32LE(4);
const gzip = buffer.readUInt32LE(8);
const hasEtag = (v: object): v is typeof v & { etag: string } =>
"etag" in v && typeof v.etag === "string";
ASSERT(hasEtag(headers));
return {
headers,
buffer,
raw: [headersEnd, raw],
gzip: [raw, gzip],
zstd: [gzip, buffer.byteLength],
};
}
const gzip = util.promisify(zlib.gzip);
const zstdCompress = util.promisify(zlib.zstdCompress);
export async function overwriteDynamic(
key: Key,
value: string | Buffer,
headers: Record<string, string>,
) {
if (!current) current = await reload();
const asset = UNWRAP(current.map[key]);
ASSERT(asset.type === 1);
UNWRAP(current.dynamic.has(asset.id));
const buffer = Buffer.from(value);
const etag = JSON.stringify(
crypto.createHash("sha1").update(buffer).digest("hex"),
);
const [gzipBuffer, zstdBuffer] = await Promise.all([
gzip(buffer),
zstdCompress(buffer),
]);
const packed = packDynamicBuffer(buffer, gzipBuffer, zstdBuffer, {
...headers,
etag,
});
current.dynamic.set(asset.id, loadRegenerative(packed));
await fs.writeFile(path.join(root, "dynamic", asset.id), packed);
}
process.on("message", (msg: any) => {
if (msg?.type === "clover.assets.reload") reload();
});
@ -105,6 +225,10 @@ process.on("message", (msg: any) => {
import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status";
import type { BuiltAsset, BuiltAssetMap, BufferView } from "../incremental.ts";
import type { BufferView } from "../incremental.ts";
import { Buffer } from "node:buffer";
import * as path from "node:path";
import type { AssetKey as Key } from "../../.clover/ts/asset.d.ts";
import * as crypto from "node:crypto";
import * as zlib from "node:zlib";
import * as util from "node:util";

View file

@ -51,8 +51,8 @@ export class Queue<T, R> {
let n = 0;
for (const item of active) {
let itemText = "- " + item.format(now);
text +=
`\n` + itemText.slice(0, Math.max(0, process.stdout.columns - 1));
text += `\n` +
itemText.slice(0, Math.max(0, process.stdout.columns - 1));
if (n > 10) {
text += `\n ... + ${active.length - n} more`;
break;

View file

@ -4,6 +4,7 @@ export {
createReadStream,
createWriteStream,
existsSync,
type FileHandle,
open,
readdir,
readdirSync,
@ -15,7 +16,6 @@ export {
statSync,
writeFile,
writeFileSync,
type FileHandle,
};
export function mkdir(dir: string) {
@ -98,6 +98,7 @@ import {
writeFileSync,
} from "node:fs";
import {
type FileHandle,
mkdir as nodeMkdir,
open,
readdir,
@ -106,6 +107,5 @@ import {
rmdir,
stat,
writeFile,
type FileHandle,
} from "node:fs/promises";
export { Stats } from "node:fs";

View file

@ -1,9 +1,11 @@
/* Impementation of CommonMark specification for markdown with support
/* Implementation of [CommonMark] specification for markdown with support
* for custom syntax extensions via the parser options. Instead of
* returning an AST that has a second conversion pass to JSX, the
* returned value of 'parse' is 'engine.Node' which can be stringified
* via clover's SSR engine. This way, generation optimizations, async
* via Clover's SSR engine. This way, generation optimizations, async
* components, and other features are gained for free here.
*
* [CommonMark]: https://spec.commonmark.org/0.31.2/
*/
function parse(src: string, options: Partial<ParseOpts> = {}) {}
@ -16,6 +18,9 @@ export function Markdown({
return parse(src, options);
}
// TODO: This implementation is flawed because it is impossible to sanely handle
// emphasis and strong emphasis, and all their edge cases. Instead of making these
// using extensions interface, they should be special cased.
function parseInline(src: string, options: Partial<InlineOpts> = {}) {
const { rules = inlineRules, links = new Map() } = options;
const opts: InlineOpts = { rules, links };
@ -110,12 +115,11 @@ export const inlineRules: Record<string, InlineRule> = {
} else if (afterText[0] === "[") {
const splitTarget = splitFirst(afterText.slice(1), /]/);
if (!splitTarget) return null;
const name =
splitTarget.first.trim().length === 0
? // Collapsed reference link
textSrc.trim()
: // Full Reference Link
splitTarget.first.trim();
const name = splitTarget.first.trim().length === 0
// Collapsed reference link
? textSrc.trim()
// Full Reference Link
: splitTarget.first.trim();
const target = opts.links.get(name);
if (!target) return null;
({ href, title } = target);

View file

@ -1,6 +1,25 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string;
export interface PageExports extends ViewExports {
regenerate?: PageRegenerateOptions;
}
export interface ViewExports {
default: render.Component;
meta: meta.Meta | ((props: { ssr: true }) => Promise<meta.Meta> | meta.Meta);
theme?: css.Theme;
layout?: Layout;
}
export interface Layout {
default: render.Component;
theme?: css.Theme;
// TODO: nested layout
}
export interface PageRegenerateOptions {
tags?: string[];
seconds?: number;
debounce?: number;
}
/**
* A filesystem object associated with some ID,
* such as a page's route to it's source file.
@ -49,3 +68,5 @@ export function wrapDocument({
}
import * as render from "#engine/render";
import type * as meta from "./meta.ts";
import type * as css from "../css.ts";

View file

@ -10,7 +10,9 @@ export function getDb(file: string) {
if (db) return db;
const fileWithExt = file.includes(".") ? file : file + ".sqlite";
db = new WrappedDatabase(
new DatabaseSync(path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt)),
new DatabaseSync(
path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt),
),
);
map.set(file, db);
return db;

View file

@ -1,5 +1,9 @@
export function tmpFile(basename: string) {
const file = path.join(import.meta.dirname, '../../.clover/testing', basename);
const file = path.join(
import.meta.dirname,
"../../.clover/testing",
basename,
);
return {
path: file,
read: fs.readFile.bind(fs, file),
@ -7,5 +11,5 @@ export function tmpFile(basename: string) {
};
}
import * as path from 'node:path';
import * as fs from './fs.ts';
import * as path from "node:path";
import * as fs from "./fs.ts";

View file

@ -1,6 +1,6 @@
// The "view" system allows rendering dynamic pages within backends.
// This is done by scanning all `views` dirs, bundling their client
// resources, and then providing `renderView` which renders a page.
// resources, and then providing `serve` which renders a page.
//
// This system also implements page regeneration.
let codegen: Codegen;
@ -12,53 +12,53 @@ try {
// Generated in `bundle.ts`
export interface Codegen {
views: Record<ViewKey, View>;
views: { [K in Key]: View<PropsFromModule<ViewMap[K]>> };
scripts: Record<string, string>;
regenTtls: Ttl[];
regenTags: Record<string, ViewKey[]>;
regenTags: Record<RegenKey, Key[]>;
}
export interface View {
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
export interface View<Props extends Record<string, unknown>> {
component: render.Component;
meta:
| meta.Meta
| ((props: { context?: hono.Context }) => Promise<meta.Meta> | meta.Meta);
meta: meta.Meta | ((props: Props) => Promise<meta.Meta> | meta.Meta);
layout?: render.Component;
inlineCss: string;
scripts: Record<string, string>;
}
export interface Ttl {
seconds: number;
key: ViewKey;
key: Key;
}
type ViewKey = keyof ViewMap;
export type Key = keyof ViewMap;
export async function renderView<K extends ViewKey>(
export async function serve<K extends Key>(
context: hono.Context,
id: K,
props: PropsFromModule<ViewMap[K]>,
) {
return context.html(await renderViewToString(id, { context, ...props }));
return context.html(await renderToString(id, { context, ...props }));
}
type PropsFromModule<M extends any> = M extends {
default: (props: infer T) => render.Node;
}
? T
} ? T
: never;
export async function renderViewToString<K extends ViewKey>(
export async function renderToString<K extends Key>(
id: K,
props: PropsFromModule<ViewMap[K]>,
) {
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
const {
component,
inlineCss,
layout,
meta: metadata,
}: View = UNWRAP(codegen.views[id], `Missing view ${id}`);
}: View<PropsFromModule<ViewMap[K]>> = UNWRAP(
codegen.views[id],
`Missing view ${id}`,
);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
@ -79,21 +79,43 @@ export async function renderViewToString<K extends ViewKey>(
head: await renderedMetaPromise,
inlineCss,
scripts: joinScripts(
Array.from(sitegen.scripts, (id) =>
UNWRAP(codegen.scripts[id], `Missing script ${id}`),
Array.from(
sitegen!.scripts,
(id) => UNWRAP(codegen.scripts[id], `Missing script ${id}`),
),
),
});
}
export function joinScripts(scriptSources: string[]) {
export function regenerate(tag: RegenKey) {
for (const view of codegen.regenTags[tag]) {
const key = view.slice("page:".length);
renderToString(view, {})
.then((result) => {
console.info(`regenerate ${key}`);
asset.overwriteDynamic(key as asset.Key, result, {
"content-type": "text/html",
});
})
.catch((e) => {
console.error(`Failed regenerating ${view} from tag ${tag}`, e);
});
}
}
function joinScripts(scriptSources: string[]) {
const { length } = scriptSources;
if (length === 0) return "";
if (length === 1) return scriptSources[0];
if (0 in scriptSources) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";");
}
import * as meta from "./meta.ts";
import type * as hono from "#hono";
import * as render from "#engine/render";
import * as sg from "./sitegen.ts";
import type { RegisteredViews as ViewMap } from "../../.clover/ts/view.d.ts";
import * as asset from "./assets.ts";
import type {
RegenKey,
RegisteredViews as ViewMap,
} from "../../.clover/ts/view.d.ts";

View file

@ -14,16 +14,17 @@ export function loadMarko(module: NodeJS.Module, filepath: string) {
// bare client import statements to it's own usage.
const scannedClientRefs = new Set<string>();
if (src.match(/^\s*client\s+import\s+["']/m)) {
src =
src.replace(
src = src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = hot.resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${JSON.stringify(
return `<CloverScriptInclude=${
JSON.stringify(
hot.getScriptId(resolved),
)} />`;
)
} />`;
},
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
}

View file

@ -11,7 +11,7 @@ let watch: Watch;
export async function main() {
// Catch up state by running a main build.
await incr.restore();
if (!process.argv.includes("-f")) await incr.restore();
watch = new Watch(rebuild);
rebuild([]);
}
@ -36,15 +36,16 @@ function onSubprocessClose(code: number | null, signal: string | null) {
}
async function rebuild(files: string[]) {
const hasInvalidated = files.length === 0
|| (await Promise.all(files.map(incr.invalidate))).some(Boolean);
const hasInvalidated = files.length === 0 ||
(await Promise.all(files.map((file) => incr.invalidate(file))))
.some(Boolean);
if (!hasInvalidated) return;
incr.compile(generate.generate).then(({
watchFiles,
newOutputs,
newAssets
newAssets,
}) => {
const removeWatch = [...watch.files].filter(x => !watchFiles.has(x))
const removeWatch = [...watch.files].filter((x) => !watchFiles.has(x));
for (const file of removeWatch) watch.remove(file);
watch.add(...watchFiles);
// Restart the server if it was changed or not running.
@ -60,8 +61,8 @@ async function rebuild(files: string[]) {
function statusLine() {
console.info(
`Watching ${watch.files.size} files `
+ `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
`Watching ${watch.files.size} files ` +
`\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
);
}
@ -142,7 +143,7 @@ class Watch {
#getFiles(absPath: string, event: fs.WatchEventType) {
const files = [];
if (this.files.has(absPath)) files.push(absPath);
if (event === 'rename') {
if (event === "rename") {
const dir = path.dirname(absPath);
if (this.files.has(dir)) files.push(dir);
}

View file

@ -1,6 +1,6 @@
// This is the main file for the backend
// This is the main file for paperclover.net's server.
const app = new Hono();
const logHttp = scoped("http", { color: "magenta" });
const logHttp = console.scoped("http", { color: "magenta" });
// Middleware
app.use(trimTrailingSlash());
@ -38,4 +38,4 @@ import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts";
import { scoped } from "@paperclover/console";
import * as console from "@paperclover/console";

View file

@ -1,10 +0,0 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-07-08",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";

View file

@ -1,290 +0,0 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-06-13",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";
I've been recently playing around [Marko][1], and after adding limited support
for it in my website generator, [sitegen][2], I instantly fell in love with how
minimalistic it is in comparison to JSX, Astro components, and Svelte.
## Introduction
If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving
JavaScript into HTML. Attributes are JavaScript expressions.
```marko
<div>
// `input` is like props, but given in the top-level scope
<time datetime=input.date.toISOString()>
// Interpolation with JS template string syntax
${formatTimeNicely(input.date)}
</time>
<div>
<a href=`/users/${input.user.id}`>${input.user.name}</a>
</div>
// Capital letter variables for imported components
<MarkdownContent message=input.message />
// Components also can be auto-imported by lowercase.
// This will look upwards for a `tags/` folder containing
// "custom-footer.marko", similar to how Node.js finds
// package names in all upwards `node_modules` folders.
<custom-footer />
</div>
// ESM `import` / `export` just work as expected.
// I prefer my imports at the end, to highlight the markup.
import MarkdownContent from "./MarkdownContent.marko";
import { formatTimeNicely } from "../date-helpers.ts";
```
Tags with the `value` attribute have a shorthand, which is used by the built-in
`<if>` for conditional rendering.
```marko
// Sugar for <input value="string" />
<input="string" />
// and it composes amazingly to the 'if' built-in
<if=input.user>
<UserProfile=input.user />
</if>
```
Tags can also return values into the scope for use in the template using `/`, such as `<id>` for unique ID generation. This is available to components that `<return=output/>`.
```
<id/uniqueId />
<input id=uniqueId type="checkbox" name="allow_trans_rights" />
<label for=uniqueId>click me!</>
// ^ oh, you can also omit the
// closing tag name if you want.
```
It's important that I started with the two forms of "Tag I/O": `=` for input
and `/` for output. With those building blocks, we introduce local variables
with `const`
```
<const/rendered = markdownToHtml(input.value) />
// This is how you insert raw HTML to the document
<inline-html=rendered />
// It supports all of the cozy destructuring syntax JS has
<const/{ id, name } = user />
```
Unlike JSX, when you pass content within a tag (`input.content` instead of
JSX's `children`), instead of it being a JSX element, it is actually a
function. This means that the `for` tag can render the content multiple times.
```
<ul>
<for from=1 to=10>
// Renders a new random number for each iteration.
<li>${Math.random()}</li>
</>
</ul>
```
Since `content` is a function, it can take arguments. This is done with `|`
```
<h1>my friends</h1>
<ul>
// I tend to omit the closing tag names for the built-in control
// flow tags, but I keep them for HTML tags. It's kinda like how
// in JavaScript you just write `}` to close your `if`s and loops.
//
// Anyways <for> also has 'of'
<for|item| of=user.friends>
<li class="friend">${item.name}</li>
</>
// They support the same syntax JavaScript function params allows,
// so you can have destructuring here too, and multiple params.
<for|{ name }, index| of=user.friends>
// By the way you can also use emmet-style class and ID shorthands.
<li.friend>My #${index + 1} friend is ${name}</li>
</>
</ul>
```
Instead of named slots, Marko has attribute tags. These are more powerful than
slots since they are functions, and can also act as sugar for more complicated
attributes.
```
<Layout title="Welcome">
<@header variant="big">
<h1>the next big thing</h1>
</@header>
<p>body text...</p>
</Layout>
// The `input` variable inside of <Layout /> is:
//
// {
// title: "Welcome",
// header: {
// content: /* function rendering "<h1>the next big thing</h1>" */,
// variant: "big",
// },
// content: /* function rendering "<p>body text</p>" */
// }
```
This layout could be implemented as such:
```marko
<main>
<if=input.header />
<const/{ ...headerProps, content }=input.header />
<header ...headerProps>
// Instead of assigning to a variable with a capital letter,
// template interpolation works on tag names. This can also
// be a string to render the native HTML tag of that kind.
<${content} />
</header>
<hr />
</>
<${input.content} />
</main>
```
The last syntax feature missing is calling a tag with parameters. That is done
just like a regular function call, with '('.
```
<Something(item, index) />
```
In fact, attributes can just be sugar over this syntax; _this technically isn't
true but it's close enough for the example_
```
<SpecialButton type="submit" class="red" />
// is equal to
<SpecialButton({ type: "submit", class: "red" }) />
```
All of the above is about how Marko's syntax works, and how it performs HTML
generation with components. Marko also allows interactive components, but an
explaination of that is beyond the scope of this page, mostly since I have not
used it. A brief example of it, modified from their documentation.
```marko
// Reactive variables with <let/> just work...
<let/basicCounter=0 />
<button onClick() { basicCounter += 1 }>${basicCounter}</button>
// ...but a counter is boring.
<let/todos=[
{ id: 0, text: "Learn Marko" },
{ id: 1, text: "Make a Website" },
]/>
// 'by' is like React JSX's "key" property, but it's optional.
<ul><for|todo, i| of=todos by=(todo => todo.id)>
<li.todo>
// this variable remains stable even if the list
// re-orders, because 'by' was specified.
<let/done=false/>
<label>
<span>${todo.text}</span>
// ':=' creates a two-way reactive binding,
// (it passes a callback for `checkedChanged`)
<input type="checkbox" checked:=done />
</label>
<button
title="delete"
disabled=!done
onClick() {
todos = todos.toSpliced(i, 1);
}
> &times; </button>
</li>
</></ul>
// Form example
<let/nextId=2/>
<form onSubmit(e) {
e.preventDefault();
todos = todos.concat({
id: nextId++,
// HTMLFormElement exposes all its named input
// elements as extra properties on the object.
text: e.target.text.value,
});
// And you can clear it with 'reset()'
e.target.reset();
}>
// We don't 'onChange' like a React loser. The form
// value can be read in the submit event like normal.
<input name="text" placeholder="Another Item">
<button type="submit">Add</button>
</form>
```
## Usage on `paperclover.net`
TODO: document a lot of feedback, how i embedded Marko
My website uses statically generated HTML. That is why I have not needed to use
reactive variables. My generator doesn't even try compiling components
client-side.
Here is the actual component used to render [questions on the clover q+a][/q+a].
```marko
// Renders a `Question` entry including its markdown body.
export interface Input {
question: Question;
admin?: boolean;
}
// 2024-12-31 05:00:00 EST
export const transitionDate = 1735639200000;
<const/{ question, admin } = input />
<const/{ id, date, text } = question/>
<${"e-"}
f=(date > transitionDate ? true : undefined)
id=admin ? `q${id}` : undefined
>
<if=admin>
<a
style="margin-right: 0.5rem"
href=`/admin/q+a/${id}`
>[EDIT]</a>
</>
<a>
<time
datetime=formatQuestionISOTimestamp(date)
>${formatQuestionTimestamp(date)}</time>
</a>
<CloverMarkdown ...{ text } />
</>
// this singleton script will make all the '<time>' tags clickable.
client import "./clickable-links.client.ts";
import type { Question } from "@/q+a/models/Question.ts";
import { formatQuestionTimestamp, formatQuestionISOTimestamp } from "@/q+a/format.ts";
import { CloverMarkdown } from "@/q+a/clover-markdown.tsx";
```
import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts';

View file

@ -93,7 +93,7 @@ app.get("/file/*", async (c, next) => {
} satisfies APIDirectoryList;
return c.json(json);
}
c.res = await renderView(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
c.res = await view.serve(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
file,
hasCotyledonCookie,
});
@ -111,7 +111,7 @@ app.get("/file/*", async (c, next) => {
!lofi
) {
prefetchFile(file.path);
c.res = await renderView(c, "file-viewer/clofi", {
c.res = await view.serve(c, "file-viewer/clofi", {
file,
hasCotyledonCookie,
});
@ -125,11 +125,10 @@ app.get("/file/*", async (c, next) => {
let encoding = decideEncoding(c.req.header("Accept-Encoding"));
let sizeHeader =
encoding === "raw"
let sizeHeader = encoding === "raw"
? expectedSize
: // Size cannot be known because of compression modes
undefined;
// Size cannot be known because of compression modes
: undefined;
// Etag
{
@ -217,7 +216,7 @@ app.get("/canvas/:script", async (c, next) => {
if (!hasAsset(`/js/canvas/${script}.js`)) {
return next();
}
return renderView(c, "file-viewer/canvas", {
return view.serve(c, "file-viewer/canvas", {
script,
});
});
@ -295,8 +294,7 @@ function handleRanges(
): Response {
// TODO: multiple ranges
const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0);
const rangeBody =
streamOrBuffer instanceof ReadableStream
const rangeBody = streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
return new Response(rangeBody, {
@ -420,7 +418,7 @@ import { type Context, Hono } from "hono";
import * as render from "#engine/render";
import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets";
import { renderView } from "#sitegen/view";
import * as view from "#sitegen/view";
import { contentTypeFor } from "#sitegen/mime";
import { requireFriendAuth } from "@/friend-auth.ts";

View file

@ -17,9 +17,11 @@ export async function main() {
const start = performance.now();
const timerSpinner = new Spinner({
text: () =>
`paper clover's scan3 [${((performance.now() - start) / 1000).toFixed(
`paper clover's scan3 [${
((performance.now() - start) / 1000).toFixed(
1,
)}s]`,
)
}s]`,
fps: 10,
});
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
@ -44,7 +46,7 @@ export async function main() {
.flatMap((child) =>
child.kind === MediaFileKind.directory
? child.getRecursiveFileChildren()
: child,
: child
);
qMeta.addMany(
@ -103,9 +105,11 @@ export async function main() {
) {
date = mediaFile.date;
console.warn(
`M-time on ${publicPath} was likely corrupted. ${formatDate(
`M-time on ${publicPath} was likely corrupted. ${
formatDate(
mediaFile.date,
)} -> ${formatDate(stat.mtime)}`,
)
} -> ${formatDate(stat.mtime)}`,
);
}
mediaFile = MediaFile.createFile({
@ -162,7 +166,7 @@ export async function main() {
}: Omit<ProcessFileArgs, "spin">) {
const ext = mediaFile.extensionNonEmpty.toLowerCase();
let possible = processors.filter((p) =>
p.include ? p.include.has(ext) : !p.exclude?.has(ext),
p.include ? p.include.has(ext) : !p.exclude?.has(ext)
);
if (possible.length === 0) return;
@ -196,7 +200,7 @@ export async function main() {
);
} else {
possible = order.map(({ id }) =>
UNWRAP(possible.find((p) => p.id === id)),
UNWRAP(possible.find((p) => p.id === id))
);
}
@ -260,8 +264,9 @@ export async function main() {
const children = dir.getChildren();
// readme.txt
const readmeContent =
children.find((x) => x.basename === "readme.txt")?.contents ?? "";
const readmeContent = children.find((x) =>
x.basename === "readme.txt"
)?.contents ?? "";
// dirsort
let dirsort: string[] | null = null;
@ -354,7 +359,9 @@ export async function main() {
console.info(
"Global Stats:\n" +
`- File Count: \x1b[1m${count}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${formatSize(MediaFile.getByPath("/")!.size)}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${
formatSize(MediaFile.getByPath("/")!.size)
}\x1b[0m\n` +
`- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`,
);
}
@ -459,7 +466,6 @@ const procLoadTextContents: Process = {
async run({ absPath, mediaFile, stat }) {
if (stat.size > 1_000_000) return;
const text = await fs.readFile(absPath, "utf-8");
console.log({ text });
mediaFile.setContents(text);
},
};
@ -696,7 +702,7 @@ async function unproduceAsset(key: string) {
const ref = AssetRef.get(key);
if (ref) {
ref.unref();
console.log(`unref ${key}`);
console.warn(`TODO: unref ${key}`);
// TODO: remove associated files from target
}
}

View file

@ -29,7 +29,9 @@ export function formatDurationLong(seconds: number) {
const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60);
const remainingSeconds = seconds % 60;
return `${hours}:${minutes.toString().padStart(2, "0")}:${remainingSeconds.toString().padStart(2, "0")}`;
return `${hours}:${minutes.toString().padStart(2, "0")}:${
remainingSeconds.toString().padStart(2, "0")
}`;
}
export function escapeUri(uri: string) {
@ -100,21 +102,27 @@ export function highlightLinksInTextView(
// Case 1: https:// or http:// URLs
if (match.startsWith("http")) {
if (match.includes(findDomain)) {
return `<a href="${match
return `<a href="${
match
.replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")}">${match}</a>`;
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return `<a href="${match.replace(
return `<a href="${
match.replace(
/\/\/+/g,
"/",
)}" target="_blank" rel="noopener noreferrer">${match}</a>`;
)
}" target="_blank" rel="noopener noreferrer">${match}</a>`;
}
// Case 2: domain URLs without protocol
if (match.startsWith(findDomain)) {
return `<a href="${match
return `<a href="${
match
.replace(findDomain + "/", "/")
.replace(/\/\/+/g, "/")}">${match}</a>`;
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
// Case 3: /file/ URLs
@ -145,7 +153,7 @@ export function highlightLinksInTextView(
// Match sibling file names (only if they're not already part of a link)
if (siblingFiles.length > 0) {
const escapedBasenames = siblingFiles.map((f) =>
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"),
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
);
const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g");
const parts = processedText.split(/(<[^>]*>)/);
@ -155,9 +163,11 @@ export function highlightLinksInTextView(
parts[i] = parts[i].replace(pattern, (match: string) => {
const file = siblingLookup[match];
if (file) {
return `<a href="/file/${file.path
return `<a href="/file/${
file.path
.replace(/^\//, "")
.replace(/\/\/+/g, "/")}">${match}</a>`;
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return match;
});
@ -240,9 +250,11 @@ export function highlightConvo(text: string) {
return paras
.map(({ speaker, lines }) => {
return `<div class="s-${speaker}">${lines
return `<div class="s-${speaker}">${
lines
.map((line) => `<div class="line">${line}</div>`)
.join("\n")}</div>`;
.join("\n")
}</div>`;
})
.join("\n");
}
@ -267,10 +279,12 @@ export function formatDate(dateTime: Date) {
? dateTime < unknownDate
? "??.??.??"
: `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}`
: `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${dateTime
: `${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${
dateTime
.getDate()
.toString()
.padStart(2, "0")}.${dateTime.getFullYear().toString().slice(2)}`;
.padStart(2, "0")
}.${dateTime.getFullYear().toString().slice(2)}`;
}
import type { MediaFile } from "@/file-viewer/models/MediaFile.ts";

View file

@ -131,8 +131,12 @@ export class Parse {
percentage: Number(percentageStr),
timeElapsed,
transferNumber: this.currentTransfer,
filesToCheck: toCheckStr ? this.toCheck = Number(toCheckStr) : this.toCheck,
totalFiles: totalStr ? this.totalFiles = Number(totalStr) : this.totalFiles,
filesToCheck: toCheckStr
? this.toCheck = Number(toCheckStr)
: this.toCheck,
totalFiles: totalStr
? this.totalFiles = Number(totalStr)
: this.totalFiles,
speed: speed || null,
};
}

View file

@ -4,9 +4,7 @@
font-weight: 400 750;
font-style: normal;
font-display: swap;
font-variation-settings:
"CASL" 0.25,
"MONO" 0;
font-variation-settings: "CASL" 0.25, "MONO" 0;
font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E;
}
@ -16,9 +14,7 @@
font-weight: 400 800;
font-style: normal;
font-display: swap;
font-variation-settings:
"CASL" 0.25,
"MONO" 1;
font-variation-settings: "CASL" 0.25, "MONO" 1;
font-style: oblique -15deg 0deg;
unicode-range: U+0020-007E;
}
@ -28,13 +24,21 @@
font-weight: 400 800;
font-style: normal;
font-display: swap;
font-variation-settings:
"CASL" 0.25,
"MONO" 1;
font-variation-settings: "CASL" 0.25, "MONO" 1;
font-style: oblique -15deg 0deg;
unicode-range:
U+00C0-00FF, U+00A9, U+2190-2193, U+2018, U+2019, U+201C, U+201D, U+2022,
U+00A0-00A8, U+00AA-00BF, U+2194-2199, U+0100-017F;
U+00C0-00FF,
U+00A9,
U+2190-2193,
U+2018,
U+2019,
U+201C,
U+201D,
U+2022,
U+00A0-00A8,
U+00AA-00BF,
U+2194-2199,
U+0100-017F;
}
*,

View file

@ -45,7 +45,7 @@ app.post("/q+a", async (c) => {
return sendSuccess(c, new Date());
}
const ipAddr = c.req.header("cf-connecting-ip");
const ipAddr = c.req.header("x-forwarded-for");
if (ipAddr) {
input.sourceName = uniqueNamesGenerator({
dictionaries: [adjectives, colors, animals],
@ -54,11 +54,6 @@ app.post("/q+a", async (c) => {
});
}
const cfIPCountry = c.req.header("cf-ipcountry");
if (cfIPCountry) {
input.sourceLocation = cfIPCountry;
}
if (ipAddr && PROXYCHECK_API_KEY) {
const proxyCheck = await fetch(
`https://proxycheck.io/v2/?key=${PROXYCHECK_API_KEY}&risk=1&vpn=1`,
@ -77,17 +72,19 @@ app.post("/q+a", async (c) => {
proxyCheck[ipAddr].organisation ??
proxyCheck[ipAddr].provider ?? "unknown";
}
if (Number(proxyCheck[ipAddr].risk) > 72) {
if (Number(proxyCheck[ipAddr].risk) > 78) {
return questionFailure(
c,
403,
"This IP address has been flagged as a high risk IP address. If you are using a VPN/Proxy, please disable it and try again.",
"This IP address has been flagged as a high risk IP address. If " +
"you are using a VPN/Proxy, please disable it and try again.",
text,
);
}
}
}
view.regenerate("q+a inbox");
const date = Question.create(
QuestionType.pending,
JSON.stringify(input),
@ -104,7 +101,7 @@ async function sendSuccess(c: Context, date: Date) {
id: formatQuestionId(date),
}, { status: 200 });
}
c.res = await renderView(c, "q+a/success", {
c.res = await view.serve(c, "q+a/success", {
permalink: `https://paperclover.net/q+a/${formatQuestionId(date)}`,
});
}
@ -130,7 +127,7 @@ app.get("/q+a/:id", async (c, next) => {
if (image) {
return getQuestionImage(question, c.req.method === "HEAD");
}
return renderView(c, "q+a/permalink", { question });
return view.serve(c, "q+a/permalink", { question });
});
// Admin
@ -138,7 +135,7 @@ app.get("/admin/q+a", async (c) => {
return serveAsset(c, "/admin/q+a", 200);
});
app.get("/admin/q+a/inbox", async (c) => {
return renderView(c, "q+a/backend-inbox", {});
return view.serve(c, "q+a/backend-inbox", {});
});
app.delete("/admin/q+a/:id", async (c, next) => {
const id = c.req.param("id");
@ -152,6 +149,7 @@ app.delete("/admin/q+a/:id", async (c, next) => {
} else {
Question.rejectByQmid(question.qmid);
}
view.regenerate("q+a");
return c.json({ success: true, message: "ok" });
});
app.patch("/admin/q+a/:id", async (c, next) => {
@ -165,6 +163,7 @@ app.patch("/admin/q+a/:id", async (c, next) => {
return questionFailure(c, 400, "Bad Request");
}
Question.updateByQmid(question.qmid, form.text, form.type);
view.regenerate("q+a");
return c.json({ success: true, message: "ok" });
});
app.get("/admin/q+a/:id", async (c, next) => {
@ -177,20 +176,22 @@ app.get("/admin/q+a/:id", async (c, next) => {
let pendingInfo: null | PendingQuestionData = null;
if (question.type === QuestionType.pending) {
pendingInfo = JSON.parse(question.text) as PendingQuestionData;
question.text = pendingInfo.prompt.trim().split("\n").map((line) =>
line.trim().length === 0 ? "" : `q: ${line.trim()}`
).join("\n") + "\n\n";
question.text = pendingInfo.prompt
.trim()
.split("\n")
.map((line) => (line.trim().length === 0 ? "" : `q: ${line.trim()}`))
.join("\n") + "\n\n";
question.type = QuestionType.normal;
}
return renderView(c, "q+a/editor", {
return view.serve(c, "q+a/editor", {
pendingInfo,
question,
});
});
app.get("/q+a/things/random", async (c) => {
c.res = await renderView(c, "q+a/things-random", {});
c.res = await view.serve(c, "q+a/things-random", {});
});
async function questionFailure(
@ -202,7 +203,7 @@ async function questionFailure(
if (c.req.header("Accept")?.includes("application/json")) {
return c.json({ success: false, message, id: null }, { status });
}
return await renderView(c, "q+a/fail", {
return await view.serve(c, "q+a/fail", {
error: message,
content,
});
@ -218,11 +219,8 @@ import {
} from "unique-names-generator";
import { hasAdminToken } from "../admin.ts";
import { serveAsset } from "#sitegen/assets";
import {
PendingQuestion,
PendingQuestionData,
} from "./models/PendingQuestion.ts";
import type { PendingQuestionData } from "./models/PendingQuestion.ts";
import { Question, QuestionType } from "./models/Question.ts";
import { renderView } from "#sitegen/view";
import * as view from "#sitegen/view";
import { getQuestionImage } from "./image.tsx";
import { formatQuestionId, questionIdToTimestamp } from "./format.ts";

View file

@ -144,9 +144,7 @@ function ListRenderer(node: ASTNode, children: any[]) {
const T = node.ordered ? "ol" : "ul";
return (
<T>
{children.map((child) => (
<li>{child}</li>
))}
{children.map((child) => <li>{child}</li>)}
</T>
);
}

View file

@ -11,7 +11,7 @@ const getBrowser = RefCountedExpirable(
);
export async function renderQuestionImage(question: Question) {
const html = await renderViewToString("q+a/image-embed", { question });
const html = await view.renderToString("q+a/image-embed", { question });
// this browser session will be reused if multiple images are generated
// either at the same time or within a 5-minute time span. the dispose
@ -44,14 +44,15 @@ export async function getQuestionImage(
question: Question,
headOnly: boolean,
): Promise<Response> {
const hash = crypto.createHash("sha1")
const hash = crypto
.createHash("sha1")
.update(question.qmid + question.type + question.text)
.digest("hex");
const headers = {
"Content-Type": "image/png",
"Cache-Control": "public, max-age=31536000",
"ETag": `"${hash}"`,
ETag: `"${hash}"`,
"Last-Modified": question.date.toUTCString(),
};
@ -78,4 +79,4 @@ import * as path from "node:path";
import * as puppeteer from "puppeteer";
import { Question } from "@/q+a/models/Question.ts";
import { RefCountedExpirable } from "#sitegen/async";
import { renderViewToString } from "#sitegen/view";
import * as view from "#sitegen/view";

View file

@ -7,7 +7,7 @@ export const meta: Metadata = {
description: "ask clover a question",
};
export const regenerate = {
manual: true,
tags: ["q+a", "q+a inbox"]
};
<const/inboxSize = PendingQuestion.getAll().length />

View file

@ -19,8 +19,8 @@ export const meta = { title: 'question answer inbox' };
</time>
<div style="color: dodgerblue; margin-bottom: 0.25rem">
${q.sourceName}
${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : null}
${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : null}
${q.sourceLocation !== "unknown" ? `from ${q.sourceLocation}` : ''}
${q.sourceVPN ? `(VPN: ${q.sourceVPN})` : ''}
</div>
<p style="white-space: pre-wrap">${q.prompt}</p>
<p>

View file

@ -33,8 +33,12 @@ const rawFileRoot = process.env.CLOVER_FILE_RAW ??
const derivedFileRoot = process.env.CLOVER_FILE_DERIVED ??
path.join(nasRoot, "Documents/Config/paperclover/derived");
if (!fs.existsSync(rawFileRoot)) throw new Error(`${rawFileRoot} does not exist`);
if (!fs.existsSync(derivedFileRoot)) throw new Error(`${derivedFileRoot} does not exist`);
if (!fs.existsSync(rawFileRoot)) {
throw new Error(`${rawFileRoot} does not exist`);
}
if (!fs.existsSync(derivedFileRoot)) {
throw new Error(`${derivedFileRoot} does not exist`);
}
type Awaitable<T> = T | Promise<T>;

View file

@ -5,7 +5,7 @@
"incremental": true,
"jsx": "react-jsxdev",
"jsxImportSource": "#engine",
"lib": ["dom", "esnext", "esnext.iterator"],
"lib": ["esnext", "dom", "dom.iterable"],
"module": "nodenext",
"noEmit": true,
"outDir": ".clover/ts",
@ -14,7 +14,8 @@
"skipLibCheck": true,
"strict": true,
"verbatimModuleSyntax": true,
"target": "es2022"
"target": "es2022",
"noUncheckedIndexedAccess": true
},
"include": ["framework/**/*", "src/**/*"]
}