rewrite incremental #21
6 changed files with 144 additions and 149 deletions
|
@ -173,7 +173,7 @@ export async function bundleServerJavaScript(
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
const { metafile, outputFiles } = await esbuild.build({
|
const { metafile, outputFiles, errors, warnings } = await esbuild.build({
|
||||||
bundle: true,
|
bundle: true,
|
||||||
chunkNames: "c.[hash]",
|
chunkNames: "c.[hash]",
|
||||||
entryNames: path.basename(entry, path.extname(entry)),
|
entryNames: path.basename(entry, path.extname(entry)),
|
||||||
|
@ -194,7 +194,7 @@ export async function bundleServerJavaScript(
|
||||||
jsxDev: false,
|
jsxDev: false,
|
||||||
define: {
|
define: {
|
||||||
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
|
||||||
CLOVER_SERVER_ENTRY: JSON.stringify(entry),
|
'globalThis.CLOVER_SERVER_ENTRY': JSON.stringify(entry),
|
||||||
},
|
},
|
||||||
external: Object.keys(pkg.dependencies)
|
external: Object.keys(pkg.dependencies)
|
||||||
.filter((x) => !x.startsWith("@paperclover")),
|
.filter((x) => !x.startsWith("@paperclover")),
|
||||||
|
@ -208,7 +208,7 @@ export async function bundleServerJavaScript(
|
||||||
} | null = null;
|
} | null = null;
|
||||||
for (const output of outputFiles) {
|
for (const output of outputFiles) {
|
||||||
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
|
const basename = output.path.replace(/^.*?!(?:\/|\\)/, "");
|
||||||
const key = "out!" + basename.replaceAll("\\", "/");
|
const key = "out!/" + basename.replaceAll("\\", "/");
|
||||||
// If this contains the generated "$views" file, then
|
// If this contains the generated "$views" file, then
|
||||||
// mark this file as the one for replacement. Because
|
// mark this file as the one for replacement. Because
|
||||||
// `splitting` is `true`, esbuild will not emit this
|
// `splitting` is `true`, esbuild will not emit this
|
||||||
|
|
|
@ -64,7 +64,8 @@ Module.prototype._compile = function (
|
||||||
const stat = fs.statSync(filename);
|
const stat = fs.statSync(filename);
|
||||||
const cssImportsMaybe: string[] = [];
|
const cssImportsMaybe: string[] = [];
|
||||||
const imports: string[] = [];
|
const imports: string[] = [];
|
||||||
for (const { filename: file, cloverClientRefs } of this.children) {
|
for (const childModule of this.children) {
|
||||||
|
const { filename: file, cloverClientRefs } = childModule;
|
||||||
if (file.endsWith(".css")) cssImportsMaybe.push(file);
|
if (file.endsWith(".css")) cssImportsMaybe.push(file);
|
||||||
else {
|
else {
|
||||||
const child = fileStats.get(file);
|
const child = fileStats.get(file);
|
||||||
|
@ -72,6 +73,7 @@ Module.prototype._compile = function (
|
||||||
const { cssImportsRecursive } = child;
|
const { cssImportsRecursive } = child;
|
||||||
if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive);
|
if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive);
|
||||||
imports.push(file);
|
imports.push(file);
|
||||||
|
(childModule.cloverImporters ??= []).push(this);
|
||||||
if (cloverClientRefs && cloverClientRefs.length > 0) {
|
if (cloverClientRefs && cloverClientRefs.length > 0) {
|
||||||
(this.cloverClientRefs ??= [])
|
(this.cloverClientRefs ??= [])
|
||||||
.push(...cloverClientRefs);
|
.push(...cloverClientRefs);
|
||||||
|
@ -193,9 +195,13 @@ export function reloadRecursive(filepath: string) {
|
||||||
|
|
||||||
export function unload(filepath: string) {
|
export function unload(filepath: string) {
|
||||||
filepath = path.resolve(filepath);
|
filepath = path.resolve(filepath);
|
||||||
const existing = cache[filepath];
|
const module = cache[filepath];
|
||||||
if (existing) delete cache[filepath];
|
if (!module) return;
|
||||||
fileStats.delete(filepath);
|
delete cache[filepath];
|
||||||
|
lazyMarko?.markoCache.delete(filepath)
|
||||||
|
for (const importer of module.cloverImporters ?? []) {
|
||||||
|
unload(importer.filename);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function deleteRecursiveInner(id: string, module: any) {
|
function deleteRecursiveInner(id: string, module: any) {
|
||||||
|
@ -294,6 +300,7 @@ declare global {
|
||||||
interface Module {
|
interface Module {
|
||||||
cloverClientRefs?: string[];
|
cloverClientRefs?: string[];
|
||||||
cloverSourceCode?: string;
|
cloverSourceCode?: string;
|
||||||
|
cloverImporters?: Module[],
|
||||||
|
|
||||||
_compile(
|
_compile(
|
||||||
this: NodeJS.Module,
|
this: NodeJS.Module,
|
||||||
|
|
|
@ -30,10 +30,8 @@ type Job<I = any, O = any> = (io: Io, input: I) => Promise<O>;
|
||||||
export function work<O>(job: Job<void, O>): Ref<O>;
|
export function work<O>(job: Job<void, O>): Ref<O>;
|
||||||
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
|
export function work<I, O>(job: Job<I, O>, input: I): Ref<O>;
|
||||||
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
||||||
const keySource = [
|
const source = JSON.stringify(util.getCallSites(2)[1]);
|
||||||
JSON.stringify(util.getCallSites(2)[1]),
|
const keySource = [source, util.inspect(input)].join(":");
|
||||||
util.inspect(input),
|
|
||||||
].join(":");
|
|
||||||
const key = crypto.createHash("sha1").update(keySource).digest("base64url");
|
const key = crypto.createHash("sha1").update(keySource).digest("base64url");
|
||||||
ASSERT(running);
|
ASSERT(running);
|
||||||
ASSERT(
|
ASSERT(
|
||||||
|
@ -61,6 +59,7 @@ export function work<I, O>(job: Job<I, O>, input: I = null as I): Ref<O> {
|
||||||
affects: [],
|
affects: [],
|
||||||
reads,
|
reads,
|
||||||
writes,
|
writes,
|
||||||
|
debug: source,
|
||||||
});
|
});
|
||||||
for (const add of reads.files) {
|
for (const add of reads.files) {
|
||||||
const { affects } = UNWRAP(files.get(add));
|
const { affects } = UNWRAP(files.get(add));
|
||||||
|
@ -107,11 +106,17 @@ export async function compile<T>(compiler: () => Promise<T>) {
|
||||||
timerSpinner.text = "incremental flush";
|
timerSpinner.text = "incremental flush";
|
||||||
await flush(start);
|
await flush(start);
|
||||||
timerSpinner.stop();
|
timerSpinner.stop();
|
||||||
seenWorks.clear();
|
return {
|
||||||
newKeys = 0;
|
value,
|
||||||
return { value };
|
watchFiles: new Set(files.keys()),
|
||||||
|
newOutputs: Array.from(seenWrites).filter(x => x.startsWith('f:')).map(x => x.slice(2)),
|
||||||
|
newAssets: !Array.from(seenWrites).some(x => x.startsWith('a:')),
|
||||||
|
};
|
||||||
} finally {
|
} finally {
|
||||||
running = false;
|
running = false;
|
||||||
|
newKeys = 0;
|
||||||
|
seenWrites.clear();
|
||||||
|
seenWorks.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -178,13 +183,7 @@ export async function restore() {
|
||||||
await deserialize(buffer);
|
await deserialize(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function forceInvalidate(file: string) {
|
function forceInvalidate(entry: { affects: string[] }) {
|
||||||
const resolved = toAbs(file);
|
|
||||||
const key = toRel(resolved);
|
|
||||||
forceInvalidateEntry(UNWRAP(files.get(key), `Untracked file '${file}'`));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function forceInvalidateEntry(entry: { affects: string[] }) {
|
|
||||||
const queue = [...entry.affects];
|
const queue = [...entry.affects];
|
||||||
let key;
|
let key;
|
||||||
while ((key = queue.shift())) {
|
while ((key = queue.shift())) {
|
||||||
|
@ -194,8 +193,9 @@ export function forceInvalidateEntry(entry: { affects: string[] }) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function deleteWork(key: string) {
|
function deleteWork(key: string) {
|
||||||
console.info({ key });
|
const work = works.get(key);
|
||||||
const { reads, affects, writes: w } = UNWRAP(works.get(key));
|
if (!work) return [];
|
||||||
|
const { reads, affects, writes: w } = work;
|
||||||
for (const remove of reads.files) {
|
for (const remove of reads.files) {
|
||||||
const { affects } = UNWRAP(files.get(remove));
|
const { affects } = UNWRAP(files.get(remove));
|
||||||
ASSERT(affects.includes(key));
|
ASSERT(affects.includes(key));
|
||||||
|
@ -325,43 +325,54 @@ async function deserialize(buffer: Buffer) {
|
||||||
work,
|
work,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
forceInvalidateEntry({ affects: [work] });
|
forceInvalidate({ affects: [work] });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const [hash, raw, gzip, zstd] of assetEntries) {
|
for (const [hash, raw, gzip, zstd] of assetEntries) {
|
||||||
assets.set(hash, { raw, gzip, zstd });
|
assets.set(hash, { raw, gzip, zstd });
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(Array.from(files, async ([k, file]) => {
|
await Promise.all(Array.from(files, ([key, file]) => invalidateEntry(key, file)));
|
||||||
try {
|
}
|
||||||
if (file.type === "d") {
|
|
||||||
const contents = file.contents = await fs.readdir(k);
|
export async function invalidate(filePath: string): Promise<boolean> {
|
||||||
contents.sort();
|
const key = toRel(toAbs(filePath));
|
||||||
const contentHash = crypto
|
const file = UNWRAP(files.get(key), `Untracked file '${key}'`)
|
||||||
.createHash("sha1")
|
return invalidateEntry(key, file)
|
||||||
.update(contents.join("\0"))
|
}
|
||||||
.digest("base64url");
|
|
||||||
if (file.contentHash !== contentHash) {
|
export async function invalidateEntry(key: string, file: TrackedFile): Promise<boolean> {
|
||||||
file.contentHash = contentHash;
|
try {
|
||||||
throw new Error();
|
if (file.type === "d") {
|
||||||
}
|
const contents = file.contents = await fs.readdir(key);
|
||||||
} else if (file.type === 'f') {
|
contents.sort();
|
||||||
const lastModified = await fs.stat(k)
|
const contentHash = crypto
|
||||||
.then(x => Math.floor(x.mtimeMs), () => 0);
|
.createHash("sha1")
|
||||||
if (file.lastModified !== lastModified) {
|
.update(contents.join("\0"))
|
||||||
file.lastModified = lastModified;
|
.digest("base64url");
|
||||||
throw new Error();
|
if (file.contentHash !== contentHash) {
|
||||||
}
|
file.contentHash = contentHash;
|
||||||
} else {
|
throw new Error();
|
||||||
file.type satisfies 'null';
|
|
||||||
const stat = await fs.stat(k).catch(() => null);
|
|
||||||
if (stat) throw new Error();
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} else if (file.type === 'f') {
|
||||||
forceInvalidateEntry(file);
|
const lastModified = await fs.stat(key)
|
||||||
if (file.type === 'null') files.delete(k);
|
.then(x => Math.floor(x.mtimeMs), () => 0);
|
||||||
|
if (file.lastModified !== lastModified) {
|
||||||
|
file.lastModified = lastModified;
|
||||||
|
throw new Error();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
file.type satisfies 'null';
|
||||||
|
const stat = await fs.stat(key).catch(() => null);
|
||||||
|
if (stat) throw new Error();
|
||||||
}
|
}
|
||||||
}));
|
return false;
|
||||||
|
} catch (e) {
|
||||||
|
forceInvalidate(file);
|
||||||
|
hot.unload(toAbs(key));
|
||||||
|
if (file.type === 'null') files.delete(key);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getAssetManifest() {
|
export function getAssetManifest() {
|
||||||
|
@ -377,8 +388,8 @@ export function getAssetManifest() {
|
||||||
);
|
);
|
||||||
return [key, {
|
return [key, {
|
||||||
raw: writer.write(raw, "raw:" + hash),
|
raw: writer.write(raw, "raw:" + hash),
|
||||||
gzip: writer.write(gzip, "raw:" + hash),
|
gzip: writer.write(gzip, "gzip:" + hash),
|
||||||
zstd: writer.write(zstd, "raw:" + hash),
|
zstd: writer.write(zstd, "zstd:" + hash),
|
||||||
headers,
|
headers,
|
||||||
}] as const;
|
}] as const;
|
||||||
})
|
})
|
||||||
|
@ -577,7 +588,7 @@ export function validateSerializable(value: unknown, key: string) {
|
||||||
} else if (value && typeof value === "object") {
|
} else if (value && typeof value === "object") {
|
||||||
if (Array.isArray(value)) {
|
if (Array.isArray(value)) {
|
||||||
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
value.forEach((item, i) => validateSerializable(item, `${key}[${i}]`));
|
||||||
} else if (Object.getPrototypeOf(value) === Object.prototype) {
|
} else if (Object.getPrototypeOf(value) === Object.prototype || Buffer.isBuffer(value)) {
|
||||||
Object.entries(value).forEach(([k, v]) =>
|
Object.entries(value).forEach(([k, v]) =>
|
||||||
validateSerializable(v, `${key}.${k}`)
|
validateSerializable(v, `${key}.${k}`)
|
||||||
);
|
);
|
||||||
|
@ -626,6 +637,7 @@ interface Asset {
|
||||||
zstd: Buffer;
|
zstd: Buffer;
|
||||||
}
|
}
|
||||||
interface Work<T = unknown> {
|
interface Work<T = unknown> {
|
||||||
|
debug?: string;
|
||||||
value: T;
|
value: T;
|
||||||
reads: Reads;
|
reads: Reads;
|
||||||
writes: Writes;
|
writes: Writes;
|
||||||
|
@ -637,7 +649,7 @@ type TrackedFile =
|
||||||
}
|
}
|
||||||
& (
|
& (
|
||||||
| { type: "f"; lastModified: number }
|
| { type: "f"; lastModified: number }
|
||||||
| { type: "d"; contentHash: string; contents: string[] | null }
|
| { type: "d"; contentHash: string; contents: string[] }
|
||||||
| { type: "null"; }
|
| { type: "null"; }
|
||||||
);
|
);
|
||||||
export interface BuiltAssetMap {
|
export interface BuiltAssetMap {
|
||||||
|
|
|
@ -2,102 +2,67 @@
|
||||||
|
|
||||||
const debounceMilliseconds = 25;
|
const debounceMilliseconds = 25;
|
||||||
|
|
||||||
|
let subprocess: child_process.ChildProcess | null = null;
|
||||||
|
process.on("beforeExit", () => {
|
||||||
|
subprocess?.removeListener("close", onSubprocessClose);
|
||||||
|
});
|
||||||
|
|
||||||
|
let watch: Watch;
|
||||||
|
|
||||||
export async function main() {
|
export async function main() {
|
||||||
let subprocess: child_process.ChildProcess | null = null;
|
|
||||||
|
|
||||||
// Catch up state by running a main build.
|
// Catch up state by running a main build.
|
||||||
const { incr } = await generate.main();
|
await incr.restore();
|
||||||
// ...and watch the files that cause invals.
|
watch = new Watch(rebuild);
|
||||||
const watch = new Watch(rebuild);
|
rebuild([]);
|
||||||
watch.add(...incr.invals.keys());
|
}
|
||||||
statusLine();
|
|
||||||
// ... and then serve it!
|
|
||||||
serve();
|
|
||||||
|
|
||||||
function serve() {
|
function serve() {
|
||||||
if (subprocess) {
|
if (subprocess) {
|
||||||
subprocess.removeListener("close", onSubprocessClose);
|
subprocess.removeListener("close", onSubprocessClose);
|
||||||
subprocess.kill();
|
subprocess.kill();
|
||||||
}
|
|
||||||
subprocess = child_process.fork(".clover/out/server.js", [
|
|
||||||
"--development",
|
|
||||||
], {
|
|
||||||
stdio: "inherit",
|
|
||||||
});
|
|
||||||
subprocess.on("close", onSubprocessClose);
|
|
||||||
}
|
}
|
||||||
|
subprocess = child_process.fork(".clover/o/backend.js", [
|
||||||
function onSubprocessClose(code: number | null, signal: string | null) {
|
"--development",
|
||||||
subprocess = null;
|
], {
|
||||||
const status = code != null ? `code ${code}` : `signal ${signal}`;
|
stdio: "inherit",
|
||||||
console.error(`Backend process exited with ${status}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
process.on("beforeExit", () => {
|
|
||||||
subprocess?.removeListener("close", onSubprocessClose);
|
|
||||||
});
|
});
|
||||||
|
subprocess.on("close", onSubprocessClose);
|
||||||
|
}
|
||||||
|
|
||||||
function rebuild(files: string[]) {
|
function onSubprocessClose(code: number | null, signal: string | null) {
|
||||||
files = files.map((file) => path.relative(hot.projectRoot, file));
|
subprocess = null;
|
||||||
const changed: string[] = [];
|
const status = code != null ? `code ${code}` : `signal ${signal}`;
|
||||||
for (const file of files) {
|
console.error(`Backend process exited with ${status}`);
|
||||||
let mtimeMs: number | null = null;
|
}
|
||||||
try {
|
|
||||||
mtimeMs = fs.statSync(file).mtimeMs;
|
|
||||||
} catch (err: any) {
|
|
||||||
if (err?.code !== "ENOENT") throw err;
|
|
||||||
}
|
|
||||||
if (incr.updateStat(file, mtimeMs)) changed.push(file);
|
|
||||||
}
|
|
||||||
if (changed.length === 0) {
|
|
||||||
console.warn("Files were modified but the 'modify' time did not change.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
withSpinner<any, Awaited<ReturnType<typeof generate.sitegen>>>({
|
|
||||||
text: "Rebuilding",
|
|
||||||
successText: generate.successText,
|
|
||||||
failureText: () => "sitegen FAIL",
|
|
||||||
}, async (spinner) => {
|
|
||||||
console.info("---");
|
|
||||||
console.info(
|
|
||||||
"Updated" +
|
|
||||||
(changed.length === 1
|
|
||||||
? " " + changed[0]
|
|
||||||
: changed.map((file) => "\n- " + file)),
|
|
||||||
);
|
|
||||||
const result = await generate.sitegen(spinner, incr);
|
|
||||||
incr.toDisk(); // Allows picking up this state again
|
|
||||||
for (const file of watch.files) {
|
|
||||||
const relative = path.relative(hot.projectRoot, file);
|
|
||||||
if (!incr.invals.has(relative)) watch.remove(file);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}).then((result) => {
|
|
||||||
// Restart the server if it was changed or not running.
|
|
||||||
if (
|
|
||||||
!subprocess ||
|
|
||||||
result.inserted.some(({ kind }) => kind === "backendReplace")
|
|
||||||
) {
|
|
||||||
serve();
|
|
||||||
} else if (
|
|
||||||
subprocess &&
|
|
||||||
result.inserted.some(({ kind }) => kind === "asset")
|
|
||||||
) {
|
|
||||||
subprocess.send({ type: "clover.assets.reload" });
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}).catch((err) => {
|
|
||||||
console.error(util.inspect(err));
|
|
||||||
}).finally(statusLine);
|
|
||||||
}
|
|
||||||
|
|
||||||
function statusLine() {
|
function rebuild(files: string[]) {
|
||||||
console.info(
|
for (const file of files) {
|
||||||
`Watching ${incr.invals.size} files \x1b[36m[last change: ${
|
incr.invalidate(file);
|
||||||
new Date().toLocaleTimeString()
|
|
||||||
}]\x1b[39m`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
incr.compile(generate.generate).then(({
|
||||||
|
watchFiles,
|
||||||
|
newOutputs,
|
||||||
|
newAssets
|
||||||
|
}) => {
|
||||||
|
const removeWatch = [...watch.files].filter(x => !watchFiles.has(x))
|
||||||
|
for (const file of removeWatch) watch.remove(file);
|
||||||
|
watch.add(...watchFiles);
|
||||||
|
// Restart the server if it was changed or not running.
|
||||||
|
if (!subprocess || newOutputs.includes("backend.js")) {
|
||||||
|
serve();
|
||||||
|
} else if (subprocess && newAssets) {
|
||||||
|
subprocess.send({ type: "clover.assets.reload" });
|
||||||
|
}
|
||||||
|
}).catch((err) => {
|
||||||
|
console.error(util.inspect(err));
|
||||||
|
}).finally(statusLine);
|
||||||
|
}
|
||||||
|
|
||||||
|
function statusLine() {
|
||||||
|
console.info(
|
||||||
|
`Watching ${watch.files.size} files `
|
||||||
|
+ `\x1b[36m[last change: ${new Date().toLocaleTimeString()}]\x1b[39m`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
class Watch {
|
class Watch {
|
||||||
|
@ -174,11 +139,21 @@ class Watch {
|
||||||
for (const w of this.watchers) w.close();
|
for (const w of this.watchers) w.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#getFiles(absPath: string, event: fs.WatchEventType) {
|
||||||
|
const files = [];
|
||||||
|
if (this.files.has(absPath)) files.push(absPath);
|
||||||
|
if (event === 'rename') {
|
||||||
|
const dir = path.dirname(absPath);
|
||||||
|
if (this.files.has(dir)) files.push(dir);
|
||||||
|
}
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
#handleEvent(root: string, event: fs.WatchEventType, subPath: string | null) {
|
#handleEvent(root: string, event: fs.WatchEventType, subPath: string | null) {
|
||||||
if (!subPath) return;
|
if (!subPath) return;
|
||||||
const file = path.join(root, subPath);
|
const files = this.#getFiles(path.join(root, subPath), event);
|
||||||
if (!this.files.has(file)) return;
|
if (files.length === 0) return;
|
||||||
this.stale.add(file);
|
for(const file of files) this.stale.add(file);
|
||||||
const { debounce } = this;
|
const { debounce } = this;
|
||||||
if (debounce !== null) clearTimeout(debounce);
|
if (debounce !== null) clearTimeout(debounce);
|
||||||
this.debounce = setTimeout(() => {
|
this.debounce = setTimeout(() => {
|
||||||
|
@ -192,6 +167,7 @@ class Watch {
|
||||||
import * as fs from "node:fs";
|
import * as fs from "node:fs";
|
||||||
import { withSpinner } from "@paperclover/console/Spinner";
|
import { withSpinner } from "@paperclover/console/Spinner";
|
||||||
import * as generate from "./generate.ts";
|
import * as generate from "./generate.ts";
|
||||||
|
import * as incr from "./incremental.ts";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import * as util from "node:util";
|
import * as util from "node:util";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
|
|
|
@ -59,7 +59,7 @@ main {
|
||||||
}
|
}
|
||||||
|
|
||||||
h1 {
|
h1 {
|
||||||
font-size: 2.5em;
|
font-size: 2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
h1,
|
h1,
|
||||||
|
|
|
@ -29,7 +29,7 @@ export const meta: Meta = {
|
||||||
<main>
|
<main>
|
||||||
<div>
|
<div>
|
||||||
<h2>posts</h2>
|
<h2>posts</h2>
|
||||||
<p>song: <span>in the summer</span> (coming soon, 2025-07-12)</p>
|
<p>song: <a href="/in-the-summer">in the summer</a> (2025-01-01)</p>
|
||||||
<p>song: <a href="/waterfalls">waterfalls</a> (2025-01-01)</p>
|
<p>song: <a href="/waterfalls">waterfalls</a> (2025-01-01)</p>
|
||||||
<h2>things</h2>
|
<h2>things</h2>
|
||||||
<p><a href="/q+a">questions and answers</a></p>
|
<p><a href="/q+a">questions and answers</a></p>
|
||||||
|
|
Loading…
Reference in a new issue