From c538c151c7462ad0395ff2c15c5e11e89e362aa8 Mon Sep 17 00:00:00 2001
From: Striven <sg.striven@cutecat.club>
Date: Sat, 04 Apr 2026 19:47:16 +0000
Subject: [PATCH] Initial commit
---
quartz/build.ts | 345 +++++++++++++++++++++++++++++++++++++++------------------
1 files changed, 234 insertions(+), 111 deletions(-)
diff --git a/quartz/build.ts b/quartz/build.ts
index ae5fd40..b98f4a8 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -1,42 +1,55 @@
import sourceMapSupport from "source-map-support"
-sourceMapSupport.install({
- retrieveSourceMap(source) {
- // source map hack to get around query param
- // import cache busting
- if (source.includes(".quartz-cache")) {
- let realSource = fileURLToPath(source.split("?", 2)[0] + ".map")
- return {
- map: fs.readFileSync(realSource, "utf8"),
- }
- } else {
- return null
- }
- },
-})
-
+sourceMapSupport.install(options)
import path from "path"
-import { PerfTimer } from "./perf"
-import { rimraf } from "rimraf"
-import { isGitIgnored } from "globby"
-import chalk from "chalk"
+import { PerfTimer } from "./util/perf"
+import { rm } from "fs/promises"
+import { GlobbyFilterFunction, isGitIgnored } from "globby"
+import { styleText } from "util"
import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit"
import cfg from "../quartz.config"
-import { FilePath, joinSegments, slugifyFilePath } from "./path"
+import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile"
-import { Argv, BuildCtx } from "./ctx"
-import { glob, toPosixPath } from "./glob"
-import { trace } from "./trace"
-import { fileURLToPath } from "url"
-import fs from "fs"
+import { Argv, BuildCtx } from "./util/ctx"
+import { glob, toPosixPath } from "./util/glob"
+import { trace } from "./util/trace"
+import { options } from "./util/sourcemap"
+import { Mutex } from "async-mutex"
+import { getStaticResourcesFromPlugins } from "./plugins"
+import { randomIdNonSecure } from "./util/random"
+import { ChangeEvent } from "./plugins/types"
+import { minimatch } from "minimatch"
-async function buildQuartz(argv: Argv, clientRefresh: () => void) {
+type ContentMap = Map<
+ FilePath,
+ | {
+ type: "markdown"
+ content: ProcessedContent
+ }
+ | {
+ type: "other"
+ }
+>
+
+type BuildData = {
+ ctx: BuildCtx
+ ignored: GlobbyFilterFunction
+ mut: Mutex
+ contentMap: ContentMap
+ changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
+ lastBuildMs: number
+}
+
+async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
const ctx: BuildCtx = {
+ buildId: randomIdNonSecure(),
argv,
cfg,
allSlugs: [],
+ allFiles: [],
+ incremental: false,
}
const perf = new PerfTimer()
@@ -52,131 +65,241 @@
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
}
+ const release = await mut.acquire()
perf.addEvent("clean")
- await rimraf(output)
+ await rm(output, { recursive: true, force: true })
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
perf.addEvent("glob")
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
- const fps = allFiles.filter((fp) => fp.endsWith(".md"))
+ const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
console.log(
- `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
+ `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
)
- const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ ctx.allFiles = allFiles
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles)
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
- if (argv.serve) {
- return startServing(ctx, parsedFiles, clientRefresh)
+ await emitContent(ctx, filteredContent)
+ console.log(
+ styleText("green", `Done processing ${markdownPaths.length} files in ${perf.timeSince()}`),
+ )
+ release()
+
+ if (argv.watch) {
+ ctx.incremental = true
+ return startWatching(ctx, mut, parsedFiles, clientRefresh)
}
}
// setup watcher for rebuilds
-async function startServing(
+async function startWatching(
ctx: BuildCtx,
+ mut: Mutex,
initialContent: ProcessedContent[],
clientRefresh: () => void,
) {
- const { argv } = ctx
+ const { argv, allFiles } = ctx
- const ignored = await isGitIgnored()
- const contentMap = new Map<FilePath, ProcessedContent>()
- for (const content of initialContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
+ const contentMap: ContentMap = new Map()
+ for (const filePath of allFiles) {
+ contentMap.set(filePath, {
+ type: "other",
+ })
}
- let timeoutId: ReturnType<typeof setTimeout> | null = null
- let toRebuild: Set<FilePath> = new Set()
- let toRemove: Set<FilePath> = new Set()
- let trackedAssets: Set<FilePath> = new Set()
- async function rebuild(fp: string, action: "add" | "change" | "delete") {
- // don't do anything for gitignored files
- if (ignored(fp)) {
- return
- }
+ for (const content of initialContent) {
+ const [_tree, vfile] = content
+ contentMap.set(vfile.data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
- // dont bother rebuilding for non-content files, just track and refresh
- if (path.extname(fp) !== ".md") {
- fp = toPosixPath(fp)
- const filePath = joinSegments(argv.directory, fp) as FilePath
- if (action === "add" || action === "change") {
- trackedAssets.add(filePath)
- } else if (action === "delete") {
- trackedAssets.add(filePath)
- }
- clientRefresh()
- return
- }
-
- fp = toPosixPath(fp)
- const filePath = joinSegments(argv.directory, fp) as FilePath
- if (action === "add" || action === "change") {
- toRebuild.add(filePath)
- } else if (action === "delete") {
- toRemove.add(filePath)
- }
-
- if (timeoutId) {
- clearTimeout(timeoutId)
- }
-
- // debounce rebuilds every 250ms
- timeoutId = setTimeout(async () => {
- const perf = new PerfTimer()
- console.log(chalk.yellow("Detected change, rebuilding..."))
- try {
- const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
-
- ctx.allSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
- .filter((fp) => !toRemove.has(fp))
- .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
-
- const parsedContent = await parseMarkdown(ctx, filesToRebuild)
- for (const content of parsedContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
+ const gitIgnoredMatcher = await isGitIgnored()
+ const buildData: BuildData = {
+ ctx,
+ mut,
+ contentMap,
+ ignored: (fp) => {
+ const pathStr = toPosixPath(fp.toString())
+ if (pathStr.startsWith(".git/")) return true
+ if (gitIgnoredMatcher(pathStr)) return true
+ for (const pattern of cfg.configuration.ignorePatterns) {
+ if (minimatch(pathStr, pattern)) {
+ return true
}
-
- for (const fp of toRemove) {
- contentMap.delete(fp)
- }
-
- await rimraf(argv.output)
- const parsedFiles = [...contentMap.values()]
- const filteredContent = filterContent(ctx, parsedFiles)
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
- } catch {
- console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
}
- clientRefresh()
- toRebuild.clear()
- toRemove.clear()
- }, 250)
+ return false
+ },
+
+ changesSinceLastBuild: {},
+ lastBuildMs: 0,
}
const watcher = chokidar.watch(".", {
+ awaitWriteFinish: { stabilityThreshold: 250 },
persistent: true,
cwd: argv.directory,
ignoreInitial: true,
})
+ const changes: ChangeEvent[] = []
watcher
- .on("add", (fp) => rebuild(fp, "add"))
- .on("change", (fp) => rebuild(fp, "change"))
- .on("unlink", (fp) => rebuild(fp, "delete"))
+ .on("add", (fp) => {
+ fp = toPosixPath(fp)
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "add" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("change", (fp) => {
+ fp = toPosixPath(fp)
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "change" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("unlink", (fp) => {
+ fp = toPosixPath(fp)
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "delete" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+
+ return async () => {
+ await watcher.close()
+ }
}
-export default async (argv: Argv, clientRefresh: () => void) => {
+async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
+ const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
+ const { argv, cfg } = ctx
+
+ const buildId = randomIdNonSecure()
+ ctx.buildId = buildId
+ buildData.lastBuildMs = new Date().getTime()
+ const numChangesInBuild = changes.length
+ const release = await mut.acquire()
+
+ // if there's another build after us, release and let them do it
+ if (ctx.buildId !== buildId) {
+ release()
+ return
+ }
+
+ const perf = new PerfTimer()
+ perf.addEvent("rebuild")
+ console.log(styleText("yellow", "Detected change, rebuilding..."))
+
+ // update changesSinceLastBuild
+ for (const change of changes) {
+ changesSinceLastBuild[change.path] = change.type
+ }
+
+ const staticResources = getStaticResourcesFromPlugins(ctx)
+ const pathsToParse: FilePath[] = []
+ for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
+ if (type === "delete" || path.extname(fp) !== ".md") continue
+ const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
+ pathsToParse.push(fullPath)
+ }
+
+ const parsed = await parseMarkdown(ctx, pathsToParse)
+ for (const content of parsed) {
+ contentMap.set(content[1].data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
+
+ // update state using changesSinceLastBuild
+ // we do this weird play of add => compute change events => remove
+ // so that partialEmitters can do appropriate cleanup based on the content of deleted files
+ for (const [file, change] of Object.entries(changesSinceLastBuild)) {
+ if (change === "delete") {
+ // universal delete case
+ contentMap.delete(file as FilePath)
+ }
+
+ // manually track non-markdown files as processed files only
+ // contains markdown files
+ if (change === "add" && path.extname(file) !== ".md") {
+ contentMap.set(file as FilePath, {
+ type: "other",
+ })
+ }
+ }
+
+ const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
+ const path = fp as FilePath
+ const processedContent = contentMap.get(path)
+ if (processedContent?.type === "markdown") {
+ const [_tree, file] = processedContent.content
+ return {
+ type,
+ path,
+ file,
+ }
+ }
+
+ return {
+ type,
+ path,
+ }
+ })
+
+ // update allFiles and then allSlugs with the consistent view of content map
+ ctx.allFiles = Array.from(contentMap.keys())
+ ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
+ let processedFiles = filterContent(
+ ctx,
+ Array.from(contentMap.values())
+ .filter((file) => file.type === "markdown")
+ .map((file) => file.content),
+ )
+
+ let emittedFiles = 0
+ for (const emitter of cfg.plugins.emitters) {
+ // Try to use partialEmit if available, otherwise assume the output is static
+ const emitFn = emitter.partialEmit ?? emitter.emit
+ const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
+ if (emitted === null) {
+ continue
+ }
+
+ if (Symbol.asyncIterator in emitted) {
+ // Async generator case
+ for await (const file of emitted) {
+ emittedFiles++
+ if (ctx.argv.verbose) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ } else {
+ // Array case
+ emittedFiles += emitted.length
+ if (ctx.argv.verbose) {
+ for (const file of emitted) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ }
+ }
+
+ console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
+ console.log(styleText("green", `Done rebuilding in ${perf.timeSince()}`))
+ changes.splice(0, numChangesInBuild)
+ clientRefresh()
+ release()
+}
+
+export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
try {
- return await buildQuartz(argv, clientRefresh)
+ return await buildQuartz(argv, mut, clientRefresh)
} catch (err) {
trace("\nExiting Quartz due to a fatal error", err as Error)
}
--
Gitblit v1.10.0