From c538c151c7462ad0395ff2c15c5e11e89e362aa8 Mon Sep 17 00:00:00 2001
From: Striven <sg.striven@cutecat.club>
Date: Sat, 04 Apr 2026 19:47:16 +0000
Subject: [PATCH] Initial commit
---
quartz/build.ts | 325 ++++++++++++++++++++++++++++++++++++++++-------------
1 files changed, 245 insertions(+), 80 deletions(-)
diff --git a/quartz/build.ts b/quartz/build.ts
index 26baa1b..b98f4a8 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -1,28 +1,57 @@
-import "source-map-support/register.js"
+import sourceMapSupport from "source-map-support"
+sourceMapSupport.install(options)
import path from "path"
-import { PerfTimer } from "./perf"
-import { rimraf } from "rimraf"
-import { globby, isGitIgnored } from "globby"
-import chalk from "chalk"
-import http from "http"
-import serveHandler from "serve-handler"
+import { PerfTimer } from "./util/perf"
+import { rm } from "fs/promises"
+import { GlobbyFilterFunction, isGitIgnored } from "globby"
+import { styleText } from "util"
import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit"
import cfg from "../quartz.config"
-import { FilePath } from "./path"
+import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile"
-import WebSocket, { WebSocketServer } from "ws"
-import { Argv, BuildCtx } from "./ctx"
+import { Argv, BuildCtx } from "./util/ctx"
+import { glob, toPosixPath } from "./util/glob"
+import { trace } from "./util/trace"
+import { options } from "./util/sourcemap"
+import { Mutex } from "async-mutex"
+import { getStaticResourcesFromPlugins } from "./plugins"
+import { randomIdNonSecure } from "./util/random"
+import { ChangeEvent } from "./plugins/types"
+import { minimatch } from "minimatch"
-async function buildQuartz(argv: Argv, version: string) {
+type ContentMap = Map<
+ FilePath,
+ | {
+ type: "markdown"
+ content: ProcessedContent
+ }
+ | {
+ type: "other"
+ }
+>
+
+type BuildData = {
+ ctx: BuildCtx
+ ignored: GlobbyFilterFunction
+ mut: Mutex
+ contentMap: ContentMap
+ changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
+ lastBuildMs: number
+}
+
+async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
const ctx: BuildCtx = {
+ buildId: randomIdNonSecure(),
argv,
cfg,
+ allSlugs: [],
+ allFiles: [],
+ incremental: false,
}
- console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
const perf = new PerfTimer()
const output = argv.output
@@ -36,106 +65,242 @@
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
}
+ const release = await mut.acquire()
perf.addEvent("clean")
- await rimraf(output)
+ await rm(output, { recursive: true, force: true })
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
perf.addEvent("glob")
- const fps = await globby("**/*.md", {
- cwd: argv.directory,
- ignore: cfg.configuration.ignorePatterns,
- gitignore: true,
- })
+ const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
+ const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
console.log(
- `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
+ `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
)
- const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath)
+ const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ ctx.allFiles = allFiles
+ ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
+
const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles)
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
- if (argv.serve) {
- await startServing(ctx, parsedFiles)
+ await emitContent(ctx, filteredContent)
+ console.log(
+ styleText("green", `Done processing ${markdownPaths.length} files in ${perf.timeSince()}`),
+ )
+ release()
+
+ if (argv.watch) {
+ ctx.incremental = true
+ return startWatching(ctx, mut, parsedFiles, clientRefresh)
}
}
-async function startServing(ctx: BuildCtx, initialContent: ProcessedContent[]) {
- const { argv } = ctx
- const wss = new WebSocketServer({ port: 3001 })
- const connections: WebSocket[] = []
- wss.on("connection", (ws) => connections.push(ws))
+// setup watcher for rebuilds
+async function startWatching(
+ ctx: BuildCtx,
+ mut: Mutex,
+ initialContent: ProcessedContent[],
+ clientRefresh: () => void,
+) {
+ const { argv, allFiles } = ctx
- const ignored = await isGitIgnored()
- const contentMap = new Map<FilePath, ProcessedContent>()
- for (const content of initialContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
+ const contentMap: ContentMap = new Map()
+ for (const filePath of allFiles) {
+ contentMap.set(filePath, {
+ type: "other",
+ })
}
- async function rebuild(fp: string, action: "add" | "change" | "unlink") {
- const perf = new PerfTimer()
- if (!ignored(fp)) {
- console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
- const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
+ for (const content of initialContent) {
+ const [_tree, vfile] = content
+ contentMap.set(vfile.data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
- try {
- if (action === "add" || action === "change") {
- const [parsedContent] = await parseMarkdown(ctx, [fullPath])
- contentMap.set(fullPath, parsedContent)
- } else if (action === "unlink") {
- contentMap.delete(fullPath)
+ const gitIgnoredMatcher = await isGitIgnored()
+ const buildData: BuildData = {
+ ctx,
+ mut,
+ contentMap,
+ ignored: (fp) => {
+ const pathStr = toPosixPath(fp.toString())
+ if (pathStr.startsWith(".git/")) return true
+ if (gitIgnoredMatcher(pathStr)) return true
+ for (const pattern of cfg.configuration.ignorePatterns) {
+ if (minimatch(pathStr, pattern)) {
+ return true
}
-
- await rimraf(argv.output)
- const parsedFiles = [...contentMap.values()]
- const filteredContent = filterContent(ctx, parsedFiles)
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
- } catch {
- console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
}
- connections.forEach((conn) => conn.send("rebuild"))
- }
+ return false
+ },
+
+ changesSinceLastBuild: {},
+ lastBuildMs: 0,
}
const watcher = chokidar.watch(".", {
+ awaitWriteFinish: { stabilityThreshold: 250 },
persistent: true,
cwd: argv.directory,
ignoreInitial: true,
})
+ const changes: ChangeEvent[] = []
watcher
- .on("add", (fp) => rebuild(fp, "add"))
- .on("change", (fp) => rebuild(fp, "change"))
- .on("unlink", (fp) => rebuild(fp, "unlink"))
-
- const server = http.createServer(async (req, res) => {
- await serveHandler(req, res, {
- public: argv.output,
- directoryListing: false,
+ .on("add", (fp) => {
+ fp = toPosixPath(fp)
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "add" })
+ void rebuild(changes, clientRefresh, buildData)
})
- const status = res.statusCode
- const statusString =
- status >= 200 && status < 300
- ? chalk.green(`[${status}]`)
- : status >= 300 && status < 400
- ? chalk.yellow(`[${status}]`)
- : chalk.red(`[${status}]`)
- console.log(statusString + chalk.grey(` ${req.url}`))
- })
- server.listen(argv.port)
- console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
- console.log("hint: exit with ctrl+c")
+ .on("change", (fp) => {
+ fp = toPosixPath(fp)
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "change" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("unlink", (fp) => {
+ fp = toPosixPath(fp)
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "delete" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+
+ return async () => {
+ await watcher.close()
+ }
}
-export default async (argv: Argv, version: string) => {
+async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
+ const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
+ const { argv, cfg } = ctx
+
+ const buildId = randomIdNonSecure()
+ ctx.buildId = buildId
+ buildData.lastBuildMs = new Date().getTime()
+ const numChangesInBuild = changes.length
+ const release = await mut.acquire()
+
+ // if there's another build after us, release and let them do it
+ if (ctx.buildId !== buildId) {
+ release()
+ return
+ }
+
+ const perf = new PerfTimer()
+ perf.addEvent("rebuild")
+ console.log(styleText("yellow", "Detected change, rebuilding..."))
+
+ // update changesSinceLastBuild
+ for (const change of changes) {
+ changesSinceLastBuild[change.path] = change.type
+ }
+
+ const staticResources = getStaticResourcesFromPlugins(ctx)
+ const pathsToParse: FilePath[] = []
+ for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
+ if (type === "delete" || path.extname(fp) !== ".md") continue
+ const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
+ pathsToParse.push(fullPath)
+ }
+
+ const parsed = await parseMarkdown(ctx, pathsToParse)
+ for (const content of parsed) {
+ contentMap.set(content[1].data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
+
+ // update state using changesSinceLastBuild
+ // we do this weird play of add => compute change events => remove
+ // so that partialEmitters can do appropriate cleanup based on the content of deleted files
+ for (const [file, change] of Object.entries(changesSinceLastBuild)) {
+ if (change === "delete") {
+ // universal delete case
+ contentMap.delete(file as FilePath)
+ }
+
+ // manually track non-markdown files as processed files only
+ // contains markdown files
+ if (change === "add" && path.extname(file) !== ".md") {
+ contentMap.set(file as FilePath, {
+ type: "other",
+ })
+ }
+ }
+
+ const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
+ const path = fp as FilePath
+ const processedContent = contentMap.get(path)
+ if (processedContent?.type === "markdown") {
+ const [_tree, file] = processedContent.content
+ return {
+ type,
+ path,
+ file,
+ }
+ }
+
+ return {
+ type,
+ path,
+ }
+ })
+
+ // update allFiles and then allSlugs with the consistent view of content map
+ ctx.allFiles = Array.from(contentMap.keys())
+ ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
+ let processedFiles = filterContent(
+ ctx,
+ Array.from(contentMap.values())
+ .filter((file) => file.type === "markdown")
+ .map((file) => file.content),
+ )
+
+ let emittedFiles = 0
+ for (const emitter of cfg.plugins.emitters) {
+ // Try to use partialEmit if available, otherwise assume the output is static
+ const emitFn = emitter.partialEmit ?? emitter.emit
+ const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
+ if (emitted === null) {
+ continue
+ }
+
+ if (Symbol.asyncIterator in emitted) {
+ // Async generator case
+ for await (const file of emitted) {
+ emittedFiles++
+ if (ctx.argv.verbose) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ } else {
+ // Array case
+ emittedFiles += emitted.length
+ if (ctx.argv.verbose) {
+ for (const file of emitted) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ }
+ }
+
+ console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
+ console.log(styleText("green", `Done rebuilding in ${perf.timeSince()}`))
+ changes.splice(0, numChangesInBuild)
+ clientRefresh()
+ release()
+}
+
+export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
try {
- await buildQuartz(argv, version)
- } catch {
- console.log(chalk.red("\nExiting Quartz due to a fatal error"))
- process.exit(1)
+ return await buildQuartz(argv, mut, clientRefresh)
+ } catch (err) {
+ trace("\nExiting Quartz due to a fatal error", err as Error)
}
}
--
Gitblit v1.10.0