From 951d1dec24eb8e0bea4ec548cc79c5ce718bf02f Mon Sep 17 00:00:00 2001
From: fl0werpowers <47599466+fl0werpowers@users.noreply.github.com>
Date: Wed, 28 May 2025 08:40:51 +0000
Subject: [PATCH] chore(deps): replace `chalk` and `rimraf` with builtin functions (#1879)
---
quartz/build.ts | 310 ++++++++++++++++++++++++++++++++++++---------------
1 files changed, 220 insertions(+), 90 deletions(-)
diff --git a/quartz/build.ts b/quartz/build.ts
index b5b1f9e..9e657b6 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -2,9 +2,9 @@
sourceMapSupport.install(options)
import path from "path"
import { PerfTimer } from "./util/perf"
-import { rimraf } from "rimraf"
-import { isGitIgnored } from "globby"
-import chalk from "chalk"
+import { rm } from "fs/promises"
+import { GlobbyFilterFunction, isGitIgnored } from "globby"
+import { styleText } from "util"
import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit"
@@ -16,12 +16,40 @@
import { glob, toPosixPath } from "./util/glob"
import { trace } from "./util/trace"
import { options } from "./util/sourcemap"
+import { Mutex } from "async-mutex"
+import { getStaticResourcesFromPlugins } from "./plugins"
+import { randomIdNonSecure } from "./util/random"
+import { ChangeEvent } from "./plugins/types"
+import { minimatch } from "minimatch"
-async function buildQuartz(argv: Argv, clientRefresh: () => void) {
+type ContentMap = Map<
+ FilePath,
+ | {
+ type: "markdown"
+ content: ProcessedContent
+ }
+ | {
+ type: "other"
+ }
+>
+
+type BuildData = {
+ ctx: BuildCtx
+ ignored: GlobbyFilterFunction
+ mut: Mutex
+ contentMap: ContentMap
+ changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
+ lastBuildMs: number
+}
+
+async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
const ctx: BuildCtx = {
+ buildId: randomIdNonSecure(),
argv,
cfg,
allSlugs: [],
+ allFiles: [],
+ incremental: false,
}
const perf = new PerfTimer()
@@ -37,114 +65,80 @@
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
}
+ const release = await mut.acquire()
perf.addEvent("clean")
- await rimraf(output)
+ await rm(output, { recursive: true, force: true })
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
perf.addEvent("glob")
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
- const fps = allFiles.filter((fp) => fp.endsWith(".md"))
+ const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
console.log(
- `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
+ `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
)
- const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ ctx.allFiles = allFiles
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles)
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
- if (argv.serve) {
- return startServing(ctx, parsedFiles, clientRefresh)
+ await emitContent(ctx, filteredContent)
+ console.log(
+ styleText("green", `Done processing ${markdownPaths.length} files in ${perf.timeSince()}`),
+ )
+ release()
+
+ if (argv.watch) {
+ ctx.incremental = true
+ return startWatching(ctx, mut, parsedFiles, clientRefresh)
}
}
// setup watcher for rebuilds
-async function startServing(
+async function startWatching(
ctx: BuildCtx,
+ mut: Mutex,
initialContent: ProcessedContent[],
clientRefresh: () => void,
) {
- const { argv } = ctx
+ const { argv, allFiles } = ctx
- const ignored = await isGitIgnored()
- const contentMap = new Map<FilePath, ProcessedContent>()
- for (const content of initialContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
+ const contentMap: ContentMap = new Map()
+ for (const filePath of allFiles) {
+ contentMap.set(filePath, {
+ type: "other",
+ })
}
- const initialSlugs = ctx.allSlugs
- let timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
- let toRebuild: Set<FilePath> = new Set()
- let toRemove: Set<FilePath> = new Set()
- let trackedAssets: Set<FilePath> = new Set()
- async function rebuild(fp: string, action: "add" | "change" | "delete") {
- // don't do anything for gitignored files
- if (ignored(fp)) {
- return
- }
+ for (const content of initialContent) {
+ const [_tree, vfile] = content
+ contentMap.set(vfile.data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
- // dont bother rebuilding for non-content files, just track and refresh
- fp = toPosixPath(fp)
- const filePath = joinSegments(argv.directory, fp) as FilePath
- if (path.extname(fp) !== ".md") {
- if (action === "add" || action === "change") {
- trackedAssets.add(filePath)
- } else if (action === "delete") {
- trackedAssets.delete(filePath)
- }
- clientRefresh()
- return
- }
-
- if (action === "add" || action === "change") {
- toRebuild.add(filePath)
- } else if (action === "delete") {
- toRemove.add(filePath)
- }
-
- timeoutIds.forEach((id) => clearTimeout(id))
-
- // debounce rebuilds every 250ms
- timeoutIds.add(
- setTimeout(async () => {
- const perf = new PerfTimer()
- console.log(chalk.yellow("Detected change, rebuilding..."))
- try {
- const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
-
- const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
- .filter((fp) => !toRemove.has(fp))
- .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
-
- ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
- const parsedContent = await parseMarkdown(ctx, filesToRebuild)
- for (const content of parsedContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
- }
-
- for (const fp of toRemove) {
- contentMap.delete(fp)
- }
-
- await rimraf(argv.output)
- const parsedFiles = [...contentMap.values()]
- const filteredContent = filterContent(ctx, parsedFiles)
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
- } catch {
- console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
+ const gitIgnoredMatcher = await isGitIgnored()
+ const buildData: BuildData = {
+ ctx,
+ mut,
+ contentMap,
+ ignored: (path) => {
+ if (gitIgnoredMatcher(path)) return true
+ const pathStr = path.toString()
+ for (const pattern of cfg.configuration.ignorePatterns) {
+ if (minimatch(pathStr, pattern)) {
+ return true
}
+ }
- clientRefresh()
- toRebuild.clear()
- toRemove.clear()
- }, 250),
- )
+ return false
+ },
+
+ changesSinceLastBuild: {},
+ lastBuildMs: 0,
}
const watcher = chokidar.watch(".", {
@@ -153,15 +147,151 @@
ignoreInitial: true,
})
+ const changes: ChangeEvent[] = []
watcher
- .on("add", (fp) => rebuild(fp, "add"))
- .on("change", (fp) => rebuild(fp, "change"))
- .on("unlink", (fp) => rebuild(fp, "delete"))
+ .on("add", (fp) => {
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "add" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("change", (fp) => {
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "change" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("unlink", (fp) => {
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "delete" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+
+ return async () => {
+ await watcher.close()
+ }
}
-export default async (argv: Argv, clientRefresh: () => void) => {
+async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
+ const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
+ const { argv, cfg } = ctx
+
+ const buildId = randomIdNonSecure()
+ ctx.buildId = buildId
+ buildData.lastBuildMs = new Date().getTime()
+ const numChangesInBuild = changes.length
+ const release = await mut.acquire()
+
+ // if there's another build after us, release and let them do it
+ if (ctx.buildId !== buildId) {
+ release()
+ return
+ }
+
+ const perf = new PerfTimer()
+ perf.addEvent("rebuild")
+ console.log(styleText("yellow", "Detected change, rebuilding..."))
+
+ // update changesSinceLastBuild
+ for (const change of changes) {
+ changesSinceLastBuild[change.path] = change.type
+ }
+
+ const staticResources = getStaticResourcesFromPlugins(ctx)
+ const pathsToParse: FilePath[] = []
+ for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
+ if (type === "delete" || path.extname(fp) !== ".md") continue
+ const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
+ pathsToParse.push(fullPath)
+ }
+
+ const parsed = await parseMarkdown(ctx, pathsToParse)
+ for (const content of parsed) {
+ contentMap.set(content[1].data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
+
+ // update state using changesSinceLastBuild
+ // we do this weird play of add => compute change events => remove
+ // so that partialEmitters can do appropriate cleanup based on the content of deleted files
+ for (const [file, change] of Object.entries(changesSinceLastBuild)) {
+ if (change === "delete") {
+ // universal delete case
+ contentMap.delete(file as FilePath)
+ }
+
+ // manually track non-markdown files as processed files only
+ // contains markdown files
+ if (change === "add" && path.extname(file) !== ".md") {
+ contentMap.set(file as FilePath, {
+ type: "other",
+ })
+ }
+ }
+
+ const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
+ const path = fp as FilePath
+ const processedContent = contentMap.get(path)
+ if (processedContent?.type === "markdown") {
+ const [_tree, file] = processedContent.content
+ return {
+ type,
+ path,
+ file,
+ }
+ }
+
+ return {
+ type,
+ path,
+ }
+ })
+
+ // update allFiles and then allSlugs with the consistent view of content map
+ ctx.allFiles = Array.from(contentMap.keys())
+ ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
+ const processedFiles = Array.from(contentMap.values())
+ .filter((file) => file.type === "markdown")
+ .map((file) => file.content)
+
+ let emittedFiles = 0
+ for (const emitter of cfg.plugins.emitters) {
+ // Try to use partialEmit if available, otherwise assume the output is static
+ const emitFn = emitter.partialEmit ?? emitter.emit
+ const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
+ if (emitted === null) {
+ continue
+ }
+
+ if (Symbol.asyncIterator in emitted) {
+ // Async generator case
+ for await (const file of emitted) {
+ emittedFiles++
+ if (ctx.argv.verbose) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ } else {
+ // Array case
+ emittedFiles += emitted.length
+ if (ctx.argv.verbose) {
+ for (const file of emitted) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ }
+ }
+
+ console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
+ console.log(styleText("green", `Done rebuilding in ${perf.timeSince()}`))
+ changes.splice(0, numChangesInBuild)
+ clientRefresh()
+ release()
+}
+
+export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
try {
- return await buildQuartz(argv, clientRefresh)
+ return await buildQuartz(argv, mut, clientRefresh)
} catch (err) {
trace("\nExiting Quartz due to a fatal error", err as Error)
}
--
Gitblit v1.10.0