From d2f52549955ff7600cc5897e67806df4ebf85f91 Mon Sep 17 00:00:00 2001
From: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
Date: Wed, 23 Aug 2023 16:05:01 +0000
Subject: [PATCH] fix(esbuild): conflict with esbuild-sass-plugin (#402)

---
 quartz/build.ts |  125 ++++++++++++++++++++---------------------
 1 files changed, 61 insertions(+), 64 deletions(-)

diff --git a/quartz/build.ts b/quartz/build.ts
index 05edf86..8b1d318 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -1,21 +1,7 @@
 import sourceMapSupport from "source-map-support"
-sourceMapSupport.install({
-  retrieveSourceMap(source) {
-    // source map hack to get around query param
-    // import cache busting
-    if (source.includes(".quartz-cache")) {
-      let realSource = fileURLToPath(source.split("?", 2)[0] + ".map")
-      return {
-        map: fs.readFileSync(realSource, "utf8"),
-      }
-    } else {
-      return null
-    }
-  },
-})
-
+sourceMapSupport.install(options)
 import path from "path"
-import { PerfTimer } from "./perf"
+import { PerfTimer } from "./util/perf"
 import { rimraf } from "rimraf"
 import { isGitIgnored } from "globby"
 import chalk from "chalk"
@@ -23,16 +9,16 @@
 import { filterContent } from "./processors/filter"
 import { emitContent } from "./processors/emit"
 import cfg from "../quartz.config"
-import { FilePath, ServerSlug, joinSegments, slugifyFilePath } from "./path"
+import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
 import chokidar from "chokidar"
 import { ProcessedContent } from "./plugins/vfile"
-import { Argv, BuildCtx } from "./ctx"
-import { glob, toPosixPath } from "./glob"
-import { trace } from "./trace"
-import { fileURLToPath } from "url"
-import fs from "fs"
+import { Argv, BuildCtx } from "./util/ctx"
+import { glob, toPosixPath } from "./util/glob"
+import { trace } from "./util/trace"
+import { options } from "./util/sourcemap"
+import { Mutex } from "async-mutex"
 
-async function buildQuartz(argv: Argv, clientRefresh: () => void) {
+async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
   const ctx: BuildCtx = {
     argv,
     cfg,
@@ -52,6 +38,7 @@
     console.log(`  Emitters: ${pluginNames("emitters").join(", ")}`)
   }
 
+  const release = await mut.acquire()
   perf.addEvent("clean")
   await rimraf(output)
   console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
@@ -70,15 +57,17 @@
   const filteredContent = filterContent(ctx, parsedFiles)
   await emitContent(ctx, filteredContent)
   console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
+  release()
 
   if (argv.serve) {
-    return startServing(ctx, parsedFiles, clientRefresh)
+    return startServing(ctx, mut, parsedFiles, clientRefresh)
   }
 }
 
 // setup watcher for rebuilds
 async function startServing(
   ctx: BuildCtx,
+  mut: Mutex,
   initialContent: ProcessedContent[],
   clientRefresh: () => void,
 ) {
@@ -92,10 +81,10 @@
   }
 
   const initialSlugs = ctx.allSlugs
-  let timeoutId: ReturnType<typeof setTimeout> | null = null
-  let toRebuild: Set<FilePath> = new Set()
-  let toRemove: Set<FilePath> = new Set()
-  let trackedAssets: Set<FilePath> = new Set()
+  const timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
+  const toRebuild: Set<FilePath> = new Set()
+  const toRemove: Set<FilePath> = new Set()
+  const trackedAssets: Set<FilePath> = new Set()
   async function rebuild(fp: string, action: "add" | "change" | "delete") {
     // don't do anything for gitignored files
     if (ignored(fp)) {
@@ -115,53 +104,56 @@
       return
     }
 
-
     if (action === "add" || action === "change") {
       toRebuild.add(filePath)
     } else if (action === "delete") {
       toRemove.add(filePath)
     }
 
-    if (timeoutId) {
-      clearTimeout(timeoutId)
-    }
-
     // debounce rebuilds every 250ms
-    timeoutId = setTimeout(async () => {
-      const perf = new PerfTimer()
-      console.log(chalk.yellow("Detected change, rebuilding..."))
-      try {
-        const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
+    timeoutIds.add(
+      setTimeout(async () => {
+        const release = await mut.acquire()
+        timeoutIds.forEach((id) => clearTimeout(id))
+        timeoutIds.clear()
 
-        const trackedSlugs =
-          [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
+        const perf = new PerfTimer()
+        console.log(chalk.yellow("Detected change, rebuilding..."))
+        try {
+          const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
+
+          const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
             .filter((fp) => !toRemove.has(fp))
             .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
 
-        ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
-        const parsedContent = await parseMarkdown(ctx, filesToRebuild)
-        for (const content of parsedContent) {
-          const [_tree, vfile] = content
-          contentMap.set(vfile.data.filePath!, content)
+          ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
+          const parsedContent = await parseMarkdown(ctx, filesToRebuild)
+          for (const content of parsedContent) {
+            const [_tree, vfile] = content
+            contentMap.set(vfile.data.filePath!, content)
+          }
+
+          for (const fp of toRemove) {
+            contentMap.delete(fp)
+          }
+
+          // TODO: we can probably traverse the link graph to figure out what's safe to delete here
+          // instead of just deleting everything
+          await rimraf(argv.output)
+          const parsedFiles = [...contentMap.values()]
+          const filteredContent = filterContent(ctx, parsedFiles)
+          await emitContent(ctx, filteredContent)
+          console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
+        } catch {
+          console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
         }
 
-        for (const fp of toRemove) {
-          contentMap.delete(fp)
-        }
-
-        await rimraf(argv.output)
-        const parsedFiles = [...contentMap.values()]
-        const filteredContent = filterContent(ctx, parsedFiles)
-        await emitContent(ctx, filteredContent)
-        console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
-      } catch {
-        console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
-      }
-
-      clientRefresh()
-      toRebuild.clear()
-      toRemove.clear()
-    }, 250)
+        clientRefresh()
+        toRebuild.clear()
+        toRemove.clear()
+        release()
+      }, 250),
+    )
   }
 
   const watcher = chokidar.watch(".", {
@@ -174,11 +166,16 @@
     .on("add", (fp) => rebuild(fp, "add"))
     .on("change", (fp) => rebuild(fp, "change"))
     .on("unlink", (fp) => rebuild(fp, "delete"))
+
+  return async () => {
+    timeoutIds.forEach((id) => clearTimeout(id))
+    await watcher.close()
+  }
 }
 
-export default async (argv: Argv, clientRefresh: () => void) => {
+export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
   try {
-    return await buildQuartz(argv, clientRefresh)
+    return await buildQuartz(argv, mut, clientRefresh)
   } catch (err) {
     trace("\nExiting Quartz due to a fatal error", err as Error)
   }

--
Gitblit v1.10.0