From 0998bc355e6425e6b2bdf3d2da7124aa7b63b2a2 Mon Sep 17 00:00:00 2001
From: Jacky Zhao <j.zhao2k19@gmail.com>
Date: Thu, 17 Aug 2023 08:58:11 +0000
Subject: [PATCH] fix rebuild debouncing

---
 quartz/build.ts                       |   68 +++++++++++++++++-----------------
 content/features/upcoming features.md |    3 -
 quartz/bootstrap-cli.mjs              |    8 +++
 3 files changed, 42 insertions(+), 37 deletions(-)

diff --git a/content/features/upcoming features.md b/content/features/upcoming features.md
index 7093a5b..28ee48a 100644
--- a/content/features/upcoming features.md
+++ b/content/features/upcoming features.md
@@ -4,9 +4,8 @@
 
 ## todo
 
-- debounce cfg rebuild on large repos
-  - investigate content rebuild triggering multiple times even when debounced, causing an esbuild deadlock
 - dereference symlink for npx quartz sync
+  - prompt user as to whether to do it (it's expensive for large vaults)
 
 ## high priority backlog
 
diff --git a/quartz/bootstrap-cli.mjs b/quartz/bootstrap-cli.mjs
index c1c1308..8efa7b0 100755
--- a/quartz/bootstrap-cli.mjs
+++ b/quartz/bootstrap-cli.mjs
@@ -355,6 +355,7 @@
       ],
     })
 
+    const timeoutIds = new Set()
     const build = async (clientRefresh) => {
       const result = await ctx.rebuild().catch((err) => {
         console.error(`${chalk.red("Couldn't parse Quartz configuration:")} ${fp}`)
@@ -380,6 +381,11 @@
       clientRefresh()
     }
 
+    const rebuild = (clientRefresh) => {
+      timeoutIds.forEach((id) => clearTimeout(id))
+      timeoutIds.add(setTimeout(() => build(clientRefresh), 250))
+    }
+
     if (argv.serve) {
       const wss = new WebSocketServer({ port: 3001 })
       const connections = []
@@ -457,7 +463,7 @@
         })
         .on("all", async () => {
           console.log(chalk.yellow("Detected a source code change, doing a hard rebuild..."))
-          await build(clientRefresh)
+          rebuild(clientRefresh)
         })
     } else {
       await build(() => {})
diff --git a/quartz/build.ts b/quartz/build.ts
index 779ab35..b5b1f9e 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -77,7 +77,7 @@
   }
 
   const initialSlugs = ctx.allSlugs
-  let timeoutId: ReturnType<typeof setTimeout> | null = null
+  let timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
   let toRebuild: Set<FilePath> = new Set()
   let toRemove: Set<FilePath> = new Set()
   let trackedAssets: Set<FilePath> = new Set()
@@ -106,45 +106,45 @@
       toRemove.add(filePath)
     }
 
-    if (timeoutId) {
-      clearTimeout(timeoutId)
-    }
+    timeoutIds.forEach((id) => clearTimeout(id))
 
     // debounce rebuilds every 250ms
-    timeoutId = setTimeout(async () => {
-      const perf = new PerfTimer()
-      console.log(chalk.yellow("Detected change, rebuilding..."))
-      try {
-        const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
+    timeoutIds.add(
+      setTimeout(async () => {
+        const perf = new PerfTimer()
+        console.log(chalk.yellow("Detected change, rebuilding..."))
+        try {
+          const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
 
-        const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
-          .filter((fp) => !toRemove.has(fp))
-          .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
+          const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
+            .filter((fp) => !toRemove.has(fp))
+            .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
 
-        ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
-        const parsedContent = await parseMarkdown(ctx, filesToRebuild)
-        for (const content of parsedContent) {
-          const [_tree, vfile] = content
-          contentMap.set(vfile.data.filePath!, content)
+          ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
+          const parsedContent = await parseMarkdown(ctx, filesToRebuild)
+          for (const content of parsedContent) {
+            const [_tree, vfile] = content
+            contentMap.set(vfile.data.filePath!, content)
+          }
+
+          for (const fp of toRemove) {
+            contentMap.delete(fp)
+          }
+
+          await rimraf(argv.output)
+          const parsedFiles = [...contentMap.values()]
+          const filteredContent = filterContent(ctx, parsedFiles)
+          await emitContent(ctx, filteredContent)
+          console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
+        } catch {
+          console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
         }
 
-        for (const fp of toRemove) {
-          contentMap.delete(fp)
-        }
-
-        await rimraf(argv.output)
-        const parsedFiles = [...contentMap.values()]
-        const filteredContent = filterContent(ctx, parsedFiles)
-        await emitContent(ctx, filteredContent)
-        console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
-      } catch {
-        console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
-      }
-
-      clientRefresh()
-      toRebuild.clear()
-      toRemove.clear()
-    }, 250)
+        clientRefresh()
+        toRebuild.clear()
+        toRemove.clear()
+      }, 250),
+    )
   }
 
   const watcher = chokidar.watch(".", {

--
Gitblit v1.10.0