From 8cf3e3001f2cbd18da73fcc92ae5f4b76d3ecf21 Mon Sep 17 00:00:00 2001
From: Patsagorn Y. <49602385+ptsgrn@users.noreply.github.com>
Date: Thu, 16 Jan 2025 20:44:33 +0000
Subject: [PATCH] feat(i18n): Thai translations (#1722)

---
 quartz/build.ts |   87 +++++++++++++++++++++++++++----------------
 1 files changed, 55 insertions(+), 32 deletions(-)

diff --git a/quartz/build.ts b/quartz/build.ts
index ed166bb..64c462b 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -38,8 +38,13 @@
 
 type FileEvent = "add" | "change" | "delete"
 
+function newBuildId() {
+  return Math.random().toString(36).substring(2, 8)
+}
+
 async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
   const ctx: BuildCtx = {
+    buildId: newBuildId(),
     argv,
     cfg,
     allSlugs: [],
@@ -60,7 +65,7 @@
 
   const release = await mut.acquire()
   perf.addEvent("clean")
-  await rimraf(output)
+  await rimraf(path.join(output, "*"), { glob: true })
   console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
 
   perf.addEvent("glob")
@@ -134,9 +139,9 @@
 
   const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint
   watcher
-    .on("add", (fp) => buildFromEntry(fp, "add", clientRefresh, buildData))
-    .on("change", (fp) => buildFromEntry(fp, "change", clientRefresh, buildData))
-    .on("unlink", (fp) => buildFromEntry(fp, "delete", clientRefresh, buildData))
+    .on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData))
+    .on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData))
+    .on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData))
 
   return async () => {
     await watcher.close()
@@ -157,10 +162,13 @@
     return
   }
 
-  const buildStart = new Date().getTime()
-  buildData.lastBuildMs = buildStart
+  const buildId = newBuildId()
+  ctx.buildId = buildId
+  buildData.lastBuildMs = new Date().getTime()
   const release = await mut.acquire()
-  if (buildData.lastBuildMs > buildStart) {
+
+  // if there's another build after us, release and let them do it
+  if (ctx.buildId !== buildId) {
     release()
     return
   }
@@ -185,9 +193,14 @@
         const emitterGraph =
           (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
 
-        // emmiter may not define a dependency graph. nothing to update if so
         if (emitterGraph) {
-          dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
+          const existingGraph = dependencies[emitter.name]
+          if (existingGraph !== null) {
+            existingGraph.mergeGraph(emitterGraph)
+          } else {
+            // might be the first time we're adding a mardown file
+            dependencies[emitter.name] = emitterGraph
+          }
         }
       }
       break
@@ -203,8 +216,9 @@
           const emitterGraph =
             (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
 
-          // emmiter may not define a dependency graph. nothing to update if so
-          if (emitterGraph) {
+          // only update the graph if the emitter plugin uses the changed file
+          // eg. Assets plugin ignores md files, so we skip updating the graph
+          if (emitterGraph?.hasNode(fp)) {
             // merge the new dependencies into the dep graph
             dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
           }
@@ -223,7 +237,6 @@
   // EMIT
   perf.addEvent("rebuild")
   let emittedFiles = 0
-  const destinationsToDelete = new Set<FilePath>()
 
   for (const emitter of cfg.plugins.emitters) {
     const depGraph = dependencies[emitter.name]
@@ -263,11 +276,6 @@
       // and supply [a.md, b.md] to the emitter
       const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[]
 
-      if (action === "delete" && upstreams.length === 1) {
-        // if there's only one upstream, the destination is solely dependent on this file
-        destinationsToDelete.add(upstreams[0])
-      }
-
       const upstreamContent = upstreams
         // filter out non-markdown files
         .filter((file) => contentMap.has(file))
@@ -290,14 +298,26 @@
   console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
 
   // CLEANUP
-  // delete files that are solely dependent on this file
-  await rimraf([...destinationsToDelete])
+  const destinationsToDelete = new Set<FilePath>()
   for (const file of toRemove) {
     // remove from cache
     contentMap.delete(file)
-    // remove the node from dependency graphs
-    Object.values(dependencies).forEach((depGraph) => depGraph?.removeNode(file))
+    Object.values(dependencies).forEach((depGraph) => {
+      // remove the node from dependency graphs
+      depGraph?.removeNode(file)
+      // remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed
+      const orphanNodes = depGraph?.removeOrphanNodes()
+      orphanNodes?.forEach((node) => {
+        // only delete files that are in the output directory
+        if (node.startsWith(argv.output)) {
+          destinationsToDelete.add(node)
+        }
+      })
+    })
   }
+  await rimraf([...destinationsToDelete])
+
+  console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
 
   toRemove.clear()
   release()
@@ -339,26 +359,22 @@
     toRemove.add(filePath)
   }
 
-  const buildStart = new Date().getTime()
-  buildData.lastBuildMs = buildStart
+  const buildId = newBuildId()
+  ctx.buildId = buildId
+  buildData.lastBuildMs = new Date().getTime()
   const release = await mut.acquire()
 
   // there's another build after us, release and let them do it
-  if (buildData.lastBuildMs > buildStart) {
+  if (ctx.buildId !== buildId) {
     release()
     return
   }
 
   const perf = new PerfTimer()
   console.log(chalk.yellow("Detected change, rebuilding..."))
+
   try {
     const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
-
-    const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
-      .filter((fp) => !toRemove.has(fp))
-      .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
-
-    ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
     const parsedContent = await parseMarkdown(ctx, filesToRebuild)
     for (const content of parsedContent) {
       const [_tree, vfile] = content
@@ -372,9 +388,16 @@
     const parsedFiles = [...contentMap.values()]
     const filteredContent = filterContent(ctx, parsedFiles)
 
+    // re-update slugs
+    const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
+      .filter((fp) => !toRemove.has(fp))
+      .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
+
+    ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
+
     // TODO: we can probably traverse the link graph to figure out what's safe to delete here
     // instead of just deleting everything
-    await rimraf(argv.output)
+    await rimraf(path.join(argv.output, ".*"), { glob: true })
     await emitContent(ctx, filteredContent)
     console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
   } catch (err) {
@@ -384,10 +407,10 @@
     }
   }
 
-  release()
   clientRefresh()
   toRebuild.clear()
   toRemove.clear()
+  release()
 }
 
 export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {

--
Gitblit v1.10.0