From e47c29d2fd4eae06c511c914e2a5fcb057b44d7b Mon Sep 17 00:00:00 2001
From: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 19 Aug 2024 21:14:29 +0000
Subject: [PATCH] chore(deps): bump rehype-katex from 7.0.0 to 7.0.1 (#1356)
---
quartz/build.ts | 65 +++++++++++++++++++++-----------
1 files changed, 43 insertions(+), 22 deletions(-)
diff --git a/quartz/build.ts b/quartz/build.ts
index ed166bb..342a27c 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -38,8 +38,13 @@
type FileEvent = "add" | "change" | "delete"
+function newBuildId() {
+ return new Date().toISOString()
+}
+
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
const ctx: BuildCtx = {
+ buildId: newBuildId(),
argv,
cfg,
allSlugs: [],
@@ -60,7 +65,7 @@
const release = await mut.acquire()
perf.addEvent("clean")
- await rimraf(output)
+ await rimraf(path.join(output, "*"), { glob: true })
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
perf.addEvent("glob")
@@ -167,6 +172,7 @@
const perf = new PerfTimer()
console.log(chalk.yellow("Detected change, rebuilding..."))
+ ctx.buildId = newBuildId()
// UPDATE DEP GRAPH
const fp = joinSegments(argv.directory, toPosixPath(filepath)) as FilePath
@@ -185,9 +191,14 @@
const emitterGraph =
(await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
- // emmiter may not define a dependency graph. nothing to update if so
if (emitterGraph) {
- dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
+ const existingGraph = dependencies[emitter.name]
+ if (existingGraph !== null) {
+ existingGraph.mergeGraph(emitterGraph)
+ } else {
+ // might be the first time we're adding a mardown file
+ dependencies[emitter.name] = emitterGraph
+ }
}
}
break
@@ -203,8 +214,9 @@
const emitterGraph =
(await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
- // emmiter may not define a dependency graph. nothing to update if so
- if (emitterGraph) {
+ // only update the graph if the emitter plugin uses the changed file
+ // eg. Assets plugin ignores md files, so we skip updating the graph
+ if (emitterGraph?.hasNode(fp)) {
// merge the new dependencies into the dep graph
dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
}
@@ -223,7 +235,6 @@
// EMIT
perf.addEvent("rebuild")
let emittedFiles = 0
- const destinationsToDelete = new Set<FilePath>()
for (const emitter of cfg.plugins.emitters) {
const depGraph = dependencies[emitter.name]
@@ -263,11 +274,6 @@
// and supply [a.md, b.md] to the emitter
const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[]
- if (action === "delete" && upstreams.length === 1) {
- // if there's only one upstream, the destination is solely dependent on this file
- destinationsToDelete.add(upstreams[0])
- }
-
const upstreamContent = upstreams
// filter out non-markdown files
.filter((file) => contentMap.has(file))
@@ -290,14 +296,26 @@
console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
// CLEANUP
- // delete files that are solely dependent on this file
- await rimraf([...destinationsToDelete])
+ const destinationsToDelete = new Set<FilePath>()
for (const file of toRemove) {
// remove from cache
contentMap.delete(file)
- // remove the node from dependency graphs
- Object.values(dependencies).forEach((depGraph) => depGraph?.removeNode(file))
+ Object.values(dependencies).forEach((depGraph) => {
+ // remove the node from dependency graphs
+ depGraph?.removeNode(file)
+ // remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed
+ const orphanNodes = depGraph?.removeOrphanNodes()
+ orphanNodes?.forEach((node) => {
+ // only delete files that are in the output directory
+ if (node.startsWith(argv.output)) {
+ destinationsToDelete.add(node)
+ }
+ })
+ })
}
+ await rimraf([...destinationsToDelete])
+
+ console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
toRemove.clear()
release()
@@ -351,14 +369,10 @@
const perf = new PerfTimer()
console.log(chalk.yellow("Detected change, rebuilding..."))
+ ctx.buildId = newBuildId()
+
try {
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
-
- const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
- .filter((fp) => !toRemove.has(fp))
- .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
-
- ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
for (const content of parsedContent) {
const [_tree, vfile] = content
@@ -372,9 +386,16 @@
const parsedFiles = [...contentMap.values()]
const filteredContent = filterContent(ctx, parsedFiles)
+ // re-update slugs
+ const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
+ .filter((fp) => !toRemove.has(fp))
+ .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
+
+ ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
+
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
// instead of just deleting everything
- await rimraf(argv.output)
+ await rimraf(path.join(argv.output, ".*"), { glob: true })
await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
} catch (err) {
--
Gitblit v1.10.0