From fa6c02d3213dfd4e6da8e78bd3a2e7004555fd01 Mon Sep 17 00:00:00 2001
From: Jacky Zhao <j.zhao2k19@gmail.com>
Date: Wed, 24 Jan 2024 01:08:56 +0000
Subject: [PATCH] fix: make search result card block
---
quartz/build.ts | 272 ++++++++++++++++++++++++++++++++++-------------------
1 files changed, 173 insertions(+), 99 deletions(-)
diff --git a/quartz/build.ts b/quartz/build.ts
index e5bfcaa..b78ff2b 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -1,30 +1,43 @@
-import "source-map-support/register.js"
+import sourceMapSupport from "source-map-support"
+sourceMapSupport.install(options)
import path from "path"
-import { PerfTimer } from "./perf"
+import { PerfTimer } from "./util/perf"
import { rimraf } from "rimraf"
-import { globby, isGitIgnored } from "globby"
+import { GlobbyFilterFunction, isGitIgnored } from "globby"
import chalk from "chalk"
-import http from "http"
-import serveHandler from "serve-handler"
import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit"
import cfg from "../quartz.config"
-import { FilePath } from "./path"
+import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path"
import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile"
-import WebSocket, { WebSocketServer } from "ws"
+import { Argv, BuildCtx } from "./util/ctx"
+import { glob, toPosixPath } from "./util/glob"
+import { trace } from "./util/trace"
+import { options } from "./util/sourcemap"
+import { Mutex } from "async-mutex"
-interface Argv {
- directory: string
- verbose: boolean
- output: string
- serve: boolean
- port: number
+type BuildData = {
+ ctx: BuildCtx
+ ignored: GlobbyFilterFunction
+ mut: Mutex
+ initialSlugs: FullSlug[]
+ // TODO merge contentMap and trackedAssets
+ contentMap: Map<FilePath, ProcessedContent>
+ trackedAssets: Set<FilePath>
+ toRebuild: Set<FilePath>
+ toRemove: Set<FilePath>
+ lastBuildMs: number
}
-async function buildQuartz(argv: Argv, version: string) {
- console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
+async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
+ const ctx: BuildCtx = {
+ argv,
+ cfg,
+ allSlugs: [],
+ }
+
const perf = new PerfTimer()
const output = argv.output
@@ -38,113 +51,174 @@
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
}
- // clean
+ const release = await mut.acquire()
perf.addEvent("clean")
await rimraf(output)
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
- // glob
perf.addEvent("glob")
- const fps = await globby("**/*.md", {
- cwd: argv.directory,
- ignore: cfg.configuration.ignorePatterns,
- gitignore: true,
- })
+ const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
+ const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort()
console.log(
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
)
- const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath)
- const parsedFiles = await parseMarkdown(
- cfg.plugins.transformers,
- argv.directory,
- filePaths,
- argv.verbose,
- )
- const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
- await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
+ const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
+
+ const parsedFiles = await parseMarkdown(ctx, filePaths)
+ const filteredContent = filterContent(ctx, parsedFiles)
+ await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
+ release()
if (argv.serve) {
- const wss = new WebSocketServer({ port: 3001 })
- const connections: WebSocket[] = []
- wss.on("connection", (ws) => connections.push(ws))
+ return startServing(ctx, mut, parsedFiles, clientRefresh)
+ }
+}
- const ignored = await isGitIgnored()
- const contentMap = new Map<FilePath, ProcessedContent>()
- for (const content of parsedFiles) {
+// setup watcher for rebuilds
+async function startServing(
+ ctx: BuildCtx,
+ mut: Mutex,
+ initialContent: ProcessedContent[],
+ clientRefresh: () => void,
+) {
+ const { argv } = ctx
+
+ const contentMap = new Map<FilePath, ProcessedContent>()
+ for (const content of initialContent) {
+ const [_tree, vfile] = content
+ contentMap.set(vfile.data.filePath!, content)
+ }
+
+ const buildData: BuildData = {
+ ctx,
+ mut,
+ contentMap,
+ ignored: await isGitIgnored(),
+ initialSlugs: ctx.allSlugs,
+ toRebuild: new Set<FilePath>(),
+ toRemove: new Set<FilePath>(),
+ trackedAssets: new Set<FilePath>(),
+ lastBuildMs: 0,
+ }
+
+ const watcher = chokidar.watch(".", {
+ persistent: true,
+ cwd: argv.directory,
+ ignoreInitial: true,
+ })
+
+ watcher
+ .on("add", (fp) => rebuildFromEntrypoint(fp, "add", clientRefresh, buildData))
+ .on("change", (fp) => rebuildFromEntrypoint(fp, "change", clientRefresh, buildData))
+ .on("unlink", (fp) => rebuildFromEntrypoint(fp, "delete", clientRefresh, buildData))
+
+ return async () => {
+ await watcher.close()
+ }
+}
+
+async function rebuildFromEntrypoint(
+ fp: string,
+ action: "add" | "change" | "delete",
+ clientRefresh: () => void,
+ buildData: BuildData, // note: this function mutates buildData
+) {
+ const {
+ ctx,
+ ignored,
+ mut,
+ initialSlugs,
+ contentMap,
+ toRebuild,
+ toRemove,
+ trackedAssets,
+ lastBuildMs,
+ } = buildData
+
+ const { argv } = ctx
+
+ // don't do anything for gitignored files
+ if (ignored(fp)) {
+ return
+ }
+
+ // dont bother rebuilding for non-content files, just track and refresh
+ fp = toPosixPath(fp)
+ const filePath = joinSegments(argv.directory, fp) as FilePath
+ if (path.extname(fp) !== ".md") {
+ if (action === "add" || action === "change") {
+ trackedAssets.add(filePath)
+ } else if (action === "delete") {
+ trackedAssets.delete(filePath)
+ }
+ clientRefresh()
+ return
+ }
+
+ if (action === "add" || action === "change") {
+ toRebuild.add(filePath)
+ } else if (action === "delete") {
+ toRemove.add(filePath)
+ }
+
+ // debounce rebuilds every 250ms
+
+ const buildStart = new Date().getTime()
+ buildData.lastBuildMs = buildStart
+ const release = await mut.acquire()
+ if (lastBuildMs > buildStart) {
+ release()
+ return
+ }
+
+ const perf = new PerfTimer()
+ console.log(chalk.yellow("Detected change, rebuilding..."))
+ try {
+ const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
+
+ const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
+ .filter((fp) => !toRemove.has(fp))
+ .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
+
+ ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
+ const parsedContent = await parseMarkdown(ctx, filesToRebuild)
+ for (const content of parsedContent) {
const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content)
}
- async function rebuild(fp: string, action: "add" | "change" | "unlink") {
- perf.addEvent("rebuild")
- if (!ignored(fp)) {
- console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
- const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
-
- try {
- if (action === "add" || action === "change") {
- const [parsedContent] = await parseMarkdown(
- cfg.plugins.transformers,
- argv.directory,
- [fullPath],
- argv.verbose,
- )
- contentMap.set(fullPath, parsedContent)
- } else if (action === "unlink") {
- contentMap.delete(fullPath)
- }
-
- await rimraf(output)
- const parsedFiles = [...contentMap.values()]
- const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
- await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
- console.log(chalk.green(`Done rebuilding in ${perf.timeSince("rebuild")}`))
- } catch {
- console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
- }
-
- connections.forEach((conn) => conn.send("rebuild"))
- }
+ for (const fp of toRemove) {
+ contentMap.delete(fp)
}
- const watcher = chokidar.watch(".", {
- persistent: true,
- cwd: argv.directory,
- ignoreInitial: true,
- })
+ const parsedFiles = [...contentMap.values()]
+ const filteredContent = filterContent(ctx, parsedFiles)
- watcher
- .on("add", (fp) => rebuild(fp, "add"))
- .on("change", (fp) => rebuild(fp, "change"))
- .on("unlink", (fp) => rebuild(fp, "unlink"))
-
- const server = http.createServer(async (req, res) => {
- await serveHandler(req, res, {
- public: output,
- directoryListing: false,
- })
- const status = res.statusCode
- const statusString =
- status >= 200 && status < 300
- ? chalk.green(`[${status}]`)
- : status >= 300 && status < 400
- ? chalk.yellow(`[${status}]`)
- : chalk.red(`[${status}]`)
- console.log(statusString + chalk.grey(` ${req.url}`))
- })
- server.listen(argv.port)
- console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
- console.log("hint: exit with ctrl+c")
+ // TODO: we can probably traverse the link graph to figure out what's safe to delete here
+ // instead of just deleting everything
+ await rimraf(argv.output)
+ await emitContent(ctx, filteredContent)
+ console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
+ } catch (err) {
+ console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
+ if (argv.verbose) {
+ console.log(chalk.red(err))
+ }
}
+
+ release()
+ clientRefresh()
+ toRebuild.clear()
+ toRemove.clear()
}
-export default async (argv: Argv, version: string) => {
+export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
try {
- await buildQuartz(argv, version)
- } catch {
- console.log(chalk.red("\nExiting Quartz due to a fatal error"))
- process.exit(1)
+ return await buildQuartz(argv, mut, clientRefresh)
+ } catch (err) {
+ trace("\nExiting Quartz due to a fatal error", err as Error)
}
}
--
Gitblit v1.10.0