perf: incremental rebuild (--fastRebuild v2 but default) (#1841)
* checkpoint
* incremental all the things
* properly splice changes array
* smol doc update
* update docs
* make fancy logger dumb in ci
2 files deleted
34 files modified
| | |
| | | |
| | | export type QuartzEmitterPluginInstance = { |
| | | name: string |
| | | emit(ctx: BuildCtx, content: ProcessedContent[], resources: StaticResources): Promise<FilePath[]> |
| | | emit( |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | resources: StaticResources, |
| | | ): Promise<FilePath[]> | AsyncGenerator<FilePath> |
| | | partialEmit?( |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | resources: StaticResources, |
| | | changeEvents: ChangeEvent[], |
| | | ): Promise<FilePath[]> | AsyncGenerator<FilePath> | null |
| | | getQuartzComponents(ctx: BuildCtx): QuartzComponent[] |
| | | } |
| | | ``` |
| | | |
| | | An emitter plugin must define a `name` field, an `emit` function, and a `getQuartzComponents` function. `emit` is responsible for looking at all the parsed and filtered content and then appropriately creating files and returning a list of paths to files the plugin created. |
| | | An emitter plugin must define a `name` field, an `emit` function, and a `getQuartzComponents` function. It can optionally implement a `partialEmit` function for incremental builds. |
| | | |
| | | - `emit` is responsible for looking at all the parsed and filtered content and then appropriately creating files and returning a list of paths to files the plugin created. |
| | | - `partialEmit` is an optional function that enables incremental builds. It receives information about which files have changed (`changeEvents`) and can selectively rebuild only the necessary files. This is useful for optimizing build times in development mode. If `partialEmit` is undefined, it will default to the `emit` function. |
| | | - `getQuartzComponents` declares which Quartz components the emitter uses to construct its pages. |
| | | |
| | | Creating new files can be done via regular Node [fs module](https://nodejs.org/api/fs.html) (i.e. `fs.cp` or `fs.writeFile`) or via the `write` function in `quartz/plugins/emitters/helpers.ts` if you are creating files that contain text. `write` has the following signature: |
| | | |
| | |
| | | ## 🔧 Features |
| | | |
| | | - [[Obsidian compatibility]], [[full-text search]], [[graph view]], note transclusion, [[wikilinks]], [[backlinks]], [[features/Latex|Latex]], [[syntax highlighting]], [[popover previews]], [[Docker Support]], [[i18n|internationalization]], [[comments]] and [many more](./features/) right out of the box |
| | | - Hot-reload for both configuration and content |
| | | - Hot-reload on configuration edits and incremental rebuilds for content edits |
| | | - Simple JSX layouts and [[creating components|page components]] |
| | | - [[SPA Routing|Ridiculously fast page loads]] and tiny bundle sizes |
| | | - Fully-customizable parsing, filtering, and page generation through [[making plugins|plugins]] |
| | |
| | | { |
| | | "name": "@jackyzha0/quartz", |
| | | "version": "4.4.1", |
| | | "version": "4.5.0", |
| | | "lockfileVersion": 3, |
| | | "requires": true, |
| | | "packages": { |
| | | "": { |
| | | "name": "@jackyzha0/quartz", |
| | | "version": "4.4.1", |
| | | "version": "4.5.0", |
| | | "license": "MIT", |
| | | "dependencies": { |
| | | "@clack/prompts": "^0.10.0", |
| | |
| | | "@myriaddreamin/rehype-typst": "^0.5.4", |
| | | "@napi-rs/simple-git": "0.1.19", |
| | | "@tweenjs/tween.js": "^25.0.0", |
| | | "ansi-truncate": "^1.2.0", |
| | | "async-mutex": "^0.5.0", |
| | | "chalk": "^5.4.1", |
| | | "chokidar": "^4.0.3", |
| | |
| | | "mdast-util-to-hast": "^13.2.0", |
| | | "mdast-util-to-string": "^4.0.0", |
| | | "micromorph": "^0.4.5", |
| | | "minimatch": "^10.0.1", |
| | | "pixi.js": "^8.8.1", |
| | | "preact": "^10.26.4", |
| | | "preact-render-to-string": "^6.5.13", |
| | |
| | | "url": "https://github.com/chalk/ansi-styles?sponsor=1" |
| | | } |
| | | }, |
| | | "node_modules/ansi-truncate": { |
| | | "version": "1.2.0", |
| | | "resolved": "https://registry.npmjs.org/ansi-truncate/-/ansi-truncate-1.2.0.tgz", |
| | | "integrity": "sha512-/SLVrxNIP8o8iRHjdK3K9s2hDqdvb86NEjZOAB6ecWFsOo+9obaby97prnvAPn6j7ExXCpbvtlJFYPkkspg4BQ==", |
| | | "license": "MIT", |
| | | "dependencies": { |
| | | "fast-string-truncated-width": "^1.2.0" |
| | | } |
| | | }, |
| | | "node_modules/argparse": { |
| | | "version": "2.0.1", |
| | | "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", |
| | |
| | | "node": ">=8.6.0" |
| | | } |
| | | }, |
| | | "node_modules/fast-string-truncated-width": { |
| | | "version": "1.2.1", |
| | | "resolved": "https://registry.npmjs.org/fast-string-truncated-width/-/fast-string-truncated-width-1.2.1.tgz", |
| | | "integrity": "sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==", |
| | | "license": "MIT" |
| | | }, |
| | | "node_modules/fastq": { |
| | | "version": "1.19.0", |
| | | "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz", |
| | |
| | | "version": "10.0.1", |
| | | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", |
| | | "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", |
| | | "license": "ISC", |
| | | "dependencies": { |
| | | "brace-expansion": "^2.0.1" |
| | | }, |
| | |
| | | "name": "@jackyzha0/quartz", |
| | | "description": "🌱 publish your digital garden and notes as a website", |
| | | "private": true, |
| | | "version": "4.4.1", |
| | | "version": "4.5.0", |
| | | "type": "module", |
| | | "author": "jackyzha0 <j.zhao2k19@gmail.com>", |
| | | "license": "MIT", |
| | |
| | | "@myriaddreamin/rehype-typst": "^0.5.4", |
| | | "@napi-rs/simple-git": "0.1.19", |
| | | "@tweenjs/tween.js": "^25.0.0", |
| | | "ansi-truncate": "^1.2.0", |
| | | "async-mutex": "^0.5.0", |
| | | "chalk": "^5.4.1", |
| | | "chokidar": "^4.0.3", |
| | |
| | | "mdast-util-to-hast": "^13.2.0", |
| | | "mdast-util-to-string": "^4.0.0", |
| | | "micromorph": "^0.4.5", |
| | | "minimatch": "^10.0.1", |
| | | "pixi.js": "^8.8.1", |
| | | "preact": "^10.26.4", |
| | | "preact-render-to-string": "^6.5.13", |
| | |
| | | transformers: [ |
| | | Plugin.FrontMatter(), |
| | | Plugin.CreatedModifiedDate({ |
| | | priority: ["frontmatter", "filesystem"], |
| | | priority: ["git", "frontmatter", "filesystem"], |
| | | }), |
| | | Plugin.SyntaxHighlighting({ |
| | | theme: { |
| | |
| | | import { filterContent } from "./processors/filter" |
| | | import { emitContent } from "./processors/emit" |
| | | import cfg from "../quartz.config" |
| | | import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path" |
| | | import { FilePath, joinSegments, slugifyFilePath } from "./util/path" |
| | | import chokidar from "chokidar" |
| | | import { ProcessedContent } from "./plugins/vfile" |
| | | import { Argv, BuildCtx } from "./util/ctx" |
| | |
| | | import { trace } from "./util/trace" |
| | | import { options } from "./util/sourcemap" |
| | | import { Mutex } from "async-mutex" |
| | | import DepGraph from "./depgraph" |
| | | import { getStaticResourcesFromPlugins } from "./plugins" |
| | | import { randomIdNonSecure } from "./util/random" |
| | | import { ChangeEvent } from "./plugins/types" |
| | | import { minimatch } from "minimatch" |
| | | |
| | | type Dependencies = Record<string, DepGraph<FilePath> | null> |
| | | type ContentMap = Map< |
| | | FilePath, |
| | | | { |
| | | type: "markdown" |
| | | content: ProcessedContent |
| | | } |
| | | | { |
| | | type: "other" |
| | | } |
| | | > |
| | | |
| | | type BuildData = { |
| | | ctx: BuildCtx |
| | | ignored: GlobbyFilterFunction |
| | | mut: Mutex |
| | | initialSlugs: FullSlug[] |
| | | // TODO merge contentMap and trackedAssets |
| | | contentMap: Map<FilePath, ProcessedContent> |
| | | trackedAssets: Set<FilePath> |
| | | toRebuild: Set<FilePath> |
| | | toRemove: Set<FilePath> |
| | | contentMap: ContentMap |
| | | changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]> |
| | | lastBuildMs: number |
| | | dependencies: Dependencies |
| | | } |
| | | |
| | | type FileEvent = "add" | "change" | "delete" |
| | | |
| | | async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) { |
| | | const ctx: BuildCtx = { |
| | | buildId: randomIdNonSecure(), |
| | | argv, |
| | | cfg, |
| | | allSlugs: [], |
| | | allFiles: [], |
| | | incremental: false, |
| | | } |
| | | |
| | | const perf = new PerfTimer() |
| | |
| | | |
| | | perf.addEvent("glob") |
| | | const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns) |
| | | const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort() |
| | | const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort() |
| | | console.log( |
| | | `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`, |
| | | `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`, |
| | | ) |
| | | |
| | | const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath) |
| | | const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath) |
| | | ctx.allFiles = allFiles |
| | | ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath)) |
| | | |
| | | const parsedFiles = await parseMarkdown(ctx, filePaths) |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | |
| | | const dependencies: Record<string, DepGraph<FilePath> | null> = {} |
| | | |
| | | // Only build dependency graphs if we're doing a fast rebuild |
| | | if (argv.fastRebuild) { |
| | | const staticResources = getStaticResourcesFromPlugins(ctx) |
| | | for (const emitter of cfg.plugins.emitters) { |
| | | dependencies[emitter.name] = |
| | | (await emitter.getDependencyGraph?.(ctx, filteredContent, staticResources)) ?? null |
| | | } |
| | | } |
| | | |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`)) |
| | | console.log(chalk.green(`Done processing ${markdownPaths.length} files in ${perf.timeSince()}`)) |
| | | release() |
| | | |
| | | if (argv.serve) { |
| | | return startServing(ctx, mut, parsedFiles, clientRefresh, dependencies) |
| | | if (argv.watch) { |
| | | ctx.incremental = true |
| | | return startWatching(ctx, mut, parsedFiles, clientRefresh) |
| | | } |
| | | } |
| | | |
| | | // setup watcher for rebuilds |
| | | async function startServing( |
| | | async function startWatching( |
| | | ctx: BuildCtx, |
| | | mut: Mutex, |
| | | initialContent: ProcessedContent[], |
| | | clientRefresh: () => void, |
| | | dependencies: Dependencies, // emitter name: dep graph |
| | | ) { |
| | | const { argv } = ctx |
| | | const { argv, allFiles } = ctx |
| | | |
| | | // cache file parse results |
| | | const contentMap = new Map<FilePath, ProcessedContent>() |
| | | for (const content of initialContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | const contentMap: ContentMap = new Map() |
| | | for (const filePath of allFiles) { |
| | | contentMap.set(filePath, { |
| | | type: "other", |
| | | }) |
| | | } |
| | | |
| | | for (const content of initialContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.relativePath!, { |
| | | type: "markdown", |
| | | content, |
| | | }) |
| | | } |
| | | |
| | | const gitIgnoredMatcher = await isGitIgnored() |
| | | const buildData: BuildData = { |
| | | ctx, |
| | | mut, |
| | | dependencies, |
| | | contentMap, |
| | | ignored: await isGitIgnored(), |
| | | initialSlugs: ctx.allSlugs, |
| | | toRebuild: new Set<FilePath>(), |
| | | toRemove: new Set<FilePath>(), |
| | | trackedAssets: new Set<FilePath>(), |
| | | ignored: (path) => { |
| | | if (gitIgnoredMatcher(path)) return true |
| | | const pathStr = path.toString() |
| | | for (const pattern of cfg.configuration.ignorePatterns) { |
| | | if (minimatch(pathStr, pattern)) { |
| | | return true |
| | | } |
| | | } |
| | | |
| | | return false |
| | | }, |
| | | |
| | | changesSinceLastBuild: {}, |
| | | lastBuildMs: 0, |
| | | } |
| | | |
| | |
| | | ignoreInitial: true, |
| | | }) |
| | | |
| | | const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint |
| | | const changes: ChangeEvent[] = [] |
| | | watcher |
| | | .on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData)) |
| | | .on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData)) |
| | | .on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData)) |
| | | .on("add", (fp) => { |
| | | if (buildData.ignored(fp)) return |
| | | changes.push({ path: fp as FilePath, type: "add" }) |
| | | void rebuild(changes, clientRefresh, buildData) |
| | | }) |
| | | .on("change", (fp) => { |
| | | if (buildData.ignored(fp)) return |
| | | changes.push({ path: fp as FilePath, type: "change" }) |
| | | void rebuild(changes, clientRefresh, buildData) |
| | | }) |
| | | .on("unlink", (fp) => { |
| | | if (buildData.ignored(fp)) return |
| | | changes.push({ path: fp as FilePath, type: "delete" }) |
| | | void rebuild(changes, clientRefresh, buildData) |
| | | }) |
| | | |
| | | return async () => { |
| | | await watcher.close() |
| | | } |
| | | } |
| | | |
| | | async function partialRebuildFromEntrypoint( |
| | | filepath: string, |
| | | action: FileEvent, |
| | | clientRefresh: () => void, |
| | | buildData: BuildData, // note: this function mutates buildData |
| | | ) { |
| | | const { ctx, ignored, dependencies, contentMap, mut, toRemove } = buildData |
| | | async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) { |
| | | const { ctx, contentMap, mut, changesSinceLastBuild } = buildData |
| | | const { argv, cfg } = ctx |
| | | |
| | | // don't do anything for gitignored files |
| | | if (ignored(filepath)) { |
| | | return |
| | | } |
| | | |
| | | const buildId = randomIdNonSecure() |
| | | ctx.buildId = buildId |
| | | buildData.lastBuildMs = new Date().getTime() |
| | | const numChangesInBuild = changes.length |
| | | const release = await mut.acquire() |
| | | |
| | | // if there's another build after us, release and let them do it |
| | |
| | | } |
| | | |
| | | const perf = new PerfTimer() |
| | | perf.addEvent("rebuild") |
| | | console.log(chalk.yellow("Detected change, rebuilding...")) |
| | | |
| | | // UPDATE DEP GRAPH |
| | | const fp = joinSegments(argv.directory, toPosixPath(filepath)) as FilePath |
| | | // update changesSinceLastBuild |
| | | for (const change of changes) { |
| | | changesSinceLastBuild[change.path] = change.type |
| | | } |
| | | |
| | | const staticResources = getStaticResourcesFromPlugins(ctx) |
| | | let processedFiles: ProcessedContent[] = [] |
| | | |
| | | switch (action) { |
| | | case "add": |
| | | // add to cache when new file is added |
| | | processedFiles = await parseMarkdown(ctx, [fp]) |
| | | processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile])) |
| | | |
| | | // update the dep graph by asking all emitters whether they depend on this file |
| | | for (const emitter of cfg.plugins.emitters) { |
| | | const emitterGraph = |
| | | (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null |
| | | |
| | | if (emitterGraph) { |
| | | const existingGraph = dependencies[emitter.name] |
| | | if (existingGraph !== null) { |
| | | existingGraph.mergeGraph(emitterGraph) |
| | | } else { |
| | | // might be the first time we're adding a mardown file |
| | | dependencies[emitter.name] = emitterGraph |
| | | } |
| | | } |
| | | } |
| | | break |
| | | case "change": |
| | | // invalidate cache when file is changed |
| | | processedFiles = await parseMarkdown(ctx, [fp]) |
| | | processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile])) |
| | | |
| | | // only content files can have added/removed dependencies because of transclusions |
| | | if (path.extname(fp) === ".md") { |
| | | for (const emitter of cfg.plugins.emitters) { |
| | | // get new dependencies from all emitters for this file |
| | | const emitterGraph = |
| | | (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null |
| | | |
| | | // only update the graph if the emitter plugin uses the changed file |
| | | // eg. Assets plugin ignores md files, so we skip updating the graph |
| | | if (emitterGraph?.hasNode(fp)) { |
| | | // merge the new dependencies into the dep graph |
| | | dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp) |
| | | } |
| | | } |
| | | } |
| | | break |
| | | case "delete": |
| | | toRemove.add(fp) |
| | | break |
| | | const pathsToParse: FilePath[] = [] |
| | | for (const [fp, type] of Object.entries(changesSinceLastBuild)) { |
| | | if (type === "delete" || path.extname(fp) !== ".md") continue |
| | | const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath |
| | | pathsToParse.push(fullPath) |
| | | } |
| | | |
| | | if (argv.verbose) { |
| | | console.log(`Updated dependency graphs in ${perf.timeSince()}`) |
| | | const parsed = await parseMarkdown(ctx, pathsToParse) |
| | | for (const content of parsed) { |
| | | contentMap.set(content[1].data.relativePath!, { |
| | | type: "markdown", |
| | | content, |
| | | }) |
| | | } |
| | | |
| | | // EMIT |
| | | perf.addEvent("rebuild") |
| | | // update state using changesSinceLastBuild |
| | | // we do this weird play of add => compute change events => remove |
| | | // so that partialEmitters can do appropriate cleanup based on the content of deleted files |
| | | for (const [file, change] of Object.entries(changesSinceLastBuild)) { |
| | | if (change === "delete") { |
| | | // universal delete case |
| | | contentMap.delete(file as FilePath) |
| | | } |
| | | |
| | | // manually track non-markdown files as processed files only |
| | | // contains markdown files |
| | | if (change === "add" && path.extname(file) !== ".md") { |
| | | contentMap.set(file as FilePath, { |
| | | type: "other", |
| | | }) |
| | | } |
| | | } |
| | | |
| | | const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => { |
| | | const path = fp as FilePath |
| | | const processedContent = contentMap.get(path) |
| | | if (processedContent?.type === "markdown") { |
| | | const [_tree, file] = processedContent.content |
| | | return { |
| | | type, |
| | | path, |
| | | file, |
| | | } |
| | | } |
| | | |
| | | return { |
| | | type, |
| | | path, |
| | | } |
| | | }) |
| | | |
| | | // update allFiles and then allSlugs with the consistent view of content map |
| | | ctx.allFiles = Array.from(contentMap.keys()) |
| | | ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath)) |
| | | const processedFiles = Array.from(contentMap.values()) |
| | | .filter((file) => file.type === "markdown") |
| | | .map((file) => file.content) |
| | | |
| | | let emittedFiles = 0 |
| | | |
| | | for (const emitter of cfg.plugins.emitters) { |
| | | const depGraph = dependencies[emitter.name] |
| | | |
| | | // emitter hasn't defined a dependency graph. call it with all processed files |
| | | if (depGraph === null) { |
| | | if (argv.verbose) { |
| | | console.log( |
| | | `Emitter ${emitter.name} doesn't define a dependency graph. Calling it with all files...`, |
| | | ) |
| | | } |
| | | |
| | | const files = [...contentMap.values()].filter( |
| | | ([_node, vfile]) => !toRemove.has(vfile.data.filePath!), |
| | | ) |
| | | |
| | | const emitted = await emitter.emit(ctx, files, staticResources) |
| | | if (Symbol.asyncIterator in emitted) { |
| | | // Async generator case |
| | | for await (const file of emitted) { |
| | | emittedFiles++ |
| | | if (ctx.argv.verbose) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | | } |
| | | } else { |
| | | // Array case |
| | | emittedFiles += emitted.length |
| | | if (ctx.argv.verbose) { |
| | | for (const file of emitted) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | | } |
| | | } |
| | | |
| | | // Try to use partialEmit if available, otherwise assume the output is static |
| | | const emitFn = emitter.partialEmit ?? emitter.emit |
| | | const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents) |
| | | if (emitted === null) { |
| | | continue |
| | | } |
| | | |
| | | // only call the emitter if it uses this file |
| | | if (depGraph.hasNode(fp)) { |
| | | // re-emit using all files that are needed for the downstream of this file |
| | | // eg. for ContentIndex, the dep graph could be: |
| | | // a.md --> contentIndex.json |
| | | // b.md ------^ |
| | | // |
| | | // if a.md changes, we need to re-emit contentIndex.json, |
| | | // and supply [a.md, b.md] to the emitter |
| | | const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[] |
| | | |
| | | const upstreamContent = upstreams |
| | | // filter out non-markdown files |
| | | .filter((file) => contentMap.has(file)) |
| | | // if file was deleted, don't give it to the emitter |
| | | .filter((file) => !toRemove.has(file)) |
| | | .map((file) => contentMap.get(file)!) |
| | | |
| | | const emitted = await emitter.emit(ctx, upstreamContent, staticResources) |
| | | if (Symbol.asyncIterator in emitted) { |
| | | // Async generator case |
| | | for await (const file of emitted) { |
| | |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`) |
| | | |
| | | // CLEANUP |
| | | const destinationsToDelete = new Set<FilePath>() |
| | | for (const file of toRemove) { |
| | | // remove from cache |
| | | contentMap.delete(file) |
| | | Object.values(dependencies).forEach((depGraph) => { |
| | | // remove the node from dependency graphs |
| | | depGraph?.removeNode(file) |
| | | // remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed |
| | | const orphanNodes = depGraph?.removeOrphanNodes() |
| | | orphanNodes?.forEach((node) => { |
| | | // only delete files that are in the output directory |
| | | if (node.startsWith(argv.output)) { |
| | | destinationsToDelete.add(node) |
| | | } |
| | | }) |
| | | }) |
| | | } |
| | | await rimraf([...destinationsToDelete]) |
| | | |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | |
| | | toRemove.clear() |
| | | release() |
| | | changes.splice(0, numChangesInBuild) |
| | | clientRefresh() |
| | | } |
| | | |
| | | async function rebuildFromEntrypoint( |
| | | fp: string, |
| | | action: FileEvent, |
| | | clientRefresh: () => void, |
| | | buildData: BuildData, // note: this function mutates buildData |
| | | ) { |
| | | const { ctx, ignored, mut, initialSlugs, contentMap, toRebuild, toRemove, trackedAssets } = |
| | | buildData |
| | | |
| | | const { argv } = ctx |
| | | |
| | | // don't do anything for gitignored files |
| | | if (ignored(fp)) { |
| | | return |
| | | } |
| | | |
| | | // dont bother rebuilding for non-content files, just track and refresh |
| | | fp = toPosixPath(fp) |
| | | const filePath = joinSegments(argv.directory, fp) as FilePath |
| | | if (path.extname(fp) !== ".md") { |
| | | if (action === "add" || action === "change") { |
| | | trackedAssets.add(filePath) |
| | | } else if (action === "delete") { |
| | | trackedAssets.delete(filePath) |
| | | } |
| | | clientRefresh() |
| | | return |
| | | } |
| | | |
| | | if (action === "add" || action === "change") { |
| | | toRebuild.add(filePath) |
| | | } else if (action === "delete") { |
| | | toRemove.add(filePath) |
| | | } |
| | | |
| | | const buildId = randomIdNonSecure() |
| | | ctx.buildId = buildId |
| | | buildData.lastBuildMs = new Date().getTime() |
| | | const release = await mut.acquire() |
| | | |
| | | // there's another build after us, release and let them do it |
| | | if (ctx.buildId !== buildId) { |
| | | release() |
| | | return |
| | | } |
| | | |
| | | const perf = new PerfTimer() |
| | | console.log(chalk.yellow("Detected change, rebuilding...")) |
| | | |
| | | try { |
| | | const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) |
| | | const parsedContent = await parseMarkdown(ctx, filesToRebuild) |
| | | for (const content of parsedContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | } |
| | | |
| | | for (const fp of toRemove) { |
| | | contentMap.delete(fp) |
| | | } |
| | | |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | |
| | | // re-update slugs |
| | | const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] |
| | | .filter((fp) => !toRemove.has(fp)) |
| | | .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) |
| | | |
| | | ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] |
| | | |
| | | // TODO: we can probably traverse the link graph to figure out what's safe to delete here |
| | | // instead of just deleting everything |
| | | await rimraf(path.join(argv.output, ".*"), { glob: true }) |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | } catch (err) { |
| | | console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) |
| | | if (argv.verbose) { |
| | | console.log(chalk.red(err)) |
| | | } |
| | | } |
| | | |
| | | clientRefresh() |
| | | toRebuild.clear() |
| | | toRemove.clear() |
| | | release() |
| | | } |
| | | |
| | |
| | | import { QuartzComponent } from "./components/types" |
| | | import { ValidLocale } from "./i18n" |
| | | import { PluginTypes } from "./plugins/types" |
| | | import { SocialImageOptions } from "./util/og" |
| | | import { Theme } from "./util/theme" |
| | | |
| | | export type Analytics = |
| | |
| | | default: false, |
| | | describe: "run a local server to live-preview your Quartz", |
| | | }, |
| | | fastRebuild: { |
| | | watch: { |
| | | boolean: true, |
| | | default: false, |
| | | describe: "[experimental] rebuild only the changed files", |
| | | describe: "watch for changes and rebuild automatically", |
| | | }, |
| | | baseDir: { |
| | | string: true, |
| | |
| | | * @param {*} argv arguments for `build` |
| | | */ |
| | | export async function handleBuild(argv) { |
| | | if (argv.serve) { |
| | | argv.watch = true |
| | | } |
| | | |
| | | console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`)) |
| | | const ctx = await esbuild.context({ |
| | | entryPoints: [fp], |
| | |
| | | clientRefresh() |
| | | } |
| | | |
| | | let clientRefresh = () => {} |
| | | if (argv.serve) { |
| | | const connections = [] |
| | | const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild")) |
| | | clientRefresh = () => connections.forEach((conn) => conn.send("rebuild")) |
| | | |
| | | if (argv.baseDir !== "" && !argv.baseDir.startsWith("/")) { |
| | | argv.baseDir = "/" + argv.baseDir |
| | |
| | | |
| | | return serve() |
| | | }) |
| | | |
| | | server.listen(argv.port) |
| | | const wss = new WebSocketServer({ port: argv.wsPort }) |
| | | wss.on("connection", (ws) => connections.push(ws)) |
| | |
| | | `Started a Quartz server listening at http://localhost:${argv.port}${argv.baseDir}`, |
| | | ), |
| | | ) |
| | | console.log("hint: exit with ctrl+c") |
| | | const paths = await globby(["**/*.ts", "**/*.tsx", "**/*.scss", "package.json"]) |
| | | } else { |
| | | await build(clientRefresh) |
| | | ctx.dispose() |
| | | } |
| | | |
| | | if (argv.watch) { |
| | | const paths = await globby([ |
| | | "**/*.ts", |
| | | "quartz/cli/*.js", |
| | | "quartz/static/**/*", |
| | | "**/*.tsx", |
| | | "**/*.scss", |
| | | "package.json", |
| | | ]) |
| | | chokidar |
| | | .watch(paths, { ignoreInitial: true }) |
| | | .on("add", () => build(clientRefresh)) |
| | | .on("change", () => build(clientRefresh)) |
| | | .on("unlink", () => build(clientRefresh)) |
| | | } else { |
| | | await build(() => {}) |
| | | ctx.dispose() |
| | | |
| | | console.log(chalk.grey("hint: exit with ctrl+c")) |
| | | } |
| | | } |
| | | |
| | |
| | | import { Root, Element, ElementContent } from "hast" |
| | | import { GlobalConfiguration } from "../cfg" |
| | | import { i18n } from "../i18n" |
| | | import { QuartzPluginData } from "../plugins/vfile" |
| | | |
| | | interface RenderComponents { |
| | | head: QuartzComponent |
| | |
| | | const headerRegex = new RegExp(/h[1-6]/) |
| | | export function pageResources( |
| | | baseDir: FullSlug | RelativeURL, |
| | | fileData: QuartzPluginData, |
| | | staticResources: StaticResources, |
| | | ): StaticResources { |
| | | const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json") |
| | |
| | | return resources |
| | | } |
| | | |
| | | export function renderPage( |
| | | function renderTranscludes( |
| | | root: Root, |
| | | cfg: GlobalConfiguration, |
| | | slug: FullSlug, |
| | | componentData: QuartzComponentProps, |
| | | components: RenderComponents, |
| | | pageResources: StaticResources, |
| | | ): string { |
| | | // make a deep copy of the tree so we don't remove the transclusion references |
| | | // for the file cached in contentMap in build.ts |
| | | const root = clone(componentData.tree) as Root |
| | | |
| | | ) { |
| | | // process transcludes in componentData |
| | | visit(root, "element", (node, _index, _parent) => { |
| | | if (node.tagName === "blockquote") { |
| | |
| | | } |
| | | } |
| | | }) |
| | | } |
| | | |
| | | export function renderPage( |
| | | cfg: GlobalConfiguration, |
| | | slug: FullSlug, |
| | | componentData: QuartzComponentProps, |
| | | components: RenderComponents, |
| | | pageResources: StaticResources, |
| | | ): string { |
| | | // make a deep copy of the tree so we don't remove the transclusion references |
| | | // for the file cached in contentMap in build.ts |
| | | const root = clone(componentData.tree) as Root |
| | | renderTranscludes(root, cfg, slug, componentData) |
| | | |
| | | // set componentData.tree to the edited html that has transclusions rendered |
| | | componentData.tree = root |
| | |
| | | } |
| | | |
| | | document.addEventListener("nav", () => { |
| | | const switchTheme = (e: Event) => { |
| | | const switchTheme = () => { |
| | | const newTheme = |
| | | document.documentElement.getAttribute("saved-theme") === "dark" ? "light" : "dark" |
| | | document.documentElement.setAttribute("saved-theme", newTheme) |
| | |
| | | import BodyConstructor from "../../components/Body" |
| | | import { pageResources, renderPage } from "../../components/renderPage" |
| | | import { FullPageLayout } from "../../cfg" |
| | | import { FilePath, FullSlug } from "../../util/path" |
| | | import { FullSlug } from "../../util/path" |
| | | import { sharedPageComponents } from "../../../quartz.layout" |
| | | import { NotFound } from "../../components" |
| | | import { defaultProcessedContent } from "../vfile" |
| | | import { write } from "./helpers" |
| | | import { i18n } from "../../i18n" |
| | | import DepGraph from "../../depgraph" |
| | | |
| | | export const NotFoundPage: QuartzEmitterPlugin = () => { |
| | | const opts: FullPageLayout = { |
| | |
| | | getQuartzComponents() { |
| | | return [Head, Body, pageBody, Footer] |
| | | }, |
| | | async getDependencyGraph(_ctx, _content, _resources) { |
| | | return new DepGraph<FilePath>() |
| | | }, |
| | | async *emit(ctx, _content, resources) { |
| | | const cfg = ctx.cfg.configuration |
| | | const slug = "404" as FullSlug |
| | |
| | | description: notFound, |
| | | frontmatter: { title: notFound, tags: [] }, |
| | | }) |
| | | const externalResources = pageResources(path, vfile.data, resources) |
| | | const externalResources = pageResources(path, resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData: vfile.data, |
| | |
| | | ext: ".html", |
| | | }) |
| | | }, |
| | | async *partialEmit() {}, |
| | | } |
| | | } |
| | |
| | | import { FilePath, joinSegments, resolveRelative, simplifySlug } from "../../util/path" |
| | | import { resolveRelative, simplifySlug } from "../../util/path" |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | import { write } from "./helpers" |
| | | import DepGraph from "../../depgraph" |
| | | import { getAliasSlugs } from "../transformers/frontmatter" |
| | | import { BuildCtx } from "../../util/ctx" |
| | | import { VFile } from "vfile" |
| | | |
| | | export const AliasRedirects: QuartzEmitterPlugin = () => ({ |
| | | name: "AliasRedirects", |
| | | async getDependencyGraph(ctx, content, _resources) { |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | const { argv } = ctx |
| | | for (const [_tree, file] of content) { |
| | | for (const slug of getAliasSlugs(file.data.frontmatter?.aliases ?? [], argv, file)) { |
| | | graph.addEdge(file.data.filePath!, joinSegments(argv.output, slug + ".html") as FilePath) |
| | | } |
| | | } |
| | | |
| | | return graph |
| | | }, |
| | | async *emit(ctx, content, _resources) { |
| | | for (const [_tree, file] of content) { |
| | | async function* processFile(ctx: BuildCtx, file: VFile) { |
| | | const ogSlug = simplifySlug(file.data.slug!) |
| | | |
| | | for (const slug of file.data.aliases ?? []) { |
| | |
| | | }) |
| | | } |
| | | } |
| | | |
| | | export const AliasRedirects: QuartzEmitterPlugin = () => ({ |
| | | name: "AliasRedirects", |
| | | async *emit(ctx, content) { |
| | | for (const [_tree, file] of content) { |
| | | yield* processFile(ctx, file) |
| | | } |
| | | }, |
| | | async *partialEmit(ctx, _content, _resources, changeEvents) { |
| | | for (const changeEvent of changeEvents) { |
| | | if (!changeEvent.file) continue |
| | | if (changeEvent.type === "add" || changeEvent.type === "change") { |
| | | // add new ones if this file still exists |
| | | yield* processFile(ctx, changeEvent.file) |
| | | } |
| | | } |
| | | }, |
| | | }) |
| | |
| | | import path from "path" |
| | | import fs from "fs" |
| | | import { glob } from "../../util/glob" |
| | | import DepGraph from "../../depgraph" |
| | | import { Argv } from "../../util/ctx" |
| | | import { QuartzConfig } from "../../cfg" |
| | | |
| | |
| | | return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns]) |
| | | } |
| | | |
| | | const copyFile = async (argv: Argv, fp: FilePath) => { |
| | | const src = joinSegments(argv.directory, fp) as FilePath |
| | | |
| | | const name = slugifyFilePath(fp) |
| | | const dest = joinSegments(argv.output, name) as FilePath |
| | | |
| | | // ensure dir exists |
| | | const dir = path.dirname(dest) as FilePath |
| | | await fs.promises.mkdir(dir, { recursive: true }) |
| | | |
| | | await fs.promises.copyFile(src, dest) |
| | | return dest |
| | | } |
| | | |
| | | export const Assets: QuartzEmitterPlugin = () => { |
| | | return { |
| | | name: "Assets", |
| | | async getDependencyGraph(ctx, _content, _resources) { |
| | | const { argv, cfg } = ctx |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | async *emit({ argv, cfg }) { |
| | | const fps = await filesToCopy(argv, cfg) |
| | | |
| | | for (const fp of fps) { |
| | | const ext = path.extname(fp) |
| | | const src = joinSegments(argv.directory, fp) as FilePath |
| | | const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath |
| | | |
| | | const dest = joinSegments(argv.output, name) as FilePath |
| | | |
| | | graph.addEdge(src, dest) |
| | | yield copyFile(argv, fp) |
| | | } |
| | | |
| | | return graph |
| | | }, |
| | | async *emit({ argv, cfg }, _content, _resources) { |
| | | const assetsPath = argv.output |
| | | const fps = await filesToCopy(argv, cfg) |
| | | for (const fp of fps) { |
| | | const ext = path.extname(fp) |
| | | const src = joinSegments(argv.directory, fp) as FilePath |
| | | const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath |
| | | async *partialEmit(ctx, _content, _resources, changeEvents) { |
| | | for (const changeEvent of changeEvents) { |
| | | const ext = path.extname(changeEvent.path) |
| | | if (ext === ".md") continue |
| | | |
| | | const dest = joinSegments(assetsPath, name) as FilePath |
| | | const dir = path.dirname(dest) as FilePath |
| | | await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists |
| | | await fs.promises.copyFile(src, dest) |
| | | yield dest |
| | | if (changeEvent.type === "add" || changeEvent.type === "change") { |
| | | yield copyFile(ctx.argv, changeEvent.path) |
| | | } else if (changeEvent.type === "delete") { |
| | | const name = slugifyFilePath(changeEvent.path) |
| | | const dest = joinSegments(ctx.argv.output, name) as FilePath |
| | | await fs.promises.unlink(dest) |
| | | } |
| | | } |
| | | }, |
| | | } |
| | |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | import fs from "fs" |
| | | import chalk from "chalk" |
| | | import DepGraph from "../../depgraph" |
| | | |
| | | export function extractDomainFromBaseUrl(baseUrl: string) { |
| | | const url = new URL(`https://${baseUrl}`) |
| | |
| | | |
| | | export const CNAME: QuartzEmitterPlugin = () => ({ |
| | | name: "CNAME", |
| | | async getDependencyGraph(_ctx, _content, _resources) { |
| | | return new DepGraph<FilePath>() |
| | | }, |
| | | async emit({ argv, cfg }, _content, _resources) { |
| | | async emit({ argv, cfg }) { |
| | | if (!cfg.configuration.baseUrl) { |
| | | console.warn(chalk.yellow("CNAME emitter requires `baseUrl` to be set in your configuration")) |
| | | return [] |
| | |
| | | await fs.promises.writeFile(path, content) |
| | | return [path] as FilePath[] |
| | | }, |
| | | async *partialEmit() {}, |
| | | }) |
| | |
| | | import { FilePath, FullSlug, joinSegments } from "../../util/path" |
| | | import { FullSlug, joinSegments } from "../../util/path" |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | |
| | | // @ts-ignore |
| | |
| | | import { Features, transform } from "lightningcss" |
| | | import { transform as transpile } from "esbuild" |
| | | import { write } from "./helpers" |
| | | import DepGraph from "../../depgraph" |
| | | |
| | | type ComponentResources = { |
| | | css: string[] |
| | |
| | | export const ComponentResources: QuartzEmitterPlugin = () => { |
| | | return { |
| | | name: "ComponentResources", |
| | | async getDependencyGraph(_ctx, _content, _resources) { |
| | | return new DepGraph<FilePath>() |
| | | }, |
| | | async *emit(ctx, _content, _resources) { |
| | | const cfg = ctx.cfg.configuration |
| | | // component specific scripts and styles |
| | |
| | | }, |
| | | include: Features.MediaQueries, |
| | | }).code.toString(), |
| | | }), |
| | | }) |
| | | |
| | | yield write({ |
| | | ctx, |
| | | slug: "prescript" as FullSlug, |
| | | ext: ".js", |
| | | content: prescript, |
| | | }), |
| | | }) |
| | | |
| | | yield write({ |
| | | ctx, |
| | | slug: "postscript" as FullSlug, |
| | |
| | | content: postscript, |
| | | }) |
| | | }, |
| | | async *partialEmit() {}, |
| | | } |
| | | } |
| | |
| | | import { toHtml } from "hast-util-to-html" |
| | | import { write } from "./helpers" |
| | | import { i18n } from "../../i18n" |
| | | import DepGraph from "../../depgraph" |
| | | |
| | | export type ContentIndexMap = Map<FullSlug, ContentDetails> |
| | | export type ContentDetails = { |
| | |
| | | opts = { ...defaultOptions, ...opts } |
| | | return { |
| | | name: "ContentIndex", |
| | | async getDependencyGraph(ctx, content, _resources) { |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | for (const [_tree, file] of content) { |
| | | const sourcePath = file.data.filePath! |
| | | |
| | | graph.addEdge( |
| | | sourcePath, |
| | | joinSegments(ctx.argv.output, "static/contentIndex.json") as FilePath, |
| | | ) |
| | | if (opts?.enableSiteMap) { |
| | | graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "sitemap.xml") as FilePath) |
| | | } |
| | | if (opts?.enableRSS) { |
| | | graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "index.xml") as FilePath) |
| | | } |
| | | } |
| | | |
| | | return graph |
| | | }, |
| | | async *emit(ctx, content, _resources) { |
| | | async *emit(ctx, content) { |
| | | const cfg = ctx.cfg.configuration |
| | | const linkIndex: ContentIndexMap = new Map() |
| | | for (const [tree, file] of content) { |
| | |
| | | if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) { |
| | | linkIndex.set(slug, { |
| | | slug, |
| | | filePath: file.data.filePath!, |
| | | filePath: file.data.relativePath!, |
| | | title: file.data.frontmatter?.title!, |
| | | links: file.data.links ?? [], |
| | | tags: file.data.frontmatter?.tags ?? [], |
| | |
| | | import path from "path" |
| | | import { visit } from "unist-util-visit" |
| | | import { Root } from "hast" |
| | | import { VFile } from "vfile" |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | import { QuartzComponentProps } from "../../components/types" |
| | | import HeaderConstructor from "../../components/Header" |
| | | import BodyConstructor from "../../components/Body" |
| | | import { pageResources, renderPage } from "../../components/renderPage" |
| | | import { FullPageLayout } from "../../cfg" |
| | | import { Argv } from "../../util/ctx" |
| | | import { FilePath, isRelativeURL, joinSegments, pathToRoot } from "../../util/path" |
| | | import { pathToRoot } from "../../util/path" |
| | | import { defaultContentPageLayout, sharedPageComponents } from "../../../quartz.layout" |
| | | import { Content } from "../../components" |
| | | import chalk from "chalk" |
| | | import { write } from "./helpers" |
| | | import DepGraph from "../../depgraph" |
| | | import { BuildCtx } from "../../util/ctx" |
| | | import { Node } from "unist" |
| | | import { StaticResources } from "../../util/resources" |
| | | import { QuartzPluginData } from "../vfile" |
| | | |
| | | // get all the dependencies for the markdown file |
| | | // eg. images, scripts, stylesheets, transclusions |
| | | const parseDependencies = (argv: Argv, hast: Root, file: VFile): string[] => { |
| | | const dependencies: string[] = [] |
| | | |
| | | visit(hast, "element", (elem): void => { |
| | | let ref: string | null = null |
| | | |
| | | if ( |
| | | ["script", "img", "audio", "video", "source", "iframe"].includes(elem.tagName) && |
| | | elem?.properties?.src |
| | | async function processContent( |
| | | ctx: BuildCtx, |
| | | tree: Node, |
| | | fileData: QuartzPluginData, |
| | | allFiles: QuartzPluginData[], |
| | | opts: FullPageLayout, |
| | | resources: StaticResources, |
| | | ) { |
| | | ref = elem.properties.src.toString() |
| | | } else if (["a", "link"].includes(elem.tagName) && elem?.properties?.href) { |
| | | // transclusions will create a tags with relative hrefs |
| | | ref = elem.properties.href.toString() |
| | | const slug = fileData.slug! |
| | | const cfg = ctx.cfg.configuration |
| | | const externalResources = pageResources(pathToRoot(slug), resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData, |
| | | externalResources, |
| | | cfg, |
| | | children: [], |
| | | tree, |
| | | allFiles, |
| | | } |
| | | |
| | | // if it is a relative url, its a local file and we need to add |
| | | // it to the dependency graph. otherwise, ignore |
| | | if (ref === null || !isRelativeURL(ref)) { |
| | | return |
| | | } |
| | | |
| | | let fp = path.join(file.data.filePath!, path.relative(argv.directory, ref)).replace(/\\/g, "/") |
| | | // markdown files have the .md extension stripped in hrefs, add it back here |
| | | if (!fp.split("/").pop()?.includes(".")) { |
| | | fp += ".md" |
| | | } |
| | | dependencies.push(fp) |
| | | const content = renderPage(cfg, slug, componentData, opts, externalResources) |
| | | return write({ |
| | | ctx, |
| | | content, |
| | | slug, |
| | | ext: ".html", |
| | | }) |
| | | |
| | | return dependencies |
| | | } |
| | | |
| | | export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOpts) => { |
| | |
| | | Footer, |
| | | ] |
| | | }, |
| | | async getDependencyGraph(ctx, content, _resources) { |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | for (const [tree, file] of content) { |
| | | const sourcePath = file.data.filePath! |
| | | const slug = file.data.slug! |
| | | graph.addEdge(sourcePath, joinSegments(ctx.argv.output, slug + ".html") as FilePath) |
| | | |
| | | parseDependencies(ctx.argv, tree as Root, file).forEach((dep) => { |
| | | graph.addEdge(dep as FilePath, sourcePath) |
| | | }) |
| | | } |
| | | |
| | | return graph |
| | | }, |
| | | async *emit(ctx, content, resources) { |
| | | const cfg = ctx.cfg.configuration |
| | | const allFiles = content.map((c) => c[1].data) |
| | | |
| | | let containsIndex = false |
| | | |
| | | for (const [tree, file] of content) { |
| | | const slug = file.data.slug! |
| | | if (slug === "index") { |
| | | containsIndex = true |
| | | } |
| | | |
| | | if (file.data.slug?.endsWith("/index")) { |
| | | continue |
| | | // only process home page, non-tag pages, and non-index pages |
| | | if (slug.endsWith("/index") || slug.startsWith("tags/")) continue |
| | | yield processContent(ctx, tree, file.data, allFiles, opts, resources) |
| | | } |
| | | |
| | | const externalResources = pageResources(pathToRoot(slug), file.data, resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData: file.data, |
| | | externalResources, |
| | | cfg, |
| | | children: [], |
| | | tree, |
| | | allFiles, |
| | | } |
| | | |
| | | const content = renderPage(cfg, slug, componentData, opts, externalResources) |
| | | yield write({ |
| | | ctx, |
| | | content, |
| | | slug, |
| | | ext: ".html", |
| | | }) |
| | | } |
| | | |
| | | if (!containsIndex && !ctx.argv.fastRebuild) { |
| | | if (!containsIndex) { |
| | | console.log( |
| | | chalk.yellow( |
| | | `\nWarning: you seem to be missing an \`index.md\` home page file at the root of your \`${ctx.argv.directory}\` folder (\`${path.join(ctx.argv.directory, "index.md")} does not exist\`). This may cause errors when deploying.`, |
| | |
| | | ) |
| | | } |
| | | }, |
| | | async *partialEmit(ctx, content, resources, changeEvents) { |
| | | const allFiles = content.map((c) => c[1].data) |
| | | |
| | | // find all slugs that changed or were added |
| | | const changedSlugs = new Set<string>() |
| | | for (const changeEvent of changeEvents) { |
| | | if (!changeEvent.file) continue |
| | | if (changeEvent.type === "add" || changeEvent.type === "change") { |
| | | changedSlugs.add(changeEvent.file.data.slug!) |
| | | } |
| | | } |
| | | |
| | | for (const [tree, file] of content) { |
| | | const slug = file.data.slug! |
| | | if (!changedSlugs.has(slug)) continue |
| | | if (slug.endsWith("/index") || slug.startsWith("tags/")) continue |
| | | |
| | | yield processContent(ctx, tree, file.data, allFiles, opts, resources) |
| | | } |
| | | }, |
| | | } |
| | | } |
| | |
| | | import { FullPageLayout } from "../../cfg" |
| | | import path from "path" |
| | | import { |
| | | FilePath, |
| | | FullSlug, |
| | | SimpleSlug, |
| | | stripSlashes, |
| | |
| | | import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout" |
| | | import { FolderContent } from "../../components" |
| | | import { write } from "./helpers" |
| | | import { i18n } from "../../i18n" |
| | | import DepGraph from "../../depgraph" |
| | | |
| | | import { i18n, TRANSLATIONS } from "../../i18n" |
| | | import { BuildCtx } from "../../util/ctx" |
| | | import { StaticResources } from "../../util/resources" |
| | | interface FolderPageOptions extends FullPageLayout { |
| | | sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number |
| | | } |
| | | |
| | | async function* processFolderInfo( |
| | | ctx: BuildCtx, |
| | | folderInfo: Record<SimpleSlug, ProcessedContent>, |
| | | allFiles: QuartzPluginData[], |
| | | opts: FullPageLayout, |
| | | resources: StaticResources, |
| | | ) { |
| | | for (const [folder, folderContent] of Object.entries(folderInfo) as [ |
| | | SimpleSlug, |
| | | ProcessedContent, |
| | | ][]) { |
| | | const slug = joinSegments(folder, "index") as FullSlug |
| | | const [tree, file] = folderContent |
| | | const cfg = ctx.cfg.configuration |
| | | const externalResources = pageResources(pathToRoot(slug), resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData: file.data, |
| | | externalResources, |
| | | cfg, |
| | | children: [], |
| | | tree, |
| | | allFiles, |
| | | } |
| | | |
| | | const content = renderPage(cfg, slug, componentData, opts, externalResources) |
| | | yield write({ |
| | | ctx, |
| | | content, |
| | | slug, |
| | | ext: ".html", |
| | | }) |
| | | } |
| | | } |
| | | |
| | | function computeFolderInfo( |
| | | folders: Set<SimpleSlug>, |
| | | content: ProcessedContent[], |
| | | locale: keyof typeof TRANSLATIONS, |
| | | ): Record<SimpleSlug, ProcessedContent> { |
| | | // Create default folder descriptions |
| | | const folderInfo: Record<SimpleSlug, ProcessedContent> = Object.fromEntries( |
| | | [...folders].map((folder) => [ |
| | | folder, |
| | | defaultProcessedContent({ |
| | | slug: joinSegments(folder, "index") as FullSlug, |
| | | frontmatter: { |
| | | title: `${i18n(locale).pages.folderContent.folder}: ${folder}`, |
| | | tags: [], |
| | | }, |
| | | }), |
| | | ]), |
| | | ) |
| | | |
| | | // Update with actual content if available |
| | | for (const [tree, file] of content) { |
| | | const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug |
| | | if (folders.has(slug)) { |
| | | folderInfo[slug] = [tree, file] |
| | | } |
| | | } |
| | | |
| | | return folderInfo |
| | | } |
| | | |
| | | function _getFolders(slug: FullSlug): SimpleSlug[] { |
| | | var folderName = path.dirname(slug ?? "") as SimpleSlug |
| | | const parentFolderNames = [folderName] |
| | | |
| | | while (folderName !== ".") { |
| | | folderName = path.dirname(folderName ?? "") as SimpleSlug |
| | | parentFolderNames.push(folderName) |
| | | } |
| | | return parentFolderNames |
| | | } |
| | | |
| | | export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (userOpts) => { |
| | | const opts: FullPageLayout = { |
| | | ...sharedPageComponents, |
| | |
| | | Footer, |
| | | ] |
| | | }, |
| | | async getDependencyGraph(_ctx, content, _resources) { |
| | | // Example graph: |
| | | // nested/file.md --> nested/index.html |
| | | // nested/file2.md ------^ |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | content.map(([_tree, vfile]) => { |
| | | const slug = vfile.data.slug |
| | | const folderName = path.dirname(slug ?? "") as SimpleSlug |
| | | if (slug && folderName !== "." && folderName !== "tags") { |
| | | graph.addEdge(vfile.data.filePath!, joinSegments(folderName, "index.html") as FilePath) |
| | | } |
| | | }) |
| | | |
| | | return graph |
| | | }, |
| | | async *emit(ctx, content, resources) { |
| | | const allFiles = content.map((c) => c[1].data) |
| | | const cfg = ctx.cfg.configuration |
| | |
| | | }), |
| | | ) |
| | | |
| | | const folderDescriptions: Record<string, ProcessedContent> = Object.fromEntries( |
| | | [...folders].map((folder) => [ |
| | | folder, |
| | | defaultProcessedContent({ |
| | | slug: joinSegments(folder, "index") as FullSlug, |
| | | frontmatter: { |
| | | title: `${i18n(cfg.locale).pages.folderContent.folder}: ${folder}`, |
| | | tags: [], |
| | | const folderInfo = computeFolderInfo(folders, content, cfg.locale) |
| | | yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources) |
| | | }, |
| | | }), |
| | | ]), |
| | | async *partialEmit(ctx, content, resources, changeEvents) { |
| | | const allFiles = content.map((c) => c[1].data) |
| | | const cfg = ctx.cfg.configuration |
| | | |
| | | // Find all folders that need to be updated based on changed files |
| | | const affectedFolders: Set<SimpleSlug> = new Set() |
| | | for (const changeEvent of changeEvents) { |
| | | if (!changeEvent.file) continue |
| | | const slug = changeEvent.file.data.slug! |
| | | const folders = _getFolders(slug).filter( |
| | | (folderName) => folderName !== "." && folderName !== "tags", |
| | | ) |
| | | |
| | | for (const [tree, file] of content) { |
| | | const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug |
| | | if (folders.has(slug)) { |
| | | folderDescriptions[slug] = [tree, file] |
| | | } |
| | | folders.forEach((folder) => affectedFolders.add(folder)) |
| | | } |
| | | |
| | | for (const folder of folders) { |
| | | const slug = joinSegments(folder, "index") as FullSlug |
| | | const [tree, file] = folderDescriptions[folder] |
| | | const externalResources = pageResources(pathToRoot(slug), file.data, resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData: file.data, |
| | | externalResources, |
| | | cfg, |
| | | children: [], |
| | | tree, |
| | | allFiles, |
| | | } |
| | | |
| | | const content = renderPage(cfg, slug, componentData, opts, externalResources) |
| | | yield write({ |
| | | ctx, |
| | | content, |
| | | slug, |
| | | ext: ".html", |
| | | }) |
| | | // If there are affected folders, rebuild their pages |
| | | if (affectedFolders.size > 0) { |
| | | const folderInfo = computeFolderInfo(affectedFolders, content, cfg.locale) |
| | | yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources) |
| | | } |
| | | }, |
| | | } |
| | | } |
| | | |
| | | function _getFolders(slug: FullSlug): SimpleSlug[] { |
| | | var folderName = path.dirname(slug ?? "") as SimpleSlug |
| | | const parentFolderNames = [folderName] |
| | | |
| | | while (folderName !== ".") { |
| | | folderName = path.dirname(folderName ?? "") as SimpleSlug |
| | | parentFolderNames.push(folderName) |
| | | } |
| | | return parentFolderNames |
| | | } |
| | |
| | | import { FullSlug, getFileExtension } from "../../util/path" |
| | | import { ImageOptions, SocialImageOptions, defaultImage, getSatoriFonts } from "../../util/og" |
| | | import sharp from "sharp" |
| | | import satori from "satori" |
| | | import satori, { SatoriOptions } from "satori" |
| | | import { loadEmoji, getIconCode } from "../../util/emoji" |
| | | import { Readable } from "stream" |
| | | import { write } from "./helpers" |
| | | import { BuildCtx } from "../../util/ctx" |
| | | import { QuartzPluginData } from "../vfile" |
| | | |
| | | const defaultOptions: SocialImageOptions = { |
| | | colorScheme: "lightMode", |
| | |
| | | return sharp(Buffer.from(svg)).webp({ quality: 40 }) |
| | | } |
| | | |
| | | async function processOgImage( |
| | | ctx: BuildCtx, |
| | | fileData: QuartzPluginData, |
| | | fonts: SatoriOptions["fonts"], |
| | | fullOptions: SocialImageOptions, |
| | | ) { |
| | | const cfg = ctx.cfg.configuration |
| | | const slug = fileData.slug! |
| | | const titleSuffix = cfg.pageTitleSuffix ?? "" |
| | | const title = |
| | | (fileData.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix |
| | | const description = |
| | | fileData.frontmatter?.socialDescription ?? |
| | | fileData.frontmatter?.description ?? |
| | | unescapeHTML(fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description) |
| | | |
| | | const stream = await generateSocialImage( |
| | | { |
| | | title, |
| | | description, |
| | | fonts, |
| | | cfg, |
| | | fileData, |
| | | }, |
| | | fullOptions, |
| | | ) |
| | | |
| | | return write({ |
| | | ctx, |
| | | content: stream, |
| | | slug: `${slug}-og-image` as FullSlug, |
| | | ext: ".webp", |
| | | }) |
| | | } |
| | | |
| | | export const CustomOgImagesEmitterName = "CustomOgImages" |
| | | export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> = (userOpts) => { |
| | | const fullOptions = { ...defaultOptions, ...userOpts } |
| | |
| | | const fonts = await getSatoriFonts(headerFont, bodyFont) |
| | | |
| | | for (const [_tree, vfile] of content) { |
| | | // if this file defines socialImage, we can skip |
| | | if (vfile.data.frontmatter?.socialImage !== undefined) { |
| | | continue |
| | | if (vfile.data.frontmatter?.socialImage !== undefined) continue |
| | | yield processOgImage(ctx, vfile.data, fonts, fullOptions) |
| | | } |
| | | |
| | | const slug = vfile.data.slug! |
| | | const titleSuffix = cfg.pageTitleSuffix ?? "" |
| | | const title = |
| | | (vfile.data.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix |
| | | const description = |
| | | vfile.data.frontmatter?.socialDescription ?? |
| | | vfile.data.frontmatter?.description ?? |
| | | unescapeHTML( |
| | | vfile.data.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description, |
| | | ) |
| | | |
| | | const stream = await generateSocialImage( |
| | | { |
| | | title, |
| | | description, |
| | | fonts, |
| | | cfg, |
| | | fileData: vfile.data, |
| | | }, |
| | | fullOptions, |
| | | ) |
| | | async *partialEmit(ctx, _content, _resources, changeEvents) { |
| | | const cfg = ctx.cfg.configuration |
| | | const headerFont = cfg.theme.typography.header |
| | | const bodyFont = cfg.theme.typography.body |
| | | const fonts = await getSatoriFonts(headerFont, bodyFont) |
| | | |
| | | yield write({ |
| | | ctx, |
| | | content: stream, |
| | | slug: `${slug}-og-image` as FullSlug, |
| | | ext: ".webp", |
| | | }) |
| | | // find all slugs that changed or were added |
| | | for (const changeEvent of changeEvents) { |
| | | if (!changeEvent.file) continue |
| | | if (changeEvent.file.data.frontmatter?.socialImage !== undefined) continue |
| | | if (changeEvent.type === "add" || changeEvent.type === "change") { |
| | | yield processOgImage(ctx, changeEvent.file.data, fonts, fullOptions) |
| | | } |
| | | } |
| | | }, |
| | | externalResources: (ctx) => { |
| | |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | import fs from "fs" |
| | | import { glob } from "../../util/glob" |
| | | import DepGraph from "../../depgraph" |
| | | import { dirname } from "path" |
| | | |
| | | export const Static: QuartzEmitterPlugin = () => ({ |
| | | name: "Static", |
| | | async getDependencyGraph({ argv, cfg }, _content, _resources) { |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | const staticPath = joinSegments(QUARTZ, "static") |
| | | const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns) |
| | | for (const fp of fps) { |
| | | graph.addEdge( |
| | | joinSegments("static", fp) as FilePath, |
| | | joinSegments(argv.output, "static", fp) as FilePath, |
| | | ) |
| | | } |
| | | |
| | | return graph |
| | | }, |
| | | async *emit({ argv, cfg }, _content) { |
| | | async *emit({ argv, cfg }) { |
| | | const staticPath = joinSegments(QUARTZ, "static") |
| | | const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns) |
| | | const outputStaticPath = joinSegments(argv.output, "static") |
| | |
| | | yield dest |
| | | } |
| | | }, |
| | | async *partialEmit() {}, |
| | | }) |
| | |
| | | import { pageResources, renderPage } from "../../components/renderPage" |
| | | import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../vfile" |
| | | import { FullPageLayout } from "../../cfg" |
| | | import { |
| | | FilePath, |
| | | FullSlug, |
| | | getAllSegmentPrefixes, |
| | | joinSegments, |
| | | pathToRoot, |
| | | } from "../../util/path" |
| | | import { FullSlug, getAllSegmentPrefixes, joinSegments, pathToRoot } from "../../util/path" |
| | | import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout" |
| | | import { TagContent } from "../../components" |
| | | import { write } from "./helpers" |
| | | import { i18n } from "../../i18n" |
| | | import DepGraph from "../../depgraph" |
| | | import { i18n, TRANSLATIONS } from "../../i18n" |
| | | import { BuildCtx } from "../../util/ctx" |
| | | import { StaticResources } from "../../util/resources" |
| | | |
| | | interface TagPageOptions extends FullPageLayout { |
| | | sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number |
| | | } |
| | | |
| | | function computeTagInfo( |
| | | allFiles: QuartzPluginData[], |
| | | content: ProcessedContent[], |
| | | locale: keyof typeof TRANSLATIONS, |
| | | ): [Set<string>, Record<string, ProcessedContent>] { |
| | | const tags: Set<string> = new Set( |
| | | allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes), |
| | | ) |
| | | |
| | | // add base tag |
| | | tags.add("index") |
| | | |
| | | const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries( |
| | | [...tags].map((tag) => { |
| | | const title = |
| | | tag === "index" |
| | | ? i18n(locale).pages.tagContent.tagIndex |
| | | : `${i18n(locale).pages.tagContent.tag}: ${tag}` |
| | | return [ |
| | | tag, |
| | | defaultProcessedContent({ |
| | | slug: joinSegments("tags", tag) as FullSlug, |
| | | frontmatter: { title, tags: [] }, |
| | | }), |
| | | ] |
| | | }), |
| | | ) |
| | | |
| | | // Update with actual content if available |
| | | for (const [tree, file] of content) { |
| | | const slug = file.data.slug! |
| | | if (slug.startsWith("tags/")) { |
| | | const tag = slug.slice("tags/".length) |
| | | if (tags.has(tag)) { |
| | | tagDescriptions[tag] = [tree, file] |
| | | if (file.data.frontmatter?.title === tag) { |
| | | file.data.frontmatter.title = `${i18n(locale).pages.tagContent.tag}: ${tag}` |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | return [tags, tagDescriptions] |
| | | } |
| | | |
| | | async function processTagPage( |
| | | ctx: BuildCtx, |
| | | tag: string, |
| | | tagContent: ProcessedContent, |
| | | allFiles: QuartzPluginData[], |
| | | opts: FullPageLayout, |
| | | resources: StaticResources, |
| | | ) { |
| | | const slug = joinSegments("tags", tag) as FullSlug |
| | | const [tree, file] = tagContent |
| | | const cfg = ctx.cfg.configuration |
| | | const externalResources = pageResources(pathToRoot(slug), resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData: file.data, |
| | | externalResources, |
| | | cfg, |
| | | children: [], |
| | | tree, |
| | | allFiles, |
| | | } |
| | | |
| | | const content = renderPage(cfg, slug, componentData, opts, externalResources) |
| | | return write({ |
| | | ctx, |
| | | content, |
| | | slug: file.data.slug!, |
| | | ext: ".html", |
| | | }) |
| | | } |
| | | |
| | | export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts) => { |
| | | const opts: FullPageLayout = { |
| | | ...sharedPageComponents, |
| | |
| | | Footer, |
| | | ] |
| | | }, |
| | | async getDependencyGraph(ctx, content, _resources) { |
| | | const graph = new DepGraph<FilePath>() |
| | | |
| | | for (const [_tree, file] of content) { |
| | | const sourcePath = file.data.filePath! |
| | | const tags = (file.data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes) |
| | | // if the file has at least one tag, it is used in the tag index page |
| | | if (tags.length > 0) { |
| | | tags.push("index") |
| | | } |
| | | |
| | | for (const tag of tags) { |
| | | graph.addEdge( |
| | | sourcePath, |
| | | joinSegments(ctx.argv.output, "tags", tag + ".html") as FilePath, |
| | | ) |
| | | } |
| | | } |
| | | |
| | | return graph |
| | | }, |
| | | async *emit(ctx, content, resources) { |
| | | const allFiles = content.map((c) => c[1].data) |
| | | const cfg = ctx.cfg.configuration |
| | | |
| | | const tags: Set<string> = new Set( |
| | | allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes), |
| | | ) |
| | | |
| | | // add base tag |
| | | tags.add("index") |
| | | |
| | | const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries( |
| | | [...tags].map((tag) => { |
| | | const title = |
| | | tag === "index" |
| | | ? i18n(cfg.locale).pages.tagContent.tagIndex |
| | | : `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}` |
| | | return [ |
| | | tag, |
| | | defaultProcessedContent({ |
| | | slug: joinSegments("tags", tag) as FullSlug, |
| | | frontmatter: { title, tags: [] }, |
| | | }), |
| | | ] |
| | | }), |
| | | ) |
| | | |
| | | for (const [tree, file] of content) { |
| | | const slug = file.data.slug! |
| | | if (slug.startsWith("tags/")) { |
| | | const tag = slug.slice("tags/".length) |
| | | if (tags.has(tag)) { |
| | | tagDescriptions[tag] = [tree, file] |
| | | if (file.data.frontmatter?.title === tag) { |
| | | file.data.frontmatter.title = `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}` |
| | | } |
| | | } |
| | | } |
| | | } |
| | | const [tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale) |
| | | |
| | | for (const tag of tags) { |
| | | const slug = joinSegments("tags", tag) as FullSlug |
| | | const [tree, file] = tagDescriptions[tag] |
| | | const externalResources = pageResources(pathToRoot(slug), file.data, resources) |
| | | const componentData: QuartzComponentProps = { |
| | | ctx, |
| | | fileData: file.data, |
| | | externalResources, |
| | | cfg, |
| | | children: [], |
| | | tree, |
| | | allFiles, |
| | | yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources) |
| | | } |
| | | }, |
| | | async *partialEmit(ctx, content, resources, changeEvents) { |
| | | const allFiles = content.map((c) => c[1].data) |
| | | const cfg = ctx.cfg.configuration |
| | | |
| | | // Find all tags that need to be updated based on changed files |
| | | const affectedTags: Set<string> = new Set() |
| | | for (const changeEvent of changeEvents) { |
| | | if (!changeEvent.file) continue |
| | | const slug = changeEvent.file.data.slug! |
| | | |
| | | // If it's a tag page itself that changed |
| | | if (slug.startsWith("tags/")) { |
| | | const tag = slug.slice("tags/".length) |
| | | affectedTags.add(tag) |
| | | } |
| | | |
| | | const content = renderPage(cfg, slug, componentData, opts, externalResources) |
| | | yield write({ |
| | | ctx, |
| | | content, |
| | | slug: file.data.slug!, |
| | | ext: ".html", |
| | | }) |
| | | // If a file with tags changed, we need to update those tag pages |
| | | const fileTags = changeEvent.file.data.frontmatter?.tags ?? [] |
| | | fileTags.flatMap(getAllSegmentPrefixes).forEach((tag) => affectedTags.add(tag)) |
| | | |
| | | // Always update the index tag page if any file changes |
| | | affectedTags.add("index") |
| | | } |
| | | |
| | | // If there are affected tags, rebuild their pages |
| | | if (affectedTags.size > 0) { |
| | | // We still need to compute all tags because tag pages show all tags |
| | | const [_tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale) |
| | | |
| | | for (const tag of affectedTags) { |
| | | if (tagDescriptions[tag]) { |
| | | yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources) |
| | | } |
| | | } |
| | | } |
| | | }, |
| | | } |
| | |
| | | import { QuartzTransformerPlugin } from "../types" |
| | | import yaml from "js-yaml" |
| | | import toml from "toml" |
| | | import { FilePath, FullSlug, joinSegments, slugifyFilePath, slugTag } from "../../util/path" |
| | | import { FilePath, FullSlug, getFileExtension, slugifyFilePath, slugTag } from "../../util/path" |
| | | import { QuartzPluginData } from "../vfile" |
| | | import { i18n } from "../../i18n" |
| | | import { Argv } from "../../util/ctx" |
| | | import { VFile } from "vfile" |
| | | import path from "path" |
| | | |
| | | export interface Options { |
| | | delimiters: string | [string, string] |
| | |
| | | .map((tag: string | number) => tag.toString()) |
| | | } |
| | | |
| | | export function getAliasSlugs(aliases: string[], argv: Argv, file: VFile): FullSlug[] { |
| | | const dir = path.posix.relative(argv.directory, path.dirname(file.data.filePath!)) |
| | | const slugs: FullSlug[] = aliases.map( |
| | | (alias) => path.posix.join(dir, slugifyFilePath(alias as FilePath)) as FullSlug, |
| | | ) |
| | | const permalink = file.data.frontmatter?.permalink |
| | | if (typeof permalink === "string") { |
| | | slugs.push(permalink as FullSlug) |
| | | function getAliasSlugs(aliases: string[]): FullSlug[] { |
| | | const res: FullSlug[] = [] |
| | | for (const alias of aliases) { |
| | | const isMd = getFileExtension(alias) === "md" |
| | | const mockFp = isMd ? alias : alias + ".md" |
| | | const slug = slugifyFilePath(mockFp as FilePath) |
| | | res.push(slug) |
| | | } |
| | | // fix any slugs that have trailing slash |
| | | return slugs.map((slug) => |
| | | slug.endsWith("/") ? (joinSegments(slug, "index") as FullSlug) : slug, |
| | | ) |
| | | |
| | | return res |
| | | } |
| | | |
| | | export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts) => { |
| | | const opts = { ...defaultOptions, ...userOpts } |
| | | return { |
| | | name: "FrontMatter", |
| | | markdownPlugins({ cfg, allSlugs, argv }) { |
| | | markdownPlugins(ctx) { |
| | | const { cfg, allSlugs } = ctx |
| | | return [ |
| | | [remarkFrontmatter, ["yaml", "toml"]], |
| | | () => { |
| | |
| | | const aliases = coerceToArray(coalesceAliases(data, ["aliases", "alias"])) |
| | | if (aliases) { |
| | | data.aliases = aliases // frontmatter |
| | | const slugs = (file.data.aliases = getAliasSlugs(aliases, argv, file)) |
| | | allSlugs.push(...slugs) |
| | | file.data.aliases = getAliasSlugs(aliases) |
| | | allSlugs.push(...file.data.aliases) |
| | | } |
| | | |
| | | if (data.permalink != null && data.permalink.toString() !== "") { |
| | | data.permalink = data.permalink.toString() as FullSlug |
| | | const aliases = file.data.aliases ?? [] |
| | | aliases.push(data.permalink) |
| | | file.data.aliases = aliases |
| | | allSlugs.push(data.permalink) |
| | | } |
| | | |
| | | const cssclasses = coerceToArray(coalesceAliases(data, ["cssclasses", "cssclass"])) |
| | | if (cssclasses) data.cssclasses = cssclasses |
| | | |
| | |
| | | const opts = { ...defaultOptions, ...userOpts } |
| | | return { |
| | | name: "CreatedModifiedDate", |
| | | markdownPlugins() { |
| | | markdownPlugins(ctx) { |
| | | return [ |
| | | () => { |
| | | let repo: Repository | undefined = undefined |
| | |
| | | let modified: MaybeDate = undefined |
| | | let published: MaybeDate = undefined |
| | | |
| | | const fp = file.data.filePath! |
| | | const fullFp = path.isAbsolute(fp) ? fp : path.posix.join(file.cwd, fp) |
| | | const fp = file.data.relativePath! |
| | | const fullFp = path.posix.join(ctx.argv.directory, fp) |
| | | for (const source of opts.priority) { |
| | | if (source === "filesystem") { |
| | | const st = await fs.promises.stat(fullFp) |
| | |
| | | // Get a reference to the main git repo. |
| | | // It's either the same as the workdir, |
| | | // or 1+ level higher in case of a submodule/subtree setup |
| | | repo = Repository.discover(file.cwd) |
| | | repo = Repository.discover(ctx.argv.directory) |
| | | } |
| | | |
| | | try { |
| | | modified ||= await repo.getFileLatestModifiedDateAsync(file.data.filePath!) |
| | | modified ||= await repo.getFileLatestModifiedDateAsync(fullFp) |
| | | } catch { |
| | | console.log( |
| | | chalk.yellow( |
| | |
| | | textTransform(_ctx, src) { |
| | | if (opts.wikilinks) { |
| | | src = src.toString() |
| | | src = src.replaceAll(relrefRegex, (value, ...capture) => { |
| | | src = src.replaceAll(relrefRegex, (_value, ...capture) => { |
| | | const [text, link] = capture |
| | | return `[${text}](${link})` |
| | | }) |
| | |
| | | |
| | | if (opts.removePredefinedAnchor) { |
| | | src = src.toString() |
| | | src = src.replaceAll(predefinedHeadingIdRegex, (value, ...capture) => { |
| | | src = src.replaceAll(predefinedHeadingIdRegex, (_value, ...capture) => { |
| | | const [headingText] = capture |
| | | return headingText |
| | | }) |
| | |
| | | |
| | | if (opts.removeHugoShortcode) { |
| | | src = src.toString() |
| | | src = src.replaceAll(hugoShortcodeRegex, (value, ...capture) => { |
| | | src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => { |
| | | const [scContent] = capture |
| | | return scContent |
| | | }) |
| | |
| | | |
| | | if (opts.replaceFigureWithMdImg) { |
| | | src = src.toString() |
| | | src = src.replaceAll(figureTagRegex, (value, ...capture) => { |
| | | src = src.replaceAll(figureTagRegex, (_value, ...capture) => { |
| | | const [src] = capture |
| | | return `` |
| | | }) |
| | |
| | | |
| | | if (opts.replaceOrgLatex) { |
| | | src = src.toString() |
| | | src = src.replaceAll(inlineLatexRegex, (value, ...capture) => { |
| | | src = src.replaceAll(inlineLatexRegex, (_value, ...capture) => { |
| | | const [eqn] = capture |
| | | return `$${eqn}$` |
| | | }) |
| | | src = src.replaceAll(blockLatexRegex, (value, ...capture) => { |
| | | src = src.replaceAll(blockLatexRegex, (_value, ...capture) => { |
| | | const [eqn] = capture |
| | | return `$$${eqn}$$` |
| | | }) |
| | |
| | | import { QuartzTransformerPlugin } from "../types" |
| | | import { PluggableList } from "unified" |
| | | import { SKIP, visit } from "unist-util-visit" |
| | | import { visit } from "unist-util-visit" |
| | | import { ReplaceFunction, findAndReplace as mdastFindReplace } from "mdast-util-find-and-replace" |
| | | import { Root, Html, Paragraph, Text, Link, Parent } from "mdast" |
| | | import { Node } from "unist" |
| | | import { VFile } from "vfile" |
| | | import { BuildVisitor } from "unist-util-visit" |
| | | |
| | | export interface Options { |
| | |
| | | const orRegex = new RegExp(/{{or:(.*?)}}/, "g") |
| | | const TODORegex = new RegExp(/{{.*?\bTODO\b.*?}}/, "g") |
| | | const DONERegex = new RegExp(/{{.*?\bDONE\b.*?}}/, "g") |
| | | const videoRegex = new RegExp(/{{.*?\[\[video\]\].*?\:(.*?)}}/, "g") |
| | | const youtubeRegex = new RegExp( |
| | | /{{.*?\[\[video\]\].*?(https?:\/\/(?:www\.)?youtu(?:be\.com\/watch\?v=|\.be\/)([\w\-\_]*)(&(amp;)?[\w\?=]*)?)}}/, |
| | | "g", |
| | | ) |
| | | |
| | | // const multimediaRegex = new RegExp(/{{.*?\b(video|audio)\b.*?\:(.*?)}}/, "g") |
| | | |
| | | const audioRegex = new RegExp(/{{.*?\[\[audio\]\].*?\:(.*?)}}/, "g") |
| | | const pdfRegex = new RegExp(/{{.*?\[\[pdf\]\].*?\:(.*?)}}/, "g") |
| | | const blockquoteRegex = new RegExp(/(\[\[>\]\])\s*(.*)/, "g") |
| | | const roamHighlightRegex = new RegExp(/\^\^(.+)\^\^/, "g") |
| | | const roamItalicRegex = new RegExp(/__(.+)__/, "g") |
| | | const tableRegex = new RegExp(/- {{.*?\btable\b.*?}}/, "g") /* TODO */ |
| | | const attributeRegex = new RegExp(/\b\w+(?:\s+\w+)*::/, "g") /* TODO */ |
| | | |
| | | function isSpecialEmbed(node: Paragraph): boolean { |
| | | if (node.children.length !== 2) return false |
| | |
| | | const plugins: PluggableList = [] |
| | | |
| | | plugins.push(() => { |
| | | return (tree: Root, file: VFile) => { |
| | | return (tree: Root) => { |
| | | const replacements: [RegExp, ReplaceFunction][] = [] |
| | | |
| | | // Handle special embeds (audio, video, PDF) |
| | |
| | | import { QuartzComponent } from "../components/types" |
| | | import { FilePath } from "../util/path" |
| | | import { BuildCtx } from "../util/ctx" |
| | | import DepGraph from "../depgraph" |
| | | import { VFile } from "vfile" |
| | | |
| | | export interface PluginTypes { |
| | | transformers: QuartzTransformerPluginInstance[] |
| | |
| | | shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean |
| | | } |
| | | |
| | | export type ChangeEvent = { |
| | | type: "add" | "change" | "delete" |
| | | path: FilePath |
| | | file?: VFile |
| | | } |
| | | |
| | | export type QuartzEmitterPlugin<Options extends OptionType = undefined> = ( |
| | | opts?: Options, |
| | | ) => QuartzEmitterPluginInstance |
| | | export type QuartzEmitterPluginInstance = { |
| | | name: string |
| | | emit( |
| | | emit: ( |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | resources: StaticResources, |
| | | ): Promise<FilePath[]> | AsyncGenerator<FilePath> |
| | | ) => Promise<FilePath[]> | AsyncGenerator<FilePath> |
| | | partialEmit?: ( |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | resources: StaticResources, |
| | | changeEvents: ChangeEvent[], |
| | | ) => Promise<FilePath[]> | AsyncGenerator<FilePath> | null |
| | | /** |
| | | * Returns the components (if any) that are used in rendering the page. |
| | | * This helps Quartz optimize the page by only including necessary resources |
| | | * for components that are actually used. |
| | | */ |
| | | getQuartzComponents?: (ctx: BuildCtx) => QuartzComponent[] |
| | | getDependencyGraph?( |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | resources: StaticResources, |
| | | ): Promise<DepGraph<FilePath>> |
| | | externalResources?: ExternalResourcesFn |
| | | } |
| | |
| | | const perf = new PerfTimer() |
| | | const log = new QuartzLogger(ctx.argv.verbose) |
| | | |
| | | log.start(`Emitting output files`) |
| | | log.start(`Emitting files`) |
| | | |
| | | let emittedFiles = 0 |
| | | const staticResources = getStaticResourcesFromPlugins(ctx) |
| | |
| | | if (ctx.argv.verbose) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } else { |
| | | log.updateText(`Emitting output files: ${emitter.name} -> ${chalk.gray(file)}`) |
| | | log.updateText(`${emitter.name} -> ${chalk.gray(file)}`) |
| | | } |
| | | } |
| | | } else { |
| | |
| | | if (ctx.argv.verbose) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } else { |
| | | log.updateText(`Emitting output files: ${emitter.name} -> ${chalk.gray(file)}`) |
| | | log.updateText(`${emitter.name} -> ${chalk.gray(file)}`) |
| | | } |
| | | } |
| | | } |
| | |
| | | import { MarkdownContent, ProcessedContent } from "../plugins/vfile" |
| | | import { PerfTimer } from "../util/perf" |
| | | import { read } from "to-vfile" |
| | | import { FilePath, FullSlug, QUARTZ, slugifyFilePath } from "../util/path" |
| | | import { FilePath, QUARTZ, slugifyFilePath } from "../util/path" |
| | | import path from "path" |
| | | import workerpool, { Promise as WorkerPromise } from "workerpool" |
| | | import { QuartzLogger } from "../util/log" |
| | | import { trace } from "../util/trace" |
| | | import { BuildCtx } from "../util/ctx" |
| | | import { BuildCtx, WorkerSerializableBuildCtx } from "../util/ctx" |
| | | import chalk from "chalk" |
| | | |
| | | export type QuartzMdProcessor = Processor<MDRoot, MDRoot, MDRoot> |
| | | export type QuartzHtmlProcessor = Processor<undefined, MDRoot, HTMLRoot> |
| | |
| | | process.exit(1) |
| | | } |
| | | |
| | | const mdPromises: WorkerPromise<[MarkdownContent[], FullSlug[]]>[] = [] |
| | | for (const chunk of chunks(fps, CHUNK_SIZE)) { |
| | | mdPromises.push(pool.exec("parseMarkdown", [ctx.buildId, argv, chunk])) |
| | | const serializableCtx: WorkerSerializableBuildCtx = { |
| | | buildId: ctx.buildId, |
| | | argv: ctx.argv, |
| | | allSlugs: ctx.allSlugs, |
| | | allFiles: ctx.allFiles, |
| | | incremental: ctx.incremental, |
| | | } |
| | | const mdResults: [MarkdownContent[], FullSlug[]][] = |
| | | await WorkerPromise.all(mdPromises).catch(errorHandler) |
| | | |
| | | const childPromises: WorkerPromise<ProcessedContent[]>[] = [] |
| | | for (const [_, extraSlugs] of mdResults) { |
| | | ctx.allSlugs.push(...extraSlugs) |
| | | const textToMarkdownPromises: WorkerPromise<MarkdownContent[]>[] = [] |
| | | let processedFiles = 0 |
| | | for (const chunk of chunks(fps, CHUNK_SIZE)) { |
| | | textToMarkdownPromises.push(pool.exec("parseMarkdown", [serializableCtx, chunk])) |
| | | } |
| | | |
| | | const mdResults: Array<MarkdownContent[]> = await Promise.all( |
| | | textToMarkdownPromises.map(async (promise) => { |
| | | const result = await promise |
| | | processedFiles += result.length |
| | | log.updateText(`text->markdown ${chalk.gray(`${processedFiles}/${fps.length}`)}`) |
| | | return result |
| | | }), |
| | | ).catch(errorHandler) |
| | | |
| | | const markdownToHtmlPromises: WorkerPromise<ProcessedContent[]>[] = [] |
| | | processedFiles = 0 |
| | | for (const [mdChunk, _] of mdResults) { |
| | | childPromises.push(pool.exec("processHtml", [ctx.buildId, argv, mdChunk, ctx.allSlugs])) |
| | | markdownToHtmlPromises.push(pool.exec("processHtml", [serializableCtx, mdChunk])) |
| | | } |
| | | const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch(errorHandler) |
| | | const results: ProcessedContent[][] = await Promise.all( |
| | | markdownToHtmlPromises.map(async (promise) => { |
| | | const result = await promise |
| | | processedFiles += result.length |
| | | log.updateText(`markdown->html ${chalk.gray(`${processedFiles}/${fps.length}`)}`) |
| | | return result |
| | | }), |
| | | ).catch(errorHandler) |
| | | |
| | | res = results.flat() |
| | | await pool.terminate() |
| | |
| | | import { QuartzConfig } from "../cfg" |
| | | import { FullSlug } from "./path" |
| | | import { FilePath, FullSlug } from "./path" |
| | | |
| | | export interface Argv { |
| | | directory: string |
| | | verbose: boolean |
| | | output: string |
| | | serve: boolean |
| | | fastRebuild: boolean |
| | | watch: boolean |
| | | port: number |
| | | wsPort: number |
| | | remoteDevHost?: string |
| | |
| | | argv: Argv |
| | | cfg: QuartzConfig |
| | | allSlugs: FullSlug[] |
| | | allFiles: FilePath[] |
| | | incremental: boolean |
| | | } |
| | | |
| | | export type WorkerSerializableBuildCtx = Omit<BuildCtx, "cfg"> |
| | |
| | | import truncate from "ansi-truncate" |
| | | import readline from "readline" |
| | | |
| | | export class QuartzLogger { |
| | | verbose: boolean |
| | | private spinnerInterval: NodeJS.Timeout | undefined |
| | | private spinnerText: string = "" |
| | | private updateSuffix: string = "" |
| | | private spinnerIndex: number = 0 |
| | | private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"] |
| | | |
| | | constructor(verbose: boolean) { |
| | | this.verbose = verbose |
| | | const isInteractiveTerminal = |
| | | process.stdout.isTTY && process.env.TERM !== "dumb" && !process.env.CI |
| | | this.verbose = verbose || !isInteractiveTerminal |
| | | } |
| | | |
| | | start(text: string) { |
| | | this.spinnerText = text |
| | | |
| | | if (this.verbose) { |
| | | console.log(text) |
| | | } else { |
| | |
| | | this.spinnerInterval = setInterval(() => { |
| | | readline.clearLine(process.stdout, 0) |
| | | readline.cursorTo(process.stdout, 0) |
| | | process.stdout.write(`${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`) |
| | | |
| | | const columns = process.stdout.columns || 80 |
| | | let output = `${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}` |
| | | if (this.updateSuffix) { |
| | | output += `: ${this.updateSuffix}` |
| | | } |
| | | |
| | | const truncated = truncate(output, columns) |
| | | process.stdout.write(truncated) |
| | | this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length |
| | | }, 20) |
| | | } |
| | | } |
| | | |
| | | updateText(text: string) { |
| | | this.spinnerText = text |
| | | this.updateSuffix = text |
| | | } |
| | | |
| | | end(text?: string) { |
| | |
| | | return s === suffix || s.endsWith("/" + suffix) |
| | | } |
| | | |
| | | function trimSuffix(s: string, suffix: string): string { |
| | | export function trimSuffix(s: string, suffix: string): string { |
| | | if (endsWith(s, suffix)) { |
| | | s = s.slice(0, -suffix.length) |
| | | } |
| | |
| | | import sourceMapSupport from "source-map-support" |
| | | sourceMapSupport.install(options) |
| | | import cfg from "../quartz.config" |
| | | import { Argv, BuildCtx } from "./util/ctx" |
| | | import { FilePath, FullSlug } from "./util/path" |
| | | import { BuildCtx, WorkerSerializableBuildCtx } from "./util/ctx" |
| | | import { FilePath } from "./util/path" |
| | | import { |
| | | createFileParser, |
| | | createHtmlProcessor, |
| | |
| | | |
| | | // only called from worker thread |
| | | export async function parseMarkdown( |
| | | buildId: string, |
| | | argv: Argv, |
| | | partialCtx: WorkerSerializableBuildCtx, |
| | | fps: FilePath[], |
| | | ): Promise<[MarkdownContent[], FullSlug[]]> { |
| | | // this is a hack |
| | | // we assume markdown parsers can add to `allSlugs`, |
| | | // but don't actually use them |
| | | const allSlugs: FullSlug[] = [] |
| | | ): Promise<MarkdownContent[]> { |
| | | const ctx: BuildCtx = { |
| | | buildId, |
| | | ...partialCtx, |
| | | cfg, |
| | | argv, |
| | | allSlugs, |
| | | } |
| | | return [await createFileParser(ctx, fps)(createMdProcessor(ctx)), allSlugs] |
| | | return await createFileParser(ctx, fps)(createMdProcessor(ctx)) |
| | | } |
| | | |
| | | // only called from worker thread |
| | | export function processHtml( |
| | | buildId: string, |
| | | argv: Argv, |
| | | partialCtx: WorkerSerializableBuildCtx, |
| | | mds: MarkdownContent[], |
| | | allSlugs: FullSlug[], |
| | | ): Promise<ProcessedContent[]> { |
| | | const ctx: BuildCtx = { |
| | | buildId, |
| | | ...partialCtx, |
| | | cfg, |
| | | argv, |
| | | allSlugs, |
| | | } |
| | | return createMarkdownParser(ctx, mds)(createHtmlProcessor(ctx)) |
| | | } |
| | |
| | | "skipLibCheck": true, |
| | | "allowSyntheticDefaultImports": true, |
| | | "forceConsistentCasingInFileNames": true, |
| | | "noUnusedLocals": true, |
| | | "noUnusedParameters": true, |
| | | "esModuleInterop": true, |
| | | "jsx": "react-jsx", |
| | | "jsxImportSource": "preact" |