From 9e83af04a78d5988bd517bcc61c48998bbfa17ef Mon Sep 17 00:00:00 2001
From: Jacky Zhao <j.zhao2k19@gmail.com>
Date: Mon, 24 Jul 2023 00:07:19 +0000
Subject: [PATCH] refactor static and asset emission to be actual emitter plugins
---
quartz/plugins/types.ts | 4
quartz/processors/filter.ts | 8
quartz/plugins/emitters/static.ts | 21 +++
quartz/worker.ts | 19 +-
quartz/processors/parse.ts | 33 +---
quartz/build.ts | 156 ++++++++++-----------
content/features/upcoming features.md | 4
quartz/plugins/emitters/contentIndex.ts | 3
quartz/plugins/emitters/tagPage.tsx | 3
quartz/plugins/transformers/ofm.ts | 9
quartz/ctx.ts | 1
quartz/processors/emit.ts | 59 ++------
quartz/plugins/emitters/assets.ts | 36 +++++
quartz/plugins/emitters/folderPage.tsx | 3
quartz/plugins/emitters/index.ts | 2
quartz/plugins/emitters/contentPage.tsx | 3
quartz/plugins/emitters/aliases.ts | 4
quartz.config.ts | 2
18 files changed, 197 insertions(+), 173 deletions(-)
diff --git a/content/features/upcoming features.md b/content/features/upcoming features.md
index 351eae4..65fb56d 100644
--- a/content/features/upcoming features.md
+++ b/content/features/upcoming features.md
@@ -3,6 +3,7 @@
---
## high priority
+
- attachments path
- https://help.obsidian.md/Editing+and+formatting/Tags#Nested+tags nested tags??
- watch mode for config/source code
@@ -13,6 +14,7 @@
- note/header/block transcludes: https://help.obsidian.md/Linking+notes+and+files/Embedding+files
## misc
+
- breadcrumbs component
- filetree component
- recent notes component
@@ -25,6 +27,6 @@
- audio/video embed styling
- Canvas
- mermaid styling: https://mermaid.js.org/config/theming.html#theme-variables-reference-table
- - https://github.com/jackyzha0/quartz/issues/331
+ - https://github.com/jackyzha0/quartz/issues/331
- parse all images in page: use this for page lists if applicable?
- CV mode? with print stylesheet
diff --git a/quartz.config.ts b/quartz.config.ts
index d6aed2f..6ead8a0 100644
--- a/quartz.config.ts
+++ b/quartz.config.ts
@@ -114,6 +114,8 @@
enableSiteMap: true,
enableRSS: true,
}),
+ Plugin.Assets(),
+ Plugin.Static(),
],
},
}
diff --git a/quartz/build.ts b/quartz/build.ts
index e5bfcaa..553bd8c 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -14,16 +14,14 @@
import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile"
import WebSocket, { WebSocketServer } from "ws"
-
-interface Argv {
- directory: string
- verbose: boolean
- output: string
- serve: boolean
- port: number
-}
+import { Argv, BuildCtx } from "./ctx"
async function buildQuartz(argv: Argv, version: string) {
+ const ctx: BuildCtx = {
+ argv,
+ cfg,
+ }
+
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
const perf = new PerfTimer()
const output = argv.output
@@ -38,12 +36,10 @@
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
}
- // clean
perf.addEvent("clean")
await rimraf(output)
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
- // glob
perf.addEvent("glob")
const fps = await globby("**/*.md", {
cwd: argv.directory,
@@ -55,89 +51,87 @@
)
const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath)
- const parsedFiles = await parseMarkdown(
- cfg.plugins.transformers,
- argv.directory,
- filePaths,
- argv.verbose,
- )
- const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
- await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
+ const parsedFiles = await parseMarkdown(ctx, filePaths)
+ const filteredContent = filterContent(ctx, parsedFiles)
+ await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
if (argv.serve) {
- const wss = new WebSocketServer({ port: 3001 })
- const connections: WebSocket[] = []
- wss.on("connection", (ws) => connections.push(ws))
+ await startServing(ctx, parsedFiles)
+ }
+}
- const ignored = await isGitIgnored()
- const contentMap = new Map<FilePath, ProcessedContent>()
- for (const content of parsedFiles) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
- }
+async function startServing(ctx: BuildCtx, initialContent: ProcessedContent[]) {
+ const { argv } = ctx
+ const wss = new WebSocketServer({ port: 3001 })
+ const connections: WebSocket[] = []
+ wss.on("connection", (ws) => connections.push(ws))
- async function rebuild(fp: string, action: "add" | "change" | "unlink") {
- perf.addEvent("rebuild")
- if (!ignored(fp)) {
- console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
- const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
+ const ignored = await isGitIgnored()
+ const contentMap = new Map<FilePath, ProcessedContent>()
+ for (const content of initialContent) {
+ const [_tree, vfile] = content
+ contentMap.set(vfile.data.filePath!, content)
+ }
- try {
- if (action === "add" || action === "change") {
- const [parsedContent] = await parseMarkdown(
- cfg.plugins.transformers,
- argv.directory,
- [fullPath],
- argv.verbose,
- )
- contentMap.set(fullPath, parsedContent)
- } else if (action === "unlink") {
- contentMap.delete(fullPath)
- }
+ async function rebuild(fp: string, action: "add" | "change" | "unlink") {
+ const perf = new PerfTimer()
+ if (!ignored(fp)) {
+ console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
+ const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
- await rimraf(output)
- const parsedFiles = [...contentMap.values()]
- const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
- await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
- console.log(chalk.green(`Done rebuilding in ${perf.timeSince("rebuild")}`))
- } catch {
- console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
+ try {
+ if (action === "add" || action === "change") {
+ const [parsedContent] = await parseMarkdown(ctx, [fullPath])
+ contentMap.set(fullPath, parsedContent)
+ } else if (action === "unlink") {
+ contentMap.delete(fullPath)
}
- connections.forEach((conn) => conn.send("rebuild"))
+ await rimraf(argv.output)
+ const parsedFiles = [...contentMap.values()]
+ const filteredContent = filterContent(ctx, parsedFiles)
+ await emitContent(
+ ctx,
+ filteredContent,
+ )
+ console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
+ } catch {
+ console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
}
+
+ connections.forEach((conn) => conn.send("rebuild"))
}
-
- const watcher = chokidar.watch(".", {
- persistent: true,
- cwd: argv.directory,
- ignoreInitial: true,
- })
-
- watcher
- .on("add", (fp) => rebuild(fp, "add"))
- .on("change", (fp) => rebuild(fp, "change"))
- .on("unlink", (fp) => rebuild(fp, "unlink"))
-
- const server = http.createServer(async (req, res) => {
- await serveHandler(req, res, {
- public: output,
- directoryListing: false,
- })
- const status = res.statusCode
- const statusString =
- status >= 200 && status < 300
- ? chalk.green(`[${status}]`)
- : status >= 300 && status < 400
- ? chalk.yellow(`[${status}]`)
- : chalk.red(`[${status}]`)
- console.log(statusString + chalk.grey(` ${req.url}`))
- })
- server.listen(argv.port)
- console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
- console.log("hint: exit with ctrl+c")
}
+
+ const watcher = chokidar.watch(".", {
+ persistent: true,
+ cwd: argv.directory,
+ ignoreInitial: true,
+ })
+
+ watcher
+ .on("add", (fp) => rebuild(fp, "add"))
+ .on("change", (fp) => rebuild(fp, "change"))
+ .on("unlink", (fp) => rebuild(fp, "unlink"))
+
+ const server = http.createServer(async (req, res) => {
+ await serveHandler(req, res, {
+ public: argv.output,
+ directoryListing: false,
+ })
+ const status = res.statusCode
+ const statusString =
+ status >= 200 && status < 300
+ ? chalk.green(`[${status}]`)
+ : status >= 300 && status < 400
+ ? chalk.yellow(`[${status}]`)
+ : chalk.red(`[${status}]`)
+ console.log(statusString + chalk.grey(` ${req.url}`))
+ })
+ server.listen(argv.port)
+ console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
+ console.log("hint: exit with ctrl+c")
}
export default async (argv: Argv, version: string) => {
diff --git a/quartz/ctx.ts b/quartz/ctx.ts
index 011a262..355b4cb 100644
--- a/quartz/ctx.ts
+++ b/quartz/ctx.ts
@@ -10,6 +10,5 @@
export interface BuildCtx {
argv: Argv
- version: string
cfg: QuartzConfig
}
diff --git a/quartz/plugins/emitters/aliases.ts b/quartz/plugins/emitters/aliases.ts
index 2350cb7..0a992ba 100644
--- a/quartz/plugins/emitters/aliases.ts
+++ b/quartz/plugins/emitters/aliases.ts
@@ -13,12 +13,12 @@
getQuartzComponents() {
return []
},
- async emit(contentFolder, _cfg, content, _resources, emit): Promise<FilePath[]> {
+ async emit({argv}, content, _resources, emit): Promise<FilePath[]> {
const fps: FilePath[] = []
for (const [_tree, file] of content) {
const ogSlug = canonicalizeServer(file.data.slug!)
- const dir = path.relative(contentFolder, file.dirname ?? contentFolder)
+ const dir = path.relative(argv.directory, file.dirname ?? argv.directory)
let aliases: CanonicalSlug[] = []
if (file.data.frontmatter?.aliases) {
diff --git a/quartz/plugins/emitters/assets.ts b/quartz/plugins/emitters/assets.ts
index e69de29..d0913c4 100644
--- a/quartz/plugins/emitters/assets.ts
+++ b/quartz/plugins/emitters/assets.ts
@@ -0,0 +1,36 @@
+import { globbyStream } from "globby"
+import {
+ FilePath, slugifyFilePath,
+} from "../../path"
+import { QuartzEmitterPlugin } from "../types"
+import path from "path"
+import fs from "fs"
+
+export const Assets: QuartzEmitterPlugin = () => ({
+ name: "Assets",
+ getQuartzComponents() {
+ return []
+ },
+ async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> {
+ // glob all non MD/MDX/HTML files in content folder and copy it over
+ const assetsPath = path.join(argv.output, "assets")
+
+ const fps: FilePath[] = []
+ for await (const rawFp of globbyStream("**", {
+ ignore: ["**/*.md"],
+ cwd: argv.directory,
+ })) {
+ const fp = rawFp as FilePath
+ const ext = path.extname(fp)
+ const src = path.join(argv.directory, fp) as FilePath
+ const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath
+ const dest = path.join(assetsPath, name) as FilePath
+ const dir = path.dirname(dest) as FilePath
+ await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
+ await fs.promises.copyFile(src, dest)
+ fps.push(path.join("assets", fp) as FilePath)
+ }
+
+ return fps
+ },
+})
diff --git a/quartz/plugins/emitters/contentIndex.ts b/quartz/plugins/emitters/contentIndex.ts
index 0b1fec7..66cf137 100644
--- a/quartz/plugins/emitters/contentIndex.ts
+++ b/quartz/plugins/emitters/contentIndex.ts
@@ -68,7 +68,8 @@
opts = { ...defaultOptions, ...opts }
return {
name: "ContentIndex",
- async emit(_contentDir, cfg, content, _resources, emit) {
+ async emit(ctx, content, _resources, emit) {
+ const cfg = ctx.cfg.configuration
const emitted: FilePath[] = []
const linkIndex: ContentIndex = new Map()
for (const [_tree, file] of content) {
diff --git a/quartz/plugins/emitters/contentPage.tsx b/quartz/plugins/emitters/contentPage.tsx
index 59a24f2..97198da 100644
--- a/quartz/plugins/emitters/contentPage.tsx
+++ b/quartz/plugins/emitters/contentPage.tsx
@@ -22,7 +22,8 @@
getQuartzComponents() {
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
},
- async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
+ async emit(ctx, content, resources, emit): Promise<FilePath[]> {
+ const cfg = ctx.cfg.configuration
const fps: FilePath[] = []
const allFiles = content.map((c) => c[1].data)
for (const [tree, file] of content) {
diff --git a/quartz/plugins/emitters/folderPage.tsx b/quartz/plugins/emitters/folderPage.tsx
index f58ac39..8c6ae02 100644
--- a/quartz/plugins/emitters/folderPage.tsx
+++ b/quartz/plugins/emitters/folderPage.tsx
@@ -22,9 +22,10 @@
getQuartzComponents() {
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
},
- async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
+ async emit(ctx, content, resources, emit): Promise<FilePath[]> {
const fps: FilePath[] = []
const allFiles = content.map((c) => c[1].data)
+ const cfg = ctx.cfg.configuration
const folders: Set<CanonicalSlug> = new Set(
allFiles.flatMap((data) => {
diff --git a/quartz/plugins/emitters/index.ts b/quartz/plugins/emitters/index.ts
index 0017e82..6f1e143 100644
--- a/quartz/plugins/emitters/index.ts
+++ b/quartz/plugins/emitters/index.ts
@@ -3,3 +3,5 @@
export { FolderPage } from "./folderPage"
export { ContentIndex } from "./contentIndex"
export { AliasRedirects } from "./aliases"
+export { Assets } from "./assets"
+export { Static } from "./static"
\ No newline at end of file
diff --git a/quartz/plugins/emitters/static.ts b/quartz/plugins/emitters/static.ts
new file mode 100644
index 0000000..7e4af15
--- /dev/null
+++ b/quartz/plugins/emitters/static.ts
@@ -0,0 +1,21 @@
+import { globby } from "globby"
+import {
+ FilePath, QUARTZ
+} from "../../path"
+import { QuartzEmitterPlugin } from "../types"
+import path from "path"
+import fs from "fs"
+
+
+export const Static: QuartzEmitterPlugin = () => ({
+ name: "Static",
+ getQuartzComponents() {
+ return []
+ },
+ async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> {
+ const staticPath = path.join(QUARTZ, "static")
+ const fps = await globby("*", { cwd: staticPath })
+ await fs.promises.cp(staticPath, path.join(argv.output, "static"), { recursive: true })
+ return fps.map(fp => path.join("static", fp)) as FilePath[]
+ },
+})
diff --git a/quartz/plugins/emitters/tagPage.tsx b/quartz/plugins/emitters/tagPage.tsx
index 2903bb4..69b0180 100644
--- a/quartz/plugins/emitters/tagPage.tsx
+++ b/quartz/plugins/emitters/tagPage.tsx
@@ -21,9 +21,10 @@
getQuartzComponents() {
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
},
- async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
+ async emit(ctx, content, resources, emit): Promise<FilePath[]> {
const fps: FilePath[] = []
const allFiles = content.map((c) => c[1].data)
+ const cfg = ctx.cfg.configuration
const tags: Set<string> = new Set(allFiles.flatMap((data) => data.frontmatter?.tags ?? []))
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
diff --git a/quartz/plugins/transformers/ofm.ts b/quartz/plugins/transformers/ofm.ts
index 6b68fcc..3f58d0f 100644
--- a/quartz/plugins/transformers/ofm.ts
+++ b/quartz/plugins/transformers/ofm.ts
@@ -110,7 +110,7 @@
// from https://github.com/escwxyz/remark-obsidian-callout/blob/main/src/index.ts
const calloutRegex = new RegExp(/^\[\!(\w+)\]([+-]?)/)
// (?:^| ) -> non-capturing group, tag should start be separated by a space or be the start of the line
-// #(\w+) -> tag itself is # followed by a string of alpha-numeric characters
+// #(\w+) -> tag itself is # followed by a string of alpha-numeric characters
const tagRegex = new RegExp(/(?:^| )#(\w+)/, "g")
export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | undefined> = (
@@ -225,7 +225,7 @@
findAndReplace(tree, commentRegex, (_value: string, ..._capture: string[]) => {
return {
type: "text",
- value: ""
+ value: "",
}
})
}
@@ -296,8 +296,9 @@
node.data = {
hProperties: {
...(node.data?.hProperties ?? {}),
- className: `callout ${collapse ? "is-collapsible" : ""} ${defaultState === "collapsed" ? "is-collapsed" : ""
- }`,
+ className: `callout ${collapse ? "is-collapsible" : ""} ${
+ defaultState === "collapsed" ? "is-collapsed" : ""
+ }`,
"data-callout": calloutType,
"data-callout-fold": collapse,
},
diff --git a/quartz/plugins/types.ts b/quartz/plugins/types.ts
index 52dd833..4145e8f 100644
--- a/quartz/plugins/types.ts
+++ b/quartz/plugins/types.ts
@@ -4,6 +4,7 @@
import { GlobalConfiguration } from "../cfg"
import { QuartzComponent } from "../components/types"
import { FilePath, ServerSlug } from "../path"
+import { BuildCtx } from "../ctx"
export interface PluginTypes {
transformers: QuartzTransformerPluginInstance[]
@@ -37,8 +38,7 @@
export type QuartzEmitterPluginInstance = {
name: string
emit(
- contentDir: string,
- cfg: GlobalConfiguration,
+ ctx: BuildCtx,
content: ProcessedContent[],
resources: StaticResources,
emitCallback: EmitCallback,
diff --git a/quartz/processors/emit.ts b/quartz/processors/emit.ts
index 6ff9a21..004bb18 100644
--- a/quartz/processors/emit.ts
+++ b/quartz/processors/emit.ts
@@ -1,6 +1,5 @@
import path from "path"
import fs from "fs"
-import { GlobalConfiguration, QuartzConfig } from "../cfg"
import { PerfTimer } from "../perf"
import {
ComponentResources,
@@ -10,8 +9,7 @@
} from "../plugins"
import { EmitCallback } from "../plugins/types"
import { ProcessedContent } from "../plugins/vfile"
-import { FilePath, QUARTZ, slugifyFilePath } from "../path"
-import { globbyStream } from "globby"
+import { FilePath } from "../path"
// @ts-ignore
import spaRouterScript from "../components/scripts/spa.inline"
@@ -24,13 +22,15 @@
import { QuartzLogger } from "../log"
import { googleFontHref } from "../theme"
import { trace } from "../trace"
+import { BuildCtx } from "../ctx"
function addGlobalPageResources(
- cfg: GlobalConfiguration,
- reloadScript: boolean,
+ ctx: BuildCtx,
staticResources: StaticResources,
componentResources: ComponentResources,
) {
+ const cfg = ctx.cfg.configuration
+ const reloadScript = ctx.argv.serve
staticResources.css.push(googleFontHref(cfg.theme))
// popovers
@@ -85,19 +85,17 @@
}
export async function emitContent(
- contentFolder: string,
- output: string,
- cfg: QuartzConfig,
+ ctx: BuildCtx,
content: ProcessedContent[],
- reloadScript: boolean,
- verbose: boolean,
) {
+ const { argv, cfg }= ctx
+ const contentFolder = argv.directory
const perf = new PerfTimer()
- const log = new QuartzLogger(verbose)
+ const log = new QuartzLogger(ctx.argv.verbose)
log.start(`Emitting output files`)
const emit: EmitCallback = async ({ slug, ext, content }) => {
- const pathToPage = path.join(output, slug + ext) as FilePath
+ const pathToPage = path.join(argv.output, slug + ext) as FilePath
const dir = path.dirname(pathToPage)
await fs.promises.mkdir(dir, { recursive: true })
await fs.promises.writeFile(pathToPage, content)
@@ -113,11 +111,11 @@
// important that this goes *after* component scripts
// as the "nav" event gets triggered here and we should make sure
// that everyone else had the chance to register a listener for it
- addGlobalPageResources(cfg.configuration, reloadScript, staticResources, componentResources)
+ addGlobalPageResources(ctx, staticResources, componentResources)
let emittedFiles = 0
const emittedResources = await emitComponentResources(cfg.configuration, componentResources, emit)
- if (verbose) {
+ if (argv.verbose) {
for (const file of emittedResources) {
emittedFiles += 1
console.log(`[emit:Resources] ${file}`)
@@ -128,15 +126,14 @@
for (const emitter of cfg.plugins.emitters) {
try {
const emitted = await emitter.emit(
- contentFolder,
- cfg.configuration,
+ ctx,
content,
staticResources,
emit,
)
emittedFiles += emitted.length
- if (verbose) {
+ if (ctx.argv.verbose) {
for (const file of emitted) {
console.log(`[emit:${emitter.name}] ${file}`)
}
@@ -147,31 +144,5 @@
}
}
- const staticPath = path.join(QUARTZ, "static")
- await fs.promises.cp(staticPath, path.join(output, "static"), { recursive: true })
- if (verbose) {
- console.log(`[emit:Static] ${path.join("static", "**")}`)
- }
-
- // glob all non MD/MDX/HTML files in content folder and copy it over
- const assetsPath = path.join(output, "assets")
- for await (const rawFp of globbyStream("**", {
- ignore: ["**/*.md"],
- cwd: contentFolder,
- })) {
- const fp = rawFp as FilePath
- const ext = path.extname(fp)
- const src = path.join(contentFolder, fp) as FilePath
- const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath
- const dest = path.join(assetsPath, name) as FilePath
- const dir = path.dirname(dest) as FilePath
- await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
- await fs.promises.copyFile(src, dest)
- emittedFiles += 1
- if (verbose) {
- console.log(`[emit:Assets] ${path.join("assets", name)}`)
- }
- }
-
- log.end(`Emitted ${emittedFiles} files to \`${output}\` in ${perf.timeSince()}`)
+ log.end(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince()}`)
}
diff --git a/quartz/processors/filter.ts b/quartz/processors/filter.ts
index 1f4496d..12c5b48 100644
--- a/quartz/processors/filter.ts
+++ b/quartz/processors/filter.ts
@@ -1,18 +1,18 @@
+import { BuildCtx } from "../ctx"
import { PerfTimer } from "../perf"
import { QuartzFilterPluginInstance } from "../plugins/types"
import { ProcessedContent } from "../plugins/vfile"
export function filterContent(
- plugins: QuartzFilterPluginInstance[],
+ { cfg, argv }: BuildCtx,
content: ProcessedContent[],
- verbose: boolean,
): ProcessedContent[] {
const perf = new PerfTimer()
const initialLength = content.length
- for (const plugin of plugins) {
+ for (const plugin of cfg.plugins.filters) {
const updatedContent = content.filter(plugin.shouldPublish)
- if (verbose) {
+ if (argv.verbose) {
const diff = content.filter((x) => !updatedContent.includes(x))
for (const file of diff) {
console.log(`[filter:${plugin.name}] ${file[1].data.slug}`)
diff --git a/quartz/processors/parse.ts b/quartz/processors/parse.ts
index 55783dc..aec2276 100644
--- a/quartz/processors/parse.ts
+++ b/quartz/processors/parse.ts
@@ -14,6 +14,7 @@
import { QuartzTransformerPluginInstance } from "../plugins/types"
import { QuartzLogger } from "../log"
import { trace } from "../trace"
+import { BuildCtx } from "../ctx"
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
@@ -72,13 +73,7 @@
})
}
-export function createFileParser(
- transformers: QuartzTransformerPluginInstance[],
- baseDir: string,
- fps: FilePath[],
- allSlugs: ServerSlug[],
- verbose: boolean,
-) {
+export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) {
return async (processor: QuartzProcessor) => {
const res: ProcessedContent[] = []
for (const fp of fps) {
@@ -89,12 +84,12 @@
file.value = file.value.toString().trim()
// Text -> Text transforms
- for (const plugin of transformers.filter((p) => p.textTransform)) {
+ for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) {
file.value = plugin.textTransform!(file.value)
}
// base data properties that plugins may use
- file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath)
+ file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath)
file.data.allSlugs = allSlugs
file.data.filePath = fp
@@ -102,7 +97,7 @@
const newAst = await processor.run(ast, file)
res.push([newAst, file])
- if (verbose) {
+ if (argv.verbose) {
console.log(`[process] ${fp} -> ${file.data.slug}`)
}
} catch (err) {
@@ -115,29 +110,25 @@
}
}
-export async function parseMarkdown(
- transformers: QuartzTransformerPluginInstance[],
- baseDir: string,
- fps: FilePath[],
- verbose: boolean,
-): Promise<ProcessedContent[]> {
+export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> {
+ const { argv, cfg } = ctx
const perf = new PerfTimer()
- const log = new QuartzLogger(verbose)
+ const log = new QuartzLogger(argv.verbose)
const CHUNK_SIZE = 128
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
// get all slugs ahead of time as each thread needs a copy
const allSlugs = fps.map((fp) =>
- slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath),
+ slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath),
)
let res: ProcessedContent[] = []
log.start(`Parsing input files using ${concurrency} threads`)
if (concurrency === 1) {
try {
- const processor = createProcessor(transformers)
- const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
+ const processor = createProcessor(cfg.plugins.transformers)
+ const parse = createFileParser(ctx, fps, allSlugs)
res = await parse(processor)
} catch (error) {
log.end()
@@ -153,7 +144,7 @@
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
for (const chunk of chunks(fps, CHUNK_SIZE)) {
- childPromises.push(pool.exec("parseFiles", [baseDir, chunk, allSlugs, verbose]))
+ childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs]))
}
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
diff --git a/quartz/worker.ts b/quartz/worker.ts
index de5868a..eef4907 100644
--- a/quartz/worker.ts
+++ b/quartz/worker.ts
@@ -1,17 +1,18 @@
-import config from "../quartz.config"
+import cfg from "../quartz.config"
+import { Argv, BuildCtx } from "./ctx"
import { FilePath, ServerSlug } from "./path"
import { createFileParser, createProcessor } from "./processors/parse"
-const transformers = config.plugins.transformers
+const transformers = cfg.plugins.transformers
const processor = createProcessor(transformers)
// only called from worker thread
-export async function parseFiles(
- baseDir: string,
- fps: FilePath[],
- allSlugs: ServerSlug[],
- verbose: boolean,
-) {
- const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
+export async function parseFiles(argv: Argv, fps: FilePath[], allSlugs: ServerSlug[]) {
+ const ctx: BuildCtx = {
+ cfg,
+ argv,
+ }
+
+ const parse = createFileParser(ctx, fps, allSlugs)
return parse(processor)
}
--
Gitblit v1.10.0