From 3ac6b42e16dca5a44ed3fec2c0314f1dbbc2322b Mon Sep 17 00:00:00 2001
From: Jacky Zhao <j.zhao2k19@gmail.com>
Date: Sun, 16 Jul 2023 06:02:12 +0000
Subject: [PATCH] finish path refactoring, add sourcemap + better trace support

---
 quartz/processors/parse.ts |   98 ++++++++++++++++++++++++++++++++-----------------
 1 files changed, 64 insertions(+), 34 deletions(-)

diff --git a/quartz/processors/parse.ts b/quartz/processors/parse.ts
index 715a4e9..a782475 100644
--- a/quartz/processors/parse.ts
+++ b/quartz/processors/parse.ts
@@ -7,20 +7,23 @@
 import { ProcessedContent } from '../plugins/vfile'
 import { PerfTimer } from '../perf'
 import { read } from 'to-vfile'
-import { slugify } from '../path'
+import { FilePath, ServerSlug, slugifyFilePath } from '../path'
 import path from 'path'
 import os from 'os'
 import workerpool, { Promise as WorkerPromise } from 'workerpool'
-import { QuartzTransformerPlugin } from '../plugins/types'
+import { QuartzTransformerPluginInstance } from '../plugins/types'
+import { QuartzLogger } from '../log'
+import chalk from 'chalk'
+import { trace } from '../trace'
 
 export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
-export function createProcessor(transformers: QuartzTransformerPlugin[]): any {
+export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
   // base Markdown -> MD AST
   let processor = unified().use(remarkParse)
 
   // MD AST -> MD AST transforms
-  for (const plugin of transformers) {
-    processor = processor.use(plugin.markdownPlugins())
+  for (const plugin of transformers.filter(p => p.markdownPlugins)) {
+    processor = processor.use(plugin.markdownPlugins!())
   }
 
   // MD AST -> HTML AST
@@ -28,8 +31,8 @@
 
 
   // HTML AST -> HTML AST transforms
-  for (const plugin of transformers) {
-    processor = processor.use(plugin.htmlPlugins())
+  for (const plugin of transformers.filter(p => p.htmlPlugins)) {
+    processor = processor.use(plugin.htmlPlugins!())
   }
 
   return processor
@@ -41,15 +44,11 @@
   }
 }
 
-async function transpileWorkerScript(verbose: boolean) {
+async function transpileWorkerScript() {
   // transpile worker script
   const cacheFile = "./.quartz-cache/transpiled-worker.mjs"
   const fp = "./quartz/worker.ts"
-  if (verbose) {
-    console.log("Transpiling worker script")
-  }
-
-  await esbuild.build({
+  return esbuild.build({
     entryPoints: [fp],
     outfile: path.join("quartz", cacheFile),
     bundle: true,
@@ -75,31 +74,61 @@
   })
 }
 
-export async function parseMarkdown(transformers: QuartzTransformerPlugin[], baseDir: string, fps: string[], verbose: boolean): Promise<ProcessedContent[]> {
+export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: FilePath[], allSlugs: ServerSlug[], verbose: boolean) {
+  return async (processor: QuartzProcessor) => {
+    const res: ProcessedContent[] = []
+    for (const fp of fps) {
+      try {
+        const file = await read(fp)
+
+        // strip leading and trailing whitespace
+        file.value = file.value.toString().trim()
+
+        // Text -> Text transforms
+        for (const plugin of transformers.filter(p => p.textTransform)) {
+          file.value = plugin.textTransform!(file.value)
+        }
+
+        // base data properties that plugins may use
+        file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath)
+        file.data.allSlugs = allSlugs
+        file.data.filePath = fp
+
+        const ast = processor.parse(file)
+        const newAst = await processor.run(ast, file)
+        res.push([newAst, file])
+
+        if (verbose) {
+          console.log(`[process] ${fp} -> ${file.data.slug}`)
+        }
+      } catch (err) {
+        trace(`\nFailed to process \`${fp}\``, err as Error)
+        process.exit(1)
+      }
+    }
+
+    return res
+  }
+}
+
+export async function parseMarkdown(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: FilePath[], verbose: boolean): Promise<ProcessedContent[]> {
   const perf = new PerfTimer()
+  const log = new QuartzLogger(verbose)
 
   const CHUNK_SIZE = 128
   let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
-  const res: ProcessedContent[] = []
+
+  // get all slugs ahead of time as each thread needs a copy
+  const allSlugs = fps.map(fp => slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath))
+
+  let res: ProcessedContent[] = []
+  log.start(`Parsing input files using ${concurrency} threads`)
   if (concurrency === 1) {
-    // single-thread
     const processor = createProcessor(transformers)
-    for (const fp of fps) {
-      const file = await read(fp)
-
-      // base data properties that plugins may use
-      file.data.slug = slugify(path.relative(baseDir, file.path))
-      file.data.filePath = fp
-
-      const ast = processor.parse(file)
-      res.push([await processor.run(ast, file), file])
-
-      if (verbose) {
-        console.log(`[process] ${fp} -> ${file.data.slug}`)
-      }
-    }
+    const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
+    res = await parse(processor)
   } else {
-    await transpileWorkerScript(verbose)
+    await transpileWorkerScript()
     const pool = workerpool.pool(
       './quartz/bootstrap-worker.mjs',
       {
@@ -111,13 +140,14 @@
 
     const childPromises: WorkerPromise<ProcessedContent[]>[] = []
     for (const chunk of chunks(fps, CHUNK_SIZE)) {
-      childPromises.push(pool.exec('parseFiles', [baseDir, chunk, verbose]))
+      childPromises.push(pool.exec('parseFiles', [baseDir, chunk, allSlugs, verbose]))
     }
+
     const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
-    res.push(...results.flat())
+    res = results.flat()
     await pool.terminate()
   }
 
-  console.log(`Parsed and transformed ${res.length} Markdown files with ${concurrency} cores in ${perf.timeSince()}`)
+  log.success(`Parsed ${res.length} Markdown files in ${perf.timeSince()}`)
   return res
 }

--
Gitblit v1.10.0