From 906f91f8eed5e91a7afae95c7002a3e4553d6aae Mon Sep 17 00:00:00 2001
From: Jacky Zhao <j.zhao2k19@gmail.com>
Date: Thu, 13 Jul 2023 07:19:35 +0000
Subject: [PATCH] base path refactor, more docs

---
 quartz/processors/parse.ts |   18 +++++++++++-------
 1 files changed, 11 insertions(+), 7 deletions(-)

diff --git a/quartz/processors/parse.ts b/quartz/processors/parse.ts
index 170e964..6d3d3d9 100644
--- a/quartz/processors/parse.ts
+++ b/quartz/processors/parse.ts
@@ -7,7 +7,7 @@
 import { ProcessedContent } from '../plugins/vfile'
 import { PerfTimer } from '../perf'
 import { read } from 'to-vfile'
-import { slugify } from '../path'
+import { FilePath, ServerSlug, slugifyFilePath } from '../path'
 import path from 'path'
 import os from 'os'
 import workerpool, { Promise as WorkerPromise } from 'workerpool'
@@ -73,7 +73,7 @@
   })
 }
 
-export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: string[], verbose: boolean) {
+export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: FilePath[], allSlugs: ServerSlug[], verbose: boolean) {
   return async (processor: QuartzProcessor) => {
     const res: ProcessedContent[] = []
     for (const fp of fps) {
@@ -89,7 +89,8 @@
         }
 
         // base data properties that plugins may use
-        file.data.slug = slugify(path.relative(baseDir, file.path))
+        file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath)
+        file.data.allSlugs = allSlugs
         file.data.filePath = fp
 
         const ast = processor.parse(file)
@@ -109,18 +110,21 @@
   }
 }
 
-export async function parseMarkdown(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: string[], verbose: boolean): Promise<ProcessedContent[]> {
+export async function parseMarkdown(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: FilePath[], verbose: boolean): Promise<ProcessedContent[]> {
   const perf = new PerfTimer()
   const log = new QuartzLogger(verbose)
 
   const CHUNK_SIZE = 128
   let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
-  let res: ProcessedContent[] = []
 
+  // get all slugs ahead of time as each thread needs a copy
+  const allSlugs = fps.map(fp => slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath))
+
+  let res: ProcessedContent[] = []
   log.start(`Parsing input files using ${concurrency} threads`)
   if (concurrency === 1) {
     const processor = createProcessor(transformers)
-    const parse = createFileParser(transformers, baseDir, fps, verbose)
+    const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
     res = await parse(processor)
   } else {
     await transpileWorkerScript()
@@ -135,7 +139,7 @@
 
     const childPromises: WorkerPromise<ProcessedContent[]>[] = []
     for (const chunk of chunks(fps, CHUNK_SIZE)) {
-      childPromises.push(pool.exec('parseFiles', [baseDir, chunk, verbose]))
+      childPromises.push(pool.exec('parseFiles', [baseDir, chunk, allSlugs, verbose]))
     }
 
     const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)

--
Gitblit v1.10.0