| | |
| | | }) |
| | | } |
| | | |
| | | export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: string[], verbose: boolean) { |
| | | export function createFileParser(transformers: QuartzTransformerPluginInstance[], baseDir: string, fps: string[], allSlugs: string[], verbose: boolean) { |
| | | return async (processor: QuartzProcessor) => { |
| | | const res: ProcessedContent[] = [] |
| | | for (const fp of fps) { |
| | |
| | | |
| | | // base data properties that plugins may use |
| | | file.data.slug = slugify(path.relative(baseDir, file.path)) |
| | | file.data.allSlugs = allSlugs |
| | | file.data.filePath = fp |
| | | |
| | | const ast = processor.parse(file) |
| | |
| | | |
| | | const CHUNK_SIZE = 128 |
| | | let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism() |
| | | let res: ProcessedContent[] = [] |
| | | |
| | | // get all slugs ahead of time as each thread needs a copy |
| | | // const slugs: string[] = fps.map(fp => slugify(path)) |
| | | const allSlugs = fps.map(fp => slugify(path.relative(baseDir, path.resolve(fp)))) |
| | | |
| | | let res: ProcessedContent[] = [] |
| | | log.start(`Parsing input files using ${concurrency} threads`) |
| | | if (concurrency === 1) { |
| | | const processor = createProcessor(transformers) |
| | | const parse = createFileParser(transformers, baseDir, fps, verbose) |
| | | const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose) |
| | | res = await parse(processor) |
| | | } else { |
| | | await transpileWorkerScript() |
| | |
| | | |
| | | const childPromises: WorkerPromise<ProcessedContent[]>[] = [] |
| | | for (const chunk of chunks(fps, CHUNK_SIZE)) { |
| | | childPromises.push(pool.exec('parseFiles', [baseDir, chunk, verbose])) |
| | | childPromises.push(pool.exec('parseFiles', [baseDir, chunk, allSlugs, verbose])) |
| | | } |
| | | |
| | | const results: ProcessedContent[][] = await WorkerPromise.all(childPromises) |