| | |
| | | import esbuild from 'esbuild' |
| | | import remarkParse from 'remark-parse' |
| | | import remarkRehype from 'remark-rehype' |
| | | import esbuild from "esbuild" |
| | | import remarkParse from "remark-parse" |
| | | import remarkRehype from "remark-rehype" |
| | | import { Processor, unified } from "unified" |
| | | import { Root as MDRoot } from 'remark-parse/lib' |
| | | import { Root as HTMLRoot } from 'hast' |
| | | import { ProcessedContent } from '../plugins/vfile' |
| | | import { PerfTimer } from '../perf' |
| | | import { read } from 'to-vfile' |
| | | import { slugify } from '../path' |
| | | import path from 'path' |
| | | import os from 'os' |
| | | import workerpool, { Promise as WorkerPromise } from 'workerpool' |
| | | import { QuartzTransformerPlugin } from '../plugins/types' |
| | | import { QuartzLogger } from '../log' |
| | | import chalk from 'chalk' |
| | | import { Root as MDRoot } from "remark-parse/lib" |
| | | import { Root as HTMLRoot } from "hast" |
| | | import { ProcessedContent } from "../plugins/vfile" |
| | | import { PerfTimer } from "../perf" |
| | | import { read } from "to-vfile" |
| | | import { FilePath, QUARTZ, slugifyFilePath } from "../path" |
| | | import path from "path" |
| | | import workerpool, { Promise as WorkerPromise } from "workerpool" |
| | | import { QuartzLogger } from "../log" |
| | | import { trace } from "../trace" |
| | | import { BuildCtx } from "../ctx" |
| | | |
| | | export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void> |
| | | export function createProcessor(transformers: QuartzTransformerPlugin[]): QuartzProcessor { |
| | | export function createProcessor(ctx: BuildCtx): QuartzProcessor { |
| | | const transformers = ctx.cfg.plugins.transformers |
| | | |
| | | // base Markdown -> MD AST |
| | | let processor = unified().use(remarkParse) |
| | | |
| | | // MD AST -> MD AST transforms |
| | | for (const plugin of transformers) { |
| | | processor = processor.use(plugin.markdownPlugins()) |
| | | for (const plugin of transformers.filter((p) => p.markdownPlugins)) { |
| | | processor = processor.use(plugin.markdownPlugins!(ctx)) |
| | | } |
| | | |
| | | // MD AST -> HTML AST |
| | | processor = processor.use(remarkRehype, { allowDangerousHtml: true }) |
| | | |
| | | |
| | | // HTML AST -> HTML AST transforms |
| | | for (const plugin of transformers) { |
| | | processor = processor.use(plugin.htmlPlugins()) |
| | | for (const plugin of transformers.filter((p) => p.htmlPlugins)) { |
| | | processor = processor.use(plugin.htmlPlugins!(ctx)) |
| | | } |
| | | |
| | | return processor |
| | |
| | | const fp = "./quartz/worker.ts" |
| | | return esbuild.build({ |
| | | entryPoints: [fp], |
| | | outfile: path.join("quartz", cacheFile), |
| | | outfile: path.join(QUARTZ, cacheFile), |
| | | bundle: true, |
| | | keepNames: true, |
| | | platform: "node", |
| | | format: "esm", |
| | | packages: "external", |
| | | sourcemap: true, |
| | | sourcesContent: false, |
| | | plugins: [ |
| | | { |
| | | name: 'css-and-scripts-as-text', |
| | | name: "css-and-scripts-as-text", |
| | | setup(build) { |
| | | build.onLoad({ filter: /\.scss$/ }, (_) => ({ |
| | | contents: '', |
| | | loader: 'text' |
| | | contents: "", |
| | | loader: "text", |
| | | })) |
| | | build.onLoad({ filter: /\.inline\.(ts|js)$/ }, (_) => ({ |
| | | contents: '', |
| | | loader: 'text' |
| | | contents: "", |
| | | loader: "text", |
| | | })) |
| | | } |
| | | } |
| | | ] |
| | | }, |
| | | }, |
| | | ], |
| | | }) |
| | | } |
| | | |
| | | export function createFileParser(baseDir: string, fps: string[], verbose: boolean) { |
| | | export function createFileParser(ctx: BuildCtx, fps: FilePath[]) { |
| | | const { argv, cfg } = ctx |
| | | return async (processor: QuartzProcessor) => { |
| | | const res: ProcessedContent[] = [] |
| | | for (const fp of fps) { |
| | | try { |
| | | const perf = new PerfTimer() |
| | | const file = await read(fp) |
| | | |
| | | // strip leading and trailing whitespace |
| | | file.value = file.value.toString().trim() |
| | | |
| | | // Text -> Text transforms |
| | | for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) { |
| | | file.value = plugin.textTransform!(ctx, file.value) |
| | | } |
| | | |
| | | // base data properties that plugins may use |
| | | file.data.slug = slugify(path.relative(baseDir, file.path)) |
| | | file.data.slug = slugifyFilePath(path.posix.relative(argv.directory, file.path) as FilePath) |
| | | file.data.filePath = fp |
| | | |
| | | const ast = processor.parse(file) |
| | | const newAst = await processor.run(ast, file) |
| | | res.push([newAst, file]) |
| | | |
| | | if (verbose) { |
| | | console.log(`[process] ${fp} -> ${file.data.slug}`) |
| | | if (argv.verbose) { |
| | | console.log(`[process] ${fp} -> ${file.data.slug} (${perf.timeSince()})`) |
| | | } |
| | | } catch (err) { |
| | | console.log(chalk.red(`\nFailed to process \`${fp}\`: `) + err) |
| | | process.exit(1) |
| | | trace(`\nFailed to process \`${fp}\``, err as Error) |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | } |
| | | |
| | | export async function parseMarkdown(transformers: QuartzTransformerPlugin[], baseDir: string, fps: string[], verbose: boolean): Promise<ProcessedContent[]> { |
| | | const clamp = (num: number, min: number, max: number) => Math.min(Math.max(Math.round(num), min), max); |
| | | export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> { |
| | | const { argv } = ctx |
| | | const perf = new PerfTimer() |
| | | const log = new QuartzLogger(verbose) |
| | | const log = new QuartzLogger(argv.verbose) |
| | | |
| | | // rough heuristics: 128 gives enough time for v8 to JIT and optimize parsing code paths |
| | | const CHUNK_SIZE = 128 |
| | | let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism() |
| | | let res: ProcessedContent[] = [] |
| | | const concurrency = ctx.argv.concurrency ?? clamp(fps.length / CHUNK_SIZE, 1, 4) |
| | | |
| | | let res: ProcessedContent[] = [] |
| | | log.start(`Parsing input files using ${concurrency} threads`) |
| | | if (concurrency === 1) { |
| | | // single-thread |
| | | const processor = createProcessor(transformers) |
| | | const parse = createFileParser(baseDir, fps, verbose) |
| | | res = await parse(processor) |
| | | try { |
| | | const processor = createProcessor(ctx) |
| | | const parse = createFileParser(ctx, fps) |
| | | res = await parse(processor) |
| | | } catch (error) { |
| | | log.end() |
| | | throw error |
| | | } |
| | | } else { |
| | | await transpileWorkerScript() |
| | | const pool = workerpool.pool( |
| | | './quartz/bootstrap-worker.mjs', |
| | | { |
| | | minWorkers: 'max', |
| | | maxWorkers: concurrency, |
| | | workerType: 'thread' |
| | | } |
| | | ) |
| | | const pool = workerpool.pool("./quartz/bootstrap-worker.mjs", { |
| | | minWorkers: "max", |
| | | maxWorkers: concurrency, |
| | | workerType: "thread", |
| | | }) |
| | | |
| | | const childPromises: WorkerPromise<ProcessedContent[]>[] = [] |
| | | for (const chunk of chunks(fps, CHUNK_SIZE)) { |
| | | childPromises.push(pool.exec('parseFiles', [baseDir, chunk, verbose])) |
| | | childPromises.push(pool.exec("parseFiles", [argv, chunk, ctx.allSlugs])) |
| | | } |
| | | |
| | | const results: ProcessedContent[][] = await WorkerPromise.all(childPromises) |
| | | const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch((err) => { |
| | | const errString = err.toString().slice("Error:".length) |
| | | console.error(errString) |
| | | process.exit(1) |
| | | }) |
| | | res = results.flat() |
| | | await pool.terminate() |
| | | } |
| | | |
| | | log.success(`Parsed ${res.length} Markdown files in ${perf.timeSince()}`) |
| | | log.end(`Parsed ${res.length} Markdown files in ${perf.timeSince()}`) |
| | | return res |
| | | } |