refactor static and asset emission to be actual emitter plugins
1 files added
17 files modified
| | |
| | | --- |
| | | |
| | | ## high priority |
| | | |
| | | - attachments path |
| | | - https://help.obsidian.md/Editing+and+formatting/Tags#Nested+tags nested tags?? |
| | | - watch mode for config/source code |
| | |
| | | - note/header/block transcludes: https://help.obsidian.md/Linking+notes+and+files/Embedding+files |
| | | |
| | | ## misc |
| | | |
| | | - breadcrumbs component |
| | | - filetree component |
| | | - recent notes component |
| | |
| | | enableSiteMap: true, |
| | | enableRSS: true, |
| | | }), |
| | | Plugin.Assets(), |
| | | Plugin.Static(), |
| | | ], |
| | | }, |
| | | } |
| | |
| | | import chokidar from "chokidar" |
| | | import { ProcessedContent } from "./plugins/vfile" |
| | | import WebSocket, { WebSocketServer } from "ws" |
| | | |
| | | interface Argv { |
| | | directory: string |
| | | verbose: boolean |
| | | output: string |
| | | serve: boolean |
| | | port: number |
| | | } |
| | | import { Argv, BuildCtx } from "./ctx" |
| | | |
| | | async function buildQuartz(argv: Argv, version: string) { |
| | | const ctx: BuildCtx = { |
| | | argv, |
| | | cfg, |
| | | } |
| | | |
| | | console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`)) |
| | | const perf = new PerfTimer() |
| | | const output = argv.output |
| | |
| | | console.log(` Emitters: ${pluginNames("emitters").join(", ")}`) |
| | | } |
| | | |
| | | // clean |
| | | perf.addEvent("clean") |
| | | await rimraf(output) |
| | | console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`) |
| | | |
| | | // glob |
| | | perf.addEvent("glob") |
| | | const fps = await globby("**/*.md", { |
| | | cwd: argv.directory, |
| | |
| | | ) |
| | | |
| | | const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath) |
| | | const parsedFiles = await parseMarkdown( |
| | | cfg.plugins.transformers, |
| | | argv.directory, |
| | | filePaths, |
| | | argv.verbose, |
| | | ) |
| | | const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose) |
| | | await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose) |
| | | const parsedFiles = await parseMarkdown(ctx, filePaths) |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`)) |
| | | |
| | | if (argv.serve) { |
| | | await startServing(ctx, parsedFiles) |
| | | } |
| | | } |
| | | |
| | | async function startServing(ctx: BuildCtx, initialContent: ProcessedContent[]) { |
| | | const { argv } = ctx |
| | | const wss = new WebSocketServer({ port: 3001 }) |
| | | const connections: WebSocket[] = [] |
| | | wss.on("connection", (ws) => connections.push(ws)) |
| | | |
| | | const ignored = await isGitIgnored() |
| | | const contentMap = new Map<FilePath, ProcessedContent>() |
| | | for (const content of parsedFiles) { |
| | | for (const content of initialContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | } |
| | | |
| | | async function rebuild(fp: string, action: "add" | "change" | "unlink") { |
| | | perf.addEvent("rebuild") |
| | | const perf = new PerfTimer() |
| | | if (!ignored(fp)) { |
| | | console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`)) |
| | | const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath |
| | | |
| | | try { |
| | | if (action === "add" || action === "change") { |
| | | const [parsedContent] = await parseMarkdown( |
| | | cfg.plugins.transformers, |
| | | argv.directory, |
| | | [fullPath], |
| | | argv.verbose, |
| | | ) |
| | | const [parsedContent] = await parseMarkdown(ctx, [fullPath]) |
| | | contentMap.set(fullPath, parsedContent) |
| | | } else if (action === "unlink") { |
| | | contentMap.delete(fullPath) |
| | | } |
| | | |
| | | await rimraf(output) |
| | | await rimraf(argv.output) |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose) |
| | | await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince("rebuild")}`)) |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | await emitContent( |
| | | ctx, |
| | | filteredContent, |
| | | ) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | } catch { |
| | | console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) |
| | | } |
| | |
| | | |
| | | const server = http.createServer(async (req, res) => { |
| | | await serveHandler(req, res, { |
| | | public: output, |
| | | public: argv.output, |
| | | directoryListing: false, |
| | | }) |
| | | const status = res.statusCode |
| | |
| | | console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`)) |
| | | console.log("hint: exit with ctrl+c") |
| | | } |
| | | } |
| | | |
| | | export default async (argv: Argv, version: string) => { |
| | | try { |
| | |
| | | |
| | | export interface BuildCtx { |
| | | argv: Argv |
| | | version: string |
| | | cfg: QuartzConfig |
| | | } |
| | |
| | | getQuartzComponents() { |
| | | return [] |
| | | }, |
| | | async emit(contentFolder, _cfg, content, _resources, emit): Promise<FilePath[]> { |
| | | async emit({argv}, content, _resources, emit): Promise<FilePath[]> { |
| | | const fps: FilePath[] = [] |
| | | |
| | | for (const [_tree, file] of content) { |
| | | const ogSlug = canonicalizeServer(file.data.slug!) |
| | | const dir = path.relative(contentFolder, file.dirname ?? contentFolder) |
| | | const dir = path.relative(argv.directory, file.dirname ?? argv.directory) |
| | | |
| | | let aliases: CanonicalSlug[] = [] |
| | | if (file.data.frontmatter?.aliases) { |
| | |
| | | import { globbyStream } from "globby" |
| | | import { |
| | | FilePath, slugifyFilePath, |
| | | } from "../../path" |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | import path from "path" |
| | | import fs from "fs" |
| | | |
| | | export const Assets: QuartzEmitterPlugin = () => ({ |
| | | name: "Assets", |
| | | getQuartzComponents() { |
| | | return [] |
| | | }, |
| | | async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> { |
| | | // glob all non MD/MDX/HTML files in content folder and copy it over |
| | | const assetsPath = path.join(argv.output, "assets") |
| | | |
| | | const fps: FilePath[] = [] |
| | | for await (const rawFp of globbyStream("**", { |
| | | ignore: ["**/*.md"], |
| | | cwd: argv.directory, |
| | | })) { |
| | | const fp = rawFp as FilePath |
| | | const ext = path.extname(fp) |
| | | const src = path.join(argv.directory, fp) as FilePath |
| | | const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath |
| | | const dest = path.join(assetsPath, name) as FilePath |
| | | const dir = path.dirname(dest) as FilePath |
| | | await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists |
| | | await fs.promises.copyFile(src, dest) |
| | | fps.push(path.join("assets", fp) as FilePath) |
| | | } |
| | | |
| | | return fps |
| | | }, |
| | | }) |
| | |
| | | opts = { ...defaultOptions, ...opts } |
| | | return { |
| | | name: "ContentIndex", |
| | | async emit(_contentDir, cfg, content, _resources, emit) { |
| | | async emit(ctx, content, _resources, emit) { |
| | | const cfg = ctx.cfg.configuration |
| | | const emitted: FilePath[] = [] |
| | | const linkIndex: ContentIndex = new Map() |
| | | for (const [_tree, file] of content) { |
| | |
| | | getQuartzComponents() { |
| | | return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer] |
| | | }, |
| | | async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> { |
| | | async emit(ctx, content, resources, emit): Promise<FilePath[]> { |
| | | const cfg = ctx.cfg.configuration |
| | | const fps: FilePath[] = [] |
| | | const allFiles = content.map((c) => c[1].data) |
| | | for (const [tree, file] of content) { |
| | |
| | | getQuartzComponents() { |
| | | return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer] |
| | | }, |
| | | async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> { |
| | | async emit(ctx, content, resources, emit): Promise<FilePath[]> { |
| | | const fps: FilePath[] = [] |
| | | const allFiles = content.map((c) => c[1].data) |
| | | const cfg = ctx.cfg.configuration |
| | | |
| | | const folders: Set<CanonicalSlug> = new Set( |
| | | allFiles.flatMap((data) => { |
| | |
| | | export { FolderPage } from "./folderPage" |
| | | export { ContentIndex } from "./contentIndex" |
| | | export { AliasRedirects } from "./aliases" |
| | | export { Assets } from "./assets" |
| | | export { Static } from "./static" |
| New file |
| | |
| | | import { globby } from "globby" |
| | | import { |
| | | FilePath, QUARTZ |
| | | } from "../../path" |
| | | import { QuartzEmitterPlugin } from "../types" |
| | | import path from "path" |
| | | import fs from "fs" |
| | | |
| | | |
| | | export const Static: QuartzEmitterPlugin = () => ({ |
| | | name: "Static", |
| | | getQuartzComponents() { |
| | | return [] |
| | | }, |
| | | async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> { |
| | | const staticPath = path.join(QUARTZ, "static") |
| | | const fps = await globby("*", { cwd: staticPath }) |
| | | await fs.promises.cp(staticPath, path.join(argv.output, "static"), { recursive: true }) |
| | | return fps.map(fp => path.join("static", fp)) as FilePath[] |
| | | }, |
| | | }) |
| | |
| | | getQuartzComponents() { |
| | | return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer] |
| | | }, |
| | | async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> { |
| | | async emit(ctx, content, resources, emit): Promise<FilePath[]> { |
| | | const fps: FilePath[] = [] |
| | | const allFiles = content.map((c) => c[1].data) |
| | | const cfg = ctx.cfg.configuration |
| | | |
| | | const tags: Set<string> = new Set(allFiles.flatMap((data) => data.frontmatter?.tags ?? [])) |
| | | const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries( |
| | |
| | | findAndReplace(tree, commentRegex, (_value: string, ..._capture: string[]) => { |
| | | return { |
| | | type: "text", |
| | | value: "" |
| | | value: "", |
| | | } |
| | | }) |
| | | } |
| | |
| | | node.data = { |
| | | hProperties: { |
| | | ...(node.data?.hProperties ?? {}), |
| | | className: `callout ${collapse ? "is-collapsible" : ""} ${defaultState === "collapsed" ? "is-collapsed" : "" |
| | | className: `callout ${collapse ? "is-collapsible" : ""} ${ |
| | | defaultState === "collapsed" ? "is-collapsed" : "" |
| | | }`, |
| | | "data-callout": calloutType, |
| | | "data-callout-fold": collapse, |
| | |
| | | import { GlobalConfiguration } from "../cfg" |
| | | import { QuartzComponent } from "../components/types" |
| | | import { FilePath, ServerSlug } from "../path" |
| | | import { BuildCtx } from "../ctx" |
| | | |
| | | export interface PluginTypes { |
| | | transformers: QuartzTransformerPluginInstance[] |
| | |
| | | export type QuartzEmitterPluginInstance = { |
| | | name: string |
| | | emit( |
| | | contentDir: string, |
| | | cfg: GlobalConfiguration, |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | resources: StaticResources, |
| | | emitCallback: EmitCallback, |
| | |
| | | import path from "path" |
| | | import fs from "fs" |
| | | import { GlobalConfiguration, QuartzConfig } from "../cfg" |
| | | import { PerfTimer } from "../perf" |
| | | import { |
| | | ComponentResources, |
| | |
| | | } from "../plugins" |
| | | import { EmitCallback } from "../plugins/types" |
| | | import { ProcessedContent } from "../plugins/vfile" |
| | | import { FilePath, QUARTZ, slugifyFilePath } from "../path" |
| | | import { globbyStream } from "globby" |
| | | import { FilePath } from "../path" |
| | | |
| | | // @ts-ignore |
| | | import spaRouterScript from "../components/scripts/spa.inline" |
| | |
| | | import { QuartzLogger } from "../log" |
| | | import { googleFontHref } from "../theme" |
| | | import { trace } from "../trace" |
| | | import { BuildCtx } from "../ctx" |
| | | |
| | | function addGlobalPageResources( |
| | | cfg: GlobalConfiguration, |
| | | reloadScript: boolean, |
| | | ctx: BuildCtx, |
| | | staticResources: StaticResources, |
| | | componentResources: ComponentResources, |
| | | ) { |
| | | const cfg = ctx.cfg.configuration |
| | | const reloadScript = ctx.argv.serve |
| | | staticResources.css.push(googleFontHref(cfg.theme)) |
| | | |
| | | // popovers |
| | |
| | | } |
| | | |
| | | export async function emitContent( |
| | | contentFolder: string, |
| | | output: string, |
| | | cfg: QuartzConfig, |
| | | ctx: BuildCtx, |
| | | content: ProcessedContent[], |
| | | reloadScript: boolean, |
| | | verbose: boolean, |
| | | ) { |
| | | const { argv, cfg }= ctx |
| | | const contentFolder = argv.directory |
| | | const perf = new PerfTimer() |
| | | const log = new QuartzLogger(verbose) |
| | | const log = new QuartzLogger(ctx.argv.verbose) |
| | | |
| | | log.start(`Emitting output files`) |
| | | const emit: EmitCallback = async ({ slug, ext, content }) => { |
| | | const pathToPage = path.join(output, slug + ext) as FilePath |
| | | const pathToPage = path.join(argv.output, slug + ext) as FilePath |
| | | const dir = path.dirname(pathToPage) |
| | | await fs.promises.mkdir(dir, { recursive: true }) |
| | | await fs.promises.writeFile(pathToPage, content) |
| | |
| | | // important that this goes *after* component scripts |
| | | // as the "nav" event gets triggered here and we should make sure |
| | | // that everyone else had the chance to register a listener for it |
| | | addGlobalPageResources(cfg.configuration, reloadScript, staticResources, componentResources) |
| | | addGlobalPageResources(ctx, staticResources, componentResources) |
| | | |
| | | let emittedFiles = 0 |
| | | const emittedResources = await emitComponentResources(cfg.configuration, componentResources, emit) |
| | | if (verbose) { |
| | | if (argv.verbose) { |
| | | for (const file of emittedResources) { |
| | | emittedFiles += 1 |
| | | console.log(`[emit:Resources] ${file}`) |
| | |
| | | for (const emitter of cfg.plugins.emitters) { |
| | | try { |
| | | const emitted = await emitter.emit( |
| | | contentFolder, |
| | | cfg.configuration, |
| | | ctx, |
| | | content, |
| | | staticResources, |
| | | emit, |
| | | ) |
| | | emittedFiles += emitted.length |
| | | |
| | | if (verbose) { |
| | | if (ctx.argv.verbose) { |
| | | for (const file of emitted) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | |
| | | } |
| | | } |
| | | |
| | | const staticPath = path.join(QUARTZ, "static") |
| | | await fs.promises.cp(staticPath, path.join(output, "static"), { recursive: true }) |
| | | if (verbose) { |
| | | console.log(`[emit:Static] ${path.join("static", "**")}`) |
| | | } |
| | | |
| | | // glob all non MD/MDX/HTML files in content folder and copy it over |
| | | const assetsPath = path.join(output, "assets") |
| | | for await (const rawFp of globbyStream("**", { |
| | | ignore: ["**/*.md"], |
| | | cwd: contentFolder, |
| | | })) { |
| | | const fp = rawFp as FilePath |
| | | const ext = path.extname(fp) |
| | | const src = path.join(contentFolder, fp) as FilePath |
| | | const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath |
| | | const dest = path.join(assetsPath, name) as FilePath |
| | | const dir = path.dirname(dest) as FilePath |
| | | await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists |
| | | await fs.promises.copyFile(src, dest) |
| | | emittedFiles += 1 |
| | | if (verbose) { |
| | | console.log(`[emit:Assets] ${path.join("assets", name)}`) |
| | | } |
| | | } |
| | | |
| | | log.end(`Emitted ${emittedFiles} files to \`${output}\` in ${perf.timeSince()}`) |
| | | log.end(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince()}`) |
| | | } |
| | |
| | | import { BuildCtx } from "../ctx" |
| | | import { PerfTimer } from "../perf" |
| | | import { QuartzFilterPluginInstance } from "../plugins/types" |
| | | import { ProcessedContent } from "../plugins/vfile" |
| | | |
| | | export function filterContent( |
| | | plugins: QuartzFilterPluginInstance[], |
| | | { cfg, argv }: BuildCtx, |
| | | content: ProcessedContent[], |
| | | verbose: boolean, |
| | | ): ProcessedContent[] { |
| | | const perf = new PerfTimer() |
| | | const initialLength = content.length |
| | | for (const plugin of plugins) { |
| | | for (const plugin of cfg.plugins.filters) { |
| | | const updatedContent = content.filter(plugin.shouldPublish) |
| | | |
| | | if (verbose) { |
| | | if (argv.verbose) { |
| | | const diff = content.filter((x) => !updatedContent.includes(x)) |
| | | for (const file of diff) { |
| | | console.log(`[filter:${plugin.name}] ${file[1].data.slug}`) |
| | |
| | | import { QuartzTransformerPluginInstance } from "../plugins/types" |
| | | import { QuartzLogger } from "../log" |
| | | import { trace } from "../trace" |
| | | import { BuildCtx } from "../ctx" |
| | | |
| | | export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void> |
| | | export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor { |
| | |
| | | }) |
| | | } |
| | | |
| | | export function createFileParser( |
| | | transformers: QuartzTransformerPluginInstance[], |
| | | baseDir: string, |
| | | fps: FilePath[], |
| | | allSlugs: ServerSlug[], |
| | | verbose: boolean, |
| | | ) { |
| | | export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) { |
| | | return async (processor: QuartzProcessor) => { |
| | | const res: ProcessedContent[] = [] |
| | | for (const fp of fps) { |
| | |
| | | file.value = file.value.toString().trim() |
| | | |
| | | // Text -> Text transforms |
| | | for (const plugin of transformers.filter((p) => p.textTransform)) { |
| | | for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) { |
| | | file.value = plugin.textTransform!(file.value) |
| | | } |
| | | |
| | | // base data properties that plugins may use |
| | | file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath) |
| | | file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath) |
| | | file.data.allSlugs = allSlugs |
| | | file.data.filePath = fp |
| | | |
| | |
| | | const newAst = await processor.run(ast, file) |
| | | res.push([newAst, file]) |
| | | |
| | | if (verbose) { |
| | | if (argv.verbose) { |
| | | console.log(`[process] ${fp} -> ${file.data.slug}`) |
| | | } |
| | | } catch (err) { |
| | |
| | | } |
| | | } |
| | | |
| | | export async function parseMarkdown( |
| | | transformers: QuartzTransformerPluginInstance[], |
| | | baseDir: string, |
| | | fps: FilePath[], |
| | | verbose: boolean, |
| | | ): Promise<ProcessedContent[]> { |
| | | export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> { |
| | | const { argv, cfg } = ctx |
| | | const perf = new PerfTimer() |
| | | const log = new QuartzLogger(verbose) |
| | | const log = new QuartzLogger(argv.verbose) |
| | | |
| | | const CHUNK_SIZE = 128 |
| | | let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism() |
| | | |
| | | // get all slugs ahead of time as each thread needs a copy |
| | | const allSlugs = fps.map((fp) => |
| | | slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath), |
| | | slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath), |
| | | ) |
| | | |
| | | let res: ProcessedContent[] = [] |
| | | log.start(`Parsing input files using ${concurrency} threads`) |
| | | if (concurrency === 1) { |
| | | try { |
| | | const processor = createProcessor(transformers) |
| | | const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose) |
| | | const processor = createProcessor(cfg.plugins.transformers) |
| | | const parse = createFileParser(ctx, fps, allSlugs) |
| | | res = await parse(processor) |
| | | } catch (error) { |
| | | log.end() |
| | |
| | | |
| | | const childPromises: WorkerPromise<ProcessedContent[]>[] = [] |
| | | for (const chunk of chunks(fps, CHUNK_SIZE)) { |
| | | childPromises.push(pool.exec("parseFiles", [baseDir, chunk, allSlugs, verbose])) |
| | | childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs])) |
| | | } |
| | | |
| | | const results: ProcessedContent[][] = await WorkerPromise.all(childPromises) |
| | |
| | | import config from "../quartz.config" |
| | | import cfg from "../quartz.config" |
| | | import { Argv, BuildCtx } from "./ctx" |
| | | import { FilePath, ServerSlug } from "./path" |
| | | import { createFileParser, createProcessor } from "./processors/parse" |
| | | |
| | | const transformers = config.plugins.transformers |
| | | const transformers = cfg.plugins.transformers |
| | | const processor = createProcessor(transformers) |
| | | |
| | | // only called from worker thread |
| | | export async function parseFiles( |
| | | baseDir: string, |
| | | fps: FilePath[], |
| | | allSlugs: ServerSlug[], |
| | | verbose: boolean, |
| | | ) { |
| | | const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose) |
| | | export async function parseFiles(argv: Argv, fps: FilePath[], allSlugs: ServerSlug[]) { |
| | | const ctx: BuildCtx = { |
| | | cfg, |
| | | argv, |
| | | } |
| | | |
| | | const parse = createFileParser(ctx, fps, allSlugs) |
| | | return parse(processor) |
| | | } |