| | |
| | | |
| | | ## high priority |
| | | |
| | | - back button doesn't work sometimes |
| | | - images in same folder are broken on shortest path mode |
| | | - https://help.obsidian.md/Editing+and+formatting/Tags#Nested+tags nested tags?? and big tag listing |
| | | - watch mode for config/source code |
| | |
| | | import { filterContent } from "./processors/filter" |
| | | import { emitContent } from "./processors/emit" |
| | | import cfg from "../quartz.config" |
| | | import { FilePath } from "./path" |
| | | import { FilePath, slugifyFilePath } from "./path" |
| | | import chokidar from "chokidar" |
| | | import { ProcessedContent } from "./plugins/vfile" |
| | | import WebSocket, { WebSocketServer } from "ws" |
| | |
| | | const ctx: BuildCtx = { |
| | | argv, |
| | | cfg, |
| | | allSlugs: [], |
| | | } |
| | | |
| | | console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`)) |
| | |
| | | ) |
| | | |
| | | const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath) |
| | | ctx.allSlugs = fps.map((fp) => slugifyFilePath(fp as FilePath)) |
| | | |
| | | const parsedFiles = await parseMarkdown(ctx, filePaths) |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | await emitContent(ctx, filteredContent) |
| | |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | } |
| | | |
| | | async function rebuild(fp: string, action: "add" | "change" | "unlink") { |
| | | const perf = new PerfTimer() |
| | | let timeoutId: ReturnType<typeof setTimeout> | null = null |
| | | let toRebuild: Set<FilePath> = new Set() |
| | | let toRemove: Set<FilePath> = new Set() |
| | | async function rebuild(fp: string, action: "add" | "change" | "delete") { |
| | | if (!ignored(fp)) { |
| | | console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`)) |
| | | const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath |
| | | |
| | | try { |
| | | if (action === "add" || action === "change") { |
| | | const [parsedContent] = await parseMarkdown(ctx, [fullPath]) |
| | | contentMap.set(fullPath, parsedContent) |
| | | } else if (action === "unlink") { |
| | | contentMap.delete(fullPath) |
| | | } |
| | | |
| | | await rimraf(argv.output) |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | } catch { |
| | | console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) |
| | | const filePath = `${argv.directory}${path.sep}${fp}` as FilePath |
| | | if (action === "add" || action === "change") { |
| | | toRebuild.add(filePath) |
| | | } else if (action === "delete") { |
| | | toRemove.add(filePath) |
| | | } |
| | | |
| | | connections.forEach((conn) => conn.send("rebuild")) |
| | | if (timeoutId) { |
| | | clearTimeout(timeoutId) |
| | | } |
| | | |
| | | timeoutId = setTimeout(async () => { |
| | | const perf = new PerfTimer() |
| | | console.log(chalk.yellow("Detected change, rebuilding...")) |
| | | try { |
| | | const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) |
| | | |
| | | ctx.allSlugs = [...new Set([...contentMap.keys(), ...toRebuild])] |
| | | .filter((fp) => !toRemove.has(fp)) |
| | | .map((fp) => slugifyFilePath(path.relative(argv.directory, fp) as FilePath)) |
| | | |
| | | const parsedContent = await parseMarkdown(ctx, filesToRebuild) |
| | | for (const content of parsedContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | } |
| | | |
| | | for (const fp of toRemove) { |
| | | contentMap.delete(fp) |
| | | } |
| | | |
| | | await rimraf(argv.output) |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | } catch { |
| | | console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) |
| | | } |
| | | connections.forEach((conn) => conn.send("rebuild")) |
| | | toRebuild.clear() |
| | | toRemove.clear() |
| | | }, 250) |
| | | } |
| | | } |
| | | |
| | |
| | | watcher |
| | | .on("add", (fp) => rebuild(fp, "add")) |
| | | .on("change", (fp) => rebuild(fp, "change")) |
| | | .on("unlink", (fp) => rebuild(fp, "unlink")) |
| | | .on("unlink", (fp) => rebuild(fp, "delete")) |
| | | |
| | | const server = http.createServer(async (req, res) => { |
| | | await serveHandler(req, res, { |
| | |
| | | import { QuartzConfig } from "./cfg" |
| | | import { ServerSlug } from "./path" |
| | | |
| | | export interface Argv { |
| | | directory: string |
| | |
| | | export interface BuildCtx { |
| | | argv: Argv |
| | | cfg: QuartzConfig |
| | | allSlugs: ServerSlug[] |
| | | } |
| | |
| | | afterDOMLoaded: string[] |
| | | } |
| | | |
| | | function getComponentResources(plugins: PluginTypes): ComponentResources { |
| | | function getComponentResources(ctx: BuildCtx): ComponentResources { |
| | | const allComponents: Set<QuartzComponent> = new Set() |
| | | for (const emitter of plugins.emitters) { |
| | | const components = emitter.getQuartzComponents() |
| | | for (const emitter of ctx.cfg.plugins.emitters) { |
| | | const components = emitter.getQuartzComponents(ctx) |
| | | for (const component of components) { |
| | | allComponents.add(component) |
| | | } |
| | |
| | | }, |
| | | async emit(ctx, _content, resources, emit): Promise<FilePath[]> { |
| | | // component specific scripts and styles |
| | | const componentResources = getComponentResources(ctx.cfg.plugins) |
| | | const componentResources = getComponentResources(ctx) |
| | | // important that this goes *after* component scripts |
| | | // as the "nav" event gets triggered here and we should make sure |
| | | // that everyone else had the chance to register a listener for it |
| | |
| | | |
| | | export const RemoveDrafts: QuartzFilterPlugin<{}> = () => ({ |
| | | name: "RemoveDrafts", |
| | | shouldPublish([_tree, vfile]) { |
| | | shouldPublish(_ctx, [_tree, vfile]) { |
| | | const draftFlag: boolean = vfile.data?.frontmatter?.draft ?? false |
| | | return !draftFlag |
| | | }, |
| | |
| | | |
| | | export const ExplicitPublish: QuartzFilterPlugin = () => ({ |
| | | name: "ExplicitPublish", |
| | | shouldPublish([_tree, vfile]) { |
| | | shouldPublish(_ctx, [_tree, vfile]) { |
| | | const publishFlag: boolean = vfile.data?.frontmatter?.publish ?? false |
| | | return publishFlag |
| | | }, |
| | |
| | | import { StaticResources } from "../resources" |
| | | import { PluginTypes } from "./types" |
| | | import { FilePath, ServerSlug } from "../path" |
| | | import { BuildCtx } from "../ctx" |
| | | |
| | | export function getStaticResourcesFromPlugins(plugins: PluginTypes) { |
| | | export function getStaticResourcesFromPlugins(ctx: BuildCtx) { |
| | | const staticResources: StaticResources = { |
| | | css: [], |
| | | js: [], |
| | | } |
| | | |
| | | for (const transformer of plugins.transformers) { |
| | | const res = transformer.externalResources ? transformer.externalResources() : {} |
| | | for (const transformer of ctx.cfg.plugins.transformers) { |
| | | const res = transformer.externalResources ? transformer.externalResources(ctx) : {} |
| | | if (res?.js) { |
| | | staticResources.js.push(...res.js) |
| | | } |
| | |
| | | // inserted in processors.ts |
| | | interface DataMap { |
| | | slug: ServerSlug |
| | | allSlugs: ServerSlug[] |
| | | filePath: FilePath |
| | | } |
| | | } |
| | |
| | | const opts = { ...defaultOptions, ...userOpts } |
| | | return { |
| | | name: "LinkProcessing", |
| | | htmlPlugins() { |
| | | htmlPlugins(ctx) { |
| | | return [ |
| | | () => { |
| | | return (tree, file) => { |
| | |
| | | if (opts.markdownLinkResolution === "relative") { |
| | | return targetSlug as RelativeURL |
| | | } else if (opts.markdownLinkResolution === "shortest") { |
| | | // https://forum.obsidian.md/t/settings-new-link-format-what-is-shortest-path-when-possible/6748/5 |
| | | const allSlugs = file.data.allSlugs! |
| | | |
| | | // if the file name is unique, then it's just the filename |
| | | const matchingFileNames = allSlugs.filter((slug) => { |
| | | const matchingFileNames = ctx.allSlugs.filter((slug) => { |
| | | const parts = slug.split(path.posix.sep) |
| | | const fileName = parts.at(-1) |
| | | return targetCanonical === fileName |
| | |
| | | const opts = { ...defaultOptions, ...userOpts } |
| | | return { |
| | | name: "ObsidianFlavoredMarkdown", |
| | | textTransform(src) { |
| | | textTransform(_ctx, src) { |
| | | // pre-transform wikilinks (fix anchors to things that may contain illegal syntax e.g. codeblocks, latex) |
| | | if (opts.wikilinks) { |
| | | src = src.toString() |
| | |
| | | import { PluggableList } from "unified" |
| | | import { StaticResources } from "../resources" |
| | | import { ProcessedContent } from "./vfile" |
| | | import { GlobalConfiguration } from "../cfg" |
| | | import { QuartzComponent } from "../components/types" |
| | | import { FilePath, ServerSlug } from "../path" |
| | | import { BuildCtx } from "../ctx" |
| | |
| | | ) => QuartzTransformerPluginInstance |
| | | export type QuartzTransformerPluginInstance = { |
| | | name: string |
| | | textTransform?: (src: string | Buffer) => string | Buffer |
| | | markdownPlugins?: () => PluggableList |
| | | htmlPlugins?: () => PluggableList |
| | | externalResources?: () => Partial<StaticResources> |
| | | textTransform?: (ctx: BuildCtx, src: string | Buffer) => string | Buffer |
| | | markdownPlugins?: (ctx: BuildCtx) => PluggableList |
| | | htmlPlugins?: (ctx: BuildCtx) => PluggableList |
| | | externalResources?: (ctx: BuildCtx) => Partial<StaticResources> |
| | | } |
| | | |
| | | export type QuartzFilterPlugin<Options extends OptionType = undefined> = ( |
| | |
| | | ) => QuartzFilterPluginInstance |
| | | export type QuartzFilterPluginInstance = { |
| | | name: string |
| | | shouldPublish(content: ProcessedContent): boolean |
| | | shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean |
| | | } |
| | | |
| | | export type QuartzEmitterPlugin<Options extends OptionType = undefined> = ( |
| | |
| | | resources: StaticResources, |
| | | emitCallback: EmitCallback, |
| | | ): Promise<FilePath[]> |
| | | getQuartzComponents(): QuartzComponent[] |
| | | getQuartzComponents(ctx: BuildCtx): QuartzComponent[] |
| | | } |
| | | |
| | | export interface EmitOptions { |
| | |
| | | } |
| | | |
| | | let emittedFiles = 0 |
| | | const staticResources = getStaticResourcesFromPlugins(cfg.plugins) |
| | | const staticResources = getStaticResourcesFromPlugins(ctx) |
| | | for (const emitter of cfg.plugins.emitters) { |
| | | try { |
| | | const emitted = await emitter.emit(ctx, content, staticResources, emit) |
| | |
| | | import { BuildCtx } from "../ctx" |
| | | import { PerfTimer } from "../perf" |
| | | import { QuartzFilterPluginInstance } from "../plugins/types" |
| | | import { ProcessedContent } from "../plugins/vfile" |
| | | |
| | | export function filterContent( |
| | | { cfg, argv }: BuildCtx, |
| | | content: ProcessedContent[], |
| | | ): ProcessedContent[] { |
| | | export function filterContent(ctx: BuildCtx, content: ProcessedContent[]): ProcessedContent[] { |
| | | const { cfg, argv } = ctx |
| | | const perf = new PerfTimer() |
| | | const initialLength = content.length |
| | | for (const plugin of cfg.plugins.filters) { |
| | | const updatedContent = content.filter(plugin.shouldPublish) |
| | | const updatedContent = content.filter((item) => plugin.shouldPublish(ctx, item)) |
| | | |
| | | if (argv.verbose) { |
| | | const diff = content.filter((x) => !updatedContent.includes(x)) |
| | |
| | | import { ProcessedContent } from "../plugins/vfile" |
| | | import { PerfTimer } from "../perf" |
| | | import { read } from "to-vfile" |
| | | import { FilePath, QUARTZ, ServerSlug, slugifyFilePath } from "../path" |
| | | import { FilePath, QUARTZ, slugifyFilePath } from "../path" |
| | | import path from "path" |
| | | import os from "os" |
| | | import workerpool, { Promise as WorkerPromise } from "workerpool" |
| | | import { QuartzTransformerPluginInstance } from "../plugins/types" |
| | | import { QuartzLogger } from "../log" |
| | | import { trace } from "../trace" |
| | | import { BuildCtx } from "../ctx" |
| | | |
| | | export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void> |
| | | export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor { |
| | | export function createProcessor(ctx: BuildCtx): QuartzProcessor { |
| | | const transformers = ctx.cfg.plugins.transformers |
| | | |
| | | // base Markdown -> MD AST |
| | | let processor = unified().use(remarkParse) |
| | | |
| | | // MD AST -> MD AST transforms |
| | | for (const plugin of transformers.filter((p) => p.markdownPlugins)) { |
| | | processor = processor.use(plugin.markdownPlugins!()) |
| | | processor = processor.use(plugin.markdownPlugins!(ctx)) |
| | | } |
| | | |
| | | // MD AST -> HTML AST |
| | |
| | | |
| | | // HTML AST -> HTML AST transforms |
| | | for (const plugin of transformers.filter((p) => p.htmlPlugins)) { |
| | | processor = processor.use(plugin.htmlPlugins!()) |
| | | processor = processor.use(plugin.htmlPlugins!(ctx)) |
| | | } |
| | | |
| | | return processor |
| | |
| | | }) |
| | | } |
| | | |
| | | export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) { |
| | | export function createFileParser(ctx: BuildCtx, fps: FilePath[]) { |
| | | const { argv, cfg } = ctx |
| | | return async (processor: QuartzProcessor) => { |
| | | const res: ProcessedContent[] = [] |
| | | for (const fp of fps) { |
| | |
| | | |
| | | // Text -> Text transforms |
| | | for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) { |
| | | file.value = plugin.textTransform!(file.value) |
| | | file.value = plugin.textTransform!(ctx, file.value) |
| | | } |
| | | |
| | | // base data properties that plugins may use |
| | | file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath) |
| | | file.data.allSlugs = allSlugs |
| | | file.data.filePath = fp |
| | | |
| | | const ast = processor.parse(file) |
| | |
| | | } |
| | | |
| | | export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> { |
| | | const { argv, cfg } = ctx |
| | | const { argv } = ctx |
| | | const perf = new PerfTimer() |
| | | const log = new QuartzLogger(argv.verbose) |
| | | |
| | | const CHUNK_SIZE = 128 |
| | | let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism() |
| | | |
| | | // get all slugs ahead of time as each thread needs a copy |
| | | const allSlugs = fps.map((fp) => |
| | | slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath), |
| | | ) |
| | | |
| | | let res: ProcessedContent[] = [] |
| | | log.start(`Parsing input files using ${concurrency} threads`) |
| | | if (concurrency === 1) { |
| | | try { |
| | | const processor = createProcessor(cfg.plugins.transformers) |
| | | const parse = createFileParser(ctx, fps, allSlugs) |
| | | const processor = createProcessor(ctx) |
| | | const parse = createFileParser(ctx, fps) |
| | | res = await parse(processor) |
| | | } catch (error) { |
| | | log.end() |
| | |
| | | |
| | | const childPromises: WorkerPromise<ProcessedContent[]>[] = [] |
| | | for (const chunk of chunks(fps, CHUNK_SIZE)) { |
| | | childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs])) |
| | | childPromises.push(pool.exec("parseFiles", [argv, chunk, ctx.allSlugs])) |
| | | } |
| | | |
| | | const results: ProcessedContent[][] = await WorkerPromise.all(childPromises) |
| | |
| | | import { FilePath, ServerSlug } from "./path" |
| | | import { createFileParser, createProcessor } from "./processors/parse" |
| | | |
| | | const transformers = cfg.plugins.transformers |
| | | const processor = createProcessor(transformers) |
| | | |
| | | // only called from worker thread |
| | | export async function parseFiles(argv: Argv, fps: FilePath[], allSlugs: ServerSlug[]) { |
| | | const ctx: BuildCtx = { |
| | | cfg, |
| | | argv, |
| | | allSlugs, |
| | | } |
| | | |
| | | const parse = createFileParser(ctx, fps, allSlugs) |
| | | const processor = createProcessor(ctx) |
| | | const parse = createFileParser(ctx, fps) |
| | | return parse(processor) |
| | | } |