| | |
| | | import { Mutex } from "async-mutex" |
| | | import DepGraph from "./depgraph" |
| | | import { getStaticResourcesFromPlugins } from "./plugins" |
| | | import { randomIdNonSecure } from "./util/random" |
| | | |
| | | type Dependencies = Record<string, DepGraph<FilePath> | null> |
| | | |
| | |
| | | |
| | | async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) { |
| | | const ctx: BuildCtx = { |
| | | buildId: randomIdNonSecure(), |
| | | argv, |
| | | cfg, |
| | | allSlugs: [], |
| | |
| | | |
| | | const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint |
| | | watcher |
| | | .on("add", (fp) => buildFromEntry(fp, "add", clientRefresh, buildData)) |
| | | .on("change", (fp) => buildFromEntry(fp, "change", clientRefresh, buildData)) |
| | | .on("unlink", (fp) => buildFromEntry(fp, "delete", clientRefresh, buildData)) |
| | | .on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData)) |
| | | .on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData)) |
| | | .on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData)) |
| | | |
| | | return async () => { |
| | | await watcher.close() |
| | |
| | | return |
| | | } |
| | | |
| | | const buildStart = new Date().getTime() |
| | | buildData.lastBuildMs = buildStart |
| | | const buildId = randomIdNonSecure() |
| | | ctx.buildId = buildId |
| | | buildData.lastBuildMs = new Date().getTime() |
| | | const release = await mut.acquire() |
| | | if (buildData.lastBuildMs > buildStart) { |
| | | |
| | | // if there's another build after us, release and let them do it |
| | | if (ctx.buildId !== buildId) { |
| | | release() |
| | | return |
| | | } |
| | |
| | | ([_node, vfile]) => !toRemove.has(vfile.data.filePath!), |
| | | ) |
| | | |
| | | const emittedFps = await emitter.emit(ctx, files, staticResources) |
| | | |
| | | if (ctx.argv.verbose) { |
| | | for (const file of emittedFps) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | const emitted = await emitter.emit(ctx, files, staticResources) |
| | | if (Symbol.asyncIterator in emitted) { |
| | | // Async generator case |
| | | for await (const file of emitted) { |
| | | emittedFiles++ |
| | | if (ctx.argv.verbose) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | | } |
| | | } else { |
| | | // Array case |
| | | emittedFiles += emitted.length |
| | | if (ctx.argv.verbose) { |
| | | for (const file of emitted) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | | } |
| | | } |
| | | |
| | | emittedFiles += emittedFps.length |
| | | continue |
| | | } |
| | | |
| | |
| | | .filter((file) => !toRemove.has(file)) |
| | | .map((file) => contentMap.get(file)!) |
| | | |
| | | const emittedFps = await emitter.emit(ctx, upstreamContent, staticResources) |
| | | |
| | | if (ctx.argv.verbose) { |
| | | for (const file of emittedFps) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | const emitted = await emitter.emit(ctx, upstreamContent, staticResources) |
| | | if (Symbol.asyncIterator in emitted) { |
| | | // Async generator case |
| | | for await (const file of emitted) { |
| | | emittedFiles++ |
| | | if (ctx.argv.verbose) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | | } |
| | | } else { |
| | | // Array case |
| | | emittedFiles += emitted.length |
| | | if (ctx.argv.verbose) { |
| | | for (const file of emitted) { |
| | | console.log(`[emit:${emitter.name}] ${file}`) |
| | | } |
| | | } |
| | | } |
| | | |
| | | emittedFiles += emittedFps.length |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | await rimraf([...destinationsToDelete]) |
| | | |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | |
| | | toRemove.clear() |
| | | release() |
| | | clientRefresh() |
| | |
| | | toRemove.add(filePath) |
| | | } |
| | | |
| | | const buildStart = new Date().getTime() |
| | | buildData.lastBuildMs = buildStart |
| | | const buildId = randomIdNonSecure() |
| | | ctx.buildId = buildId |
| | | buildData.lastBuildMs = new Date().getTime() |
| | | const release = await mut.acquire() |
| | | |
| | | // there's another build after us, release and let them do it |
| | | if (buildData.lastBuildMs > buildStart) { |
| | | if (ctx.buildId !== buildId) { |
| | | release() |
| | | return |
| | | } |
| | | |
| | | const perf = new PerfTimer() |
| | | console.log(chalk.yellow("Detected change, rebuilding...")) |
| | | |
| | | try { |
| | | const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) |
| | | |
| | | const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] |
| | | .filter((fp) => !toRemove.has(fp)) |
| | | .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) |
| | | |
| | | ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] |
| | | const parsedContent = await parseMarkdown(ctx, filesToRebuild) |
| | | for (const content of parsedContent) { |
| | | const [_tree, vfile] = content |
| | |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | |
| | | // re-update slugs |
| | | const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] |
| | | .filter((fp) => !toRemove.has(fp)) |
| | | .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) |
| | | |
| | | ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] |
| | | |
| | | // TODO: we can probably traverse the link graph to figure out what's safe to delete here |
| | | // instead of just deleting everything |
| | | await rimraf(path.join(argv.output, ".*"), { glob: true }) |
| | |
| | | } |
| | | } |
| | | |
| | | release() |
| | | clientRefresh() |
| | | toRebuild.clear() |
| | | toRemove.clear() |
| | | release() |
| | | } |
| | | |
| | | export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => { |