| | |
| | | } |
| | | |
| | | const initialSlugs = ctx.allSlugs |
| | | const timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set() |
| | | let lastBuildMs = 0 |
| | | const toRebuild: Set<FilePath> = new Set() |
| | | const toRemove: Set<FilePath> = new Set() |
| | | const trackedAssets: Set<FilePath> = new Set() |
| | |
| | | } |
| | | |
| | | // debounce rebuilds every 250ms |
| | | timeoutIds.add( |
| | | setTimeout(async () => { |
| | | const release = await mut.acquire() |
| | | timeoutIds.forEach((id) => clearTimeout(id)) |
| | | timeoutIds.clear() |
| | | |
| | | const perf = new PerfTimer() |
| | | console.log(chalk.yellow("Detected change, rebuilding...")) |
| | | try { |
| | | const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) |
| | | const buildStart = new Date().getTime() |
| | | lastBuildMs = buildStart |
| | | const release = await mut.acquire() |
| | | if (lastBuildMs > buildStart) { |
| | | release() |
| | | return |
| | | } |
| | | |
| | | const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] |
| | | .filter((fp) => !toRemove.has(fp)) |
| | | .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) |
| | | const perf = new PerfTimer() |
| | | console.log(chalk.yellow("Detected change, rebuilding...")) |
| | | try { |
| | | const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp)) |
| | | |
| | | ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] |
| | | const parsedContent = await parseMarkdown(ctx, filesToRebuild) |
| | | for (const content of parsedContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | } |
| | | const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])] |
| | | .filter((fp) => !toRemove.has(fp)) |
| | | .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath)) |
| | | |
| | | for (const fp of toRemove) { |
| | | contentMap.delete(fp) |
| | | } |
| | | ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])] |
| | | const parsedContent = await parseMarkdown(ctx, filesToRebuild) |
| | | for (const content of parsedContent) { |
| | | const [_tree, vfile] = content |
| | | contentMap.set(vfile.data.filePath!, content) |
| | | } |
| | | |
| | | // TODO: we can probably traverse the link graph to figure out what's safe to delete here |
| | | // instead of just deleting everything |
| | | await rimraf(argv.output) |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | } catch { |
| | | console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) |
| | | } |
| | | for (const fp of toRemove) { |
| | | contentMap.delete(fp) |
| | | } |
| | | |
| | | clientRefresh() |
| | | toRebuild.clear() |
| | | toRemove.clear() |
| | | release() |
| | | }, 250), |
| | | ) |
| | | const parsedFiles = [...contentMap.values()] |
| | | const filteredContent = filterContent(ctx, parsedFiles) |
| | | // TODO: we can probably traverse the link graph to figure out what's safe to delete here |
| | | // instead of just deleting everything |
| | | await rimraf(argv.output) |
| | | await emitContent(ctx, filteredContent) |
| | | console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) |
| | | } catch { |
| | | console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`)) |
| | | } |
| | | |
| | | clientRefresh() |
| | | toRebuild.clear() |
| | | toRemove.clear() |
| | | release() |
| | | } |
| | | |
| | | const watcher = chokidar.watch(".", { |
| | |
| | | .on("unlink", (fp) => rebuild(fp, "delete")) |
| | | |
| | | return async () => { |
| | | timeoutIds.forEach((id) => clearTimeout(id)) |
| | | await watcher.close() |
| | | } |
| | | } |