roadmap/quartz/build.ts

169 lines
5.6 KiB
TypeScript
Raw Normal View History

import sourceMapSupport from "source-map-support"
sourceMapSupport.install(options)
2023-06-04 16:35:45 +00:00
import path from "path"
import { PerfTimer } from "./util/perf"
2023-06-04 16:35:45 +00:00
import { rimraf } from "rimraf"
import { isGitIgnored } from "globby"
2023-06-04 16:35:45 +00:00
import chalk from "chalk"
import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit"
import cfg from "../quartz.config"
import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
2023-07-22 23:06:36 +00:00
import chokidar from "chokidar"
2023-07-23 00:27:41 +00:00
import { ProcessedContent } from "./plugins/vfile"
import { Argv, BuildCtx } from "./util/ctx"
import { glob, toPosixPath } from "./util/glob"
import { trace } from "./util/trace"
import { options } from "./util/sourcemap"
2023-06-04 16:35:45 +00:00
async function buildQuartz(argv: Argv, clientRefresh: () => void) {
const ctx: BuildCtx = {
argv,
cfg,
2023-07-24 07:04:01 +00:00
allSlugs: [],
}
2023-06-04 16:35:45 +00:00
const perf = new PerfTimer()
const output = argv.output
2023-06-04 17:37:43 +00:00
const pluginCount = Object.values(cfg.plugins).flat().length
2023-07-23 00:27:41 +00:00
const pluginNames = (key: "transformers" | "filters" | "emitters") =>
cfg.plugins[key].map((plugin) => plugin.name)
2023-06-04 16:35:45 +00:00
if (argv.verbose) {
2023-06-06 05:14:17 +00:00
console.log(`Loaded ${pluginCount} plugins`)
2023-07-23 00:27:41 +00:00
console.log(` Transformers: ${pluginNames("transformers").join(", ")}`)
console.log(` Filters: ${pluginNames("filters").join(", ")}`)
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
2023-06-04 16:35:45 +00:00
}
2023-07-23 00:27:41 +00:00
perf.addEvent("clean")
await rimraf(output)
2023-07-23 00:27:41 +00:00
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
2023-06-04 16:35:45 +00:00
2023-07-23 00:27:41 +00:00
perf.addEvent("glob")
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
const fps = allFiles.filter((fp) => fp.endsWith(".md"))
2023-07-23 00:27:41 +00:00
console.log(
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
)
2023-06-04 16:35:45 +00:00
2023-08-02 07:07:41 +00:00
const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
2023-07-24 07:04:01 +00:00
const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles)
await emitContent(ctx, filteredContent)
2023-06-04 16:35:45 +00:00
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
if (argv.serve) {
return startServing(ctx, parsedFiles, clientRefresh)
}
}
// setup watcher for rebuilds
async function startServing(
ctx: BuildCtx,
initialContent: ProcessedContent[],
clientRefresh: () => void,
) {
const { argv } = ctx
const ignored = await isGitIgnored()
const contentMap = new Map<FilePath, ProcessedContent>()
for (const content of initialContent) {
const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content)
}
2023-07-22 23:06:36 +00:00
const initialSlugs = ctx.allSlugs
2023-08-17 08:58:11 +00:00
let timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
2023-07-24 07:04:01 +00:00
let toRebuild: Set<FilePath> = new Set()
let toRemove: Set<FilePath> = new Set()
let trackedAssets: Set<FilePath> = new Set()
2023-07-24 07:04:01 +00:00
async function rebuild(fp: string, action: "add" | "change" | "delete") {
// don't do anything for gitignored files
if (ignored(fp)) {
return
}
// dont bother rebuilding for non-content files, just track and refresh
fp = toPosixPath(fp)
const filePath = joinSegments(argv.directory, fp) as FilePath
if (path.extname(fp) !== ".md") {
2023-07-24 07:04:01 +00:00
if (action === "add" || action === "change") {
trackedAssets.add(filePath)
2023-07-24 07:04:01 +00:00
} else if (action === "delete") {
trackedAssets.delete(filePath)
2023-07-24 07:04:01 +00:00
}
clientRefresh()
return
}
2023-07-22 23:06:36 +00:00
if (action === "add" || action === "change") {
toRebuild.add(filePath)
} else if (action === "delete") {
toRemove.add(filePath)
}
2023-08-17 08:58:11 +00:00
timeoutIds.forEach((id) => clearTimeout(id))
// debounce rebuilds every 250ms
2023-08-17 08:58:11 +00:00
timeoutIds.add(
setTimeout(async () => {
const perf = new PerfTimer()
console.log(chalk.yellow("Detected change, rebuilding..."))
try {
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
.filter((fp) => !toRemove.has(fp))
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
for (const content of parsedContent) {
const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content)
}
for (const fp of toRemove) {
contentMap.delete(fp)
}
await rimraf(argv.output)
const parsedFiles = [...contentMap.values()]
const filteredContent = filterContent(ctx, parsedFiles)
await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
} catch {
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
}
2023-08-17 08:58:11 +00:00
clientRefresh()
toRebuild.clear()
toRemove.clear()
}, 250),
)
}
2023-07-22 23:06:36 +00:00
const watcher = chokidar.watch(".", {
persistent: true,
cwd: argv.directory,
ignoreInitial: true,
})
watcher
.on("add", (fp) => rebuild(fp, "add"))
.on("change", (fp) => rebuild(fp, "change"))
2023-07-24 07:04:01 +00:00
.on("unlink", (fp) => rebuild(fp, "delete"))
2023-06-04 16:35:45 +00:00
}
2023-07-23 18:49:26 +00:00
export default async (argv: Argv, clientRefresh: () => void) => {
2023-07-23 18:49:26 +00:00
try {
return await buildQuartz(argv, clientRefresh)
} catch (err) {
trace("\nExiting Quartz due to a fatal error", err as Error)
2023-07-23 18:49:26 +00:00
}
}