mirror of
https://github.com/jackyzha0/quartz.git
synced 2025-12-19 10:54:06 -06:00
* Initial plan * refactor: remove BuildCtx mutation from FrontMatter plugin - Remove temporary cast to mutable allSlugs array - Move alias collection to build orchestration layer - Update ctx.allSlugs immutably after parsing - Apply same pattern to incremental rebuild - Verified alias functionality works correctly Co-authored-by: saberzero1 <8161064+saberzero1@users.noreply.github.com> * fix: ensure alias collection happens before filtering in rebuild flow Move alias collection before filterContent() in rebuild flow to match initial build flow. This ensures consistent behavior where aliases from all markdown files (including those that will be filtered out) are included in ctx.allSlugs in both build scenarios. Co-authored-by: saberzero1 <8161064+saberzero1@users.noreply.github.com> * refactor: simplify collectAliases using functional array methods Replace imperative for-loop with declarative filter/flatMap chain for better readability and conciseness. Functionally equivalent but more idiomatic TypeScript. Co-authored-by: saberzero1 <8161064+saberzero1@users.noreply.github.com> * docs: update roadmap with completion status for decoupling phases Mark phases 1-5 as completed with detailed status notes: - Phase 1 (Foundation): vfile-schema, plugin-context, test-helpers - Phase 2 (Utility Abstraction): ctx.utils migration complete - Phase 3 (Component Decoupling): component registry created - Phase 4 (Immutability): BuildCtx readonly, alias collection refactored - Phase 5 (Full Migration): all plugins migrated to new pattern Add implementation status summary showing all objectives achieved. Co-authored-by: saberzero1 <8161064+saberzero1@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: saberzero1 <8161064+saberzero1@users.noreply.github.com>
327 lines
9.8 KiB
TypeScript
327 lines
9.8 KiB
TypeScript
import sourceMapSupport from "source-map-support"
|
|
sourceMapSupport.install(options)
|
|
import path from "path"
|
|
import { PerfTimer } from "./util/perf"
|
|
import { rm } from "fs/promises"
|
|
import { GlobbyFilterFunction, isGitIgnored } from "globby"
|
|
import { styleText } from "util"
|
|
import { parseMarkdown } from "./processors/parse"
|
|
import { filterContent } from "./processors/filter"
|
|
import { emitContent } from "./processors/emit"
|
|
import cfg from "../quartz.config"
|
|
import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path"
|
|
import chokidar from "chokidar"
|
|
import { ProcessedContent } from "./plugins/vfile"
|
|
import { Argv, MutableBuildCtx } from "./util/ctx"
|
|
import { glob, toPosixPath } from "./util/glob"
|
|
import { trace } from "./util/trace"
|
|
import { options } from "./util/sourcemap"
|
|
import { Mutex } from "async-mutex"
|
|
import { getStaticResourcesFromPlugins } from "./plugins"
|
|
import { randomIdNonSecure } from "./util/random"
|
|
import { ChangeEvent } from "./plugins/types"
|
|
import { minimatch } from "minimatch"
|
|
import { createPluginUtilities } from "./plugins/plugin-context"
|
|
|
|
type ContentMap = Map<
|
|
FilePath,
|
|
| {
|
|
type: "markdown"
|
|
content: ProcessedContent
|
|
}
|
|
| {
|
|
type: "other"
|
|
}
|
|
>
|
|
|
|
type BuildData = {
|
|
ctx: MutableBuildCtx
|
|
ignored: GlobbyFilterFunction
|
|
mut: Mutex
|
|
contentMap: ContentMap
|
|
changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
|
|
lastBuildMs: number
|
|
}
|
|
|
|
/**
|
|
* Collect all aliases from parsed content files.
|
|
* This is used to update ctx.allSlugs after parsing without mutating it during plugin execution.
|
|
*/
|
|
function collectAliases(parsedFiles: ProcessedContent[]): FullSlug[] {
|
|
return parsedFiles
|
|
.filter(([_, file]) => file.data.aliases)
|
|
.flatMap(([_, file]) => file.data.aliases!)
|
|
}
|
|
|
|
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
|
|
const ctx: MutableBuildCtx = {
|
|
buildId: randomIdNonSecure(),
|
|
argv,
|
|
cfg,
|
|
allSlugs: [],
|
|
allFiles: [],
|
|
incremental: false,
|
|
utils: createPluginUtilities(),
|
|
}
|
|
|
|
const perf = new PerfTimer()
|
|
const output = argv.output
|
|
|
|
const pluginCount = Object.values(cfg.plugins).flat().length
|
|
const pluginNames = (key: "transformers" | "filters" | "emitters") =>
|
|
cfg.plugins[key].map((plugin) => plugin.name)
|
|
if (argv.verbose) {
|
|
console.log(`Loaded ${pluginCount} plugins`)
|
|
console.log(` Transformers: ${pluginNames("transformers").join(", ")}`)
|
|
console.log(` Filters: ${pluginNames("filters").join(", ")}`)
|
|
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
|
|
}
|
|
|
|
const release = await mut.acquire()
|
|
perf.addEvent("clean")
|
|
await rm(output, { recursive: true, force: true })
|
|
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
|
|
|
|
perf.addEvent("glob")
|
|
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
|
const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
|
|
console.log(
|
|
`Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
|
)
|
|
|
|
const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
|
|
ctx.allFiles = allFiles
|
|
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
|
|
|
|
const parsedFiles = await parseMarkdown(ctx, filePaths)
|
|
|
|
// Collect aliases from parsed files and update context immutably
|
|
const discoveredAliases = collectAliases(parsedFiles)
|
|
ctx.allSlugs = [...new Set([...ctx.allSlugs, ...discoveredAliases])]
|
|
|
|
const filteredContent = filterContent(ctx, parsedFiles)
|
|
|
|
await emitContent(ctx, filteredContent)
|
|
console.log(
|
|
styleText("green", `Done processing ${markdownPaths.length} files in ${perf.timeSince()}`),
|
|
)
|
|
release()
|
|
|
|
if (argv.watch) {
|
|
ctx.incremental = true
|
|
return startWatching(ctx, mut, parsedFiles, clientRefresh)
|
|
}
|
|
}
|
|
|
|
// setup watcher for rebuilds
|
|
async function startWatching(
|
|
ctx: MutableBuildCtx,
|
|
mut: Mutex,
|
|
initialContent: ProcessedContent[],
|
|
clientRefresh: () => void,
|
|
) {
|
|
const { argv, allFiles } = ctx
|
|
|
|
const contentMap: ContentMap = new Map()
|
|
for (const filePath of allFiles) {
|
|
contentMap.set(filePath, {
|
|
type: "other",
|
|
})
|
|
}
|
|
|
|
for (const content of initialContent) {
|
|
const [_tree, vfile] = content
|
|
contentMap.set(vfile.data.relativePath!, {
|
|
type: "markdown",
|
|
content,
|
|
})
|
|
}
|
|
|
|
const gitIgnoredMatcher = await isGitIgnored()
|
|
const buildData: BuildData = {
|
|
ctx,
|
|
mut,
|
|
contentMap,
|
|
ignored: (fp) => {
|
|
const pathStr = toPosixPath(fp.toString())
|
|
if (pathStr.startsWith(".git/")) return true
|
|
if (gitIgnoredMatcher(pathStr)) return true
|
|
for (const pattern of cfg.configuration.ignorePatterns) {
|
|
if (minimatch(pathStr, pattern)) {
|
|
return true
|
|
}
|
|
}
|
|
|
|
return false
|
|
},
|
|
|
|
changesSinceLastBuild: {},
|
|
lastBuildMs: 0,
|
|
}
|
|
|
|
const watcher = chokidar.watch(".", {
|
|
persistent: true,
|
|
cwd: argv.directory,
|
|
ignoreInitial: true,
|
|
})
|
|
|
|
const changes: ChangeEvent[] = []
|
|
watcher
|
|
.on("add", (fp) => {
|
|
fp = toPosixPath(fp)
|
|
if (buildData.ignored(fp)) return
|
|
changes.push({ path: fp as FilePath, type: "add" })
|
|
void rebuild(changes, clientRefresh, buildData)
|
|
})
|
|
.on("change", (fp) => {
|
|
fp = toPosixPath(fp)
|
|
if (buildData.ignored(fp)) return
|
|
changes.push({ path: fp as FilePath, type: "change" })
|
|
void rebuild(changes, clientRefresh, buildData)
|
|
})
|
|
.on("unlink", (fp) => {
|
|
fp = toPosixPath(fp)
|
|
if (buildData.ignored(fp)) return
|
|
changes.push({ path: fp as FilePath, type: "delete" })
|
|
void rebuild(changes, clientRefresh, buildData)
|
|
})
|
|
|
|
return async () => {
|
|
await watcher.close()
|
|
}
|
|
}
|
|
|
|
async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
|
|
const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
|
|
const { argv, cfg } = ctx
|
|
|
|
const buildId = randomIdNonSecure()
|
|
ctx.buildId = buildId
|
|
buildData.lastBuildMs = new Date().getTime()
|
|
const numChangesInBuild = changes.length
|
|
const release = await mut.acquire()
|
|
|
|
// if there's another build after us, release and let them do it
|
|
if (ctx.buildId !== buildId) {
|
|
release()
|
|
return
|
|
}
|
|
|
|
const perf = new PerfTimer()
|
|
perf.addEvent("rebuild")
|
|
console.log(styleText("yellow", "Detected change, rebuilding..."))
|
|
|
|
// update changesSinceLastBuild
|
|
for (const change of changes) {
|
|
changesSinceLastBuild[change.path] = change.type
|
|
}
|
|
|
|
const staticResources = getStaticResourcesFromPlugins(ctx)
|
|
const pathsToParse: FilePath[] = []
|
|
for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
|
|
if (type === "delete" || path.extname(fp) !== ".md") continue
|
|
const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
|
|
pathsToParse.push(fullPath)
|
|
}
|
|
|
|
const parsed = await parseMarkdown(ctx, pathsToParse)
|
|
for (const content of parsed) {
|
|
contentMap.set(content[1].data.relativePath!, {
|
|
type: "markdown",
|
|
content,
|
|
})
|
|
}
|
|
|
|
// update state using changesSinceLastBuild
|
|
// we do this weird play of add => compute change events => remove
|
|
// so that partialEmitters can do appropriate cleanup based on the content of deleted files
|
|
for (const [file, change] of Object.entries(changesSinceLastBuild)) {
|
|
if (change === "delete") {
|
|
// universal delete case
|
|
contentMap.delete(file as FilePath)
|
|
}
|
|
|
|
// manually track non-markdown files as processed files only
|
|
// contains markdown files
|
|
if (change === "add" && path.extname(file) !== ".md") {
|
|
contentMap.set(file as FilePath, {
|
|
type: "other",
|
|
})
|
|
}
|
|
}
|
|
|
|
const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
|
|
const path = fp as FilePath
|
|
const processedContent = contentMap.get(path)
|
|
if (processedContent?.type === "markdown") {
|
|
const [_tree, file] = processedContent.content
|
|
return {
|
|
type,
|
|
path,
|
|
file,
|
|
}
|
|
}
|
|
|
|
return {
|
|
type,
|
|
path,
|
|
}
|
|
})
|
|
|
|
// update allFiles and then allSlugs with the consistent view of content map
|
|
ctx.allFiles = Array.from(contentMap.keys())
|
|
ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
|
|
|
|
// Collect aliases from all markdown files before filtering for consistency
|
|
const allMarkdownFiles = Array.from(contentMap.values())
|
|
.filter((file) => file.type === "markdown")
|
|
.map((file) => file.content)
|
|
|
|
const discoveredAliases = collectAliases(allMarkdownFiles)
|
|
ctx.allSlugs = [...new Set([...ctx.allSlugs, ...discoveredAliases])]
|
|
|
|
let processedFiles = filterContent(ctx, allMarkdownFiles)
|
|
|
|
let emittedFiles = 0
|
|
for (const emitter of cfg.plugins.emitters) {
|
|
// Try to use partialEmit if available, otherwise assume the output is static
|
|
const emitFn = emitter.partialEmit ?? emitter.emit
|
|
const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
|
|
if (emitted === null) {
|
|
continue
|
|
}
|
|
|
|
if (Symbol.asyncIterator in emitted) {
|
|
// Async generator case
|
|
for await (const file of emitted) {
|
|
emittedFiles++
|
|
if (ctx.argv.verbose) {
|
|
console.log(`[emit:${emitter.name}] ${file}`)
|
|
}
|
|
}
|
|
} else {
|
|
// Array case
|
|
emittedFiles += emitted.length
|
|
if (ctx.argv.verbose) {
|
|
for (const file of emitted) {
|
|
console.log(`[emit:${emitter.name}] ${file}`)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
|
|
console.log(styleText("green", `Done rebuilding in ${perf.timeSince()}`))
|
|
changes.splice(0, numChangesInBuild)
|
|
clientRefresh()
|
|
release()
|
|
}
|
|
|
|
export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
|
|
try {
|
|
return await buildQuartz(argv, mut, clientRefresh)
|
|
} catch (err) {
|
|
trace("\nExiting Quartz due to a fatal error", err as Error)
|
|
}
|
|
}
|