style={{
-> position: "relative",
-> display: "flex",
-> flexDirection: "row",
-> alignItems: "flex-start",
-> height: "100%",
-> width: "100%",
-> backgroundImage: `url("https://${cfg.baseUrl}/static/og-image.jpeg")`,
-> backgroundSize: "100% 100%",
-> }}
-> >
->
style={{
-> position: "absolute",
-> top: 0,
-> left: 0,
-> right: 0,
-> bottom: 0,
-> background: "radial-gradient(circle at center, transparent, rgba(0, 0, 0, 0.4) 70%)",
-> }}
-> />
->
style={{
-> display: "flex",
-> height: "100%",
-> width: "100%",
-> flexDirection: "column",
-> justifyContent: "flex-start",
-> alignItems: "flex-start",
-> gap: "1.5rem",
-> paddingTop: "4rem",
-> paddingBottom: "4rem",
-> marginLeft: "4rem",
-> }}
-> >
->
src={`"https://${cfg.baseUrl}/static/icon.jpeg"`}
-> style={{
-> position: "relative",
-> backgroundClip: "border-box",
-> borderRadius: "6rem",
-> }}
-> width={80}
-> />
->
style={{
-> display: "flex",
-> flexDirection: "column",
-> textAlign: "left",
-> fontFamily: fonts[0].name,
-> }}
-> >
->
style={{
-> color: cfg.theme.colors[colorScheme].light,
-> fontSize: "3rem",
-> fontWeight: 700,
-> marginRight: "4rem",
-> fontFamily: fonts[0].name,
-> }}
-> >
-> {title}
->
->
style={{
-> color: cfg.theme.colors[colorScheme].gray,
-> gap: "1rem",
-> fontSize: "1.5rem",
-> fontFamily: fonts[1].name,
-> }}
-> >
-> {Li.map((item, index) => {
-> if (item) {
-> return {item}
-> }
-> })}
->
->
->
style={{
-> color: cfg.theme.colors[colorScheme].light,
-> fontSize: "1.5rem",
-> overflow: "hidden",
-> marginRight: "8rem",
-> textOverflow: "ellipsis",
-> display: "-webkit-box",
-> WebkitLineClamp: 7,
-> WebkitBoxOrient: "vertical",
-> lineClamp: 7,
-> fontFamily: fonts[1].name,
-> }}
-> >
-> {description}
->
->
->
-> )
-> }
-> ```
+This functionality is provided by the [[CustomOgImages]] plugin. See the plugin page for customization options.
diff --git a/docs/index.md b/docs/index.md
index d4a751a84..bd8c8969a 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -32,7 +32,7 @@ If you prefer instructions in a video format you can try following Nicole van de
## 🔧 Features
- [[Obsidian compatibility]], [[full-text search]], [[graph view]], note transclusion, [[wikilinks]], [[backlinks]], [[features/Latex|Latex]], [[syntax highlighting]], [[popover previews]], [[Docker Support]], [[i18n|internationalization]], [[comments]] and [many more](./features/) right out of the box
-- Hot-reload for both configuration and content
+- Hot-reload on configuration edits and incremental rebuilds for content edits
- Simple JSX layouts and [[creating components|page components]]
- [[SPA Routing|Ridiculously fast page loads]] and tiny bundle sizes
- Fully-customizable parsing, filtering, and page generation through [[making plugins|plugins]]
diff --git a/docs/plugins/CustomOgImages.md b/docs/plugins/CustomOgImages.md
new file mode 100644
index 000000000..5d47c419c
--- /dev/null
+++ b/docs/plugins/CustomOgImages.md
@@ -0,0 +1,360 @@
+---
+title: Custom OG Images
+tags:
+ - feature/emitter
+---
+
+The Custom OG Images emitter plugin generates social media preview images for your pages. It uses [satori](https://github.com/vercel/satori) to convert HTML/CSS into images, allowing you to create beautiful and consistent social media preview cards for your content.
+
+> [!note]
+> For information on how to add, remove or configure plugins, see the [[configuration#Plugins|Configuration]] page.
+
+## Features
+
+- Automatically generates social media preview images for each page
+- Supports both light and dark mode themes
+- Customizable through frontmatter properties
+- Fallback to default image when needed
+- Full control over image design through custom components
+
+## Configuration
+
+> [!info] Info
+>
+> The `baseUrl` property in your [[configuration]] must be set properly for social images to work correctly, as they require absolute paths.
+
+This plugin accepts the following configuration options:
+
+```typescript title="quartz.config.ts"
+import { CustomOgImages } from "./quartz/plugins/emitters/ogImage"
+
+const config: QuartzConfig = {
+ plugins: {
+ emitters: [
+ CustomOgImages({
+ colorScheme: "lightMode", // what colors to use for generating image, same as theme colors from config, valid values are "darkMode" and "lightMode"
+ width: 1200, // width to generate with (in pixels)
+ height: 630, // height to generate with (in pixels)
+ excludeRoot: false, // wether to exclude "/" index path to be excluded from auto generated images (false = use auto, true = use default og image)
+ imageStructure: defaultImage, // custom image component to use
+ }),
+ ],
+ },
+}
+```
+
+### Configuration Options
+
+| Option | Type | Default | Description |
+| ---------------- | --------- | ------------ | ----------------------------------------------------------------- |
+| `colorScheme` | string | "lightMode" | Theme to use for generating images ("darkMode" or "lightMode") |
+| `width` | number | 1200 | Width of the generated image in pixels |
+| `height` | number | 630 | Height of the generated image in pixels |
+| `excludeRoot` | boolean | false | Whether to exclude the root index page from auto-generated images |
+| `imageStructure` | component | defaultImage | Custom component to use for image generation |
+
+## Frontmatter Properties
+
+The following properties can be used to customize your link previews:
+
+| Property | Alias | Summary |
+| ------------------- | ---------------- | ----------------------------------- |
+| `socialDescription` | `description` | Description to be used for preview. |
+| `socialImage` | `image`, `cover` | Link to preview image. |
+
+The `socialImage` property should contain a link to an image relative to `quartz/static`. If you have a folder for all your images in `quartz/static/my-images`, an example for `socialImage` could be `"my-images/cover.png"`.
+
+> [!info] Info
+>
+> The priority for what image will be used for the cover image looks like the following: `frontmatter property > generated image (if enabled) > default image`.
+>
+> The default image (`quartz/static/og-image.png`) will only be used as a fallback if nothing else is set. If the Custom OG Images emitter plugin is enabled, it will be treated as the new default per page, but can be overwritten by setting the `socialImage` frontmatter property for that page.
+
+## Customization
+
+You can fully customize how the images being generated look by passing your own component to `imageStructure`. This component takes JSX + some page metadata/config options and converts it to an image using [satori](https://github.com/vercel/satori). Vercel provides an [online playground](https://og-playground.vercel.app/) that can be used to preview how your JSX looks like as a picture. This is ideal for prototyping your custom design.
+
+### Fonts
+
+You will also be passed an array containing a header and a body font (where the first entry is header and the second is body). The fonts matches the ones selected in `theme.typography.header` and `theme.typography.body` from `quartz.config.ts` and will be passed in the format required by [`satori`](https://github.com/vercel/satori). To use them in CSS, use the `.name` property (e.g. `fontFamily: fonts[1].name` to use the "body" font family).
+
+An example of a component using the header font could look like this:
+
+```tsx title="socialImage.tsx"
+export const myImage: SocialImageOptions["imageStructure"] = (...) => {
+ return
Cool Header!
+}
+```
+
+> [!example]- Local fonts
+>
+> For cases where you use a local fonts under `static` folder, make sure to set the correct `@font-face` in `custom.scss`
+>
+> ```scss title="custom.scss"
+> @font-face {
+> font-family: "Newsreader";
+> font-style: normal;
+> font-weight: normal;
+> font-display: swap;
+> src: url("/static/Newsreader.woff2") format("woff2");
+> }
+> ```
+>
+> Then in `quartz/util/og.tsx`, you can load the Satori fonts like so:
+>
+> ```tsx title="quartz/util/og.tsx"
+> import { joinSegments, QUARTZ } from "../path"
+> import fs from "fs"
+> import path from "path"
+>
+> const newsreaderFontPath = joinSegments(QUARTZ, "static", "Newsreader.woff2")
+> export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: FontSpecification) {
+> // ... rest of implementation remains same
+> const fonts: SatoriOptions["fonts"] = [
+> ...headerFontData.map((data, idx) => ({
+> name: headerFontName,
+> data,
+> weight: headerWeights[idx],
+> style: "normal" as const,
+> })),
+> ...bodyFontData.map((data, idx) => ({
+> name: bodyFontName,
+> data,
+> weight: bodyWeights[idx],
+> style: "normal" as const,
+> })),
+> {
+> name: "Newsreader",
+> data: await fs.promises.readFile(path.resolve(newsreaderFontPath)),
+> weight: 400,
+> style: "normal" as const,
+> },
+> ]
+>
+> return fonts
+> }
+> ```
+>
+> This font then can be used with your custom structure.
+
+## Examples
+
+Here are some example image components you can use as a starting point:
+
+### Basic Example
+
+This example will generate images that look as follows:
+
+| Light | Dark |
+| ------------------------------------------ | ----------------------------------------- |
+| ![[custom-social-image-preview-light.png]] | ![[custom-social-image-preview-dark.png]] |
+
+```tsx
+import { SatoriOptions } from "satori/wasm"
+import { GlobalConfiguration } from "../cfg"
+import { SocialImageOptions, UserOpts } from "./imageHelper"
+import { QuartzPluginData } from "../plugins/vfile"
+
+export const customImage: SocialImageOptions["imageStructure"] = (
+ cfg: GlobalConfiguration,
+ userOpts: UserOpts,
+ title: string,
+ description: string,
+ fonts: SatoriOptions["fonts"],
+ fileData: QuartzPluginData,
+) => {
+ // How many characters are allowed before switching to smaller font
+ const fontBreakPoint = 22
+ const useSmallerFont = title.length > fontBreakPoint
+
+ const { colorScheme } = userOpts
+ return (
+
+
+
+ {title}
+
+
+ {description}
+
+
+
+
+ )
+}
+```
+
+### Advanced Example
+
+The following example includes a customized social image with a custom background and formatted date:
+
+```typescript title="custom-og.tsx"
+export const og: SocialImageOptions["Component"] = (
+ cfg: GlobalConfiguration,
+ fileData: QuartzPluginData,
+ { colorScheme }: Options,
+ title: string,
+ description: string,
+ fonts: SatoriOptions["fonts"],
+) => {
+ let created: string | undefined
+ let reading: string | undefined
+ if (fileData.dates) {
+ created = formatDate(getDate(cfg, fileData)!, cfg.locale)
+ }
+ const { minutes, text: _timeTaken, words: _words } = readingTime(fileData.text!)
+ reading = i18n(cfg.locale).components.contentMeta.readingTime({
+ minutes: Math.ceil(minutes),
+ })
+
+ const Li = [created, reading]
+
+ return (
+
+
+
+
+
+
+ {title}
+
+
+ {Li.map((item, index) => {
+ if (item) {
+ return {item}
+ }
+ })}
+
+
+
+ {description}
+
+
+
+ )
+}
+```
diff --git a/package-lock.json b/package-lock.json
index b887f5bdc..db2e37391 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@jackyzha0/quartz",
- "version": "4.4.0",
+ "version": "4.5.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@jackyzha0/quartz",
- "version": "4.4.0",
+ "version": "4.5.0",
"license": "MIT",
"dependencies": {
"@clack/prompts": "^0.10.0",
@@ -14,6 +14,7 @@
"@myriaddreamin/rehype-typst": "^0.5.4",
"@napi-rs/simple-git": "0.1.19",
"@tweenjs/tween.js": "^25.0.0",
+ "ansi-truncate": "^1.2.0",
"async-mutex": "^0.5.0",
"chalk": "^5.4.1",
"chokidar": "^4.0.3",
@@ -34,6 +35,7 @@
"mdast-util-to-hast": "^13.2.0",
"mdast-util-to-string": "^4.0.0",
"micromorph": "^0.4.5",
+ "minimatch": "^10.0.1",
"pixi.js": "^8.8.1",
"preact": "^10.26.4",
"preact-render-to-string": "^6.5.13",
@@ -75,7 +77,6 @@
"quartz": "quartz/bootstrap-cli.mjs"
},
"devDependencies": {
- "@types/cli-spinner": "^0.2.3",
"@types/d3": "^7.4.3",
"@types/hast": "^3.0.4",
"@types/js-yaml": "^4.0.9",
@@ -1585,15 +1586,6 @@
"integrity": "sha512-XKLA6syeBUaPzx4j3qwMqzzq+V4uo72BnlbOjmuljLrRqdsd3qnzvZZoxvMHZ23ndsRS4aufU6JOZYpCbU6T1A==",
"license": "MIT"
},
- "node_modules/@types/cli-spinner": {
- "version": "0.2.3",
- "resolved": "https://registry.npmjs.org/@types/cli-spinner/-/cli-spinner-0.2.3.tgz",
- "integrity": "sha512-TMO6mWltW0lCu1de8DMRq9+59OP/tEjghS+rs8ZEQ2EgYP5yV3bGw0tS14TMyJGqFaoVChNvhkVzv9RC1UgX+w==",
- "dev": true,
- "dependencies": {
- "@types/node": "*"
- }
- },
"node_modules/@types/css-font-loading-module": {
"version": "0.0.12",
"resolved": "https://registry.npmjs.org/@types/css-font-loading-module/-/css-font-loading-module-0.0.12.tgz",
@@ -2042,6 +2034,15 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
+ "node_modules/ansi-truncate": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/ansi-truncate/-/ansi-truncate-1.2.0.tgz",
+ "integrity": "sha512-/SLVrxNIP8o8iRHjdK3K9s2hDqdvb86NEjZOAB6ecWFsOo+9obaby97prnvAPn6j7ExXCpbvtlJFYPkkspg4BQ==",
+ "license": "MIT",
+ "dependencies": {
+ "fast-string-truncated-width": "^1.2.0"
+ }
+ },
"node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
@@ -3068,6 +3069,12 @@
"node": ">=8.6.0"
}
},
+ "node_modules/fast-string-truncated-width": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/fast-string-truncated-width/-/fast-string-truncated-width-1.2.1.tgz",
+ "integrity": "sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==",
+ "license": "MIT"
+ },
"node_modules/fastq": {
"version": "1.19.0",
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz",
@@ -5248,6 +5255,7 @@
"version": "10.0.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz",
"integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==",
+ "license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
},
diff --git a/package.json b/package.json
index 4ee8cfc72..0b0b9d9cd 100644
--- a/package.json
+++ b/package.json
@@ -2,7 +2,7 @@
"name": "@jackyzha0/quartz",
"description": "🌱 publish your digital garden and notes as a website",
"private": true,
- "version": "4.4.0",
+ "version": "4.5.0",
"type": "module",
"author": "jackyzha0
",
"license": "MIT",
@@ -40,6 +40,7 @@
"@myriaddreamin/rehype-typst": "^0.5.4",
"@napi-rs/simple-git": "0.1.19",
"@tweenjs/tween.js": "^25.0.0",
+ "ansi-truncate": "^1.2.0",
"async-mutex": "^0.5.0",
"chalk": "^5.4.1",
"chokidar": "^4.0.3",
@@ -60,6 +61,7 @@
"mdast-util-to-hast": "^13.2.0",
"mdast-util-to-string": "^4.0.0",
"micromorph": "^0.4.5",
+ "minimatch": "^10.0.1",
"pixi.js": "^8.8.1",
"preact": "^10.26.4",
"preact-render-to-string": "^6.5.13",
@@ -98,7 +100,6 @@
"yargs": "^17.7.2"
},
"devDependencies": {
- "@types/cli-spinner": "^0.2.3",
"@types/d3": "^7.4.3",
"@types/hast": "^3.0.4",
"@types/js-yaml": "^4.0.9",
diff --git a/quartz.config.ts b/quartz.config.ts
index 51a75515d..03ef0d7f8 100644
--- a/quartz.config.ts
+++ b/quartz.config.ts
@@ -19,7 +19,6 @@ const config: QuartzConfig = {
baseUrl: "quartz.jzhao.xyz",
ignorePatterns: ["private", "templates", ".obsidian"],
defaultDateType: "created",
- generateSocialImages: true,
theme: {
fontOrigin: "googleFonts",
cdnCaching: true,
@@ -58,7 +57,7 @@ const config: QuartzConfig = {
transformers: [
Plugin.FrontMatter(),
Plugin.CreatedModifiedDate({
- priority: ["frontmatter", "filesystem"],
+ priority: ["git", "frontmatter", "filesystem"],
}),
Plugin.SyntaxHighlighting({
theme: {
@@ -88,6 +87,8 @@ const config: QuartzConfig = {
Plugin.Assets(),
Plugin.Static(),
Plugin.NotFoundPage(),
+ // Comment out CustomOgImages to speed up build time
+ Plugin.CustomOgImages(),
],
},
}
diff --git a/quartz/build.ts b/quartz/build.ts
index 81558f950..7cf440569 100644
--- a/quartz/build.ts
+++ b/quartz/build.ts
@@ -9,7 +9,7 @@ import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit"
import cfg from "../quartz.config"
-import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path"
+import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile"
import { Argv, BuildCtx } from "./util/ctx"
@@ -17,34 +17,39 @@ import { glob, toPosixPath } from "./util/glob"
import { trace } from "./util/trace"
import { options } from "./util/sourcemap"
import { Mutex } from "async-mutex"
-import DepGraph from "./depgraph"
import { getStaticResourcesFromPlugins } from "./plugins"
import { randomIdNonSecure } from "./util/random"
+import { ChangeEvent } from "./plugins/types"
+import { minimatch } from "minimatch"
-type Dependencies = Record | null>
+type ContentMap = Map<
+ FilePath,
+ | {
+ type: "markdown"
+ content: ProcessedContent
+ }
+ | {
+ type: "other"
+ }
+>
type BuildData = {
ctx: BuildCtx
ignored: GlobbyFilterFunction
mut: Mutex
- initialSlugs: FullSlug[]
- // TODO merge contentMap and trackedAssets
- contentMap: Map
- trackedAssets: Set
- toRebuild: Set
- toRemove: Set
+ contentMap: ContentMap
+ changesSinceLastBuild: Record
lastBuildMs: number
- dependencies: Dependencies
}
-type FileEvent = "add" | "change" | "delete"
-
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
const ctx: BuildCtx = {
buildId: randomIdNonSecure(),
argv,
cfg,
allSlugs: [],
+ allFiles: [],
+ incremental: false,
}
const perf = new PerfTimer()
@@ -67,64 +72,70 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
perf.addEvent("glob")
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
- const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort()
+ const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
console.log(
- `Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
+ `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
)
- const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
+ ctx.allFiles = allFiles
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles)
- const dependencies: Record | null> = {}
-
- // Only build dependency graphs if we're doing a fast rebuild
- if (argv.fastRebuild) {
- const staticResources = getStaticResourcesFromPlugins(ctx)
- for (const emitter of cfg.plugins.emitters) {
- dependencies[emitter.name] =
- (await emitter.getDependencyGraph?.(ctx, filteredContent, staticResources)) ?? null
- }
- }
-
await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
+ console.log(chalk.green(`Done processing ${markdownPaths.length} files in ${perf.timeSince()}`))
release()
- if (argv.serve) {
- return startServing(ctx, mut, parsedFiles, clientRefresh, dependencies)
+ if (argv.watch) {
+ ctx.incremental = true
+ return startWatching(ctx, mut, parsedFiles, clientRefresh)
}
}
// setup watcher for rebuilds
-async function startServing(
+async function startWatching(
ctx: BuildCtx,
mut: Mutex,
initialContent: ProcessedContent[],
clientRefresh: () => void,
- dependencies: Dependencies, // emitter name: dep graph
) {
- const { argv } = ctx
+ const { argv, allFiles } = ctx
- // cache file parse results
- const contentMap = new Map()
- for (const content of initialContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
+ const contentMap: ContentMap = new Map()
+ for (const filePath of allFiles) {
+ contentMap.set(filePath, {
+ type: "other",
+ })
}
+ for (const content of initialContent) {
+ const [_tree, vfile] = content
+ contentMap.set(vfile.data.relativePath!, {
+ type: "markdown",
+ content,
+ })
+ }
+
+ const gitIgnoredMatcher = await isGitIgnored()
const buildData: BuildData = {
ctx,
mut,
- dependencies,
contentMap,
- ignored: await isGitIgnored(),
- initialSlugs: ctx.allSlugs,
- toRebuild: new Set(),
- toRemove: new Set(),
- trackedAssets: new Set(),
+ ignored: (path) => {
+ if (gitIgnoredMatcher(path)) return true
+ const pathStr = path.toString()
+ for (const pattern of cfg.configuration.ignorePatterns) {
+ if (minimatch(pathStr, pattern)) {
+ return true
+ }
+ }
+
+ return false
+ },
+
+ changesSinceLastBuild: {},
lastBuildMs: 0,
}
@@ -134,34 +145,37 @@ async function startServing(
ignoreInitial: true,
})
- const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint
+ const changes: ChangeEvent[] = []
watcher
- .on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData))
- .on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData))
- .on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData))
+ .on("add", (fp) => {
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "add" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("change", (fp) => {
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "change" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
+ .on("unlink", (fp) => {
+ if (buildData.ignored(fp)) return
+ changes.push({ path: fp as FilePath, type: "delete" })
+ void rebuild(changes, clientRefresh, buildData)
+ })
return async () => {
await watcher.close()
}
}
-async function partialRebuildFromEntrypoint(
- filepath: string,
- action: FileEvent,
- clientRefresh: () => void,
- buildData: BuildData, // note: this function mutates buildData
-) {
- const { ctx, ignored, dependencies, contentMap, mut, toRemove } = buildData
+async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
+ const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
const { argv, cfg } = ctx
- // don't do anything for gitignored files
- if (ignored(filepath)) {
- return
- }
-
const buildId = randomIdNonSecure()
ctx.buildId = buildId
buildData.lastBuildMs = new Date().getTime()
+ const numChangesInBuild = changes.length
const release = await mut.acquire()
// if there's another build after us, release and let them do it
@@ -171,242 +185,105 @@ async function partialRebuildFromEntrypoint(
}
const perf = new PerfTimer()
+ perf.addEvent("rebuild")
console.log(chalk.yellow("Detected change, rebuilding..."))
- // UPDATE DEP GRAPH
- const fp = joinSegments(argv.directory, toPosixPath(filepath)) as FilePath
+ // update changesSinceLastBuild
+ for (const change of changes) {
+ changesSinceLastBuild[change.path] = change.type
+ }
const staticResources = getStaticResourcesFromPlugins(ctx)
- let processedFiles: ProcessedContent[] = []
-
- switch (action) {
- case "add":
- // add to cache when new file is added
- processedFiles = await parseMarkdown(ctx, [fp])
- processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
-
- // update the dep graph by asking all emitters whether they depend on this file
- for (const emitter of cfg.plugins.emitters) {
- const emitterGraph =
- (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
-
- if (emitterGraph) {
- const existingGraph = dependencies[emitter.name]
- if (existingGraph !== null) {
- existingGraph.mergeGraph(emitterGraph)
- } else {
- // might be the first time we're adding a mardown file
- dependencies[emitter.name] = emitterGraph
- }
- }
- }
- break
- case "change":
- // invalidate cache when file is changed
- processedFiles = await parseMarkdown(ctx, [fp])
- processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
-
- // only content files can have added/removed dependencies because of transclusions
- if (path.extname(fp) === ".md") {
- for (const emitter of cfg.plugins.emitters) {
- // get new dependencies from all emitters for this file
- const emitterGraph =
- (await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
-
- // only update the graph if the emitter plugin uses the changed file
- // eg. Assets plugin ignores md files, so we skip updating the graph
- if (emitterGraph?.hasNode(fp)) {
- // merge the new dependencies into the dep graph
- dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
- }
- }
- }
- break
- case "delete":
- toRemove.add(fp)
- break
+ const pathsToParse: FilePath[] = []
+ for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
+ if (type === "delete" || path.extname(fp) !== ".md") continue
+ const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
+ pathsToParse.push(fullPath)
}
- if (argv.verbose) {
- console.log(`Updated dependency graphs in ${perf.timeSince()}`)
+ const parsed = await parseMarkdown(ctx, pathsToParse)
+ for (const content of parsed) {
+ contentMap.set(content[1].data.relativePath!, {
+ type: "markdown",
+ content,
+ })
}
- // EMIT
- perf.addEvent("rebuild")
+ // update state using changesSinceLastBuild
+ // we do this weird play of add => compute change events => remove
+ // so that partialEmitters can do appropriate cleanup based on the content of deleted files
+ for (const [file, change] of Object.entries(changesSinceLastBuild)) {
+ if (change === "delete") {
+ // universal delete case
+ contentMap.delete(file as FilePath)
+ }
+
+ // manually track non-markdown files as processed files only
+ // contains markdown files
+ if (change === "add" && path.extname(file) !== ".md") {
+ contentMap.set(file as FilePath, {
+ type: "other",
+ })
+ }
+ }
+
+ const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
+ const path = fp as FilePath
+ const processedContent = contentMap.get(path)
+ if (processedContent?.type === "markdown") {
+ const [_tree, file] = processedContent.content
+ return {
+ type,
+ path,
+ file,
+ }
+ }
+
+ return {
+ type,
+ path,
+ }
+ })
+
+ // update allFiles and then allSlugs with the consistent view of content map
+ ctx.allFiles = Array.from(contentMap.keys())
+ ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
+ const processedFiles = Array.from(contentMap.values())
+ .filter((file) => file.type === "markdown")
+ .map((file) => file.content)
+
let emittedFiles = 0
-
for (const emitter of cfg.plugins.emitters) {
- const depGraph = dependencies[emitter.name]
-
- // emitter hasn't defined a dependency graph. call it with all processed files
- if (depGraph === null) {
- if (argv.verbose) {
- console.log(
- `Emitter ${emitter.name} doesn't define a dependency graph. Calling it with all files...`,
- )
- }
-
- const files = [...contentMap.values()].filter(
- ([_node, vfile]) => !toRemove.has(vfile.data.filePath!),
- )
-
- const emittedFps = await emitter.emit(ctx, files, staticResources)
-
- if (ctx.argv.verbose) {
- for (const file of emittedFps) {
- console.log(`[emit:${emitter.name}] ${file}`)
- }
- }
-
- emittedFiles += emittedFps.length
+ // Try to use partialEmit if available, otherwise assume the output is static
+ const emitFn = emitter.partialEmit ?? emitter.emit
+ const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
+ if (emitted === null) {
continue
}
- // only call the emitter if it uses this file
- if (depGraph.hasNode(fp)) {
- // re-emit using all files that are needed for the downstream of this file
- // eg. for ContentIndex, the dep graph could be:
- // a.md --> contentIndex.json
- // b.md ------^
- //
- // if a.md changes, we need to re-emit contentIndex.json,
- // and supply [a.md, b.md] to the emitter
- const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[]
-
- const upstreamContent = upstreams
- // filter out non-markdown files
- .filter((file) => contentMap.has(file))
- // if file was deleted, don't give it to the emitter
- .filter((file) => !toRemove.has(file))
- .map((file) => contentMap.get(file)!)
-
- const emittedFps = await emitter.emit(ctx, upstreamContent, staticResources)
-
- if (ctx.argv.verbose) {
- for (const file of emittedFps) {
+ if (Symbol.asyncIterator in emitted) {
+ // Async generator case
+ for await (const file of emitted) {
+ emittedFiles++
+ if (ctx.argv.verbose) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ }
+ }
+ } else {
+ // Array case
+ emittedFiles += emitted.length
+ if (ctx.argv.verbose) {
+ for (const file of emitted) {
console.log(`[emit:${emitter.name}] ${file}`)
}
}
-
- emittedFiles += emittedFps.length
}
}
console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
-
- // CLEANUP
- const destinationsToDelete = new Set()
- for (const file of toRemove) {
- // remove from cache
- contentMap.delete(file)
- Object.values(dependencies).forEach((depGraph) => {
- // remove the node from dependency graphs
- depGraph?.removeNode(file)
- // remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed
- const orphanNodes = depGraph?.removeOrphanNodes()
- orphanNodes?.forEach((node) => {
- // only delete files that are in the output directory
- if (node.startsWith(argv.output)) {
- destinationsToDelete.add(node)
- }
- })
- })
- }
- await rimraf([...destinationsToDelete])
-
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
-
- toRemove.clear()
- release()
+ changes.splice(0, numChangesInBuild)
clientRefresh()
-}
-
-async function rebuildFromEntrypoint(
- fp: string,
- action: FileEvent,
- clientRefresh: () => void,
- buildData: BuildData, // note: this function mutates buildData
-) {
- const { ctx, ignored, mut, initialSlugs, contentMap, toRebuild, toRemove, trackedAssets } =
- buildData
-
- const { argv } = ctx
-
- // don't do anything for gitignored files
- if (ignored(fp)) {
- return
- }
-
- // dont bother rebuilding for non-content files, just track and refresh
- fp = toPosixPath(fp)
- const filePath = joinSegments(argv.directory, fp) as FilePath
- if (path.extname(fp) !== ".md") {
- if (action === "add" || action === "change") {
- trackedAssets.add(filePath)
- } else if (action === "delete") {
- trackedAssets.delete(filePath)
- }
- clientRefresh()
- return
- }
-
- if (action === "add" || action === "change") {
- toRebuild.add(filePath)
- } else if (action === "delete") {
- toRemove.add(filePath)
- }
-
- const buildId = randomIdNonSecure()
- ctx.buildId = buildId
- buildData.lastBuildMs = new Date().getTime()
- const release = await mut.acquire()
-
- // there's another build after us, release and let them do it
- if (ctx.buildId !== buildId) {
- release()
- return
- }
-
- const perf = new PerfTimer()
- console.log(chalk.yellow("Detected change, rebuilding..."))
-
- try {
- const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
- const parsedContent = await parseMarkdown(ctx, filesToRebuild)
- for (const content of parsedContent) {
- const [_tree, vfile] = content
- contentMap.set(vfile.data.filePath!, content)
- }
-
- for (const fp of toRemove) {
- contentMap.delete(fp)
- }
-
- const parsedFiles = [...contentMap.values()]
- const filteredContent = filterContent(ctx, parsedFiles)
-
- // re-update slugs
- const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
- .filter((fp) => !toRemove.has(fp))
- .map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
-
- ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
-
- // TODO: we can probably traverse the link graph to figure out what's safe to delete here
- // instead of just deleting everything
- await rimraf(path.join(argv.output, ".*"), { glob: true })
- await emitContent(ctx, filteredContent)
- console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
- } catch (err) {
- console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
- if (argv.verbose) {
- console.log(chalk.red(err))
- }
- }
-
- clientRefresh()
- toRebuild.clear()
- toRemove.clear()
release()
}
diff --git a/quartz/cfg.ts b/quartz/cfg.ts
index 135f58499..b5de75dd7 100644
--- a/quartz/cfg.ts
+++ b/quartz/cfg.ts
@@ -2,7 +2,6 @@ import { ValidDateType } from "./components/Date"
import { QuartzComponent } from "./components/types"
import { ValidLocale } from "./i18n"
import { PluginTypes } from "./plugins/types"
-import { SocialImageOptions } from "./util/og"
import { Theme } from "./util/theme"
export type Analytics =
@@ -61,10 +60,6 @@ export interface GlobalConfiguration {
* Quartz will avoid using this as much as possible and use relative URLs most of the time
*/
baseUrl?: string
- /**
- * Whether to generate social images (Open Graph and Twitter standard) for link previews
- */
- generateSocialImages: boolean | Partial
theme: Theme
/**
* Allow to translate the date in the language of your choice.
diff --git a/quartz/cli/args.js b/quartz/cli/args.js
index 123d0ac55..d2408e94b 100644
--- a/quartz/cli/args.js
+++ b/quartz/cli/args.js
@@ -71,10 +71,10 @@ export const BuildArgv = {
default: false,
describe: "run a local server to live-preview your Quartz",
},
- fastRebuild: {
+ watch: {
boolean: true,
default: false,
- describe: "[experimental] rebuild only the changed files",
+ describe: "watch for changes and rebuild automatically",
},
baseDir: {
string: true,
diff --git a/quartz/cli/handlers.js b/quartz/cli/handlers.js
index 6ef380596..c41bafc31 100644
--- a/quartz/cli/handlers.js
+++ b/quartz/cli/handlers.js
@@ -225,6 +225,10 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
* @param {*} argv arguments for `build`
*/
export async function handleBuild(argv) {
+ if (argv.serve) {
+ argv.watch = true
+ }
+
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
const ctx = await esbuild.context({
entryPoints: [fp],
@@ -331,9 +335,10 @@ export async function handleBuild(argv) {
clientRefresh()
}
+ let clientRefresh = () => {}
if (argv.serve) {
const connections = []
- const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
+ clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
if (argv.baseDir !== "" && !argv.baseDir.startsWith("/")) {
argv.baseDir = "/" + argv.baseDir
@@ -433,6 +438,7 @@ export async function handleBuild(argv) {
return serve()
})
+
server.listen(argv.port)
const wss = new WebSocketServer({ port: argv.wsPort })
wss.on("connection", (ws) => connections.push(ws))
@@ -441,16 +447,27 @@ export async function handleBuild(argv) {
`Started a Quartz server listening at http://localhost:${argv.port}${argv.baseDir}`,
),
)
- console.log("hint: exit with ctrl+c")
- const paths = await globby(["**/*.ts", "**/*.tsx", "**/*.scss", "package.json"])
+ } else {
+ await build(clientRefresh)
+ ctx.dispose()
+ }
+
+ if (argv.watch) {
+ const paths = await globby([
+ "**/*.ts",
+ "quartz/cli/*.js",
+ "quartz/static/**/*",
+ "**/*.tsx",
+ "**/*.scss",
+ "package.json",
+ ])
chokidar
.watch(paths, { ignoreInitial: true })
.on("add", () => build(clientRefresh))
.on("change", () => build(clientRefresh))
.on("unlink", () => build(clientRefresh))
- } else {
- await build(() => {})
- ctx.dispose()
+
+ console.log(chalk.grey("hint: exit with ctrl+c"))
}
}
diff --git a/quartz/components/Breadcrumbs.tsx b/quartz/components/Breadcrumbs.tsx
index 9ccfb9a6a..d0faeabb8 100644
--- a/quartz/components/Breadcrumbs.tsx
+++ b/quartz/components/Breadcrumbs.tsx
@@ -102,7 +102,7 @@ export default ((opts?: Partial) => {
// Add current slug to full path
currentPath = joinSegments(currentPath, slugParts[i])
- const includeTrailingSlash = !isTagPath || i < 1
+ const includeTrailingSlash = !isTagPath || i < slugParts.length - 1
// Format and add current crumb
const crumb = formatCrumb(
diff --git a/quartz/components/Head.tsx b/quartz/components/Head.tsx
index b6a7e8d07..60dce6edd 100644
--- a/quartz/components/Head.tsx
+++ b/quartz/components/Head.tsx
@@ -1,173 +1,41 @@
import { i18n } from "../i18n"
-import { FullSlug, joinSegments, pathToRoot } from "../util/path"
+import { FullSlug, getFileExtension, joinSegments, pathToRoot } from "../util/path"
import { CSSResourceToStyleElement, JSResourceToScriptElement } from "../util/resources"
-import { getFontSpecificationName, googleFontHref } from "../util/theme"
+import { googleFontHref } from "../util/theme"
import { QuartzComponent, QuartzComponentConstructor, QuartzComponentProps } from "./types"
-import satori, { SatoriOptions } from "satori"
-import { loadEmoji, getIconCode } from "../util/emoji"
-import fs from "fs"
-import sharp from "sharp"
-import { ImageOptions, SocialImageOptions, getSatoriFont, defaultImage } from "../util/og"
import { unescapeHTML } from "../util/escape"
-
-/**
- * Generates social image (OG/twitter standard) and saves it as `.webp` inside the public folder
- * @param opts options for generating image
- */
-async function generateSocialImage(
- { cfg, description, fileName, fontsPromise, title, fileData }: ImageOptions,
- userOpts: SocialImageOptions,
- imageDir: string,
-) {
- const fonts = await fontsPromise
- const { width, height } = userOpts
-
- // JSX that will be used to generate satori svg
- const imageComponent = userOpts.imageStructure(cfg, userOpts, title, description, fonts, fileData)
-
- const svg = await satori(imageComponent, {
- width,
- height,
- fonts,
- loadAdditionalAsset: async (languageCode: string, segment: string) => {
- if (languageCode === "emoji") {
- return `data:image/svg+xml;base64,${btoa(await loadEmoji(getIconCode(segment)))}`
- }
-
- return languageCode
- },
- })
-
- // Convert svg directly to webp (with additional compression)
- const compressed = await sharp(Buffer.from(svg)).webp({ quality: 40 }).toBuffer()
-
- // Write to file system
- const filePath = joinSegments(imageDir, `${fileName}.${extension}`)
- fs.writeFileSync(filePath, compressed)
-}
-
-const extension = "webp"
-
-const defaultOptions: SocialImageOptions = {
- colorScheme: "lightMode",
- width: 1200,
- height: 630,
- imageStructure: defaultImage,
- excludeRoot: false,
-}
-
+import { CustomOgImagesEmitterName } from "../plugins/emitters/ogImage"
export default (() => {
- let fontsPromise: Promise
-
- let fullOptions: SocialImageOptions
const Head: QuartzComponent = ({
cfg,
fileData,
externalResources,
ctx,
}: QuartzComponentProps) => {
- // Initialize options if not set
- if (!fullOptions) {
- if (typeof cfg.generateSocialImages !== "boolean") {
- fullOptions = { ...defaultOptions, ...cfg.generateSocialImages }
- } else {
- fullOptions = defaultOptions
- }
- }
-
- // Memoize google fonts
- if (!fontsPromise && cfg.generateSocialImages) {
- const headerFont = getFontSpecificationName(cfg.theme.typography.header)
- const bodyFont = getFontSpecificationName(cfg.theme.typography.body)
- fontsPromise = getSatoriFont(headerFont, bodyFont)
- }
-
- const slug = fileData.filePath
- // since "/" is not a valid character in file names, replace with "-"
- const fileName = slug?.replaceAll("/", "-")
-
- // Get file description (priority: frontmatter > fileData > default)
- const fdDescription =
- fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description
const titleSuffix = cfg.pageTitleSuffix ?? ""
const title =
(fileData.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
- let description = ""
- if (fdDescription) {
- description = unescapeHTML(fdDescription)
- }
-
- if (fileData.frontmatter?.socialDescription) {
- description = fileData.frontmatter?.socialDescription as string
- } else if (fileData.frontmatter?.description) {
- description = fileData.frontmatter?.description
- }
-
- const fileDir = joinSegments(ctx.argv.output, "static", "social-images")
- if (cfg.generateSocialImages) {
- // Generate folders for social images (if they dont exist yet)
- if (!fs.existsSync(fileDir)) {
- fs.mkdirSync(fileDir, { recursive: true })
- }
-
- if (fileName) {
- // Generate social image (happens async)
- void generateSocialImage(
- {
- title,
- description,
- fileName,
- fileDir,
- fileExt: extension,
- fontsPromise,
- cfg,
- fileData,
- },
- fullOptions,
- fileDir,
- )
- }
- }
+ const description =
+ fileData.frontmatter?.socialDescription ??
+ fileData.frontmatter?.description ??
+ unescapeHTML(fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description)
const { css, js, additionalHead } = externalResources
const url = new URL(`https://${cfg.baseUrl ?? "example.com"}`)
const path = url.pathname as FullSlug
const baseDir = fileData.slug === "404" ? path : pathToRoot(fileData.slug!)
-
const iconPath = joinSegments(baseDir, "static/icon.png")
- const ogImageDefaultPath = `https://${cfg.baseUrl}/static/og-image.png`
- // "static/social-images/slug-filename.md.webp"
- const ogImageGeneratedPath = `https://${cfg.baseUrl}/${fileDir.replace(
- `${ctx.argv.output}/`,
- "",
- )}/${fileName}.${extension}`
-
- // Use default og image if filePath doesnt exist (for autogenerated paths with no .md file)
- const useDefaultOgImage = fileName === undefined || !cfg.generateSocialImages
-
- // Path to og/social image (priority: frontmatter > generated image (if enabled) > default image)
- let ogImagePath = useDefaultOgImage ? ogImageDefaultPath : ogImageGeneratedPath
-
- // TODO: could be improved to support external images in the future
- // Aliases for image and cover handled in `frontmatter.ts`
- const frontmatterImgUrl = fileData.frontmatter?.socialImage
-
- // Override with default og image if config option is set
- if (fileData.slug === "index") {
- ogImagePath = ogImageDefaultPath
- }
-
- // Override with frontmatter url if existing
- if (frontmatterImgUrl) {
- ogImagePath = `https://${cfg.baseUrl}/static/${frontmatterImgUrl}`
- }
-
// Url of current page
const socialUrl =
fileData.slug === "404" ? url.toString() : joinSegments(url.toString(), fileData.slug!)
+ const usesCustomOgImage = ctx.cfg.plugins.emitters.some(
+ (e) => e.name === CustomOgImagesEmitterName,
+ )
+ const ogImageDefaultPath = `https://${cfg.baseUrl}/static/og-image.png`
+
return (
{title}
@@ -181,7 +49,7 @@ export default (() => {
)}
- {/* OG/Twitter meta tags */}
+
@@ -189,28 +57,32 @@ export default (() => {
-
- {/* Dont set width and height if unknown (when using custom frontmatter image) */}
- {!frontmatterImgUrl && (
+
+ {!usesCustomOgImage && (
<>
-
-
+
+
+
+
>
)}
-
+
{cfg.baseUrl && (
<>
-
-
>
)}
+
+
{css.map((resource) => CSSResourceToStyleElement(resource, true))}
{js
.filter((resource) => resource.loadTime === "beforeDOMReady")
diff --git a/quartz/components/PageList.tsx b/quartz/components/PageList.tsx
index c0538f5fa..2a5f0e055 100644
--- a/quartz/components/PageList.tsx
+++ b/quartz/components/PageList.tsx
@@ -1,4 +1,4 @@
-import { FullSlug, resolveRelative } from "../util/path"
+import { FullSlug, isFolderPath, resolveRelative } from "../util/path"
import { QuartzPluginData } from "../plugins/vfile"
import { Date, getDate } from "./Date"
import { QuartzComponent, QuartzComponentProps } from "./types"
@@ -8,6 +8,13 @@ export type SortFn = (f1: QuartzPluginData, f2: QuartzPluginData) => number
export function byDateAndAlphabetical(cfg: GlobalConfiguration): SortFn {
return (f1, f2) => {
+ // Sort folders first
+ const f1IsFolder = isFolderPath(f1.slug ?? "")
+ const f2IsFolder = isFolderPath(f2.slug ?? "")
+ if (f1IsFolder && !f2IsFolder) return -1
+ if (!f1IsFolder && f2IsFolder) return 1
+
+ // If both are folders or both are files, sort by date/alphabetical
if (f1.dates && f2.dates) {
// sort descending
return getDate(cfg, f2)!.getTime() - getDate(cfg, f1)!.getTime()
diff --git a/quartz/components/pages/FolderContent.tsx b/quartz/components/pages/FolderContent.tsx
index 2a727c009..9621f4f1a 100644
--- a/quartz/components/pages/FolderContent.tsx
+++ b/quartz/components/pages/FolderContent.tsx
@@ -1,16 +1,14 @@
import { QuartzComponent, QuartzComponentConstructor, QuartzComponentProps } from "../types"
-import path from "path"
import style from "../styles/listPage.scss"
-import { byDateAndAlphabetical, PageList, SortFn } from "../PageList"
-import { stripSlashes, simplifySlug, joinSegments, FullSlug } from "../../util/path"
+import { PageList, SortFn } from "../PageList"
import { Root } from "hast"
import { htmlToJsx } from "../../util/jsx"
import { i18n } from "../../i18n"
import { QuartzPluginData } from "../../plugins/vfile"
import { ComponentChildren } from "preact"
import { concatenateResources } from "../../util/resources"
-
+import { FileTrieNode } from "../../util/fileTrie"
interface FolderContentOptions {
/**
* Whether to display number of folders
@@ -27,51 +25,88 @@ const defaultOptions: FolderContentOptions = {
export default ((opts?: Partial) => {
const options: FolderContentOptions = { ...defaultOptions, ...opts }
+ let trie: FileTrieNode<
+ QuartzPluginData & {
+ slug: string
+ title: string
+ filePath: string
+ }
+ >
const FolderContent: QuartzComponent = (props: QuartzComponentProps) => {
const { tree, fileData, allFiles, cfg } = props
- const folderSlug = stripSlashes(simplifySlug(fileData.slug!))
- const folderParts = folderSlug.split(path.posix.sep)
- const allPagesInFolder: QuartzPluginData[] = []
- const allPagesInSubfolders: Map = new Map()
+ if (!trie) {
+ trie = new FileTrieNode([])
+ allFiles.forEach((file) => {
+ if (file.frontmatter) {
+ trie.add({
+ ...file,
+ slug: file.slug!,
+ title: file.frontmatter.title,
+ filePath: file.filePath!,
+ })
+ }
+ })
+ }
- allFiles.forEach((file) => {
- const fileSlug = stripSlashes(simplifySlug(file.slug!))
- const prefixed = fileSlug.startsWith(folderSlug) && fileSlug !== folderSlug
- const fileParts = fileSlug.split(path.posix.sep)
- const isDirectChild = fileParts.length === folderParts.length + 1
+ const folder = trie.findNode(fileData.slug!.split("/"))
+ if (!folder) {
+ return null
+ }
- if (!prefixed) {
- return
- }
+ const allPagesInFolder: QuartzPluginData[] =
+ folder.children
+ .map((node) => {
+ // regular file, proceed
+ if (node.data) {
+ return node.data
+ }
- if (isDirectChild) {
- allPagesInFolder.push(file)
- } else if (options.showSubfolders) {
- const subfolderSlug = joinSegments(
- ...fileParts.slice(0, folderParts.length + 1),
- ) as FullSlug
- const pagesInFolder = allPagesInSubfolders.get(subfolderSlug) || []
- allPagesInSubfolders.set(subfolderSlug, [...pagesInFolder, file])
- }
- })
+ if (node.isFolder && options.showSubfolders) {
+ // folders that dont have data need synthetic files
+ const getMostRecentDates = (): QuartzPluginData["dates"] => {
+ let maybeDates: QuartzPluginData["dates"] | undefined = undefined
+ for (const child of node.children) {
+ if (child.data?.dates) {
+ // compare all dates and assign to maybeDates if its more recent or its not set
+ if (!maybeDates) {
+ maybeDates = { ...child.data.dates }
+ } else {
+ if (child.data.dates.created > maybeDates.created) {
+ maybeDates.created = child.data.dates.created
+ }
- allPagesInSubfolders.forEach((files, subfolderSlug) => {
- const hasIndex = allPagesInFolder.some(
- (file) => subfolderSlug === stripSlashes(simplifySlug(file.slug!)),
- )
- if (!hasIndex) {
- const subfolderDates = files.sort(byDateAndAlphabetical(cfg))[0].dates
- const subfolderTitle = subfolderSlug.split(path.posix.sep).at(-1)!
- allPagesInFolder.push({
- slug: subfolderSlug,
- dates: subfolderDates,
- frontmatter: { title: subfolderTitle, tags: ["folder"] },
+ if (child.data.dates.modified > maybeDates.modified) {
+ maybeDates.modified = child.data.dates.modified
+ }
+
+ if (child.data.dates.published > maybeDates.published) {
+ maybeDates.published = child.data.dates.published
+ }
+ }
+ }
+ }
+ return (
+ maybeDates ?? {
+ created: new Date(),
+ modified: new Date(),
+ published: new Date(),
+ }
+ )
+ }
+
+ return {
+ slug: node.slug,
+ dates: getMostRecentDates(),
+ frontmatter: {
+ title: node.displayName,
+ tags: [],
+ },
+ }
+ }
})
- }
- })
-
+ .filter((page) => page !== undefined) ?? []
const cssClasses: string[] = fileData.frontmatter?.cssclasses ?? []
const classes = cssClasses.join(" ")
const listProps = {
diff --git a/quartz/components/renderPage.tsx b/quartz/components/renderPage.tsx
index a43b66cb7..19324f51e 100644
--- a/quartz/components/renderPage.tsx
+++ b/quartz/components/renderPage.tsx
@@ -9,7 +9,6 @@ import { visit } from "unist-util-visit"
import { Root, Element, ElementContent } from "hast"
import { GlobalConfiguration } from "../cfg"
import { i18n } from "../i18n"
-import { QuartzPluginData } from "../plugins/vfile"
interface RenderComponents {
head: QuartzComponent
@@ -25,7 +24,6 @@ interface RenderComponents {
const headerRegex = new RegExp(/h[1-6]/)
export function pageResources(
baseDir: FullSlug | RelativeURL,
- fileData: QuartzPluginData,
staticResources: StaticResources,
): StaticResources {
const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json")
@@ -65,17 +63,12 @@ export function pageResources(
return resources
}
-export function renderPage(
+function renderTranscludes(
+ root: Root,
cfg: GlobalConfiguration,
slug: FullSlug,
componentData: QuartzComponentProps,
- components: RenderComponents,
- pageResources: StaticResources,
-): string {
- // make a deep copy of the tree so we don't remove the transclusion references
- // for the file cached in contentMap in build.ts
- const root = clone(componentData.tree) as Root
-
+) {
// process transcludes in componentData
visit(root, "element", (node, _index, _parent) => {
if (node.tagName === "blockquote") {
@@ -191,6 +184,19 @@ export function renderPage(
}
}
})
+}
+
+export function renderPage(
+ cfg: GlobalConfiguration,
+ slug: FullSlug,
+ componentData: QuartzComponentProps,
+ components: RenderComponents,
+ pageResources: StaticResources,
+): string {
+ // make a deep copy of the tree so we don't remove the transclusion references
+ // for the file cached in contentMap in build.ts
+ const root = clone(componentData.tree) as Root
+ renderTranscludes(root, cfg, slug, componentData)
// set componentData.tree to the edited html that has transclusions rendered
componentData.tree = root
diff --git a/quartz/components/scripts/darkmode.inline.ts b/quartz/components/scripts/darkmode.inline.ts
index 871eb24d0..d8dfee964 100644
--- a/quartz/components/scripts/darkmode.inline.ts
+++ b/quartz/components/scripts/darkmode.inline.ts
@@ -10,7 +10,7 @@ const emitThemeChangeEvent = (theme: "light" | "dark") => {
}
document.addEventListener("nav", () => {
- const switchTheme = (e: Event) => {
+ const switchTheme = () => {
const newTheme =
document.documentElement.getAttribute("saved-theme") === "dark" ? "light" : "dark"
document.documentElement.setAttribute("saved-theme", newTheme)
diff --git a/quartz/components/scripts/explorer.inline.ts b/quartz/components/scripts/explorer.inline.ts
index e9c0a09fe..b431c53c5 100644
--- a/quartz/components/scripts/explorer.inline.ts
+++ b/quartz/components/scripts/explorer.inline.ts
@@ -134,9 +134,9 @@ function createFolderNode(
}
for (const child of node.children) {
- const childNode = child.data
- ? createFileNode(currentSlug, child)
- : createFolderNode(currentSlug, child, opts)
+ const childNode = child.isFolder
+ ? createFolderNode(currentSlug, child, opts)
+ : createFileNode(currentSlug, child)
ul.appendChild(childNode)
}
diff --git a/quartz/components/scripts/graph.inline.ts b/quartz/components/scripts/graph.inline.ts
index fca7bd21c..a669b0547 100644
--- a/quartz/components/scripts/graph.inline.ts
+++ b/quartz/components/scripts/graph.inline.ts
@@ -400,7 +400,6 @@ async function renderGraph(graph: HTMLElement, fullSlug: FullSlug) {
})
.circle(0, 0, nodeRadius(n))
.fill({ color: isTagNode ? computedStyleMap["--light"] : color(n) })
- .stroke({ width: isTagNode ? 2 : 0, color: color(n) })
.on("pointerover", (e) => {
updateHoverInfo(e.target.label)
oldLabelOpacity = label.alpha
@@ -416,6 +415,10 @@ async function renderGraph(graph: HTMLElement, fullSlug: FullSlug) {
}
})
+ if (isTagNode) {
+ gfx.stroke({ width: 2, color: computedStyleMap["--tertiary"] })
+ }
+
nodesContainer.addChild(gfx)
labelsContainer.addChild(label)
diff --git a/quartz/components/styles/explorer.scss b/quartz/components/styles/explorer.scss
index c284cb292..22c86db69 100644
--- a/quartz/components/styles/explorer.scss
+++ b/quartz/components/styles/explorer.scss
@@ -52,6 +52,8 @@
overflow: hidden;
flex-shrink: 0;
align-self: flex-start;
+ margin-top: auto;
+ margin-bottom: auto;
}
button.mobile-explorer {
diff --git a/quartz/depgraph.test.ts b/quartz/depgraph.test.ts
deleted file mode 100644
index 062f13e35..000000000
--- a/quartz/depgraph.test.ts
+++ /dev/null
@@ -1,118 +0,0 @@
-import test, { describe } from "node:test"
-import DepGraph from "./depgraph"
-import assert from "node:assert"
-
-describe("DepGraph", () => {
- test("getLeafNodes", () => {
- const graph = new DepGraph()
- graph.addEdge("A", "B")
- graph.addEdge("B", "C")
- graph.addEdge("D", "C")
- assert.deepStrictEqual(graph.getLeafNodes("A"), new Set(["C"]))
- assert.deepStrictEqual(graph.getLeafNodes("B"), new Set(["C"]))
- assert.deepStrictEqual(graph.getLeafNodes("C"), new Set(["C"]))
- assert.deepStrictEqual(graph.getLeafNodes("D"), new Set(["C"]))
- })
-
- describe("getLeafNodeAncestors", () => {
- test("gets correct ancestors in a graph without cycles", () => {
- const graph = new DepGraph()
- graph.addEdge("A", "B")
- graph.addEdge("B", "C")
- graph.addEdge("D", "B")
- assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "D"]))
- assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "D"]))
- assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "D"]))
- assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "D"]))
- })
-
- test("gets correct ancestors in a graph with cycles", () => {
- const graph = new DepGraph()
- graph.addEdge("A", "B")
- graph.addEdge("B", "C")
- graph.addEdge("C", "A")
- graph.addEdge("C", "D")
- assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "C"]))
- assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "C"]))
- assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "C"]))
- assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "C"]))
- })
- })
-
- describe("mergeGraph", () => {
- test("merges two graphs", () => {
- const graph = new DepGraph()
- graph.addEdge("A.md", "A.html")
-
- const other = new DepGraph()
- other.addEdge("B.md", "B.html")
-
- graph.mergeGraph(other)
-
- const expected = {
- nodes: ["A.md", "A.html", "B.md", "B.html"],
- edges: [
- ["A.md", "A.html"],
- ["B.md", "B.html"],
- ],
- }
-
- assert.deepStrictEqual(graph.export(), expected)
- })
- })
-
- describe("updateIncomingEdgesForNode", () => {
- test("merges when node exists", () => {
- // A.md -> B.md -> B.html
- const graph = new DepGraph()
- graph.addEdge("A.md", "B.md")
- graph.addEdge("B.md", "B.html")
-
- // B.md is edited so it removes the A.md transclusion
- // and adds C.md transclusion
- // C.md -> B.md
- const other = new DepGraph()
- other.addEdge("C.md", "B.md")
- other.addEdge("B.md", "B.html")
-
- // A.md -> B.md removed, C.md -> B.md added
- // C.md -> B.md -> B.html
- graph.updateIncomingEdgesForNode(other, "B.md")
-
- const expected = {
- nodes: ["A.md", "B.md", "B.html", "C.md"],
- edges: [
- ["B.md", "B.html"],
- ["C.md", "B.md"],
- ],
- }
-
- assert.deepStrictEqual(graph.export(), expected)
- })
-
- test("adds node if it does not exist", () => {
- // A.md -> B.md
- const graph = new DepGraph()
- graph.addEdge("A.md", "B.md")
-
- // Add a new file C.md that transcludes B.md
- // B.md -> C.md
- const other = new DepGraph()
- other.addEdge("B.md", "C.md")
-
- // B.md -> C.md added
- // A.md -> B.md -> C.md
- graph.updateIncomingEdgesForNode(other, "C.md")
-
- const expected = {
- nodes: ["A.md", "B.md", "C.md"],
- edges: [
- ["A.md", "B.md"],
- ["B.md", "C.md"],
- ],
- }
-
- assert.deepStrictEqual(graph.export(), expected)
- })
- })
-})
diff --git a/quartz/depgraph.ts b/quartz/depgraph.ts
deleted file mode 100644
index 3d048cd83..000000000
--- a/quartz/depgraph.ts
+++ /dev/null
@@ -1,228 +0,0 @@
-export default class DepGraph {
- // node: incoming and outgoing edges
- _graph = new Map; outgoing: Set }>()
-
- constructor() {
- this._graph = new Map()
- }
-
- export(): Object {
- return {
- nodes: this.nodes,
- edges: this.edges,
- }
- }
-
- toString(): string {
- return JSON.stringify(this.export(), null, 2)
- }
-
- // BASIC GRAPH OPERATIONS
-
- get nodes(): T[] {
- return Array.from(this._graph.keys())
- }
-
- get edges(): [T, T][] {
- let edges: [T, T][] = []
- this.forEachEdge((edge) => edges.push(edge))
- return edges
- }
-
- hasNode(node: T): boolean {
- return this._graph.has(node)
- }
-
- addNode(node: T): void {
- if (!this._graph.has(node)) {
- this._graph.set(node, { incoming: new Set(), outgoing: new Set() })
- }
- }
-
- // Remove node and all edges connected to it
- removeNode(node: T): void {
- if (this._graph.has(node)) {
- // first remove all edges so other nodes don't have references to this node
- for (const target of this._graph.get(node)!.outgoing) {
- this.removeEdge(node, target)
- }
- for (const source of this._graph.get(node)!.incoming) {
- this.removeEdge(source, node)
- }
- this._graph.delete(node)
- }
- }
-
- forEachNode(callback: (node: T) => void): void {
- for (const node of this._graph.keys()) {
- callback(node)
- }
- }
-
- hasEdge(from: T, to: T): boolean {
- return Boolean(this._graph.get(from)?.outgoing.has(to))
- }
-
- addEdge(from: T, to: T): void {
- this.addNode(from)
- this.addNode(to)
-
- this._graph.get(from)!.outgoing.add(to)
- this._graph.get(to)!.incoming.add(from)
- }
-
- removeEdge(from: T, to: T): void {
- if (this._graph.has(from) && this._graph.has(to)) {
- this._graph.get(from)!.outgoing.delete(to)
- this._graph.get(to)!.incoming.delete(from)
- }
- }
-
- // returns -1 if node does not exist
- outDegree(node: T): number {
- return this.hasNode(node) ? this._graph.get(node)!.outgoing.size : -1
- }
-
- // returns -1 if node does not exist
- inDegree(node: T): number {
- return this.hasNode(node) ? this._graph.get(node)!.incoming.size : -1
- }
-
- forEachOutNeighbor(node: T, callback: (neighbor: T) => void): void {
- this._graph.get(node)?.outgoing.forEach(callback)
- }
-
- forEachInNeighbor(node: T, callback: (neighbor: T) => void): void {
- this._graph.get(node)?.incoming.forEach(callback)
- }
-
- forEachEdge(callback: (edge: [T, T]) => void): void {
- for (const [source, { outgoing }] of this._graph.entries()) {
- for (const target of outgoing) {
- callback([source, target])
- }
- }
- }
-
- // DEPENDENCY ALGORITHMS
-
- // Add all nodes and edges from other graph to this graph
- mergeGraph(other: DepGraph): void {
- other.forEachEdge(([source, target]) => {
- this.addNode(source)
- this.addNode(target)
- this.addEdge(source, target)
- })
- }
-
- // For the node provided:
- // If node does not exist, add it
- // If an incoming edge was added in other, it is added in this graph
- // If an incoming edge was deleted in other, it is deleted in this graph
- updateIncomingEdgesForNode(other: DepGraph, node: T): void {
- this.addNode(node)
-
- // Add edge if it is present in other
- other.forEachInNeighbor(node, (neighbor) => {
- this.addEdge(neighbor, node)
- })
-
- // For node provided, remove incoming edge if it is absent in other
- this.forEachEdge(([source, target]) => {
- if (target === node && !other.hasEdge(source, target)) {
- this.removeEdge(source, target)
- }
- })
- }
-
- // Remove all nodes that do not have any incoming or outgoing edges
- // A node may be orphaned if the only node pointing to it was removed
- removeOrphanNodes(): Set {
- let orphanNodes = new Set()
-
- this.forEachNode((node) => {
- if (this.inDegree(node) === 0 && this.outDegree(node) === 0) {
- orphanNodes.add(node)
- }
- })
-
- orphanNodes.forEach((node) => {
- this.removeNode(node)
- })
-
- return orphanNodes
- }
-
- // Get all leaf nodes (i.e. destination paths) reachable from the node provided
- // Eg. if the graph is A -> B -> C
- // D ---^
- // and the node is B, this function returns [C]
- getLeafNodes(node: T): Set {
- let stack: T[] = [node]
- let visited = new Set()
- let leafNodes = new Set()
-
- // DFS
- while (stack.length > 0) {
- let node = stack.pop()!
-
- // If the node is already visited, skip it
- if (visited.has(node)) {
- continue
- }
- visited.add(node)
-
- // Check if the node is a leaf node (i.e. destination path)
- if (this.outDegree(node) === 0) {
- leafNodes.add(node)
- }
-
- // Add all unvisited neighbors to the stack
- this.forEachOutNeighbor(node, (neighbor) => {
- if (!visited.has(neighbor)) {
- stack.push(neighbor)
- }
- })
- }
-
- return leafNodes
- }
-
- // Get all ancestors of the leaf nodes reachable from the node provided
- // Eg. if the graph is A -> B -> C
- // D ---^
- // and the node is B, this function returns [A, B, D]
- getLeafNodeAncestors(node: T): Set {
- const leafNodes = this.getLeafNodes(node)
- let visited = new Set()
- let upstreamNodes = new Set()
-
- // Backwards DFS for each leaf node
- leafNodes.forEach((leafNode) => {
- let stack: T[] = [leafNode]
-
- while (stack.length > 0) {
- let node = stack.pop()!
-
- if (visited.has(node)) {
- continue
- }
- visited.add(node)
- // Add node if it's not a leaf node (i.e. destination path)
- // Assumes destination file cannot depend on another destination file
- if (this.outDegree(node) !== 0) {
- upstreamNodes.add(node)
- }
-
- // Add all unvisited parents to the stack
- this.forEachInNeighbor(node, (parentNode) => {
- if (!visited.has(parentNode)) {
- stack.push(parentNode)
- }
- })
- }
- })
-
- return upstreamNodes
- }
-}
diff --git a/quartz/plugins/emitters/404.tsx b/quartz/plugins/emitters/404.tsx
index 2d518b675..04a006dd1 100644
--- a/quartz/plugins/emitters/404.tsx
+++ b/quartz/plugins/emitters/404.tsx
@@ -3,13 +3,12 @@ import { QuartzComponentProps } from "../../components/types"
import BodyConstructor from "../../components/Body"
import { pageResources, renderPage } from "../../components/renderPage"
import { FullPageLayout } from "../../cfg"
-import { FilePath, FullSlug } from "../../util/path"
+import { FullSlug } from "../../util/path"
import { sharedPageComponents } from "../../../quartz.layout"
import { NotFound } from "../../components"
import { defaultProcessedContent } from "../vfile"
import { write } from "./helpers"
import { i18n } from "../../i18n"
-import DepGraph from "../../depgraph"
export const NotFoundPage: QuartzEmitterPlugin = () => {
const opts: FullPageLayout = {
@@ -28,10 +27,7 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
getQuartzComponents() {
return [Head, Body, pageBody, Footer]
},
- async getDependencyGraph(_ctx, _content, _resources) {
- return new DepGraph()
- },
- async emit(ctx, _content, resources): Promise {
+ async *emit(ctx, _content, resources) {
const cfg = ctx.cfg.configuration
const slug = "404" as FullSlug
@@ -44,7 +40,7 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
description: notFound,
frontmatter: { title: notFound, tags: [] },
})
- const externalResources = pageResources(path, vfile.data, resources)
+ const externalResources = pageResources(path, resources)
const componentData: QuartzComponentProps = {
ctx,
fileData: vfile.data,
@@ -55,14 +51,13 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
allFiles: [],
}
- return [
- await write({
- ctx,
- content: renderPage(cfg, slug, componentData, opts, externalResources),
- slug,
- ext: ".html",
- }),
- ]
+ yield write({
+ ctx,
+ content: renderPage(cfg, slug, componentData, opts, externalResources),
+ slug,
+ ext: ".html",
+ })
},
+ async *partialEmit() {},
}
}
diff --git a/quartz/plugins/emitters/aliases.ts b/quartz/plugins/emitters/aliases.ts
index 9d12a990c..327cde85b 100644
--- a/quartz/plugins/emitters/aliases.ts
+++ b/quartz/plugins/emitters/aliases.ts
@@ -1,52 +1,48 @@
-import { FilePath, joinSegments, resolveRelative, simplifySlug } from "../../util/path"
+import { resolveRelative, simplifySlug } from "../../util/path"
import { QuartzEmitterPlugin } from "../types"
import { write } from "./helpers"
-import DepGraph from "../../depgraph"
-import { getAliasSlugs } from "../transformers/frontmatter"
+import { BuildCtx } from "../../util/ctx"
+import { VFile } from "vfile"
+
+async function* processFile(ctx: BuildCtx, file: VFile) {
+ const ogSlug = simplifySlug(file.data.slug!)
+
+ for (const slug of file.data.aliases ?? []) {
+ const redirUrl = resolveRelative(slug, file.data.slug!)
+ yield write({
+ ctx,
+ content: `
+
+
+
+ ${ogSlug}
+
+
+
+
+
+
+ `,
+ slug,
+ ext: ".html",
+ })
+ }
+}
export const AliasRedirects: QuartzEmitterPlugin = () => ({
name: "AliasRedirects",
- async getDependencyGraph(ctx, content, _resources) {
- const graph = new DepGraph()
-
- const { argv } = ctx
+ async *emit(ctx, content) {
for (const [_tree, file] of content) {
- for (const slug of getAliasSlugs(file.data.frontmatter?.aliases ?? [], argv, file)) {
- graph.addEdge(file.data.filePath!, joinSegments(argv.output, slug + ".html") as FilePath)
- }
+ yield* processFile(ctx, file)
}
-
- return graph
},
- async emit(ctx, content, _resources): Promise {
- const fps: FilePath[] = []
-
- for (const [_tree, file] of content) {
- const ogSlug = simplifySlug(file.data.slug!)
-
- for (const slug of file.data.aliases ?? []) {
- const redirUrl = resolveRelative(slug, file.data.slug!)
- const fp = await write({
- ctx,
- content: `
-
-
-
- ${ogSlug}
-
-
-
-
-
-
- `,
- slug,
- ext: ".html",
- })
-
- fps.push(fp)
+ async *partialEmit(ctx, _content, _resources, changeEvents) {
+ for (const changeEvent of changeEvents) {
+ if (!changeEvent.file) continue
+ if (changeEvent.type === "add" || changeEvent.type === "change") {
+ // add new ones if this file still exists
+ yield* processFile(ctx, changeEvent.file)
}
}
- return fps
},
})
diff --git a/quartz/plugins/emitters/assets.ts b/quartz/plugins/emitters/assets.ts
index bb85080c4..d0da66ace 100644
--- a/quartz/plugins/emitters/assets.ts
+++ b/quartz/plugins/emitters/assets.ts
@@ -3,7 +3,6 @@ import { QuartzEmitterPlugin } from "../types"
import path from "path"
import fs from "fs"
import { glob } from "../../util/glob"
-import DepGraph from "../../depgraph"
import { Argv } from "../../util/ctx"
import { QuartzConfig } from "../../cfg"
@@ -12,44 +11,42 @@ const filesToCopy = async (argv: Argv, cfg: QuartzConfig) => {
return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns])
}
+const copyFile = async (argv: Argv, fp: FilePath) => {
+ const src = joinSegments(argv.directory, fp) as FilePath
+
+ const name = slugifyFilePath(fp)
+ const dest = joinSegments(argv.output, name) as FilePath
+
+ // ensure dir exists
+ const dir = path.dirname(dest) as FilePath
+ await fs.promises.mkdir(dir, { recursive: true })
+
+ await fs.promises.copyFile(src, dest)
+ return dest
+}
+
export const Assets: QuartzEmitterPlugin = () => {
return {
name: "Assets",
- async getDependencyGraph(ctx, _content, _resources) {
- const { argv, cfg } = ctx
- const graph = new DepGraph()
-
+ async *emit({ argv, cfg }) {
const fps = await filesToCopy(argv, cfg)
-
for (const fp of fps) {
- const ext = path.extname(fp)
- const src = joinSegments(argv.directory, fp) as FilePath
- const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
-
- const dest = joinSegments(argv.output, name) as FilePath
-
- graph.addEdge(src, dest)
+ yield copyFile(argv, fp)
}
-
- return graph
},
- async emit({ argv, cfg }, _content, _resources): Promise {
- const assetsPath = argv.output
- const fps = await filesToCopy(argv, cfg)
- const res: FilePath[] = []
- for (const fp of fps) {
- const ext = path.extname(fp)
- const src = joinSegments(argv.directory, fp) as FilePath
- const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
+ async *partialEmit(ctx, _content, _resources, changeEvents) {
+ for (const changeEvent of changeEvents) {
+ const ext = path.extname(changeEvent.path)
+ if (ext === ".md") continue
- const dest = joinSegments(assetsPath, name) as FilePath
- const dir = path.dirname(dest) as FilePath
- await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
- await fs.promises.copyFile(src, dest)
- res.push(dest)
+ if (changeEvent.type === "add" || changeEvent.type === "change") {
+ yield copyFile(ctx.argv, changeEvent.path)
+ } else if (changeEvent.type === "delete") {
+ const name = slugifyFilePath(changeEvent.path)
+ const dest = joinSegments(ctx.argv.output, name) as FilePath
+ await fs.promises.unlink(dest)
+ }
}
-
- return res
},
}
}
diff --git a/quartz/plugins/emitters/cname.ts b/quartz/plugins/emitters/cname.ts
index 380212dd4..10781dbbc 100644
--- a/quartz/plugins/emitters/cname.ts
+++ b/quartz/plugins/emitters/cname.ts
@@ -2,7 +2,6 @@ import { FilePath, joinSegments } from "../../util/path"
import { QuartzEmitterPlugin } from "../types"
import fs from "fs"
import chalk from "chalk"
-import DepGraph from "../../depgraph"
export function extractDomainFromBaseUrl(baseUrl: string) {
const url = new URL(`https://${baseUrl}`)
@@ -11,10 +10,7 @@ export function extractDomainFromBaseUrl(baseUrl: string) {
export const CNAME: QuartzEmitterPlugin = () => ({
name: "CNAME",
- async getDependencyGraph(_ctx, _content, _resources) {
- return new DepGraph()
- },
- async emit({ argv, cfg }, _content, _resources): Promise {
+ async emit({ argv, cfg }) {
if (!cfg.configuration.baseUrl) {
console.warn(chalk.yellow("CNAME emitter requires `baseUrl` to be set in your configuration"))
return []
@@ -24,7 +20,8 @@ export const CNAME: QuartzEmitterPlugin = () => ({
if (!content) {
return []
}
- fs.writeFileSync(path, content)
+ await fs.promises.writeFile(path, content)
return [path] as FilePath[]
},
+ async *partialEmit() {},
})
diff --git a/quartz/plugins/emitters/componentResources.ts b/quartz/plugins/emitters/componentResources.ts
index 7584fddbd..540a3738c 100644
--- a/quartz/plugins/emitters/componentResources.ts
+++ b/quartz/plugins/emitters/componentResources.ts
@@ -1,4 +1,4 @@
-import { FilePath, FullSlug, joinSegments } from "../../util/path"
+import { FullSlug, joinSegments } from "../../util/path"
import { QuartzEmitterPlugin } from "../types"
// @ts-ignore
@@ -9,11 +9,10 @@ import styles from "../../styles/custom.scss"
import popoverStyle from "../../components/styles/popover.scss"
import { BuildCtx } from "../../util/ctx"
import { QuartzComponent } from "../../components/types"
-import { googleFontHref, joinStyles } from "../../util/theme"
+import { googleFontHref, joinStyles, processGoogleFonts } from "../../util/theme"
import { Features, transform } from "lightningcss"
import { transform as transpile } from "esbuild"
import { write } from "./helpers"
-import DepGraph from "../../depgraph"
type ComponentResources = {
css: string[]
@@ -86,7 +85,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
componentResources.afterDOMLoaded.push(`
const gtagScript = document.createElement("script")
gtagScript.src = "https://www.googletagmanager.com/gtag/js?id=${tagId}"
- gtagScript.async = true
+ gtagScript.defer = true
document.head.appendChild(gtagScript)
window.dataLayer = window.dataLayer || [];
@@ -121,7 +120,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
umamiScript.src = "${cfg.analytics.host ?? "https://analytics.umami.is"}/script.js"
umamiScript.setAttribute("data-website-id", "${cfg.analytics.websiteId}")
umamiScript.setAttribute("data-auto-track", "false")
- umamiScript.async = true
+ umamiScript.defer = true
document.head.appendChild(umamiScript)
document.addEventListener("nav", () => {
@@ -132,7 +131,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
componentResources.afterDOMLoaded.push(`
const goatcounterScript = document.createElement("script")
goatcounterScript.src = "${cfg.analytics.scriptSrc ?? "https://gc.zgo.at/count.js"}"
- goatcounterScript.async = true
+ goatcounterScript.defer = true
goatcounterScript.setAttribute("data-goatcounter",
"https://${cfg.analytics.websiteId}.${cfg.analytics.host ?? "goatcounter.com"}/count")
document.head.appendChild(goatcounterScript)
@@ -173,14 +172,13 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
const cabinScript = document.createElement("script")
cabinScript.src = "${cfg.analytics.host ?? "https://scripts.withcabin.com"}/hello.js"
cabinScript.defer = true
- cabinScript.async = true
document.head.appendChild(cabinScript)
`)
} else if (cfg.analytics?.provider === "clarity") {
componentResources.afterDOMLoaded.push(`
const clarityScript = document.createElement("script")
clarityScript.innerHTML= \`(function(c,l,a,r,i,t,y){c[a]=c[a]||function(){(c[a].q=c[a].q||[]).push(arguments)};
- t=l.createElement(r);t.async=1;t.src="https://www.clarity.ms/tag/"+i;
+ t=l.createElement(r);t.defer=1;t.src="https://www.clarity.ms/tag/"+i;
y=l.getElementsByTagName(r)[0];y.parentNode.insertBefore(t,y);
})(window, document, "clarity", "script", "${cfg.analytics.projectId}");\`
document.head.appendChild(clarityScript)
@@ -204,11 +202,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
export const ComponentResources: QuartzEmitterPlugin = () => {
return {
name: "ComponentResources",
- async getDependencyGraph(_ctx, _content, _resources) {
- return new DepGraph()
- },
- async emit(ctx, _content, _resources): Promise {
- const promises: Promise[] = []
+ async *emit(ctx, _content, _resources) {
const cfg = ctx.cfg.configuration
// component specific scripts and styles
const componentResources = getComponentResources(ctx)
@@ -217,42 +211,35 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
// let the user do it themselves in css
} else if (cfg.theme.fontOrigin === "googleFonts" && !cfg.theme.cdnCaching) {
// when cdnCaching is true, we link to google fonts in Head.tsx
- let match
+ const response = await fetch(googleFontHref(ctx.cfg.configuration.theme))
+ googleFontsStyleSheet = await response.text()
- const fontSourceRegex = /url\((https:\/\/fonts.gstatic.com\/s\/[^)]+\.(woff2|ttf))\)/g
-
- googleFontsStyleSheet = await (
- await fetch(googleFontHref(ctx.cfg.configuration.theme))
- ).text()
-
- while ((match = fontSourceRegex.exec(googleFontsStyleSheet)) !== null) {
- // match[0] is the `url(path)`, match[1] is the `path`
- const url = match[1]
- // the static name of this file.
- const [filename, ext] = url.split("/").pop()!.split(".")
-
- googleFontsStyleSheet = googleFontsStyleSheet.replace(
- url,
- `https://${cfg.baseUrl}/static/fonts/${filename}.ttf`,
+ if (!cfg.baseUrl) {
+ throw new Error(
+ "baseUrl must be defined when using Google Fonts without cfg.theme.cdnCaching",
)
+ }
- promises.push(
- fetch(url)
- .then((res) => {
- if (!res.ok) {
- throw new Error(`Failed to fetch font`)
- }
- return res.arrayBuffer()
- })
- .then((buf) =>
- write({
- ctx,
- slug: joinSegments("static", "fonts", filename) as FullSlug,
- ext: `.${ext}`,
- content: Buffer.from(buf),
- }),
- ),
- )
+ const { processedStylesheet, fontFiles } = await processGoogleFonts(
+ googleFontsStyleSheet,
+ cfg.baseUrl,
+ )
+ googleFontsStyleSheet = processedStylesheet
+
+ // Download and save font files
+ for (const fontFile of fontFiles) {
+ const res = await fetch(fontFile.url)
+ if (!res.ok) {
+ throw new Error(`Failed to fetch font ${fontFile.filename}`)
+ }
+
+ const buf = await res.arrayBuffer()
+ yield write({
+ ctx,
+ slug: joinSegments("static", "fonts", fontFile.filename) as FullSlug,
+ ext: `.${fontFile.extension}`,
+ content: Buffer.from(buf),
+ })
}
}
@@ -267,45 +254,45 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
...componentResources.css,
styles,
)
+
const [prescript, postscript] = await Promise.all([
joinScripts(componentResources.beforeDOMLoaded),
joinScripts(componentResources.afterDOMLoaded),
])
- promises.push(
- write({
- ctx,
- slug: "index" as FullSlug,
- ext: ".css",
- content: transform({
- filename: "index.css",
- code: Buffer.from(stylesheet),
- minify: true,
- targets: {
- safari: (15 << 16) | (6 << 8), // 15.6
- ios_saf: (15 << 16) | (6 << 8), // 15.6
- edge: 115 << 16,
- firefox: 102 << 16,
- chrome: 109 << 16,
- },
- include: Features.MediaQueries,
- }).code.toString(),
- }),
- write({
- ctx,
- slug: "prescript" as FullSlug,
- ext: ".js",
- content: prescript,
- }),
- write({
- ctx,
- slug: "postscript" as FullSlug,
- ext: ".js",
- content: postscript,
- }),
- )
+ yield write({
+ ctx,
+ slug: "index" as FullSlug,
+ ext: ".css",
+ content: transform({
+ filename: "index.css",
+ code: Buffer.from(stylesheet),
+ minify: true,
+ targets: {
+ safari: (15 << 16) | (6 << 8), // 15.6
+ ios_saf: (15 << 16) | (6 << 8), // 15.6
+ edge: 115 << 16,
+ firefox: 102 << 16,
+ chrome: 109 << 16,
+ },
+ include: Features.MediaQueries,
+ }).code.toString(),
+ })
- return await Promise.all(promises)
+ yield write({
+ ctx,
+ slug: "prescript" as FullSlug,
+ ext: ".js",
+ content: prescript,
+ })
+
+ yield write({
+ ctx,
+ slug: "postscript" as FullSlug,
+ ext: ".js",
+ content: postscript,
+ })
},
+ async *partialEmit() {},
}
}
diff --git a/quartz/plugins/emitters/contentIndex.tsx b/quartz/plugins/emitters/contentIndex.tsx
index be460e53a..01d2e0034 100644
--- a/quartz/plugins/emitters/contentIndex.tsx
+++ b/quartz/plugins/emitters/contentIndex.tsx
@@ -7,7 +7,6 @@ import { QuartzEmitterPlugin } from "../types"
import { toHtml } from "hast-util-to-html"
import { write } from "./helpers"
import { i18n } from "../../i18n"
-import DepGraph from "../../depgraph"
export type ContentIndexMap = Map
export type ContentDetails = {
@@ -97,29 +96,8 @@ export const ContentIndex: QuartzEmitterPlugin> = (opts) => {
opts = { ...defaultOptions, ...opts }
return {
name: "ContentIndex",
- async getDependencyGraph(ctx, content, _resources) {
- const graph = new DepGraph()
-
- for (const [_tree, file] of content) {
- const sourcePath = file.data.filePath!
-
- graph.addEdge(
- sourcePath,
- joinSegments(ctx.argv.output, "static/contentIndex.json") as FilePath,
- )
- if (opts?.enableSiteMap) {
- graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "sitemap.xml") as FilePath)
- }
- if (opts?.enableRSS) {
- graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "index.xml") as FilePath)
- }
- }
-
- return graph
- },
- async emit(ctx, content, _resources) {
+ async *emit(ctx, content) {
const cfg = ctx.cfg.configuration
- const emitted: FilePath[] = []
const linkIndex: ContentIndexMap = new Map()
for (const [tree, file] of content) {
const slug = file.data.slug!
@@ -127,7 +105,7 @@ export const ContentIndex: QuartzEmitterPlugin> = (opts) => {
if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) {
linkIndex.set(slug, {
slug,
- filePath: file.data.filePath!,
+ filePath: file.data.relativePath!,
title: file.data.frontmatter?.title!,
links: file.data.links ?? [],
tags: file.data.frontmatter?.tags ?? [],
@@ -142,25 +120,21 @@ export const ContentIndex: QuartzEmitterPlugin> = (opts) => {
}
if (opts?.enableSiteMap) {
- emitted.push(
- await write({
- ctx,
- content: generateSiteMap(cfg, linkIndex),
- slug: "sitemap" as FullSlug,
- ext: ".xml",
- }),
- )
+ yield write({
+ ctx,
+ content: generateSiteMap(cfg, linkIndex),
+ slug: "sitemap" as FullSlug,
+ ext: ".xml",
+ })
}
if (opts?.enableRSS) {
- emitted.push(
- await write({
- ctx,
- content: generateRSSFeed(cfg, linkIndex, opts.rssLimit),
- slug: (opts?.rssSlug ?? "index") as FullSlug,
- ext: ".xml",
- }),
- )
+ yield write({
+ ctx,
+ content: generateRSSFeed(cfg, linkIndex, opts.rssLimit),
+ slug: (opts?.rssSlug ?? "index") as FullSlug,
+ ext: ".xml",
+ })
}
const fp = joinSegments("static", "contentIndex") as FullSlug
@@ -175,16 +149,12 @@ export const ContentIndex: QuartzEmitterPlugin> = (opts) => {
}),
)
- emitted.push(
- await write({
- ctx,
- content: JSON.stringify(simplifiedIndex),
- slug: fp,
- ext: ".json",
- }),
- )
-
- return emitted
+ yield write({
+ ctx,
+ content: JSON.stringify(simplifiedIndex),
+ slug: fp,
+ ext: ".json",
+ })
},
externalResources: (ctx) => {
if (opts?.enableRSS) {
diff --git a/quartz/plugins/emitters/contentPage.tsx b/quartz/plugins/emitters/contentPage.tsx
index f59ff6bf5..d3f54e912 100644
--- a/quartz/plugins/emitters/contentPage.tsx
+++ b/quartz/plugins/emitters/contentPage.tsx
@@ -1,54 +1,48 @@
import path from "path"
-import { visit } from "unist-util-visit"
-import { Root } from "hast"
-import { VFile } from "vfile"
import { QuartzEmitterPlugin } from "../types"
import { QuartzComponentProps } from "../../components/types"
import HeaderConstructor from "../../components/Header"
import BodyConstructor from "../../components/Body"
import { pageResources, renderPage } from "../../components/renderPage"
import { FullPageLayout } from "../../cfg"
-import { Argv } from "../../util/ctx"
-import { FilePath, isRelativeURL, joinSegments, pathToRoot } from "../../util/path"
+import { pathToRoot } from "../../util/path"
import { defaultContentPageLayout, sharedPageComponents } from "../../../quartz.layout"
import { Content } from "../../components"
import chalk from "chalk"
import { write } from "./helpers"
-import DepGraph from "../../depgraph"
+import { BuildCtx } from "../../util/ctx"
+import { Node } from "unist"
+import { StaticResources } from "../../util/resources"
+import { QuartzPluginData } from "../vfile"
-// get all the dependencies for the markdown file
-// eg. images, scripts, stylesheets, transclusions
-const parseDependencies = (argv: Argv, hast: Root, file: VFile): string[] => {
- const dependencies: string[] = []
+async function processContent(
+ ctx: BuildCtx,
+ tree: Node,
+ fileData: QuartzPluginData,
+ allFiles: QuartzPluginData[],
+ opts: FullPageLayout,
+ resources: StaticResources,
+) {
+ const slug = fileData.slug!
+ const cfg = ctx.cfg.configuration
+ const externalResources = pageResources(pathToRoot(slug), resources)
+ const componentData: QuartzComponentProps = {
+ ctx,
+ fileData,
+ externalResources,
+ cfg,
+ children: [],
+ tree,
+ allFiles,
+ }
- visit(hast, "element", (elem): void => {
- let ref: string | null = null
-
- if (
- ["script", "img", "audio", "video", "source", "iframe"].includes(elem.tagName) &&
- elem?.properties?.src
- ) {
- ref = elem.properties.src.toString()
- } else if (["a", "link"].includes(elem.tagName) && elem?.properties?.href) {
- // transclusions will create a tags with relative hrefs
- ref = elem.properties.href.toString()
- }
-
- // if it is a relative url, its a local file and we need to add
- // it to the dependency graph. otherwise, ignore
- if (ref === null || !isRelativeURL(ref)) {
- return
- }
-
- let fp = path.join(file.data.filePath!, path.relative(argv.directory, ref)).replace(/\\/g, "/")
- // markdown files have the .md extension stripped in hrefs, add it back here
- if (!fp.split("/").pop()?.includes(".")) {
- fp += ".md"
- }
- dependencies.push(fp)
+ const content = renderPage(cfg, slug, componentData, opts, externalResources)
+ return write({
+ ctx,
+ content,
+ slug,
+ ext: ".html",
})
-
- return dependencies
}
export const ContentPage: QuartzEmitterPlugin> = (userOpts) => {
@@ -79,64 +73,48 @@ export const ContentPage: QuartzEmitterPlugin> = (userOp
Footer,
]
},
- async getDependencyGraph(ctx, content, _resources) {
- const graph = new DepGraph()
-
- for (const [tree, file] of content) {
- const sourcePath = file.data.filePath!
- const slug = file.data.slug!
- graph.addEdge(sourcePath, joinSegments(ctx.argv.output, slug + ".html") as FilePath)
-
- parseDependencies(ctx.argv, tree as Root, file).forEach((dep) => {
- graph.addEdge(dep as FilePath, sourcePath)
- })
- }
-
- return graph
- },
- async emit(ctx, content, resources): Promise {
- const cfg = ctx.cfg.configuration
- const fps: FilePath[] = []
+ async *emit(ctx, content, resources) {
const allFiles = content.map((c) => c[1].data)
-
let containsIndex = false
+
for (const [tree, file] of content) {
const slug = file.data.slug!
if (slug === "index") {
containsIndex = true
}
- const externalResources = pageResources(pathToRoot(slug), file.data, resources)
- const componentData: QuartzComponentProps = {
- ctx,
- fileData: file.data,
- externalResources,
- cfg,
- children: [],
- tree,
- allFiles,
- }
-
- const content = renderPage(cfg, slug, componentData, opts, externalResources)
- const fp = await write({
- ctx,
- content,
- slug,
- ext: ".html",
- })
-
- fps.push(fp)
+ // only process home page, non-tag pages, and non-index pages
+ if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
+ yield processContent(ctx, tree, file.data, allFiles, opts, resources)
}
- if (!containsIndex && !ctx.argv.fastRebuild) {
+ if (!containsIndex) {
console.log(
chalk.yellow(
`\nWarning: you seem to be missing an \`index.md\` home page file at the root of your \`${ctx.argv.directory}\` folder (\`${path.join(ctx.argv.directory, "index.md")} does not exist\`). This may cause errors when deploying.`,
),
)
}
+ },
+ async *partialEmit(ctx, content, resources, changeEvents) {
+ const allFiles = content.map((c) => c[1].data)
- return fps
+ // find all slugs that changed or were added
+ const changedSlugs = new Set()
+ for (const changeEvent of changeEvents) {
+ if (!changeEvent.file) continue
+ if (changeEvent.type === "add" || changeEvent.type === "change") {
+ changedSlugs.add(changeEvent.file.data.slug!)
+ }
+ }
+
+ for (const [tree, file] of content) {
+ const slug = file.data.slug!
+ if (!changedSlugs.has(slug)) continue
+ if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
+
+ yield processContent(ctx, tree, file.data, allFiles, opts, resources)
+ }
},
}
}
diff --git a/quartz/plugins/emitters/folderPage.tsx b/quartz/plugins/emitters/folderPage.tsx
index bafaec916..f9b181dff 100644
--- a/quartz/plugins/emitters/folderPage.tsx
+++ b/quartz/plugins/emitters/folderPage.tsx
@@ -7,7 +7,6 @@ import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../
import { FullPageLayout } from "../../cfg"
import path from "path"
import {
- FilePath,
FullSlug,
SimpleSlug,
stripSlashes,
@@ -18,13 +17,89 @@ import {
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
import { FolderContent } from "../../components"
import { write } from "./helpers"
-import { i18n } from "../../i18n"
-import DepGraph from "../../depgraph"
-
+import { i18n, TRANSLATIONS } from "../../i18n"
+import { BuildCtx } from "../../util/ctx"
+import { StaticResources } from "../../util/resources"
interface FolderPageOptions extends FullPageLayout {
sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
}
+async function* processFolderInfo(
+ ctx: BuildCtx,
+ folderInfo: Record,
+ allFiles: QuartzPluginData[],
+ opts: FullPageLayout,
+ resources: StaticResources,
+) {
+ for (const [folder, folderContent] of Object.entries(folderInfo) as [
+ SimpleSlug,
+ ProcessedContent,
+ ][]) {
+ const slug = joinSegments(folder, "index") as FullSlug
+ const [tree, file] = folderContent
+ const cfg = ctx.cfg.configuration
+ const externalResources = pageResources(pathToRoot(slug), resources)
+ const componentData: QuartzComponentProps = {
+ ctx,
+ fileData: file.data,
+ externalResources,
+ cfg,
+ children: [],
+ tree,
+ allFiles,
+ }
+
+ const content = renderPage(cfg, slug, componentData, opts, externalResources)
+ yield write({
+ ctx,
+ content,
+ slug,
+ ext: ".html",
+ })
+ }
+}
+
+function computeFolderInfo(
+ folders: Set,
+ content: ProcessedContent[],
+ locale: keyof typeof TRANSLATIONS,
+): Record {
+ // Create default folder descriptions
+ const folderInfo: Record = Object.fromEntries(
+ [...folders].map((folder) => [
+ folder,
+ defaultProcessedContent({
+ slug: joinSegments(folder, "index") as FullSlug,
+ frontmatter: {
+ title: `${i18n(locale).pages.folderContent.folder}: ${folder}`,
+ tags: [],
+ },
+ }),
+ ]),
+ )
+
+ // Update with actual content if available
+ for (const [tree, file] of content) {
+ const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
+ if (folders.has(slug)) {
+ folderInfo[slug] = [tree, file]
+ }
+ }
+
+ return folderInfo
+}
+
+function _getFolders(slug: FullSlug): SimpleSlug[] {
+ var folderName = path.dirname(slug ?? "") as SimpleSlug
+ const parentFolderNames = [folderName]
+
+ while (folderName !== ".") {
+ folderName = path.dirname(folderName ?? "") as SimpleSlug
+ parentFolderNames.push(folderName)
+ }
+ return parentFolderNames
+}
+
export const FolderPage: QuartzEmitterPlugin> = (userOpts) => {
const opts: FullPageLayout = {
...sharedPageComponents,
@@ -53,24 +128,7 @@ export const FolderPage: QuartzEmitterPlugin> = (user
Footer,
]
},
- async getDependencyGraph(_ctx, content, _resources) {
- // Example graph:
- // nested/file.md --> nested/index.html
- // nested/file2.md ------^
- const graph = new DepGraph()
-
- content.map(([_tree, vfile]) => {
- const slug = vfile.data.slug
- const folderName = path.dirname(slug ?? "") as SimpleSlug
- if (slug && folderName !== "." && folderName !== "tags") {
- graph.addEdge(vfile.data.filePath!, joinSegments(folderName, "index.html") as FilePath)
- }
- })
-
- return graph
- },
- async emit(ctx, content, resources): Promise {
- const fps: FilePath[] = []
+ async *emit(ctx, content, resources) {
const allFiles = content.map((c) => c[1].data)
const cfg = ctx.cfg.configuration
@@ -84,62 +142,29 @@ export const FolderPage: QuartzEmitterPlugin> = (user
}),
)
- const folderDescriptions: Record = Object.fromEntries(
- [...folders].map((folder) => [
- folder,
- defaultProcessedContent({
- slug: joinSegments(folder, "index") as FullSlug,
- frontmatter: {
- title: `${i18n(cfg.locale).pages.folderContent.folder}: ${folder}`,
- tags: [],
- },
- }),
- ]),
- )
+ const folderInfo = computeFolderInfo(folders, content, cfg.locale)
+ yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
+ },
+ async *partialEmit(ctx, content, resources, changeEvents) {
+ const allFiles = content.map((c) => c[1].data)
+ const cfg = ctx.cfg.configuration
- for (const [tree, file] of content) {
- const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
- if (folders.has(slug)) {
- folderDescriptions[slug] = [tree, file]
- }
+ // Find all folders that need to be updated based on changed files
+ const affectedFolders: Set = new Set()
+ for (const changeEvent of changeEvents) {
+ if (!changeEvent.file) continue
+ const slug = changeEvent.file.data.slug!
+ const folders = _getFolders(slug).filter(
+ (folderName) => folderName !== "." && folderName !== "tags",
+ )
+ folders.forEach((folder) => affectedFolders.add(folder))
}
- for (const folder of folders) {
- const slug = joinSegments(folder, "index") as FullSlug
- const [tree, file] = folderDescriptions[folder]
- const externalResources = pageResources(pathToRoot(slug), file.data, resources)
- const componentData: QuartzComponentProps = {
- ctx,
- fileData: file.data,
- externalResources,
- cfg,
- children: [],
- tree,
- allFiles,
- }
-
- const content = renderPage(cfg, slug, componentData, opts, externalResources)
- const fp = await write({
- ctx,
- content,
- slug,
- ext: ".html",
- })
-
- fps.push(fp)
+ // If there are affected folders, rebuild their pages
+ if (affectedFolders.size > 0) {
+ const folderInfo = computeFolderInfo(affectedFolders, content, cfg.locale)
+ yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
}
- return fps
},
}
}
-
-function _getFolders(slug: FullSlug): SimpleSlug[] {
- var folderName = path.dirname(slug ?? "") as SimpleSlug
- const parentFolderNames = [folderName]
-
- while (folderName !== ".") {
- folderName = path.dirname(folderName ?? "") as SimpleSlug
- parentFolderNames.push(folderName)
- }
- return parentFolderNames
-}
diff --git a/quartz/plugins/emitters/helpers.ts b/quartz/plugins/emitters/helpers.ts
index 523151c2c..6218178a4 100644
--- a/quartz/plugins/emitters/helpers.ts
+++ b/quartz/plugins/emitters/helpers.ts
@@ -2,12 +2,13 @@ import path from "path"
import fs from "fs"
import { BuildCtx } from "../../util/ctx"
import { FilePath, FullSlug, joinSegments } from "../../util/path"
+import { Readable } from "stream"
type WriteOptions = {
ctx: BuildCtx
slug: FullSlug
ext: `.${string}` | ""
- content: string | Buffer
+ content: string | Buffer | Readable
}
export const write = async ({ ctx, slug, ext, content }: WriteOptions): Promise => {
diff --git a/quartz/plugins/emitters/index.ts b/quartz/plugins/emitters/index.ts
index 60f47fe01..842ffb083 100644
--- a/quartz/plugins/emitters/index.ts
+++ b/quartz/plugins/emitters/index.ts
@@ -8,3 +8,4 @@ export { Static } from "./static"
export { ComponentResources } from "./componentResources"
export { NotFoundPage } from "./404"
export { CNAME } from "./cname"
+export { CustomOgImages } from "./ogImage"
diff --git a/quartz/plugins/emitters/ogImage.tsx b/quartz/plugins/emitters/ogImage.tsx
new file mode 100644
index 000000000..0b786955c
--- /dev/null
+++ b/quartz/plugins/emitters/ogImage.tsx
@@ -0,0 +1,175 @@
+import { QuartzEmitterPlugin } from "../types"
+import { i18n } from "../../i18n"
+import { unescapeHTML } from "../../util/escape"
+import { FullSlug, getFileExtension, joinSegments, QUARTZ } from "../../util/path"
+import { ImageOptions, SocialImageOptions, defaultImage, getSatoriFonts } from "../../util/og"
+import sharp from "sharp"
+import satori, { SatoriOptions } from "satori"
+import { loadEmoji, getIconCode } from "../../util/emoji"
+import { Readable } from "stream"
+import { write } from "./helpers"
+import { BuildCtx } from "../../util/ctx"
+import { QuartzPluginData } from "../vfile"
+import fs from "node:fs/promises"
+import chalk from "chalk"
+
+const defaultOptions: SocialImageOptions = {
+ colorScheme: "lightMode",
+ width: 1200,
+ height: 630,
+ imageStructure: defaultImage,
+ excludeRoot: false,
+}
+
+/**
+ * Generates social image (OG/twitter standard) and saves it as `.webp` inside the public folder
+ * @param opts options for generating image
+ */
+async function generateSocialImage(
+ { cfg, description, fonts, title, fileData }: ImageOptions,
+ userOpts: SocialImageOptions,
+): Promise {
+ const { width, height } = userOpts
+ const iconPath = joinSegments(QUARTZ, "static", "icon.png")
+ let iconBase64: string | undefined = undefined
+ try {
+ const iconData = await fs.readFile(iconPath)
+ iconBase64 = `data:image/png;base64,${iconData.toString("base64")}`
+ } catch (err) {
+ console.warn(chalk.yellow(`Warning: Could not find icon at ${iconPath}`))
+ }
+
+ const imageComponent = userOpts.imageStructure({
+ cfg,
+ userOpts,
+ title,
+ description,
+ fonts,
+ fileData,
+ iconBase64,
+ })
+
+ const svg = await satori(imageComponent, {
+ width,
+ height,
+ fonts,
+ loadAdditionalAsset: async (languageCode: string, segment: string) => {
+ if (languageCode === "emoji") {
+ return `data:image/svg+xml;base64,${btoa(await loadEmoji(getIconCode(segment)))}`
+ }
+ return languageCode
+ },
+ })
+
+ return sharp(Buffer.from(svg)).webp({ quality: 40 })
+}
+
+async function processOgImage(
+ ctx: BuildCtx,
+ fileData: QuartzPluginData,
+ fonts: SatoriOptions["fonts"],
+ fullOptions: SocialImageOptions,
+) {
+ const cfg = ctx.cfg.configuration
+ const slug = fileData.slug!
+ const titleSuffix = cfg.pageTitleSuffix ?? ""
+ const title =
+ (fileData.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
+ const description =
+ fileData.frontmatter?.socialDescription ??
+ fileData.frontmatter?.description ??
+ unescapeHTML(fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description)
+
+ const stream = await generateSocialImage(
+ {
+ title,
+ description,
+ fonts,
+ cfg,
+ fileData,
+ },
+ fullOptions,
+ )
+
+ return write({
+ ctx,
+ content: stream,
+ slug: `${slug}-og-image` as FullSlug,
+ ext: ".webp",
+ })
+}
+
+export const CustomOgImagesEmitterName = "CustomOgImages"
+export const CustomOgImages: QuartzEmitterPlugin> = (userOpts) => {
+ const fullOptions = { ...defaultOptions, ...userOpts }
+
+ return {
+ name: CustomOgImagesEmitterName,
+ getQuartzComponents() {
+ return []
+ },
+ async *emit(ctx, content, _resources) {
+ const cfg = ctx.cfg.configuration
+ const headerFont = cfg.theme.typography.header
+ const bodyFont = cfg.theme.typography.body
+ const fonts = await getSatoriFonts(headerFont, bodyFont)
+
+ for (const [_tree, vfile] of content) {
+ if (vfile.data.frontmatter?.socialImage !== undefined) continue
+ yield processOgImage(ctx, vfile.data, fonts, fullOptions)
+ }
+ },
+ async *partialEmit(ctx, _content, _resources, changeEvents) {
+ const cfg = ctx.cfg.configuration
+ const headerFont = cfg.theme.typography.header
+ const bodyFont = cfg.theme.typography.body
+ const fonts = await getSatoriFonts(headerFont, bodyFont)
+
+ // find all slugs that changed or were added
+ for (const changeEvent of changeEvents) {
+ if (!changeEvent.file) continue
+ if (changeEvent.file.data.frontmatter?.socialImage !== undefined) continue
+ if (changeEvent.type === "add" || changeEvent.type === "change") {
+ yield processOgImage(ctx, changeEvent.file.data, fonts, fullOptions)
+ }
+ }
+ },
+ externalResources: (ctx) => {
+ if (!ctx.cfg.configuration.baseUrl) {
+ return {}
+ }
+
+ const baseUrl = ctx.cfg.configuration.baseUrl
+ return {
+ additionalHead: [
+ (pageData) => {
+ const isRealFile = pageData.filePath !== undefined
+ const userDefinedOgImagePath = pageData.frontmatter?.socialImage
+ const generatedOgImagePath = isRealFile
+ ? `https://${baseUrl}/${pageData.slug!}-og-image.webp`
+ : undefined
+ const defaultOgImagePath = `https://${baseUrl}/static/og-image.png`
+ const ogImagePath = userDefinedOgImagePath ?? generatedOgImagePath ?? defaultOgImagePath
+
+ const ogImageMimeType = `image/${getFileExtension(ogImagePath) ?? "png"}`
+ return (
+ <>
+ {!userDefinedOgImagePath && (
+ <>
+
+
+ >
+ )}
+
+
+
+
+
+ >
+ )
+ },
+ ],
+ }
+ },
+ }
+}
diff --git a/quartz/plugins/emitters/static.ts b/quartz/plugins/emitters/static.ts
index 5545d2ccb..0b4529083 100644
--- a/quartz/plugins/emitters/static.ts
+++ b/quartz/plugins/emitters/static.ts
@@ -2,31 +2,22 @@ import { FilePath, QUARTZ, joinSegments } from "../../util/path"
import { QuartzEmitterPlugin } from "../types"
import fs from "fs"
import { glob } from "../../util/glob"
-import DepGraph from "../../depgraph"
+import { dirname } from "path"
export const Static: QuartzEmitterPlugin = () => ({
name: "Static",
- async getDependencyGraph({ argv, cfg }, _content, _resources) {
- const graph = new DepGraph()
-
+ async *emit({ argv, cfg }) {
const staticPath = joinSegments(QUARTZ, "static")
const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
+ const outputStaticPath = joinSegments(argv.output, "static")
+ await fs.promises.mkdir(outputStaticPath, { recursive: true })
for (const fp of fps) {
- graph.addEdge(
- joinSegments("static", fp) as FilePath,
- joinSegments(argv.output, "static", fp) as FilePath,
- )
+ const src = joinSegments(staticPath, fp) as FilePath
+ const dest = joinSegments(outputStaticPath, fp) as FilePath
+ await fs.promises.mkdir(dirname(dest), { recursive: true })
+ await fs.promises.copyFile(src, dest)
+ yield dest
}
-
- return graph
- },
- async emit({ argv, cfg }, _content, _resources): Promise {
- const staticPath = joinSegments(QUARTZ, "static")
- const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
- await fs.promises.cp(staticPath, joinSegments(argv.output, "static"), {
- recursive: true,
- dereference: true,
- })
- return fps.map((fp) => joinSegments(argv.output, "static", fp)) as FilePath[]
},
+ async *partialEmit() {},
})
diff --git a/quartz/plugins/emitters/tagPage.tsx b/quartz/plugins/emitters/tagPage.tsx
index 9913e7d82..5f238932d 100644
--- a/quartz/plugins/emitters/tagPage.tsx
+++ b/quartz/plugins/emitters/tagPage.tsx
@@ -5,23 +5,94 @@ import BodyConstructor from "../../components/Body"
import { pageResources, renderPage } from "../../components/renderPage"
import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../vfile"
import { FullPageLayout } from "../../cfg"
-import {
- FilePath,
- FullSlug,
- getAllSegmentPrefixes,
- joinSegments,
- pathToRoot,
-} from "../../util/path"
+import { FullSlug, getAllSegmentPrefixes, joinSegments, pathToRoot } from "../../util/path"
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
import { TagContent } from "../../components"
import { write } from "./helpers"
-import { i18n } from "../../i18n"
-import DepGraph from "../../depgraph"
+import { i18n, TRANSLATIONS } from "../../i18n"
+import { BuildCtx } from "../../util/ctx"
+import { StaticResources } from "../../util/resources"
interface TagPageOptions extends FullPageLayout {
sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
}
+function computeTagInfo(
+ allFiles: QuartzPluginData[],
+ content: ProcessedContent[],
+ locale: keyof typeof TRANSLATIONS,
+): [Set, Record] {
+ const tags: Set = new Set(
+ allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
+ )
+
+ // add base tag
+ tags.add("index")
+
+ const tagDescriptions: Record = Object.fromEntries(
+ [...tags].map((tag) => {
+ const title =
+ tag === "index"
+ ? i18n(locale).pages.tagContent.tagIndex
+ : `${i18n(locale).pages.tagContent.tag}: ${tag}`
+ return [
+ tag,
+ defaultProcessedContent({
+ slug: joinSegments("tags", tag) as FullSlug,
+ frontmatter: { title, tags: [] },
+ }),
+ ]
+ }),
+ )
+
+ // Update with actual content if available
+ for (const [tree, file] of content) {
+ const slug = file.data.slug!
+ if (slug.startsWith("tags/")) {
+ const tag = slug.slice("tags/".length)
+ if (tags.has(tag)) {
+ tagDescriptions[tag] = [tree, file]
+ if (file.data.frontmatter?.title === tag) {
+ file.data.frontmatter.title = `${i18n(locale).pages.tagContent.tag}: ${tag}`
+ }
+ }
+ }
+ }
+
+ return [tags, tagDescriptions]
+}
+
+async function processTagPage(
+ ctx: BuildCtx,
+ tag: string,
+ tagContent: ProcessedContent,
+ allFiles: QuartzPluginData[],
+ opts: FullPageLayout,
+ resources: StaticResources,
+) {
+ const slug = joinSegments("tags", tag) as FullSlug
+ const [tree, file] = tagContent
+ const cfg = ctx.cfg.configuration
+ const externalResources = pageResources(pathToRoot(slug), resources)
+ const componentData: QuartzComponentProps = {
+ ctx,
+ fileData: file.data,
+ externalResources,
+ cfg,
+ children: [],
+ tree,
+ allFiles,
+ }
+
+ const content = renderPage(cfg, slug, componentData, opts, externalResources)
+ return write({
+ ctx,
+ content,
+ slug: file.data.slug!,
+ ext: ".html",
+ })
+}
+
export const TagPage: QuartzEmitterPlugin> = (userOpts) => {
const opts: FullPageLayout = {
...sharedPageComponents,
@@ -50,93 +121,50 @@ export const TagPage: QuartzEmitterPlugin> = (userOpts)
Footer,
]
},
- async getDependencyGraph(ctx, content, _resources) {
- const graph = new DepGraph()
+ async *emit(ctx, content, resources) {
+ const allFiles = content.map((c) => c[1].data)
+ const cfg = ctx.cfg.configuration
+ const [tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
- for (const [_tree, file] of content) {
- const sourcePath = file.data.filePath!
- const tags = (file.data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes)
- // if the file has at least one tag, it is used in the tag index page
- if (tags.length > 0) {
- tags.push("index")
- }
-
- for (const tag of tags) {
- graph.addEdge(
- sourcePath,
- joinSegments(ctx.argv.output, "tags", tag + ".html") as FilePath,
- )
- }
+ for (const tag of tags) {
+ yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
}
-
- return graph
},
- async emit(ctx, content, resources): Promise {
- const fps: FilePath[] = []
+ async *partialEmit(ctx, content, resources, changeEvents) {
const allFiles = content.map((c) => c[1].data)
const cfg = ctx.cfg.configuration
- const tags: Set = new Set(
- allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
- )
+ // Find all tags that need to be updated based on changed files
+ const affectedTags: Set = new Set()
+ for (const changeEvent of changeEvents) {
+ if (!changeEvent.file) continue
+ const slug = changeEvent.file.data.slug!
- // add base tag
- tags.add("index")
-
- const tagDescriptions: Record = Object.fromEntries(
- [...tags].map((tag) => {
- const title =
- tag === "index"
- ? i18n(cfg.locale).pages.tagContent.tagIndex
- : `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
- return [
- tag,
- defaultProcessedContent({
- slug: joinSegments("tags", tag) as FullSlug,
- frontmatter: { title, tags: [] },
- }),
- ]
- }),
- )
-
- for (const [tree, file] of content) {
- const slug = file.data.slug!
+ // If it's a tag page itself that changed
if (slug.startsWith("tags/")) {
const tag = slug.slice("tags/".length)
- if (tags.has(tag)) {
- tagDescriptions[tag] = [tree, file]
- if (file.data.frontmatter?.title === tag) {
- file.data.frontmatter.title = `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
- }
+ affectedTags.add(tag)
+ }
+
+ // If a file with tags changed, we need to update those tag pages
+ const fileTags = changeEvent.file.data.frontmatter?.tags ?? []
+ fileTags.flatMap(getAllSegmentPrefixes).forEach((tag) => affectedTags.add(tag))
+
+ // Always update the index tag page if any file changes
+ affectedTags.add("index")
+ }
+
+ // If there are affected tags, rebuild their pages
+ if (affectedTags.size > 0) {
+ // We still need to compute all tags because tag pages show all tags
+ const [_tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
+
+ for (const tag of affectedTags) {
+ if (tagDescriptions[tag]) {
+ yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
}
}
}
-
- for (const tag of tags) {
- const slug = joinSegments("tags", tag) as FullSlug
- const [tree, file] = tagDescriptions[tag]
- const externalResources = pageResources(pathToRoot(slug), file.data, resources)
- const componentData: QuartzComponentProps = {
- ctx,
- fileData: file.data,
- externalResources,
- cfg,
- children: [],
- tree,
- allFiles,
- }
-
- const content = renderPage(cfg, slug, componentData, opts, externalResources)
- const fp = await write({
- ctx,
- content,
- slug: file.data.slug!,
- ext: ".html",
- })
-
- fps.push(fp)
- }
- return fps
},
}
}
diff --git a/quartz/plugins/transformers/description.ts b/quartz/plugins/transformers/description.ts
index c7e592ee9..3f8519b32 100644
--- a/quartz/plugins/transformers/description.ts
+++ b/quartz/plugins/transformers/description.ts
@@ -5,11 +5,13 @@ import { escapeHTML } from "../../util/escape"
export interface Options {
descriptionLength: number
+ maxDescriptionLength: number
replaceExternalLinks: boolean
}
const defaultOptions: Options = {
descriptionLength: 150,
+ maxDescriptionLength: 300,
replaceExternalLinks: true,
}
@@ -37,35 +39,41 @@ export const Description: QuartzTransformerPlugin> = (userOpts)
text = text.replace(urlRegex, "$" + "$")
}
- const desc = frontMatterDescription ?? text
- const sentences = desc.replace(/\s+/g, " ").split(/\.\s/)
- const finalDesc: string[] = []
- const len = opts.descriptionLength
- let sentenceIdx = 0
- let currentDescriptionLength = 0
+ if (frontMatterDescription) {
+ file.data.description = frontMatterDescription
+ file.data.text = text
+ return
+ }
- if (sentences[0] !== undefined && sentences[0].length >= len) {
- const firstSentence = sentences[0].split(" ")
- while (currentDescriptionLength < len) {
- const sentence = firstSentence[sentenceIdx]
- if (!sentence) break
- finalDesc.push(sentence)
- currentDescriptionLength += sentence.length
- sentenceIdx++
- }
- finalDesc.push("...")
- } else {
- while (currentDescriptionLength < len) {
- const sentence = sentences[sentenceIdx]
- if (!sentence) break
- const currentSentence = sentence.endsWith(".") ? sentence : sentence + "."
- finalDesc.push(currentSentence)
- currentDescriptionLength += currentSentence.length
+ // otherwise, use the text content
+ const desc = text
+ const sentences = desc.replace(/\s+/g, " ").split(/\.\s/)
+ let finalDesc = ""
+ let sentenceIdx = 0
+
+ // Add full sentences until we exceed the guideline length
+ while (sentenceIdx < sentences.length) {
+ const sentence = sentences[sentenceIdx]
+ if (!sentence) break
+
+ const currentSentence = sentence.endsWith(".") ? sentence : sentence + "."
+ const nextLength = finalDesc.length + currentSentence.length + (finalDesc ? 1 : 0)
+
+ // Add the sentence if we're under the guideline length
+ // or if this is the first sentence (always include at least one)
+ if (nextLength <= opts.descriptionLength || sentenceIdx === 0) {
+ finalDesc += (finalDesc ? " " : "") + currentSentence
sentenceIdx++
+ } else {
+ break
}
}
- file.data.description = finalDesc.join(" ")
+ // truncate to max length if necessary
+ file.data.description =
+ finalDesc.length > opts.maxDescriptionLength
+ ? finalDesc.slice(0, opts.maxDescriptionLength) + "..."
+ : finalDesc
file.data.text = text
}
},
diff --git a/quartz/plugins/transformers/frontmatter.ts b/quartz/plugins/transformers/frontmatter.ts
index 9679bd1ec..c04c52a24 100644
--- a/quartz/plugins/transformers/frontmatter.ts
+++ b/quartz/plugins/transformers/frontmatter.ts
@@ -3,12 +3,9 @@ import remarkFrontmatter from "remark-frontmatter"
import { QuartzTransformerPlugin } from "../types"
import yaml from "js-yaml"
import toml from "toml"
-import { FilePath, FullSlug, joinSegments, slugifyFilePath, slugTag } from "../../util/path"
+import { FilePath, FullSlug, getFileExtension, slugifyFilePath, slugTag } from "../../util/path"
import { QuartzPluginData } from "../vfile"
import { i18n } from "../../i18n"
-import { Argv } from "../../util/ctx"
-import { VFile } from "vfile"
-import path from "path"
export interface Options {
delimiters: string | [string, string]
@@ -43,26 +40,24 @@ function coerceToArray(input: string | string[]): string[] | undefined {
.map((tag: string | number) => tag.toString())
}
-export function getAliasSlugs(aliases: string[], argv: Argv, file: VFile): FullSlug[] {
- const dir = path.posix.relative(argv.directory, path.dirname(file.data.filePath!))
- const slugs: FullSlug[] = aliases.map(
- (alias) => path.posix.join(dir, slugifyFilePath(alias as FilePath)) as FullSlug,
- )
- const permalink = file.data.frontmatter?.permalink
- if (typeof permalink === "string") {
- slugs.push(permalink as FullSlug)
+function getAliasSlugs(aliases: string[]): FullSlug[] {
+ const res: FullSlug[] = []
+ for (const alias of aliases) {
+ const isMd = getFileExtension(alias) === "md"
+ const mockFp = isMd ? alias : alias + ".md"
+ const slug = slugifyFilePath(mockFp as FilePath)
+ res.push(slug)
}
- // fix any slugs that have trailing slash
- return slugs.map((slug) =>
- slug.endsWith("/") ? (joinSegments(slug, "index") as FullSlug) : slug,
- )
+
+ return res
}
export const FrontMatter: QuartzTransformerPlugin> = (userOpts) => {
const opts = { ...defaultOptions, ...userOpts }
return {
name: "FrontMatter",
- markdownPlugins({ cfg, allSlugs, argv }) {
+ markdownPlugins(ctx) {
+ const { cfg, allSlugs } = ctx
return [
[remarkFrontmatter, ["yaml", "toml"]],
() => {
@@ -88,9 +83,18 @@ export const FrontMatter: QuartzTransformerPlugin> = (userOpts)
const aliases = coerceToArray(coalesceAliases(data, ["aliases", "alias"]))
if (aliases) {
data.aliases = aliases // frontmatter
- const slugs = (file.data.aliases = getAliasSlugs(aliases, argv, file))
- allSlugs.push(...slugs)
+ file.data.aliases = getAliasSlugs(aliases)
+ allSlugs.push(...file.data.aliases)
}
+
+ if (data.permalink != null && data.permalink.toString() !== "") {
+ data.permalink = data.permalink.toString() as FullSlug
+ const aliases = file.data.aliases ?? []
+ aliases.push(data.permalink)
+ file.data.aliases = aliases
+ allSlugs.push(data.permalink)
+ }
+
const cssclasses = coerceToArray(coalesceAliases(data, ["cssclasses", "cssclass"]))
if (cssclasses) data.cssclasses = cssclasses
@@ -131,6 +135,7 @@ declare module "vfile" {
created: string
published: string
description: string
+ socialDescription: string
publish: boolean | string
draft: boolean | string
lang: string
diff --git a/quartz/plugins/transformers/lastmod.ts b/quartz/plugins/transformers/lastmod.ts
index 02278037d..fb057f705 100644
--- a/quartz/plugins/transformers/lastmod.ts
+++ b/quartz/plugins/transformers/lastmod.ts
@@ -31,7 +31,7 @@ export const CreatedModifiedDate: QuartzTransformerPlugin> = (u
const opts = { ...defaultOptions, ...userOpts }
return {
name: "CreatedModifiedDate",
- markdownPlugins() {
+ markdownPlugins(ctx) {
return [
() => {
let repo: Repository | undefined = undefined
@@ -40,8 +40,8 @@ export const CreatedModifiedDate: QuartzTransformerPlugin> = (u
let modified: MaybeDate = undefined
let published: MaybeDate = undefined
- const fp = file.data.filePath!
- const fullFp = path.isAbsolute(fp) ? fp : path.posix.join(file.cwd, fp)
+ const fp = file.data.relativePath!
+ const fullFp = path.posix.join(ctx.argv.directory, fp)
for (const source of opts.priority) {
if (source === "filesystem") {
const st = await fs.promises.stat(fullFp)
diff --git a/quartz/plugins/transformers/oxhugofm.ts b/quartz/plugins/transformers/oxhugofm.ts
index cdbffcffd..0612c7a9d 100644
--- a/quartz/plugins/transformers/oxhugofm.ts
+++ b/quartz/plugins/transformers/oxhugofm.ts
@@ -54,7 +54,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin>
textTransform(_ctx, src) {
if (opts.wikilinks) {
src = src.toString()
- src = src.replaceAll(relrefRegex, (value, ...capture) => {
+ src = src.replaceAll(relrefRegex, (_value, ...capture) => {
const [text, link] = capture
return `[${text}](${link})`
})
@@ -62,7 +62,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin>
if (opts.removePredefinedAnchor) {
src = src.toString()
- src = src.replaceAll(predefinedHeadingIdRegex, (value, ...capture) => {
+ src = src.replaceAll(predefinedHeadingIdRegex, (_value, ...capture) => {
const [headingText] = capture
return headingText
})
@@ -70,7 +70,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin>
if (opts.removeHugoShortcode) {
src = src.toString()
- src = src.replaceAll(hugoShortcodeRegex, (value, ...capture) => {
+ src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => {
const [scContent] = capture
return scContent
})
@@ -78,7 +78,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin>
if (opts.replaceFigureWithMdImg) {
src = src.toString()
- src = src.replaceAll(figureTagRegex, (value, ...capture) => {
+ src = src.replaceAll(figureTagRegex, (_value, ...capture) => {
const [src] = capture
return ``
})
@@ -86,11 +86,11 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin>
if (opts.replaceOrgLatex) {
src = src.toString()
- src = src.replaceAll(inlineLatexRegex, (value, ...capture) => {
+ src = src.replaceAll(inlineLatexRegex, (_value, ...capture) => {
const [eqn] = capture
return `$${eqn}$`
})
- src = src.replaceAll(blockLatexRegex, (value, ...capture) => {
+ src = src.replaceAll(blockLatexRegex, (_value, ...capture) => {
const [eqn] = capture
return `$$${eqn}$$`
})
diff --git a/quartz/plugins/transformers/roam.ts b/quartz/plugins/transformers/roam.ts
index b3be8f542..b6df67a8f 100644
--- a/quartz/plugins/transformers/roam.ts
+++ b/quartz/plugins/transformers/roam.ts
@@ -1,10 +1,8 @@
import { QuartzTransformerPlugin } from "../types"
import { PluggableList } from "unified"
-import { SKIP, visit } from "unist-util-visit"
+import { visit } from "unist-util-visit"
import { ReplaceFunction, findAndReplace as mdastFindReplace } from "mdast-util-find-and-replace"
import { Root, Html, Paragraph, Text, Link, Parent } from "mdast"
-import { Node } from "unist"
-import { VFile } from "vfile"
import { BuildVisitor } from "unist-util-visit"
export interface Options {
@@ -34,21 +32,10 @@ const defaultOptions: Options = {
const orRegex = new RegExp(/{{or:(.*?)}}/, "g")
const TODORegex = new RegExp(/{{.*?\bTODO\b.*?}}/, "g")
const DONERegex = new RegExp(/{{.*?\bDONE\b.*?}}/, "g")
-const videoRegex = new RegExp(/{{.*?\[\[video\]\].*?\:(.*?)}}/, "g")
-const youtubeRegex = new RegExp(
- /{{.*?\[\[video\]\].*?(https?:\/\/(?:www\.)?youtu(?:be\.com\/watch\?v=|\.be\/)([\w\-\_]*)(&(amp;)?[\w\?=]*)?)}}/,
- "g",
-)
-// const multimediaRegex = new RegExp(/{{.*?\b(video|audio)\b.*?\:(.*?)}}/, "g")
-
-const audioRegex = new RegExp(/{{.*?\[\[audio\]\].*?\:(.*?)}}/, "g")
-const pdfRegex = new RegExp(/{{.*?\[\[pdf\]\].*?\:(.*?)}}/, "g")
const blockquoteRegex = new RegExp(/(\[\[>\]\])\s*(.*)/, "g")
const roamHighlightRegex = new RegExp(/\^\^(.+)\^\^/, "g")
const roamItalicRegex = new RegExp(/__(.+)__/, "g")
-const tableRegex = new RegExp(/- {{.*?\btable\b.*?}}/, "g") /* TODO */
-const attributeRegex = new RegExp(/\b\w+(?:\s+\w+)*::/, "g") /* TODO */
function isSpecialEmbed(node: Paragraph): boolean {
if (node.children.length !== 2) return false
@@ -135,7 +122,7 @@ export const RoamFlavoredMarkdown: QuartzTransformerPlugin | un
const plugins: PluggableList = []
plugins.push(() => {
- return (tree: Root, file: VFile) => {
+ return (tree: Root) => {
const replacements: [RegExp, ReplaceFunction][] = []
// Handle special embeds (audio, video, PDF)
diff --git a/quartz/plugins/types.ts b/quartz/plugins/types.ts
index e7cfb479f..2a7c16c5d 100644
--- a/quartz/plugins/types.ts
+++ b/quartz/plugins/types.ts
@@ -4,7 +4,7 @@ import { ProcessedContent } from "./vfile"
import { QuartzComponent } from "../components/types"
import { FilePath } from "../util/path"
import { BuildCtx } from "../util/ctx"
-import DepGraph from "../depgraph"
+import { VFile } from "vfile"
export interface PluginTypes {
transformers: QuartzTransformerPluginInstance[]
@@ -33,22 +33,33 @@ export type QuartzFilterPluginInstance = {
shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean
}
+export type ChangeEvent = {
+ type: "add" | "change" | "delete"
+ path: FilePath
+ file?: VFile
+}
+
export type QuartzEmitterPlugin = (
opts?: Options,
) => QuartzEmitterPluginInstance
export type QuartzEmitterPluginInstance = {
name: string
- emit(ctx: BuildCtx, content: ProcessedContent[], resources: StaticResources): Promise
+ emit: (
+ ctx: BuildCtx,
+ content: ProcessedContent[],
+ resources: StaticResources,
+ ) => Promise | AsyncGenerator
+ partialEmit?: (
+ ctx: BuildCtx,
+ content: ProcessedContent[],
+ resources: StaticResources,
+ changeEvents: ChangeEvent[],
+ ) => Promise | AsyncGenerator | null
/**
* Returns the components (if any) that are used in rendering the page.
* This helps Quartz optimize the page by only including necessary resources
* for components that are actually used.
*/
getQuartzComponents?: (ctx: BuildCtx) => QuartzComponent[]
- getDependencyGraph?(
- ctx: BuildCtx,
- content: ProcessedContent[],
- resources: StaticResources,
- ): Promise>
externalResources?: ExternalResourcesFn
}
diff --git a/quartz/processors/emit.ts b/quartz/processors/emit.ts
index c68e0edeb..00bc9c82c 100644
--- a/quartz/processors/emit.ts
+++ b/quartz/processors/emit.ts
@@ -4,30 +4,47 @@ import { ProcessedContent } from "../plugins/vfile"
import { QuartzLogger } from "../util/log"
import { trace } from "../util/trace"
import { BuildCtx } from "../util/ctx"
+import chalk from "chalk"
export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
const { argv, cfg } = ctx
const perf = new PerfTimer()
const log = new QuartzLogger(ctx.argv.verbose)
- log.start(`Emitting output files`)
+ log.start(`Emitting files`)
let emittedFiles = 0
const staticResources = getStaticResourcesFromPlugins(ctx)
- for (const emitter of cfg.plugins.emitters) {
- try {
- const emitted = await emitter.emit(ctx, content, staticResources)
- emittedFiles += emitted.length
-
- if (ctx.argv.verbose) {
- for (const file of emitted) {
- console.log(`[emit:${emitter.name}] ${file}`)
+ await Promise.all(
+ cfg.plugins.emitters.map(async (emitter) => {
+ try {
+ const emitted = await emitter.emit(ctx, content, staticResources)
+ if (Symbol.asyncIterator in emitted) {
+ // Async generator case
+ for await (const file of emitted) {
+ emittedFiles++
+ if (ctx.argv.verbose) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ } else {
+ log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
+ }
+ }
+ } else {
+ // Array case
+ emittedFiles += emitted.length
+ for (const file of emitted) {
+ if (ctx.argv.verbose) {
+ console.log(`[emit:${emitter.name}] ${file}`)
+ } else {
+ log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
+ }
+ }
}
+ } catch (err) {
+ trace(`Failed to emit from plugin \`${emitter.name}\``, err as Error)
}
- } catch (err) {
- trace(`Failed to emit from plugin \`${emitter.name}\``, err as Error)
- }
- }
+ }),
+ )
log.end(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince()}`)
}
diff --git a/quartz/processors/parse.ts b/quartz/processors/parse.ts
index 479313f49..04efdbea2 100644
--- a/quartz/processors/parse.ts
+++ b/quartz/processors/parse.ts
@@ -7,12 +7,13 @@ import { Root as HTMLRoot } from "hast"
import { MarkdownContent, ProcessedContent } from "../plugins/vfile"
import { PerfTimer } from "../util/perf"
import { read } from "to-vfile"
-import { FilePath, FullSlug, QUARTZ, slugifyFilePath } from "../util/path"
+import { FilePath, QUARTZ, slugifyFilePath } from "../util/path"
import path from "path"
import workerpool, { Promise as WorkerPromise } from "workerpool"
import { QuartzLogger } from "../util/log"
import { trace } from "../util/trace"
-import { BuildCtx } from "../util/ctx"
+import { BuildCtx, WorkerSerializableBuildCtx } from "../util/ctx"
+import chalk from "chalk"
export type QuartzMdProcessor = Processor
export type QuartzHtmlProcessor = Processor
@@ -171,25 +172,46 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise {
- console.error(`${err}`.replace(/^error:\s*/i, ""))
+ console.error(err)
process.exit(1)
}
- const mdPromises: WorkerPromise<[MarkdownContent[], FullSlug[]]>[] = []
- for (const chunk of chunks(fps, CHUNK_SIZE)) {
- mdPromises.push(pool.exec("parseMarkdown", [ctx.buildId, argv, chunk]))
+ const serializableCtx: WorkerSerializableBuildCtx = {
+ buildId: ctx.buildId,
+ argv: ctx.argv,
+ allSlugs: ctx.allSlugs,
+ allFiles: ctx.allFiles,
+ incremental: ctx.incremental,
}
- const mdResults: [MarkdownContent[], FullSlug[]][] =
- await WorkerPromise.all(mdPromises).catch(errorHandler)
- const childPromises: WorkerPromise[] = []
- for (const [_, extraSlugs] of mdResults) {
- ctx.allSlugs.push(...extraSlugs)
+ const textToMarkdownPromises: WorkerPromise[] = []
+ let processedFiles = 0
+ for (const chunk of chunks(fps, CHUNK_SIZE)) {
+ textToMarkdownPromises.push(pool.exec("parseMarkdown", [serializableCtx, chunk]))
}
- for (const [mdChunk, _] of mdResults) {
- childPromises.push(pool.exec("processHtml", [ctx.buildId, argv, mdChunk, ctx.allSlugs]))
+
+ const mdResults: Array = await Promise.all(
+ textToMarkdownPromises.map(async (promise) => {
+ const result = await promise
+ processedFiles += result.length
+ log.updateText(`text->markdown ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
+ return result
+ }),
+ ).catch(errorHandler)
+
+ const markdownToHtmlPromises: WorkerPromise[] = []
+ processedFiles = 0
+ for (const mdChunk of mdResults) {
+ markdownToHtmlPromises.push(pool.exec("processHtml", [serializableCtx, mdChunk]))
}
- const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch(errorHandler)
+ const results: ProcessedContent[][] = await Promise.all(
+ markdownToHtmlPromises.map(async (promise) => {
+ const result = await promise
+ processedFiles += result.length
+ log.updateText(`markdown->html ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
+ return result
+ }),
+ ).catch(errorHandler)
res = results.flat()
await pool.terminate()
diff --git a/quartz/util/ctx.ts b/quartz/util/ctx.ts
index 044d21f68..b3e7a37f5 100644
--- a/quartz/util/ctx.ts
+++ b/quartz/util/ctx.ts
@@ -1,12 +1,12 @@
import { QuartzConfig } from "../cfg"
-import { FullSlug } from "./path"
+import { FilePath, FullSlug } from "./path"
export interface Argv {
directory: string
verbose: boolean
output: string
serve: boolean
- fastRebuild: boolean
+ watch: boolean
port: number
wsPort: number
remoteDevHost?: string
@@ -18,4 +18,8 @@ export interface BuildCtx {
argv: Argv
cfg: QuartzConfig
allSlugs: FullSlug[]
+ allFiles: FilePath[]
+ incremental: boolean
}
+
+export type WorkerSerializableBuildCtx = Omit
diff --git a/quartz/util/fileTrie.test.ts b/quartz/util/fileTrie.test.ts
index a4481eda9..d66e14266 100644
--- a/quartz/util/fileTrie.test.ts
+++ b/quartz/util/fileTrie.test.ts
@@ -1,6 +1,7 @@
import test, { describe, beforeEach } from "node:test"
import assert from "node:assert"
import { FileTrieNode } from "./fileTrie"
+import { FullSlug } from "./path"
interface TestData {
title: string
@@ -192,6 +193,94 @@ describe("FileTrie", () => {
})
})
+ describe("fromEntries", () => {
+ test("nested", () => {
+ const trie = FileTrieNode.fromEntries([
+ ["index" as FullSlug, { title: "Root", slug: "index", filePath: "index.md" }],
+ [
+ "folder/file1" as FullSlug,
+ { title: "File 1", slug: "folder/file1", filePath: "folder/file1.md" },
+ ],
+ [
+ "folder/index" as FullSlug,
+ { title: "Folder Index", slug: "folder/index", filePath: "folder/index.md" },
+ ],
+ [
+ "folder/file2" as FullSlug,
+ { title: "File 2", slug: "folder/file2", filePath: "folder/file2.md" },
+ ],
+ [
+ "folder/folder2/index" as FullSlug,
+ {
+ title: "Subfolder Index",
+ slug: "folder/folder2/index",
+ filePath: "folder/folder2/index.md",
+ },
+ ],
+ ])
+
+ assert.strictEqual(trie.children.length, 1)
+ assert.strictEqual(trie.children[0].slug, "folder/index")
+ assert.strictEqual(trie.children[0].children.length, 3)
+ assert.strictEqual(trie.children[0].children[0].slug, "folder/file1")
+ assert.strictEqual(trie.children[0].children[1].slug, "folder/file2")
+ assert.strictEqual(trie.children[0].children[2].slug, "folder/folder2/index")
+ assert.strictEqual(trie.children[0].children[2].children.length, 0)
+ })
+ })
+
+ describe("findNode", () => {
+ test("should find root node with empty path", () => {
+ const data = { title: "Root", slug: "index", filePath: "index.md" }
+ trie.add(data)
+ const found = trie.findNode([])
+ assert.strictEqual(found, trie)
+ })
+
+ test("should find node at first level", () => {
+ const data = { title: "Test", slug: "test", filePath: "test.md" }
+ trie.add(data)
+ const found = trie.findNode(["test"])
+ assert.strictEqual(found?.data, data)
+ })
+
+ test("should find nested node", () => {
+ const data = {
+ title: "Nested",
+ slug: "folder/subfolder/test",
+ filePath: "folder/subfolder/test.md",
+ }
+ trie.add(data)
+ const found = trie.findNode(["folder", "subfolder", "test"])
+ assert.strictEqual(found?.data, data)
+
+ // should find the folder and subfolder indexes too
+ assert.strictEqual(
+ trie.findNode(["folder", "subfolder", "index"]),
+ trie.children[0].children[0],
+ )
+ assert.strictEqual(trie.findNode(["folder", "index"]), trie.children[0])
+ })
+
+ test("should return undefined for non-existent path", () => {
+ const data = { title: "Test", slug: "test", filePath: "test.md" }
+ trie.add(data)
+ const found = trie.findNode(["nonexistent"])
+ assert.strictEqual(found, undefined)
+ })
+
+ test("should return undefined for partial path", () => {
+ const data = {
+ title: "Nested",
+ slug: "folder/subfolder/test",
+ filePath: "folder/subfolder/test.md",
+ }
+ trie.add(data)
+ const found = trie.findNode(["folder"])
+ assert.strictEqual(found?.data, null)
+ })
+ })
+
describe("getFolderPaths", () => {
test("should return all folder paths", () => {
const data1 = {
diff --git a/quartz/util/fileTrie.ts b/quartz/util/fileTrie.ts
index b39833cf7..e3dc2e7aa 100644
--- a/quartz/util/fileTrie.ts
+++ b/quartz/util/fileTrie.ts
@@ -89,6 +89,14 @@ export class FileTrieNode {
this.insert(file.slug.split("/"), file)
}
+ findNode(path: string[]): FileTrieNode | undefined {
+ if (path.length === 0 || (path.length === 1 && path[0] === "index")) {
+ return this
+ }
+
+ return this.children.find((c) => c.slugSegment === path[0])?.findNode(path.slice(1))
+ }
+
/**
* Filter trie nodes. Behaves similar to `Array.prototype.filter()`, but modifies tree in place
*/
diff --git a/quartz/util/log.ts b/quartz/util/log.ts
index 773945c97..cfd8c3f89 100644
--- a/quartz/util/log.ts
+++ b/quartz/util/log.ts
@@ -1,26 +1,56 @@
-import { Spinner } from "cli-spinner"
+import truncate from "ansi-truncate"
+import readline from "readline"
export class QuartzLogger {
verbose: boolean
- spinner: Spinner | undefined
+ private spinnerInterval: NodeJS.Timeout | undefined
+ private spinnerText: string = ""
+ private updateSuffix: string = ""
+ private spinnerIndex: number = 0
+ private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
+
constructor(verbose: boolean) {
- this.verbose = verbose
+ const isInteractiveTerminal =
+ process.stdout.isTTY && process.env.TERM !== "dumb" && !process.env.CI
+ this.verbose = verbose || !isInteractiveTerminal
}
start(text: string) {
+ this.spinnerText = text
+
if (this.verbose) {
console.log(text)
} else {
- this.spinner = new Spinner(`%s ${text}`)
- this.spinner.setSpinnerString(18)
- this.spinner.start()
+ this.spinnerIndex = 0
+ this.spinnerInterval = setInterval(() => {
+ readline.clearLine(process.stdout, 0)
+ readline.cursorTo(process.stdout, 0)
+
+ const columns = process.stdout.columns || 80
+ let output = `${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`
+ if (this.updateSuffix) {
+ output += `: ${this.updateSuffix}`
+ }
+
+ const truncated = truncate(output, columns)
+ process.stdout.write(truncated)
+ this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length
+ }, 50)
}
}
+ updateText(text: string) {
+ this.updateSuffix = text
+ }
+
end(text?: string) {
- if (!this.verbose) {
- this.spinner!.stop(true)
+ if (!this.verbose && this.spinnerInterval) {
+ clearInterval(this.spinnerInterval)
+ this.spinnerInterval = undefined
+ readline.clearLine(process.stdout, 0)
+ readline.cursorTo(process.stdout, 0)
}
+
if (text) {
console.log(text)
}
diff --git a/quartz/util/og.tsx b/quartz/util/og.tsx
index 4d675cbd4..41f885b3b 100644
--- a/quartz/util/og.tsx
+++ b/quartz/util/og.tsx
@@ -1,28 +1,67 @@
+import { promises as fs } from "fs"
import { FontWeight, SatoriOptions } from "satori/wasm"
import { GlobalConfiguration } from "../cfg"
import { QuartzPluginData } from "../plugins/vfile"
import { JSXInternal } from "preact/src/jsx"
-import { ThemeKey } from "./theme"
+import { FontSpecification, getFontSpecificationName, ThemeKey } from "./theme"
+import path from "path"
+import { QUARTZ } from "./path"
+import { formatDate, getDate } from "../components/Date"
+import readingTime from "reading-time"
+import { i18n } from "../i18n"
+import chalk from "chalk"
-/**
- * Get an array of `FontOptions` (for satori) given google font names
- * @param headerFontName name of google font used for header
- * @param bodyFontName name of google font used for body
- * @returns FontOptions for header and body
- */
-export async function getSatoriFont(headerFontName: string, bodyFontName: string) {
- const headerWeight = 700 as FontWeight
- const bodyWeight = 400 as FontWeight
+const defaultHeaderWeight = [700]
+const defaultBodyWeight = [400]
- // Fetch fonts
- const headerFont = await fetchTtf(headerFontName, headerWeight)
- const bodyFont = await fetchTtf(bodyFontName, bodyWeight)
+export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: FontSpecification) {
+ // Get all weights for header and body fonts
+ const headerWeights: FontWeight[] = (
+ typeof headerFont === "string"
+ ? defaultHeaderWeight
+ : (headerFont.weights ?? defaultHeaderWeight)
+ ) as FontWeight[]
+ const bodyWeights: FontWeight[] = (
+ typeof bodyFont === "string" ? defaultBodyWeight : (bodyFont.weights ?? defaultBodyWeight)
+ ) as FontWeight[]
- // Convert fonts to satori font format and return
+ const headerFontName = typeof headerFont === "string" ? headerFont : headerFont.name
+ const bodyFontName = typeof bodyFont === "string" ? bodyFont : bodyFont.name
+
+ // Fetch fonts for all weights and convert to satori format in one go
+ const headerFontPromises = headerWeights.map(async (weight) => {
+ const data = await fetchTtf(headerFontName, weight)
+ if (!data) return null
+ return {
+ name: headerFontName,
+ data,
+ weight,
+ style: "normal" as const,
+ }
+ })
+
+ const bodyFontPromises = bodyWeights.map(async (weight) => {
+ const data = await fetchTtf(bodyFontName, weight)
+ if (!data) return null
+ return {
+ name: bodyFontName,
+ data,
+ weight,
+ style: "normal" as const,
+ }
+ })
+
+ const [headerFonts, bodyFonts] = await Promise.all([
+ Promise.all(headerFontPromises),
+ Promise.all(bodyFontPromises),
+ ])
+
+ // Filter out any failed fetches and combine header and body fonts
const fonts: SatoriOptions["fonts"] = [
- { name: headerFontName, data: headerFont, weight: headerWeight, style: "normal" },
- { name: bodyFontName, data: bodyFont, weight: bodyWeight, style: "normal" },
+ ...headerFonts.filter((font): font is NonNullable => font !== null),
+ ...bodyFonts.filter((font): font is NonNullable => font !== null),
]
+
return fonts
}
@@ -32,32 +71,49 @@ export async function getSatoriFont(headerFontName: string, bodyFontName: string
* @param weight what font weight to fetch font
* @returns `.ttf` file of google font
*/
-async function fetchTtf(fontName: string, weight: FontWeight): Promise {
+export async function fetchTtf(
+ rawFontName: string,
+ weight: FontWeight,
+): Promise | undefined> {
+ const fontName = rawFontName.replaceAll(" ", "+")
+ const cacheKey = `${fontName}-${weight}`
+ const cacheDir = path.join(QUARTZ, ".quartz-cache", "fonts")
+ const cachePath = path.join(cacheDir, cacheKey)
+
+ // Check if font exists in cache
try {
- // Get css file from google fonts
- const cssResponse = await fetch(
- `https://fonts.googleapis.com/css2?family=${fontName}:wght@${weight}`,
- )
- const css = await cssResponse.text()
-
- // Extract .ttf url from css file
- const urlRegex = /url\((https:\/\/fonts.gstatic.com\/s\/.*?.ttf)\)/g
- const match = urlRegex.exec(css)
-
- if (!match) {
- throw new Error("Could not fetch font")
- }
-
- // Retrieve font data as ArrayBuffer
- const fontResponse = await fetch(match[1])
-
- // fontData is an ArrayBuffer containing the .ttf file data (get match[1] due to google fonts response format, always contains link twice, but second entry is the "raw" link)
- const fontData = await fontResponse.arrayBuffer()
-
- return fontData
+ await fs.access(cachePath)
+ return fs.readFile(cachePath)
} catch (error) {
- throw new Error(`Error fetching font: ${error}`)
+ // ignore errors and fetch font
}
+
+ // Get css file from google fonts
+ const cssResponse = await fetch(
+ `https://fonts.googleapis.com/css2?family=${fontName}:wght@${weight}`,
+ )
+ const css = await cssResponse.text()
+
+ // Extract .ttf url from css file
+ const urlRegex = /url\((https:\/\/fonts.gstatic.com\/s\/.*?.ttf)\)/g
+ const match = urlRegex.exec(css)
+
+ if (!match) {
+ console.log(
+ chalk.yellow(
+ `\nWarning: Failed to fetch font ${rawFontName} with weight ${weight}, got ${cssResponse.statusText}`,
+ ),
+ )
+ return
+ }
+
+ // fontData is an ArrayBuffer containing the .ttf file data
+ const fontResponse = await fetch(match[1])
+ const fontData = Buffer.from(await fontResponse.arrayBuffer())
+ await fs.mkdir(cacheDir, { recursive: true })
+ await fs.writeFile(cachePath, fontData)
+
+ return fontData
}
export type SocialImageOptions = {
@@ -79,21 +135,12 @@ export type SocialImageOptions = {
excludeRoot: boolean
/**
* JSX to use for generating image. See satori docs for more info (https://github.com/vercel/satori)
- * @param cfg global quartz config
- * @param userOpts options that can be set by user
- * @param title title of current page
- * @param description description of current page
- * @param fonts global font that can be used for styling
- * @param fileData full fileData of current page
- * @returns prepared jsx to be used for generating image
*/
imageStructure: (
- cfg: GlobalConfiguration,
- userOpts: UserOpts,
- title: string,
- description: string,
- fonts: SatoriOptions["fonts"],
- fileData: QuartzPluginData,
+ options: ImageOptions & {
+ userOpts: UserOpts
+ iconBase64?: string
+ },
) => JSXInternal.Element
}
@@ -108,22 +155,10 @@ export type ImageOptions = {
* what description to use as body in image
*/
description: string
- /**
- * what fileName to use when writing to disk
- */
- fileName: string
- /**
- * what directory to store image in
- */
- fileDir: string
- /**
- * what file extension to use (should be `webp` unless you also change sharp conversion)
- */
- fileExt: string
/**
* header + body font to be used when generating satori image (as promise to work around sync in component)
*/
- fontsPromise: Promise
+ fonts: SatoriOptions["fonts"]
/**
* `GlobalConfiguration` of quartz (used for theme/typography)
*/
@@ -135,74 +170,111 @@ export type ImageOptions = {
}
// This is the default template for generated social image.
-export const defaultImage: SocialImageOptions["imageStructure"] = (
- cfg: GlobalConfiguration,
- { colorScheme }: UserOpts,
- title: string,
- description: string,
- fonts: SatoriOptions["fonts"],
- _fileData: QuartzPluginData,
-) => {
- const fontBreakPoint = 22
+export const defaultImage: SocialImageOptions["imageStructure"] = ({
+ cfg,
+ userOpts,
+ title,
+ description,
+ fileData,
+ iconBase64,
+}) => {
+ const { colorScheme } = userOpts
+ const fontBreakPoint = 32
const useSmallerFont = title.length > fontBreakPoint
- const iconPath = `https://${cfg.baseUrl}/static/icon.png`
+
+ // Format date if available
+ const rawDate = getDate(cfg, fileData)
+ const date = rawDate ? formatDate(rawDate, cfg.locale) : null
+
+ // Calculate reading time
+ const { minutes } = readingTime(fileData.text ?? "")
+ const readingTimeText = i18n(cfg.locale).components.contentMeta.readingTime({
+ minutes: Math.ceil(minutes),
+ })
+
+ // Get tags if available
+ const tags = fileData.frontmatter?.tags ?? []
+ const bodyFont = getFontSpecificationName(cfg.theme.typography.body)
+ const headerFont = getFontSpecificationName(cfg.theme.typography.header)
return (
+ {/* Header Section */}
-
+ {iconBase64 && (
+
+ )}
+ {cfg.baseUrl}
+
+
+
+ {/* Title Section */}
+
+ {title}
+
+
+ {/* Description Section */}
+
+ {/* Footer with Metadata */}
+
+ {/* Left side - Date and Reading Time */}
+
+ {date && (
+
+
+
+
+
+
+
+ {date}
+
+ )}
+
+
+
+
+
+ {readingTimeText}
+
+
+
+ {/* Right side - Tags */}
+
+ {tags.slice(0, 3).map((tag: string) => (
+
+ #{tag}
+
+ ))}
+
+
)
}
diff --git a/quartz/util/path.ts b/quartz/util/path.ts
index 8f8502979..0681fae72 100644
--- a/quartz/util/path.ts
+++ b/quartz/util/path.ts
@@ -36,7 +36,7 @@ export type RelativeURL = SlugLike<"relative">
export function isRelativeURL(s: string): s is RelativeURL {
const validStart = /^\.{1,2}/.test(s)
const validEnding = !endsWith(s, "index")
- return validStart && validEnding && ![".md", ".html"].includes(_getFileExtension(s) ?? "")
+ return validStart && validEnding && ![".md", ".html"].includes(getFileExtension(s) ?? "")
}
export function getFullSlug(window: Window): FullSlug {
@@ -61,7 +61,7 @@ function sluggify(s: string): string {
export function slugifyFilePath(fp: FilePath, excludeExt?: boolean): FullSlug {
fp = stripSlashes(fp) as FilePath
- let ext = _getFileExtension(fp)
+ let ext = getFileExtension(fp)
const withoutFileExt = fp.replace(new RegExp(ext + "$"), "")
if (excludeExt || [".md", ".html", undefined].includes(ext)) {
ext = ""
@@ -247,7 +247,7 @@ export function transformLink(src: FullSlug, target: string, opts: TransformOpti
}
// path helpers
-function isFolderPath(fplike: string): boolean {
+export function isFolderPath(fplike: string): boolean {
return (
fplike.endsWith("/") ||
endsWith(fplike, "index") ||
@@ -260,7 +260,7 @@ export function endsWith(s: string, suffix: string): boolean {
return s === suffix || s.endsWith("/" + suffix)
}
-function trimSuffix(s: string, suffix: string): string {
+export function trimSuffix(s: string, suffix: string): string {
if (endsWith(s, suffix)) {
s = s.slice(0, -suffix.length)
}
@@ -272,10 +272,10 @@ function containsForbiddenCharacters(s: string): boolean {
}
function _hasFileExtension(s: string): boolean {
- return _getFileExtension(s) !== undefined
+ return getFileExtension(s) !== undefined
}
-function _getFileExtension(s: string): string | undefined {
+export function getFileExtension(s: string): string | undefined {
return s.match(/\.[A-Za-z0-9]+$/)?.[0]
}
diff --git a/quartz/util/theme.ts b/quartz/util/theme.ts
index 8381cc720..56261e35b 100644
--- a/quartz/util/theme.ts
+++ b/quartz/util/theme.ts
@@ -15,7 +15,7 @@ interface Colors {
darkMode: ColorScheme
}
-type FontSpecification =
+export type FontSpecification =
| string
| {
name: string
@@ -90,6 +90,36 @@ export function googleFontHref(theme: Theme) {
return `https://fonts.googleapis.com/css2?family=${bodyFont}&family=${headerFont}&family=${codeFont}&display=swap`
}
+export interface GoogleFontFile {
+ url: string
+ filename: string
+ extension: string
+}
+
+export async function processGoogleFonts(
+ stylesheet: string,
+ baseUrl: string,
+): Promise<{
+ processedStylesheet: string
+ fontFiles: GoogleFontFile[]
+}> {
+ const fontSourceRegex = /url\((https:\/\/fonts.gstatic.com\/s\/[^)]+\.(woff2|ttf))\)/g
+ const fontFiles: GoogleFontFile[] = []
+ let processedStylesheet = stylesheet
+
+ let match
+ while ((match = fontSourceRegex.exec(stylesheet)) !== null) {
+ const url = match[1]
+ const [filename, extension] = url.split("/").pop()!.split(".")
+ const staticUrl = `https://${baseUrl}/static/fonts/${filename}.${extension}`
+
+ processedStylesheet = processedStylesheet.replace(url, staticUrl)
+ fontFiles.push({ url, filename, extension })
+ }
+
+ return { processedStylesheet, fontFiles }
+}
+
export function joinStyles(theme: Theme, ...stylesheet: string[]) {
return `
${stylesheet.join("\n\n")}
diff --git a/quartz/worker.ts b/quartz/worker.ts
index c9cd98055..f4cf4c600 100644
--- a/quartz/worker.ts
+++ b/quartz/worker.ts
@@ -1,8 +1,8 @@
import sourceMapSupport from "source-map-support"
sourceMapSupport.install(options)
import cfg from "../quartz.config"
-import { Argv, BuildCtx } from "./util/ctx"
-import { FilePath, FullSlug } from "./util/path"
+import { BuildCtx, WorkerSerializableBuildCtx } from "./util/ctx"
+import { FilePath } from "./util/path"
import {
createFileParser,
createHtmlProcessor,
@@ -14,35 +14,24 @@ import { MarkdownContent, ProcessedContent } from "./plugins/vfile"
// only called from worker thread
export async function parseMarkdown(
- buildId: string,
- argv: Argv,
+ partialCtx: WorkerSerializableBuildCtx,
fps: FilePath[],
-): Promise<[MarkdownContent[], FullSlug[]]> {
- // this is a hack
- // we assume markdown parsers can add to `allSlugs`,
- // but don't actually use them
- const allSlugs: FullSlug[] = []
+): Promise