forked from GitHub/quartz
Compare commits
7 Commits
v4
...
jackyzha0/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3173d185ed | ||
|
|
de727b4686 | ||
|
|
07ffc8681e | ||
|
|
f301eca9a7 | ||
|
|
1fb7756c49 | ||
|
|
c5a8b199ae | ||
|
|
5d50282124 |
2
.github/workflows/docker-build-push.yaml
vendored
2
.github/workflows/docker-build-push.yaml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
- name: Inject slug/short variables
|
||||
uses: rlespinasse/github-slug-action@v5.1.0
|
||||
uses: rlespinasse/github-slug-action@v5.0.0
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
|
||||
160
action.sh
160
action.sh
@ -1,160 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# To fetch and use this script in a GitHub action:
|
||||
#
|
||||
# curl -s -S https://raw.githubusercontent.com/saberzero1/quartz-themes/master/action.sh | bash -s -- <THEME_NAME>
|
||||
|
||||
RED='\033[0;31m'
|
||||
YELLOW='\033[1;33m'
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[1;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
echo_err() { echo -e "${RED}$1${NC}"; }
|
||||
echo_warn() { echo -e "${YELLOW}$1${NC}"; }
|
||||
echo_ok() { echo -e "${GREEN}$1${NC}"; }
|
||||
echo_info() { echo -e "${BLUE}$1${NC}"; }
|
||||
|
||||
THEME_DIR="themes"
|
||||
QUARTZ_STYLES_DIR="quartz/styles"
|
||||
|
||||
if test -f ${QUARTZ_STYLES_DIR}/custom.scss; then
|
||||
echo_ok "Quartz root succesfully detected..."
|
||||
THEME_DIR="${QUARTZ_STYLES_DIR}/${THEME_DIR}"
|
||||
else
|
||||
echo_warn "Quartz root not detected, checking if we are in the styles directory..."
|
||||
if test -f custom.scss; then
|
||||
echo_ok "Styles directory detected..."
|
||||
else
|
||||
echo_err "Cannot detect Quartz repository. Are you in the correct working directory?" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -e "Input theme: ${BLUE}$*${NC}"
|
||||
|
||||
echo "Parsing input theme..."
|
||||
|
||||
# Concat parameters
|
||||
result=""
|
||||
|
||||
for param in "$@"; do
|
||||
if [ -n "$result" ]; then
|
||||
result="$result-"
|
||||
fi
|
||||
|
||||
result="$result$param"
|
||||
done
|
||||
|
||||
if "$result" = ""; then
|
||||
echo_warn "No theme provided, defaulting to Tokyo Night..."
|
||||
result="tokyo-night"
|
||||
fi
|
||||
|
||||
# Convert to lowercase
|
||||
THEME=$(echo "$result" | tr '[:upper:]' '[:lower:]')
|
||||
|
||||
echo -e "Theme ${BLUE}$*${NC} parsed to $(echo_info ${THEME})"
|
||||
|
||||
echo "Validating theme..."
|
||||
|
||||
GITHUB_URL_BASE="https://raw.githubusercontent.com/saberzero1/quartz-themes/master/__CONVERTER/"
|
||||
GITHUB_OUTPUT_DIR="__OUTPUT/"
|
||||
GITHUB_OVERRIDE_DIR="__OVERRIDES/"
|
||||
GITHUB_THEME_DIR="${THEME}/"
|
||||
CSS_INDEX_URL="${GITHUB_URL_BASE}${GITHUB_OUTPUT_DIR}${GITHUB_THEME_DIR}_index.scss"
|
||||
CSS_FONT_URL="${GITHUB_URL_BASE}${GITHUB_OUTPUT_DIR}${GITHUB_THEME_DIR}_fonts.scss"
|
||||
CSS_DARK_URL="${GITHUB_URL_BASE}${GITHUB_OUTPUT_DIR}${GITHUB_THEME_DIR}_dark.scss"
|
||||
CSS_LIGHT_URL="${GITHUB_URL_BASE}${GITHUB_OUTPUT_DIR}${GITHUB_THEME_DIR}_light.scss"
|
||||
CSS_OVERRIDE_URL="${GITHUB_URL_BASE}${GITHUB_OVERRIDE_DIR}${GITHUB_THEME_DIR}_index.scss"
|
||||
README_URL="${GITHUB_URL_BASE}${GITHUB_OVERRIDE_DIR}${GITHUB_THEME_DIR}README.md"
|
||||
|
||||
PULSE=$(curl -o /dev/null --silent -lw '%{http_code}' "${CSS_INDEX_URL}")
|
||||
|
||||
if [ "${PULSE}" = "200" ]; then
|
||||
echo_ok "Theme '${THEME}' found. Preparing to fetch files..."
|
||||
else
|
||||
if [ "${PULSE}" = "404" ]; then
|
||||
echo_err "Theme '${THEME}' not found. Please check the compatibility list." 1>&2
|
||||
exit 1
|
||||
else
|
||||
echo_err "Something weird happened. If this issue persists, please open an Issue on GitHub." !>&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Cleaning theme directory..."
|
||||
|
||||
rm -rf ${THEME_DIR}
|
||||
|
||||
echo "Creating theme directory..."
|
||||
|
||||
mkdir -p ${THEME_DIR}/overrides
|
||||
|
||||
echo "Fetching theme files..."
|
||||
|
||||
curl -s -S -o ${THEME_DIR}/_index.scss "${CSS_INDEX_URL}"
|
||||
curl -s -S -o ${THEME_DIR}/_fonts.scss "${CSS_FONT_URL}"
|
||||
curl -s -S -o ${THEME_DIR}/_dark.scss "${CSS_DARK_URL}"
|
||||
curl -s -S -o ${THEME_DIR}/_light.scss "${CSS_LIGHT_URL}"
|
||||
curl -s -S -o ${THEME_DIR}/overrides/_index.scss "${CSS_OVERRIDE_URL}"
|
||||
|
||||
echo "Fetching README file..."
|
||||
|
||||
curl -s -S -o ${THEME_DIR}/README.md "${README_URL}"
|
||||
|
||||
echo "Checking theme files..."
|
||||
|
||||
if test -f ${THEME_DIR}/_index.scss; then
|
||||
echo_ok "_index.scss exists"
|
||||
else
|
||||
echo_err "_index.scss missing" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test -f ${THEME_DIR}/_fonts.scss; then
|
||||
echo_ok "_fonts.scss exists"
|
||||
else
|
||||
echo_err "_fonts.scss missing" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test -f ${THEME_DIR}/_dark.scss; then
|
||||
echo_ok "_dark.scss exists"
|
||||
else
|
||||
echo_err "_dark.scss missing" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test -f ${THEME_DIR}/_light.scss; then
|
||||
echo_ok "_light.scss exists"
|
||||
else
|
||||
echo_err "_light.scss missing" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test -f ${THEME_DIR}/overrides/_index.scss; then
|
||||
echo_ok "overrides/_index.scss exists"
|
||||
else
|
||||
echo_err "overrides/_index.scss missing" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if test -f ${THEME_DIR}/README.md; then
|
||||
echo_ok "README file exists"
|
||||
else
|
||||
echo_warn "README file missing"
|
||||
fi
|
||||
|
||||
echo "Verifying setup..."
|
||||
|
||||
if grep -q '^@use "./themes";' ${THEME_DIR}/../custom.scss; then
|
||||
# Import already present in custom.scss
|
||||
echo_warn "Theme import line already present in custom.scss. Skipping..."
|
||||
else
|
||||
# Add `@use "./themes";` import to custom.scss
|
||||
sed -ir 's#@use "./base.scss";#@use "./base.scss";\n@use "./themes";#' ${THEME_DIR}/../custom.scss
|
||||
echo_info "Added import line to custom.scss..."
|
||||
fi
|
||||
|
||||
echo_ok "Finished fetching and applying theme '${THEME}'."
|
||||
@ -221,26 +221,12 @@ export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
|
||||
|
||||
export type QuartzEmitterPluginInstance = {
|
||||
name: string
|
||||
emit(
|
||||
ctx: BuildCtx,
|
||||
content: ProcessedContent[],
|
||||
resources: StaticResources,
|
||||
): Promise<FilePath[]> | AsyncGenerator<FilePath>
|
||||
partialEmit?(
|
||||
ctx: BuildCtx,
|
||||
content: ProcessedContent[],
|
||||
resources: StaticResources,
|
||||
changeEvents: ChangeEvent[],
|
||||
): Promise<FilePath[]> | AsyncGenerator<FilePath> | null
|
||||
emit(ctx: BuildCtx, content: ProcessedContent[], resources: StaticResources): Promise<FilePath[]>
|
||||
getQuartzComponents(ctx: BuildCtx): QuartzComponent[]
|
||||
}
|
||||
```
|
||||
|
||||
An emitter plugin must define a `name` field, an `emit` function, and a `getQuartzComponents` function. It can optionally implement a `partialEmit` function for incremental builds.
|
||||
|
||||
- `emit` is responsible for looking at all the parsed and filtered content and then appropriately creating files and returning a list of paths to files the plugin created.
|
||||
- `partialEmit` is an optional function that enables incremental builds. It receives information about which files have changed (`changeEvents`) and can selectively rebuild only the necessary files. This is useful for optimizing build times in development mode. If `partialEmit` is undefined, it will default to the `emit` function.
|
||||
- `getQuartzComponents` declares which Quartz components the emitter uses to construct its pages.
|
||||
An emitter plugin must define a `name` field, an `emit` function, and a `getQuartzComponents` function. `emit` is responsible for looking at all the parsed and filtered content and then appropriately creating files and returning a list of paths to files the plugin created.
|
||||
|
||||
Creating new files can be done via regular Node [fs module](https://nodejs.org/api/fs.html) (i.e. `fs.cp` or `fs.writeFile`) or via the `write` function in `quartz/plugins/emitters/helpers.ts` if you are creating files that contain text. `write` has the following signature:
|
||||
|
||||
|
||||
@ -41,12 +41,11 @@ This part of the configuration concerns anything that can affect the whole site.
|
||||
- `ignorePatterns`: a list of [glob](<https://en.wikipedia.org/wiki/Glob_(programming)>) patterns that Quartz should ignore and not search through when looking for files inside the `content` folder. See [[private pages]] for more details.
|
||||
- `defaultDateType`: whether to use created, modified, or published as the default date to display on pages and page listings.
|
||||
- `theme`: configure how the site looks.
|
||||
- `cdnCaching`: if `true` (default), use Google CDN to cache the fonts. This will generally be faster. Disable (`false`) this if you want Quartz to download the fonts to be self-contained.
|
||||
- `cdnCaching`: If `true` (default), use Google CDN to cache the fonts. This will generally will be faster. Disable (`false`) this if you want Quartz to download the fonts to be self-contained.
|
||||
- `typography`: what fonts to use. Any font available on [Google Fonts](https://fonts.google.com/) works here.
|
||||
- `title`: font for the title of the site (optional, same as `header` by default)
|
||||
- `header`: font to use for headers
|
||||
- `code`: font for inline and block quotes
|
||||
- `body`: font for everything
|
||||
- `header`: Font to use for headers
|
||||
- `code`: Font for inline and block quotes.
|
||||
- `body`: Font for everything
|
||||
- `colors`: controls the theming of the site.
|
||||
- `light`: page background
|
||||
- `lightgray`: borders
|
||||
@ -109,25 +108,3 @@ Some plugins are included by default in the [`quartz.config.ts`](https://github.
|
||||
You can see a list of all plugins and their configuration options [[tags/plugin|here]].
|
||||
|
||||
If you'd like to make your own plugins, see the [[making plugins|making custom plugins]] guide.
|
||||
|
||||
## Fonts
|
||||
|
||||
Fonts can be specified as a `string` or a `FontSpecification`:
|
||||
|
||||
```ts
|
||||
// string
|
||||
typography: {
|
||||
header: "Schibsted Grotesk",
|
||||
...
|
||||
}
|
||||
|
||||
// FontSpecification
|
||||
typography: {
|
||||
header: {
|
||||
name: "Schibsted Grotesk",
|
||||
weights: [400, 700],
|
||||
includeItalic: true,
|
||||
},
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
@ -32,7 +32,7 @@ If you prefer instructions in a video format you can try following Nicole van de
|
||||
## 🔧 Features
|
||||
|
||||
- [[Obsidian compatibility]], [[full-text search]], [[graph view]], note transclusion, [[wikilinks]], [[backlinks]], [[features/Latex|Latex]], [[syntax highlighting]], [[popover previews]], [[Docker Support]], [[i18n|internationalization]], [[comments]] and [many more](./features/) right out of the box
|
||||
- Hot-reload on configuration edits and incremental rebuilds for content edits
|
||||
- Hot-reload for both configuration and content
|
||||
- Simple JSX layouts and [[creating components|page components]]
|
||||
- [[SPA Routing|Ridiculously fast page loads]] and tiny bundle sizes
|
||||
- Fully-customizable parsing, filtering, and page generation through [[making plugins|plugins]]
|
||||
|
||||
22
package-lock.json
generated
22
package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@jackyzha0/quartz",
|
||||
"version": "4.5.0",
|
||||
"version": "4.4.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@jackyzha0/quartz",
|
||||
"version": "4.5.0",
|
||||
"version": "4.4.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@clack/prompts": "^0.10.0",
|
||||
@ -14,7 +14,6 @@
|
||||
"@myriaddreamin/rehype-typst": "^0.5.4",
|
||||
"@napi-rs/simple-git": "0.1.19",
|
||||
"@tweenjs/tween.js": "^25.0.0",
|
||||
"ansi-truncate": "^1.2.0",
|
||||
"async-mutex": "^0.5.0",
|
||||
"chalk": "^5.4.1",
|
||||
"chokidar": "^4.0.3",
|
||||
@ -35,7 +34,6 @@
|
||||
"mdast-util-to-hast": "^13.2.0",
|
||||
"mdast-util-to-string": "^4.0.0",
|
||||
"micromorph": "^0.4.5",
|
||||
"minimatch": "^10.0.1",
|
||||
"pixi.js": "^8.8.1",
|
||||
"preact": "^10.26.4",
|
||||
"preact-render-to-string": "^6.5.13",
|
||||
@ -2034,15 +2032,6 @@
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-truncate": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-truncate/-/ansi-truncate-1.2.0.tgz",
|
||||
"integrity": "sha512-/SLVrxNIP8o8iRHjdK3K9s2hDqdvb86NEjZOAB6ecWFsOo+9obaby97prnvAPn6j7ExXCpbvtlJFYPkkspg4BQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-string-truncated-width": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/argparse": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
|
||||
@ -3069,12 +3058,6 @@
|
||||
"node": ">=8.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-string-truncated-width": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/fast-string-truncated-width/-/fast-string-truncated-width-1.2.1.tgz",
|
||||
"integrity": "sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/fastq": {
|
||||
"version": "1.19.0",
|
||||
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz",
|
||||
@ -5255,7 +5238,6 @@
|
||||
"version": "10.0.1",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz",
|
||||
"integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
"name": "@jackyzha0/quartz",
|
||||
"description": "🌱 publish your digital garden and notes as a website",
|
||||
"private": true,
|
||||
"version": "4.5.0",
|
||||
"version": "4.4.0",
|
||||
"type": "module",
|
||||
"author": "jackyzha0 <j.zhao2k19@gmail.com>",
|
||||
"license": "MIT",
|
||||
@ -40,7 +40,6 @@
|
||||
"@myriaddreamin/rehype-typst": "^0.5.4",
|
||||
"@napi-rs/simple-git": "0.1.19",
|
||||
"@tweenjs/tween.js": "^25.0.0",
|
||||
"ansi-truncate": "^1.2.0",
|
||||
"async-mutex": "^0.5.0",
|
||||
"chalk": "^5.4.1",
|
||||
"chokidar": "^4.0.3",
|
||||
@ -61,7 +60,6 @@
|
||||
"mdast-util-to-hast": "^13.2.0",
|
||||
"mdast-util-to-string": "^4.0.0",
|
||||
"micromorph": "^0.4.5",
|
||||
"minimatch": "^10.0.1",
|
||||
"pixi.js": "^8.8.1",
|
||||
"preact": "^10.26.4",
|
||||
"preact-render-to-string": "^6.5.13",
|
||||
|
||||
@ -8,24 +8,24 @@ import * as Plugin from "./quartz/plugins"
|
||||
*/
|
||||
const config: QuartzConfig = {
|
||||
configuration: {
|
||||
pageTitle: "isuckatcode.lol",
|
||||
pageTitleSuffix: " | isuckatcode.lol",
|
||||
pageTitle: "Quartz 4",
|
||||
pageTitleSuffix: "",
|
||||
enableSPA: true,
|
||||
enablePopovers: true,
|
||||
analytics: {
|
||||
provider: "plausible",
|
||||
},
|
||||
locale: "en-US",
|
||||
baseUrl: "isuckatcode.lol",
|
||||
baseUrl: "quartz.jzhao.xyz",
|
||||
ignorePatterns: ["private", "templates", ".obsidian"],
|
||||
defaultDateType: "created",
|
||||
theme: {
|
||||
fontOrigin: "googleFonts",
|
||||
cdnCaching: true,
|
||||
typography: {
|
||||
header: "Courier Prime",
|
||||
body: "Roboto",
|
||||
code: "Courier Prime",
|
||||
header: "Schibsted Grotesk",
|
||||
body: "Source Sans Pro",
|
||||
code: "IBM Plex Mono",
|
||||
},
|
||||
colors: {
|
||||
lightMode: {
|
||||
@ -57,7 +57,7 @@ const config: QuartzConfig = {
|
||||
transformers: [
|
||||
Plugin.FrontMatter(),
|
||||
Plugin.CreatedModifiedDate({
|
||||
priority: ["frontmatter", "git", "filesystem"],
|
||||
priority: ["frontmatter", "filesystem"],
|
||||
}),
|
||||
Plugin.SyntaxHighlighting({
|
||||
theme: {
|
||||
@ -87,7 +87,6 @@ const config: QuartzConfig = {
|
||||
Plugin.Assets(),
|
||||
Plugin.Static(),
|
||||
Plugin.NotFoundPage(),
|
||||
// Comment out CustomOgImages to speed up build time
|
||||
Plugin.CustomOgImages(),
|
||||
],
|
||||
},
|
||||
|
||||
@ -49,15 +49,8 @@ export const defaultListPageLayout: PageLayout = {
|
||||
left: [
|
||||
Component.PageTitle(),
|
||||
Component.MobileOnly(Component.Spacer()),
|
||||
Component.Flex({
|
||||
components: [
|
||||
{
|
||||
Component: Component.Search(),
|
||||
grow: true,
|
||||
},
|
||||
{ Component: Component.Darkmode() },
|
||||
],
|
||||
}),
|
||||
Component.Search(),
|
||||
Component.Darkmode(),
|
||||
Component.Explorer(),
|
||||
],
|
||||
right: [],
|
||||
|
||||
432
quartz/build.ts
432
quartz/build.ts
@ -9,7 +9,7 @@ import { parseMarkdown } from "./processors/parse"
|
||||
import { filterContent } from "./processors/filter"
|
||||
import { emitContent } from "./processors/emit"
|
||||
import cfg from "../quartz.config"
|
||||
import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
|
||||
import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path"
|
||||
import chokidar from "chokidar"
|
||||
import { ProcessedContent } from "./plugins/vfile"
|
||||
import { Argv, BuildCtx } from "./util/ctx"
|
||||
@ -17,39 +17,34 @@ import { glob, toPosixPath } from "./util/glob"
|
||||
import { trace } from "./util/trace"
|
||||
import { options } from "./util/sourcemap"
|
||||
import { Mutex } from "async-mutex"
|
||||
import DepGraph from "./depgraph"
|
||||
import { getStaticResourcesFromPlugins } from "./plugins"
|
||||
import { randomIdNonSecure } from "./util/random"
|
||||
import { ChangeEvent } from "./plugins/types"
|
||||
import { minimatch } from "minimatch"
|
||||
|
||||
type ContentMap = Map<
|
||||
FilePath,
|
||||
| {
|
||||
type: "markdown"
|
||||
content: ProcessedContent
|
||||
}
|
||||
| {
|
||||
type: "other"
|
||||
}
|
||||
>
|
||||
type Dependencies = Record<string, DepGraph<FilePath> | null>
|
||||
|
||||
type BuildData = {
|
||||
ctx: BuildCtx
|
||||
ignored: GlobbyFilterFunction
|
||||
mut: Mutex
|
||||
contentMap: ContentMap
|
||||
changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
|
||||
initialSlugs: FullSlug[]
|
||||
// TODO merge contentMap and trackedAssets
|
||||
contentMap: Map<FilePath, ProcessedContent>
|
||||
trackedAssets: Set<FilePath>
|
||||
toRebuild: Set<FilePath>
|
||||
toRemove: Set<FilePath>
|
||||
lastBuildMs: number
|
||||
dependencies: Dependencies
|
||||
}
|
||||
|
||||
type FileEvent = "add" | "change" | "delete"
|
||||
|
||||
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
|
||||
const ctx: BuildCtx = {
|
||||
buildId: randomIdNonSecure(),
|
||||
argv,
|
||||
cfg,
|
||||
allSlugs: [],
|
||||
allFiles: [],
|
||||
incremental: false,
|
||||
}
|
||||
|
||||
const perf = new PerfTimer()
|
||||
@ -72,70 +67,64 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
|
||||
|
||||
perf.addEvent("glob")
|
||||
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
||||
const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
|
||||
const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort()
|
||||
console.log(
|
||||
`Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
||||
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
||||
)
|
||||
|
||||
const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
|
||||
ctx.allFiles = allFiles
|
||||
const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
|
||||
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
|
||||
|
||||
const parsedFiles = await parseMarkdown(ctx, filePaths)
|
||||
const filteredContent = filterContent(ctx, parsedFiles)
|
||||
|
||||
const dependencies: Record<string, DepGraph<FilePath> | null> = {}
|
||||
|
||||
// Only build dependency graphs if we're doing a fast rebuild
|
||||
if (argv.fastRebuild) {
|
||||
const staticResources = getStaticResourcesFromPlugins(ctx)
|
||||
for (const emitter of cfg.plugins.emitters) {
|
||||
dependencies[emitter.name] =
|
||||
(await emitter.getDependencyGraph?.(ctx, filteredContent, staticResources)) ?? null
|
||||
}
|
||||
}
|
||||
|
||||
await emitContent(ctx, filteredContent)
|
||||
console.log(chalk.green(`Done processing ${markdownPaths.length} files in ${perf.timeSince()}`))
|
||||
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
|
||||
release()
|
||||
|
||||
if (argv.watch) {
|
||||
ctx.incremental = true
|
||||
return startWatching(ctx, mut, parsedFiles, clientRefresh)
|
||||
if (argv.serve) {
|
||||
return startServing(ctx, mut, parsedFiles, clientRefresh, dependencies)
|
||||
}
|
||||
}
|
||||
|
||||
// setup watcher for rebuilds
|
||||
async function startWatching(
|
||||
async function startServing(
|
||||
ctx: BuildCtx,
|
||||
mut: Mutex,
|
||||
initialContent: ProcessedContent[],
|
||||
clientRefresh: () => void,
|
||||
dependencies: Dependencies, // emitter name: dep graph
|
||||
) {
|
||||
const { argv, allFiles } = ctx
|
||||
|
||||
const contentMap: ContentMap = new Map()
|
||||
for (const filePath of allFiles) {
|
||||
contentMap.set(filePath, {
|
||||
type: "other",
|
||||
})
|
||||
}
|
||||
const { argv } = ctx
|
||||
|
||||
// cache file parse results
|
||||
const contentMap = new Map<FilePath, ProcessedContent>()
|
||||
for (const content of initialContent) {
|
||||
const [_tree, vfile] = content
|
||||
contentMap.set(vfile.data.relativePath!, {
|
||||
type: "markdown",
|
||||
content,
|
||||
})
|
||||
contentMap.set(vfile.data.filePath!, content)
|
||||
}
|
||||
|
||||
const gitIgnoredMatcher = await isGitIgnored()
|
||||
const buildData: BuildData = {
|
||||
ctx,
|
||||
mut,
|
||||
dependencies,
|
||||
contentMap,
|
||||
ignored: (path) => {
|
||||
if (gitIgnoredMatcher(path)) return true
|
||||
const pathStr = path.toString()
|
||||
for (const pattern of cfg.configuration.ignorePatterns) {
|
||||
if (minimatch(pathStr, pattern)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
|
||||
changesSinceLastBuild: {},
|
||||
ignored: await isGitIgnored(),
|
||||
initialSlugs: ctx.allSlugs,
|
||||
toRebuild: new Set<FilePath>(),
|
||||
toRemove: new Set<FilePath>(),
|
||||
trackedAssets: new Set<FilePath>(),
|
||||
lastBuildMs: 0,
|
||||
}
|
||||
|
||||
@ -145,37 +134,34 @@ async function startWatching(
|
||||
ignoreInitial: true,
|
||||
})
|
||||
|
||||
const changes: ChangeEvent[] = []
|
||||
const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint
|
||||
watcher
|
||||
.on("add", (fp) => {
|
||||
if (buildData.ignored(fp)) return
|
||||
changes.push({ path: fp as FilePath, type: "add" })
|
||||
void rebuild(changes, clientRefresh, buildData)
|
||||
})
|
||||
.on("change", (fp) => {
|
||||
if (buildData.ignored(fp)) return
|
||||
changes.push({ path: fp as FilePath, type: "change" })
|
||||
void rebuild(changes, clientRefresh, buildData)
|
||||
})
|
||||
.on("unlink", (fp) => {
|
||||
if (buildData.ignored(fp)) return
|
||||
changes.push({ path: fp as FilePath, type: "delete" })
|
||||
void rebuild(changes, clientRefresh, buildData)
|
||||
})
|
||||
.on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData))
|
||||
.on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData))
|
||||
.on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData))
|
||||
|
||||
return async () => {
|
||||
await watcher.close()
|
||||
}
|
||||
}
|
||||
|
||||
async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
|
||||
const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
|
||||
async function partialRebuildFromEntrypoint(
|
||||
filepath: string,
|
||||
action: FileEvent,
|
||||
clientRefresh: () => void,
|
||||
buildData: BuildData, // note: this function mutates buildData
|
||||
) {
|
||||
const { ctx, ignored, dependencies, contentMap, mut, toRemove } = buildData
|
||||
const { argv, cfg } = ctx
|
||||
|
||||
// don't do anything for gitignored files
|
||||
if (ignored(filepath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const buildId = randomIdNonSecure()
|
||||
ctx.buildId = buildId
|
||||
buildData.lastBuildMs = new Date().getTime()
|
||||
const numChangesInBuild = changes.length
|
||||
const release = await mut.acquire()
|
||||
|
||||
// if there's another build after us, release and let them do it
|
||||
@ -185,105 +171,261 @@ async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildD
|
||||
}
|
||||
|
||||
const perf = new PerfTimer()
|
||||
perf.addEvent("rebuild")
|
||||
console.log(chalk.yellow("Detected change, rebuilding..."))
|
||||
|
||||
// update changesSinceLastBuild
|
||||
for (const change of changes) {
|
||||
changesSinceLastBuild[change.path] = change.type
|
||||
}
|
||||
// UPDATE DEP GRAPH
|
||||
const fp = joinSegments(argv.directory, toPosixPath(filepath)) as FilePath
|
||||
|
||||
const staticResources = getStaticResourcesFromPlugins(ctx)
|
||||
const pathsToParse: FilePath[] = []
|
||||
for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
|
||||
if (type === "delete" || path.extname(fp) !== ".md") continue
|
||||
const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
|
||||
pathsToParse.push(fullPath)
|
||||
}
|
||||
let processedFiles: ProcessedContent[] = []
|
||||
|
||||
const parsed = await parseMarkdown(ctx, pathsToParse)
|
||||
for (const content of parsed) {
|
||||
contentMap.set(content[1].data.relativePath!, {
|
||||
type: "markdown",
|
||||
content,
|
||||
})
|
||||
}
|
||||
switch (action) {
|
||||
case "add":
|
||||
// add to cache when new file is added
|
||||
processedFiles = await parseMarkdown(ctx, [fp])
|
||||
processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
|
||||
|
||||
// update state using changesSinceLastBuild
|
||||
// we do this weird play of add => compute change events => remove
|
||||
// so that partialEmitters can do appropriate cleanup based on the content of deleted files
|
||||
for (const [file, change] of Object.entries(changesSinceLastBuild)) {
|
||||
if (change === "delete") {
|
||||
// universal delete case
|
||||
contentMap.delete(file as FilePath)
|
||||
}
|
||||
// update the dep graph by asking all emitters whether they depend on this file
|
||||
for (const emitter of cfg.plugins.emitters) {
|
||||
const emitterGraph =
|
||||
(await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
|
||||
|
||||
// manually track non-markdown files as processed files only
|
||||
// contains markdown files
|
||||
if (change === "add" && path.extname(file) !== ".md") {
|
||||
contentMap.set(file as FilePath, {
|
||||
type: "other",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
|
||||
const path = fp as FilePath
|
||||
const processedContent = contentMap.get(path)
|
||||
if (processedContent?.type === "markdown") {
|
||||
const [_tree, file] = processedContent.content
|
||||
return {
|
||||
type,
|
||||
path,
|
||||
file,
|
||||
if (emitterGraph) {
|
||||
const existingGraph = dependencies[emitter.name]
|
||||
if (existingGraph !== null) {
|
||||
existingGraph.mergeGraph(emitterGraph)
|
||||
} else {
|
||||
// might be the first time we're adding a mardown file
|
||||
dependencies[emitter.name] = emitterGraph
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
case "change":
|
||||
// invalidate cache when file is changed
|
||||
processedFiles = await parseMarkdown(ctx, [fp])
|
||||
processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
|
||||
|
||||
return {
|
||||
type,
|
||||
path,
|
||||
}
|
||||
})
|
||||
// only content files can have added/removed dependencies because of transclusions
|
||||
if (path.extname(fp) === ".md") {
|
||||
for (const emitter of cfg.plugins.emitters) {
|
||||
// get new dependencies from all emitters for this file
|
||||
const emitterGraph =
|
||||
(await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
|
||||
|
||||
// update allFiles and then allSlugs with the consistent view of content map
|
||||
ctx.allFiles = Array.from(contentMap.keys())
|
||||
ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
|
||||
const processedFiles = Array.from(contentMap.values())
|
||||
.filter((file) => file.type === "markdown")
|
||||
.map((file) => file.content)
|
||||
// only update the graph if the emitter plugin uses the changed file
|
||||
// eg. Assets plugin ignores md files, so we skip updating the graph
|
||||
if (emitterGraph?.hasNode(fp)) {
|
||||
// merge the new dependencies into the dep graph
|
||||
dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
case "delete":
|
||||
toRemove.add(fp)
|
||||
break
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.log(`Updated dependency graphs in ${perf.timeSince()}`)
|
||||
}
|
||||
|
||||
// EMIT
|
||||
perf.addEvent("rebuild")
|
||||
let emittedFiles = 0
|
||||
|
||||
for (const emitter of cfg.plugins.emitters) {
|
||||
// Try to use partialEmit if available, otherwise assume the output is static
|
||||
const emitFn = emitter.partialEmit ?? emitter.emit
|
||||
const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
|
||||
if (emitted === null) {
|
||||
const depGraph = dependencies[emitter.name]
|
||||
|
||||
// emitter hasn't defined a dependency graph. call it with all processed files
|
||||
if (depGraph === null) {
|
||||
if (argv.verbose) {
|
||||
console.log(
|
||||
`Emitter ${emitter.name} doesn't define a dependency graph. Calling it with all files...`,
|
||||
)
|
||||
}
|
||||
|
||||
const files = [...contentMap.values()].filter(
|
||||
([_node, vfile]) => !toRemove.has(vfile.data.filePath!),
|
||||
)
|
||||
|
||||
const emitted = await emitter.emit(ctx, files, staticResources)
|
||||
if (Symbol.asyncIterator in emitted) {
|
||||
// Async generator case
|
||||
for await (const file of emitted) {
|
||||
emittedFiles++
|
||||
if (ctx.argv.verbose) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Array case
|
||||
emittedFiles += emitted.length
|
||||
if (ctx.argv.verbose) {
|
||||
for (const file of emitted) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if (Symbol.asyncIterator in emitted) {
|
||||
// Async generator case
|
||||
for await (const file of emitted) {
|
||||
emittedFiles++
|
||||
if (ctx.argv.verbose) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
// only call the emitter if it uses this file
|
||||
if (depGraph.hasNode(fp)) {
|
||||
// re-emit using all files that are needed for the downstream of this file
|
||||
// eg. for ContentIndex, the dep graph could be:
|
||||
// a.md --> contentIndex.json
|
||||
// b.md ------^
|
||||
//
|
||||
// if a.md changes, we need to re-emit contentIndex.json,
|
||||
// and supply [a.md, b.md] to the emitter
|
||||
const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[]
|
||||
|
||||
const upstreamContent = upstreams
|
||||
// filter out non-markdown files
|
||||
.filter((file) => contentMap.has(file))
|
||||
// if file was deleted, don't give it to the emitter
|
||||
.filter((file) => !toRemove.has(file))
|
||||
.map((file) => contentMap.get(file)!)
|
||||
|
||||
const emitted = await emitter.emit(ctx, upstreamContent, staticResources)
|
||||
if (Symbol.asyncIterator in emitted) {
|
||||
// Async generator case
|
||||
for await (const file of emitted) {
|
||||
emittedFiles++
|
||||
if (ctx.argv.verbose) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Array case
|
||||
emittedFiles += emitted.length
|
||||
if (ctx.argv.verbose) {
|
||||
for (const file of emitted) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
} else {
|
||||
// Array case
|
||||
emittedFiles += emitted.length
|
||||
if (ctx.argv.verbose) {
|
||||
for (const file of emitted) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
|
||||
|
||||
// CLEANUP
|
||||
const destinationsToDelete = new Set<FilePath>()
|
||||
for (const file of toRemove) {
|
||||
// remove from cache
|
||||
contentMap.delete(file)
|
||||
Object.values(dependencies).forEach((depGraph) => {
|
||||
// remove the node from dependency graphs
|
||||
depGraph?.removeNode(file)
|
||||
// remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed
|
||||
const orphanNodes = depGraph?.removeOrphanNodes()
|
||||
orphanNodes?.forEach((node) => {
|
||||
// only delete files that are in the output directory
|
||||
if (node.startsWith(argv.output)) {
|
||||
destinationsToDelete.add(node)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
await rimraf([...destinationsToDelete])
|
||||
|
||||
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
||||
changes.splice(0, numChangesInBuild)
|
||||
|
||||
toRemove.clear()
|
||||
release()
|
||||
clientRefresh()
|
||||
}
|
||||
|
||||
async function rebuildFromEntrypoint(
|
||||
fp: string,
|
||||
action: FileEvent,
|
||||
clientRefresh: () => void,
|
||||
buildData: BuildData, // note: this function mutates buildData
|
||||
) {
|
||||
const { ctx, ignored, mut, initialSlugs, contentMap, toRebuild, toRemove, trackedAssets } =
|
||||
buildData
|
||||
|
||||
const { argv } = ctx
|
||||
|
||||
// don't do anything for gitignored files
|
||||
if (ignored(fp)) {
|
||||
return
|
||||
}
|
||||
|
||||
// dont bother rebuilding for non-content files, just track and refresh
|
||||
fp = toPosixPath(fp)
|
||||
const filePath = joinSegments(argv.directory, fp) as FilePath
|
||||
if (path.extname(fp) !== ".md") {
|
||||
if (action === "add" || action === "change") {
|
||||
trackedAssets.add(filePath)
|
||||
} else if (action === "delete") {
|
||||
trackedAssets.delete(filePath)
|
||||
}
|
||||
clientRefresh()
|
||||
return
|
||||
}
|
||||
|
||||
if (action === "add" || action === "change") {
|
||||
toRebuild.add(filePath)
|
||||
} else if (action === "delete") {
|
||||
toRemove.add(filePath)
|
||||
}
|
||||
|
||||
const buildId = randomIdNonSecure()
|
||||
ctx.buildId = buildId
|
||||
buildData.lastBuildMs = new Date().getTime()
|
||||
const release = await mut.acquire()
|
||||
|
||||
// there's another build after us, release and let them do it
|
||||
if (ctx.buildId !== buildId) {
|
||||
release()
|
||||
return
|
||||
}
|
||||
|
||||
const perf = new PerfTimer()
|
||||
console.log(chalk.yellow("Detected change, rebuilding..."))
|
||||
|
||||
try {
|
||||
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
|
||||
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
|
||||
for (const content of parsedContent) {
|
||||
const [_tree, vfile] = content
|
||||
contentMap.set(vfile.data.filePath!, content)
|
||||
}
|
||||
|
||||
for (const fp of toRemove) {
|
||||
contentMap.delete(fp)
|
||||
}
|
||||
|
||||
const parsedFiles = [...contentMap.values()]
|
||||
const filteredContent = filterContent(ctx, parsedFiles)
|
||||
|
||||
// re-update slugs
|
||||
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
|
||||
.filter((fp) => !toRemove.has(fp))
|
||||
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
|
||||
|
||||
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
|
||||
|
||||
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
|
||||
// instead of just deleting everything
|
||||
await rimraf(path.join(argv.output, ".*"), { glob: true })
|
||||
await emitContent(ctx, filteredContent)
|
||||
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
||||
} catch (err) {
|
||||
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
||||
if (argv.verbose) {
|
||||
console.log(chalk.red(err))
|
||||
}
|
||||
}
|
||||
|
||||
clientRefresh()
|
||||
toRebuild.clear()
|
||||
toRemove.clear()
|
||||
release()
|
||||
}
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@ import { ValidDateType } from "./components/Date"
|
||||
import { QuartzComponent } from "./components/types"
|
||||
import { ValidLocale } from "./i18n"
|
||||
import { PluginTypes } from "./plugins/types"
|
||||
import { SocialImageOptions } from "./util/og"
|
||||
import { Theme } from "./util/theme"
|
||||
|
||||
export type Analytics =
|
||||
|
||||
@ -71,10 +71,10 @@ export const BuildArgv = {
|
||||
default: false,
|
||||
describe: "run a local server to live-preview your Quartz",
|
||||
},
|
||||
watch: {
|
||||
fastRebuild: {
|
||||
boolean: true,
|
||||
default: false,
|
||||
describe: "watch for changes and rebuild automatically",
|
||||
describe: "[experimental] rebuild only the changed files",
|
||||
},
|
||||
baseDir: {
|
||||
string: true,
|
||||
|
||||
@ -225,10 +225,6 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
* @param {*} argv arguments for `build`
|
||||
*/
|
||||
export async function handleBuild(argv) {
|
||||
if (argv.serve) {
|
||||
argv.watch = true
|
||||
}
|
||||
|
||||
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
|
||||
const ctx = await esbuild.context({
|
||||
entryPoints: [fp],
|
||||
@ -335,10 +331,9 @@ export async function handleBuild(argv) {
|
||||
clientRefresh()
|
||||
}
|
||||
|
||||
let clientRefresh = () => {}
|
||||
if (argv.serve) {
|
||||
const connections = []
|
||||
clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
|
||||
const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
|
||||
|
||||
if (argv.baseDir !== "" && !argv.baseDir.startsWith("/")) {
|
||||
argv.baseDir = "/" + argv.baseDir
|
||||
@ -438,7 +433,6 @@ export async function handleBuild(argv) {
|
||||
|
||||
return serve()
|
||||
})
|
||||
|
||||
server.listen(argv.port)
|
||||
const wss = new WebSocketServer({ port: argv.wsPort })
|
||||
wss.on("connection", (ws) => connections.push(ws))
|
||||
@ -447,27 +441,16 @@ export async function handleBuild(argv) {
|
||||
`Started a Quartz server listening at http://localhost:${argv.port}${argv.baseDir}`,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
await build(clientRefresh)
|
||||
ctx.dispose()
|
||||
}
|
||||
|
||||
if (argv.watch) {
|
||||
const paths = await globby([
|
||||
"**/*.ts",
|
||||
"quartz/cli/*.js",
|
||||
"quartz/static/**/*",
|
||||
"**/*.tsx",
|
||||
"**/*.scss",
|
||||
"package.json",
|
||||
])
|
||||
console.log("hint: exit with ctrl+c")
|
||||
const paths = await globby(["**/*.ts", "**/*.tsx", "**/*.scss", "package.json"])
|
||||
chokidar
|
||||
.watch(paths, { ignoreInitial: true })
|
||||
.on("add", () => build(clientRefresh))
|
||||
.on("change", () => build(clientRefresh))
|
||||
.on("unlink", () => build(clientRefresh))
|
||||
|
||||
console.log(chalk.grey("hint: exit with ctrl+c"))
|
||||
} else {
|
||||
await build(() => {})
|
||||
ctx.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { i18n } from "../i18n"
|
||||
import { FullSlug, getFileExtension, joinSegments, pathToRoot } from "../util/path"
|
||||
import { CSSResourceToStyleElement, JSResourceToScriptElement } from "../util/resources"
|
||||
import { googleFontHref, googleFontSubsetHref } from "../util/theme"
|
||||
import { googleFontHref } from "../util/theme"
|
||||
import { QuartzComponent, QuartzComponentConstructor, QuartzComponentProps } from "./types"
|
||||
import { unescapeHTML } from "../util/escape"
|
||||
import { CustomOgImagesEmitterName } from "../plugins/emitters/ogImage"
|
||||
@ -45,9 +45,6 @@ export default (() => {
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" />
|
||||
<link rel="stylesheet" href={googleFontHref(cfg.theme)} />
|
||||
{cfg.theme.typography.title && (
|
||||
<link rel="stylesheet" href={googleFontSubsetHref(cfg.theme, cfg.pageTitle)} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<link rel="preconnect" href="https://cdnjs.cloudflare.com" crossOrigin="anonymous" />
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { FullSlug, isFolderPath, resolveRelative } from "../util/path"
|
||||
import { FullSlug, resolveRelative } from "../util/path"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
import { Date, getDate } from "./Date"
|
||||
import { QuartzComponent, QuartzComponentProps } from "./types"
|
||||
@ -8,13 +8,6 @@ export type SortFn = (f1: QuartzPluginData, f2: QuartzPluginData) => number
|
||||
|
||||
export function byDateAndAlphabetical(cfg: GlobalConfiguration): SortFn {
|
||||
return (f1, f2) => {
|
||||
// Sort folders first
|
||||
const f1IsFolder = isFolderPath(f1.slug ?? "")
|
||||
const f2IsFolder = isFolderPath(f2.slug ?? "")
|
||||
if (f1IsFolder && !f2IsFolder) return -1
|
||||
if (!f1IsFolder && f2IsFolder) return 1
|
||||
|
||||
// If both are folders or both are files, sort by date/alphabetical
|
||||
if (f1.dates && f2.dates) {
|
||||
// sort descending
|
||||
return getDate(cfg, f2)!.getTime() - getDate(cfg, f1)!.getTime()
|
||||
|
||||
@ -17,7 +17,6 @@ PageTitle.css = `
|
||||
.page-title {
|
||||
font-size: 1.75rem;
|
||||
margin: 0;
|
||||
font-family: var(--titleFont);
|
||||
}
|
||||
`
|
||||
|
||||
|
||||
@ -1,14 +1,16 @@
|
||||
import { QuartzComponent, QuartzComponentConstructor, QuartzComponentProps } from "../types"
|
||||
import path from "path"
|
||||
|
||||
import style from "../styles/listPage.scss"
|
||||
import { PageList, SortFn } from "../PageList"
|
||||
import { byDateAndAlphabetical, PageList, SortFn } from "../PageList"
|
||||
import { stripSlashes, simplifySlug, joinSegments, FullSlug } from "../../util/path"
|
||||
import { Root } from "hast"
|
||||
import { htmlToJsx } from "../../util/jsx"
|
||||
import { i18n } from "../../i18n"
|
||||
import { QuartzPluginData } from "../../plugins/vfile"
|
||||
import { ComponentChildren } from "preact"
|
||||
import { concatenateResources } from "../../util/resources"
|
||||
import { FileTrieNode } from "../../util/fileTrie"
|
||||
|
||||
interface FolderContentOptions {
|
||||
/**
|
||||
* Whether to display number of folders
|
||||
@ -25,88 +27,51 @@ const defaultOptions: FolderContentOptions = {
|
||||
|
||||
export default ((opts?: Partial<FolderContentOptions>) => {
|
||||
const options: FolderContentOptions = { ...defaultOptions, ...opts }
|
||||
let trie: FileTrieNode<
|
||||
QuartzPluginData & {
|
||||
slug: string
|
||||
title: string
|
||||
filePath: string
|
||||
}
|
||||
>
|
||||
|
||||
const FolderContent: QuartzComponent = (props: QuartzComponentProps) => {
|
||||
const { tree, fileData, allFiles, cfg } = props
|
||||
const folderSlug = stripSlashes(simplifySlug(fileData.slug!))
|
||||
const folderParts = folderSlug.split(path.posix.sep)
|
||||
|
||||
if (!trie) {
|
||||
trie = new FileTrieNode([])
|
||||
allFiles.forEach((file) => {
|
||||
if (file.frontmatter) {
|
||||
trie.add({
|
||||
...file,
|
||||
slug: file.slug!,
|
||||
title: file.frontmatter.title,
|
||||
filePath: file.filePath!,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
const allPagesInFolder: QuartzPluginData[] = []
|
||||
const allPagesInSubfolders: Map<FullSlug, QuartzPluginData[]> = new Map()
|
||||
|
||||
const folder = trie.findNode(fileData.slug!.split("/"))
|
||||
if (!folder) {
|
||||
return null
|
||||
}
|
||||
allFiles.forEach((file) => {
|
||||
const fileSlug = stripSlashes(simplifySlug(file.slug!))
|
||||
const prefixed = fileSlug.startsWith(folderSlug) && fileSlug !== folderSlug
|
||||
const fileParts = fileSlug.split(path.posix.sep)
|
||||
const isDirectChild = fileParts.length === folderParts.length + 1
|
||||
|
||||
const allPagesInFolder: QuartzPluginData[] =
|
||||
folder.children
|
||||
.map((node) => {
|
||||
// regular file, proceed
|
||||
if (node.data) {
|
||||
return node.data
|
||||
}
|
||||
if (!prefixed) {
|
||||
return
|
||||
}
|
||||
|
||||
if (node.isFolder && options.showSubfolders) {
|
||||
// folders that dont have data need synthetic files
|
||||
const getMostRecentDates = (): QuartzPluginData["dates"] => {
|
||||
let maybeDates: QuartzPluginData["dates"] | undefined = undefined
|
||||
for (const child of node.children) {
|
||||
if (child.data?.dates) {
|
||||
// compare all dates and assign to maybeDates if its more recent or its not set
|
||||
if (!maybeDates) {
|
||||
maybeDates = { ...child.data.dates }
|
||||
} else {
|
||||
if (child.data.dates.created > maybeDates.created) {
|
||||
maybeDates.created = child.data.dates.created
|
||||
}
|
||||
if (isDirectChild) {
|
||||
allPagesInFolder.push(file)
|
||||
} else if (options.showSubfolders) {
|
||||
const subfolderSlug = joinSegments(
|
||||
...fileParts.slice(0, folderParts.length + 1),
|
||||
) as FullSlug
|
||||
const pagesInFolder = allPagesInSubfolders.get(subfolderSlug) || []
|
||||
allPagesInSubfolders.set(subfolderSlug, [...pagesInFolder, file])
|
||||
}
|
||||
})
|
||||
|
||||
if (child.data.dates.modified > maybeDates.modified) {
|
||||
maybeDates.modified = child.data.dates.modified
|
||||
}
|
||||
|
||||
if (child.data.dates.published > maybeDates.published) {
|
||||
maybeDates.published = child.data.dates.published
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return (
|
||||
maybeDates ?? {
|
||||
created: new Date(),
|
||||
modified: new Date(),
|
||||
published: new Date(),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
slug: node.slug,
|
||||
dates: getMostRecentDates(),
|
||||
frontmatter: {
|
||||
title: node.displayName,
|
||||
tags: [],
|
||||
},
|
||||
}
|
||||
}
|
||||
allPagesInSubfolders.forEach((files, subfolderSlug) => {
|
||||
const hasIndex = allPagesInFolder.some(
|
||||
(file) => subfolderSlug === stripSlashes(simplifySlug(file.slug!)),
|
||||
)
|
||||
if (!hasIndex) {
|
||||
const subfolderDates = files.sort(byDateAndAlphabetical(cfg))[0].dates
|
||||
const subfolderTitle = subfolderSlug.split(path.posix.sep).at(-1)!
|
||||
allPagesInFolder.push({
|
||||
slug: subfolderSlug,
|
||||
dates: subfolderDates,
|
||||
frontmatter: { title: subfolderTitle, tags: ["folder"] },
|
||||
})
|
||||
.filter((page) => page !== undefined) ?? []
|
||||
}
|
||||
})
|
||||
|
||||
const cssClasses: string[] = fileData.frontmatter?.cssclasses ?? []
|
||||
const classes = cssClasses.join(" ")
|
||||
const listProps = {
|
||||
|
||||
@ -9,6 +9,7 @@ import { visit } from "unist-util-visit"
|
||||
import { Root, Element, ElementContent } from "hast"
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
import { i18n } from "../i18n"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
|
||||
interface RenderComponents {
|
||||
head: QuartzComponent
|
||||
@ -24,6 +25,7 @@ interface RenderComponents {
|
||||
const headerRegex = new RegExp(/h[1-6]/)
|
||||
export function pageResources(
|
||||
baseDir: FullSlug | RelativeURL,
|
||||
fileData: QuartzPluginData,
|
||||
staticResources: StaticResources,
|
||||
): StaticResources {
|
||||
const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json")
|
||||
@ -63,12 +65,17 @@ export function pageResources(
|
||||
return resources
|
||||
}
|
||||
|
||||
function renderTranscludes(
|
||||
root: Root,
|
||||
export function renderPage(
|
||||
cfg: GlobalConfiguration,
|
||||
slug: FullSlug,
|
||||
componentData: QuartzComponentProps,
|
||||
) {
|
||||
components: RenderComponents,
|
||||
pageResources: StaticResources,
|
||||
): string {
|
||||
// make a deep copy of the tree so we don't remove the transclusion references
|
||||
// for the file cached in contentMap in build.ts
|
||||
const root = clone(componentData.tree) as Root
|
||||
|
||||
// process transcludes in componentData
|
||||
visit(root, "element", (node, _index, _parent) => {
|
||||
if (node.tagName === "blockquote") {
|
||||
@ -184,19 +191,6 @@ function renderTranscludes(
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function renderPage(
|
||||
cfg: GlobalConfiguration,
|
||||
slug: FullSlug,
|
||||
componentData: QuartzComponentProps,
|
||||
components: RenderComponents,
|
||||
pageResources: StaticResources,
|
||||
): string {
|
||||
// make a deep copy of the tree so we don't remove the transclusion references
|
||||
// for the file cached in contentMap in build.ts
|
||||
const root = clone(componentData.tree) as Root
|
||||
renderTranscludes(root, cfg, slug, componentData)
|
||||
|
||||
// set componentData.tree to the edited html that has transclusions rendered
|
||||
componentData.tree = root
|
||||
|
||||
@ -10,7 +10,7 @@ const emitThemeChangeEvent = (theme: "light" | "dark") => {
|
||||
}
|
||||
|
||||
document.addEventListener("nav", () => {
|
||||
const switchTheme = () => {
|
||||
const switchTheme = (e: Event) => {
|
||||
const newTheme =
|
||||
document.documentElement.getAttribute("saved-theme") === "dark" ? "light" : "dark"
|
||||
document.documentElement.setAttribute("saved-theme", newTheme)
|
||||
|
||||
@ -134,9 +134,9 @@ function createFolderNode(
|
||||
}
|
||||
|
||||
for (const child of node.children) {
|
||||
const childNode = child.isFolder
|
||||
? createFolderNode(currentSlug, child, opts)
|
||||
: createFileNode(currentSlug, child)
|
||||
const childNode = child.data
|
||||
? createFileNode(currentSlug, child)
|
||||
: createFolderNode(currentSlug, child, opts)
|
||||
ul.appendChild(childNode)
|
||||
}
|
||||
|
||||
@ -161,7 +161,7 @@ async function setupExplorer(currentSlug: FullSlug) {
|
||||
// Get folder state from local storage
|
||||
const storageTree = localStorage.getItem("fileTree")
|
||||
const serializedExplorerState = storageTree && opts.useSavedState ? JSON.parse(storageTree) : []
|
||||
const oldIndex = new Map<string, boolean>(
|
||||
const oldIndex = new Map(
|
||||
serializedExplorerState.map((entry: FolderState) => [entry.path, entry.collapsed]),
|
||||
)
|
||||
|
||||
@ -186,14 +186,10 @@ async function setupExplorer(currentSlug: FullSlug) {
|
||||
|
||||
// Get folder paths for state management
|
||||
const folderPaths = trie.getFolderPaths()
|
||||
currentExplorerState = folderPaths.map((path) => {
|
||||
const previousState = oldIndex.get(path)
|
||||
return {
|
||||
path,
|
||||
collapsed:
|
||||
previousState === undefined ? opts.folderDefaultState === "collapsed" : previousState,
|
||||
}
|
||||
})
|
||||
currentExplorerState = folderPaths.map((path) => ({
|
||||
path,
|
||||
collapsed: oldIndex.get(path) === true,
|
||||
}))
|
||||
|
||||
const explorerUl = explorer.querySelector(".explorer-ul")
|
||||
if (!explorerUl) continue
|
||||
@ -263,17 +259,15 @@ document.addEventListener("nav", async (e: CustomEventMap["nav"]) => {
|
||||
await setupExplorer(currentSlug)
|
||||
|
||||
// if mobile hamburger is visible, collapse by default
|
||||
for (const explorer of document.getElementsByClassName("explorer")) {
|
||||
const mobileExplorer = explorer.querySelector(".mobile-explorer")
|
||||
if (!mobileExplorer) return
|
||||
|
||||
if (mobileExplorer.checkVisibility()) {
|
||||
for (const explorer of document.getElementsByClassName("mobile-explorer")) {
|
||||
if (explorer.checkVisibility()) {
|
||||
explorer.classList.add("collapsed")
|
||||
explorer.setAttribute("aria-expanded", "false")
|
||||
}
|
||||
|
||||
mobileExplorer.classList.remove("hide-until-loaded")
|
||||
}
|
||||
|
||||
const hiddenUntilDoneLoading = document.querySelector("#mobile-explorer")
|
||||
hiddenUntilDoneLoading?.classList.remove("hide-until-loaded")
|
||||
})
|
||||
|
||||
function setFolderState(folderElement: HTMLElement, collapsed: boolean) {
|
||||
|
||||
@ -400,6 +400,7 @@ async function renderGraph(graph: HTMLElement, fullSlug: FullSlug) {
|
||||
})
|
||||
.circle(0, 0, nodeRadius(n))
|
||||
.fill({ color: isTagNode ? computedStyleMap["--light"] : color(n) })
|
||||
.stroke({ width: isTagNode ? 2 : 0, color: color(n) })
|
||||
.on("pointerover", (e) => {
|
||||
updateHoverInfo(e.target.label)
|
||||
oldLabelOpacity = label.alpha
|
||||
@ -415,10 +416,6 @@ async function renderGraph(graph: HTMLElement, fullSlug: FullSlug) {
|
||||
}
|
||||
})
|
||||
|
||||
if (isTagNode) {
|
||||
gfx.stroke({ width: 2, color: computedStyleMap["--tertiary"] })
|
||||
}
|
||||
|
||||
nodesContainer.addChild(gfx)
|
||||
labelsContainer.addChild(label)
|
||||
|
||||
|
||||
@ -52,8 +52,6 @@
|
||||
overflow: hidden;
|
||||
flex-shrink: 0;
|
||||
align-self: flex-start;
|
||||
margin-top: auto;
|
||||
margin-bottom: auto;
|
||||
}
|
||||
|
||||
button.mobile-explorer {
|
||||
|
||||
118
quartz/depgraph.test.ts
Normal file
118
quartz/depgraph.test.ts
Normal file
@ -0,0 +1,118 @@
|
||||
import test, { describe } from "node:test"
|
||||
import DepGraph from "./depgraph"
|
||||
import assert from "node:assert"
|
||||
|
||||
describe("DepGraph", () => {
|
||||
test("getLeafNodes", () => {
|
||||
const graph = new DepGraph<string>()
|
||||
graph.addEdge("A", "B")
|
||||
graph.addEdge("B", "C")
|
||||
graph.addEdge("D", "C")
|
||||
assert.deepStrictEqual(graph.getLeafNodes("A"), new Set(["C"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodes("B"), new Set(["C"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodes("C"), new Set(["C"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodes("D"), new Set(["C"]))
|
||||
})
|
||||
|
||||
describe("getLeafNodeAncestors", () => {
|
||||
test("gets correct ancestors in a graph without cycles", () => {
|
||||
const graph = new DepGraph<string>()
|
||||
graph.addEdge("A", "B")
|
||||
graph.addEdge("B", "C")
|
||||
graph.addEdge("D", "B")
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "D"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "D"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "D"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "D"]))
|
||||
})
|
||||
|
||||
test("gets correct ancestors in a graph with cycles", () => {
|
||||
const graph = new DepGraph<string>()
|
||||
graph.addEdge("A", "B")
|
||||
graph.addEdge("B", "C")
|
||||
graph.addEdge("C", "A")
|
||||
graph.addEdge("C", "D")
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "C"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "C"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "C"]))
|
||||
assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "C"]))
|
||||
})
|
||||
})
|
||||
|
||||
describe("mergeGraph", () => {
|
||||
test("merges two graphs", () => {
|
||||
const graph = new DepGraph<string>()
|
||||
graph.addEdge("A.md", "A.html")
|
||||
|
||||
const other = new DepGraph<string>()
|
||||
other.addEdge("B.md", "B.html")
|
||||
|
||||
graph.mergeGraph(other)
|
||||
|
||||
const expected = {
|
||||
nodes: ["A.md", "A.html", "B.md", "B.html"],
|
||||
edges: [
|
||||
["A.md", "A.html"],
|
||||
["B.md", "B.html"],
|
||||
],
|
||||
}
|
||||
|
||||
assert.deepStrictEqual(graph.export(), expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe("updateIncomingEdgesForNode", () => {
|
||||
test("merges when node exists", () => {
|
||||
// A.md -> B.md -> B.html
|
||||
const graph = new DepGraph<string>()
|
||||
graph.addEdge("A.md", "B.md")
|
||||
graph.addEdge("B.md", "B.html")
|
||||
|
||||
// B.md is edited so it removes the A.md transclusion
|
||||
// and adds C.md transclusion
|
||||
// C.md -> B.md
|
||||
const other = new DepGraph<string>()
|
||||
other.addEdge("C.md", "B.md")
|
||||
other.addEdge("B.md", "B.html")
|
||||
|
||||
// A.md -> B.md removed, C.md -> B.md added
|
||||
// C.md -> B.md -> B.html
|
||||
graph.updateIncomingEdgesForNode(other, "B.md")
|
||||
|
||||
const expected = {
|
||||
nodes: ["A.md", "B.md", "B.html", "C.md"],
|
||||
edges: [
|
||||
["B.md", "B.html"],
|
||||
["C.md", "B.md"],
|
||||
],
|
||||
}
|
||||
|
||||
assert.deepStrictEqual(graph.export(), expected)
|
||||
})
|
||||
|
||||
test("adds node if it does not exist", () => {
|
||||
// A.md -> B.md
|
||||
const graph = new DepGraph<string>()
|
||||
graph.addEdge("A.md", "B.md")
|
||||
|
||||
// Add a new file C.md that transcludes B.md
|
||||
// B.md -> C.md
|
||||
const other = new DepGraph<string>()
|
||||
other.addEdge("B.md", "C.md")
|
||||
|
||||
// B.md -> C.md added
|
||||
// A.md -> B.md -> C.md
|
||||
graph.updateIncomingEdgesForNode(other, "C.md")
|
||||
|
||||
const expected = {
|
||||
nodes: ["A.md", "B.md", "C.md"],
|
||||
edges: [
|
||||
["A.md", "B.md"],
|
||||
["B.md", "C.md"],
|
||||
],
|
||||
}
|
||||
|
||||
assert.deepStrictEqual(graph.export(), expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
228
quartz/depgraph.ts
Normal file
228
quartz/depgraph.ts
Normal file
@ -0,0 +1,228 @@
|
||||
export default class DepGraph<T> {
|
||||
// node: incoming and outgoing edges
|
||||
_graph = new Map<T, { incoming: Set<T>; outgoing: Set<T> }>()
|
||||
|
||||
constructor() {
|
||||
this._graph = new Map()
|
||||
}
|
||||
|
||||
export(): Object {
|
||||
return {
|
||||
nodes: this.nodes,
|
||||
edges: this.edges,
|
||||
}
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return JSON.stringify(this.export(), null, 2)
|
||||
}
|
||||
|
||||
// BASIC GRAPH OPERATIONS
|
||||
|
||||
get nodes(): T[] {
|
||||
return Array.from(this._graph.keys())
|
||||
}
|
||||
|
||||
get edges(): [T, T][] {
|
||||
let edges: [T, T][] = []
|
||||
this.forEachEdge((edge) => edges.push(edge))
|
||||
return edges
|
||||
}
|
||||
|
||||
hasNode(node: T): boolean {
|
||||
return this._graph.has(node)
|
||||
}
|
||||
|
||||
addNode(node: T): void {
|
||||
if (!this._graph.has(node)) {
|
||||
this._graph.set(node, { incoming: new Set(), outgoing: new Set() })
|
||||
}
|
||||
}
|
||||
|
||||
// Remove node and all edges connected to it
|
||||
removeNode(node: T): void {
|
||||
if (this._graph.has(node)) {
|
||||
// first remove all edges so other nodes don't have references to this node
|
||||
for (const target of this._graph.get(node)!.outgoing) {
|
||||
this.removeEdge(node, target)
|
||||
}
|
||||
for (const source of this._graph.get(node)!.incoming) {
|
||||
this.removeEdge(source, node)
|
||||
}
|
||||
this._graph.delete(node)
|
||||
}
|
||||
}
|
||||
|
||||
forEachNode(callback: (node: T) => void): void {
|
||||
for (const node of this._graph.keys()) {
|
||||
callback(node)
|
||||
}
|
||||
}
|
||||
|
||||
hasEdge(from: T, to: T): boolean {
|
||||
return Boolean(this._graph.get(from)?.outgoing.has(to))
|
||||
}
|
||||
|
||||
addEdge(from: T, to: T): void {
|
||||
this.addNode(from)
|
||||
this.addNode(to)
|
||||
|
||||
this._graph.get(from)!.outgoing.add(to)
|
||||
this._graph.get(to)!.incoming.add(from)
|
||||
}
|
||||
|
||||
removeEdge(from: T, to: T): void {
|
||||
if (this._graph.has(from) && this._graph.has(to)) {
|
||||
this._graph.get(from)!.outgoing.delete(to)
|
||||
this._graph.get(to)!.incoming.delete(from)
|
||||
}
|
||||
}
|
||||
|
||||
// returns -1 if node does not exist
|
||||
outDegree(node: T): number {
|
||||
return this.hasNode(node) ? this._graph.get(node)!.outgoing.size : -1
|
||||
}
|
||||
|
||||
// returns -1 if node does not exist
|
||||
inDegree(node: T): number {
|
||||
return this.hasNode(node) ? this._graph.get(node)!.incoming.size : -1
|
||||
}
|
||||
|
||||
forEachOutNeighbor(node: T, callback: (neighbor: T) => void): void {
|
||||
this._graph.get(node)?.outgoing.forEach(callback)
|
||||
}
|
||||
|
||||
forEachInNeighbor(node: T, callback: (neighbor: T) => void): void {
|
||||
this._graph.get(node)?.incoming.forEach(callback)
|
||||
}
|
||||
|
||||
forEachEdge(callback: (edge: [T, T]) => void): void {
|
||||
for (const [source, { outgoing }] of this._graph.entries()) {
|
||||
for (const target of outgoing) {
|
||||
callback([source, target])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DEPENDENCY ALGORITHMS
|
||||
|
||||
// Add all nodes and edges from other graph to this graph
|
||||
mergeGraph(other: DepGraph<T>): void {
|
||||
other.forEachEdge(([source, target]) => {
|
||||
this.addNode(source)
|
||||
this.addNode(target)
|
||||
this.addEdge(source, target)
|
||||
})
|
||||
}
|
||||
|
||||
// For the node provided:
|
||||
// If node does not exist, add it
|
||||
// If an incoming edge was added in other, it is added in this graph
|
||||
// If an incoming edge was deleted in other, it is deleted in this graph
|
||||
updateIncomingEdgesForNode(other: DepGraph<T>, node: T): void {
|
||||
this.addNode(node)
|
||||
|
||||
// Add edge if it is present in other
|
||||
other.forEachInNeighbor(node, (neighbor) => {
|
||||
this.addEdge(neighbor, node)
|
||||
})
|
||||
|
||||
// For node provided, remove incoming edge if it is absent in other
|
||||
this.forEachEdge(([source, target]) => {
|
||||
if (target === node && !other.hasEdge(source, target)) {
|
||||
this.removeEdge(source, target)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Remove all nodes that do not have any incoming or outgoing edges
|
||||
// A node may be orphaned if the only node pointing to it was removed
|
||||
removeOrphanNodes(): Set<T> {
|
||||
let orphanNodes = new Set<T>()
|
||||
|
||||
this.forEachNode((node) => {
|
||||
if (this.inDegree(node) === 0 && this.outDegree(node) === 0) {
|
||||
orphanNodes.add(node)
|
||||
}
|
||||
})
|
||||
|
||||
orphanNodes.forEach((node) => {
|
||||
this.removeNode(node)
|
||||
})
|
||||
|
||||
return orphanNodes
|
||||
}
|
||||
|
||||
// Get all leaf nodes (i.e. destination paths) reachable from the node provided
|
||||
// Eg. if the graph is A -> B -> C
|
||||
// D ---^
|
||||
// and the node is B, this function returns [C]
|
||||
getLeafNodes(node: T): Set<T> {
|
||||
let stack: T[] = [node]
|
||||
let visited = new Set<T>()
|
||||
let leafNodes = new Set<T>()
|
||||
|
||||
// DFS
|
||||
while (stack.length > 0) {
|
||||
let node = stack.pop()!
|
||||
|
||||
// If the node is already visited, skip it
|
||||
if (visited.has(node)) {
|
||||
continue
|
||||
}
|
||||
visited.add(node)
|
||||
|
||||
// Check if the node is a leaf node (i.e. destination path)
|
||||
if (this.outDegree(node) === 0) {
|
||||
leafNodes.add(node)
|
||||
}
|
||||
|
||||
// Add all unvisited neighbors to the stack
|
||||
this.forEachOutNeighbor(node, (neighbor) => {
|
||||
if (!visited.has(neighbor)) {
|
||||
stack.push(neighbor)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return leafNodes
|
||||
}
|
||||
|
||||
// Get all ancestors of the leaf nodes reachable from the node provided
|
||||
// Eg. if the graph is A -> B -> C
|
||||
// D ---^
|
||||
// and the node is B, this function returns [A, B, D]
|
||||
getLeafNodeAncestors(node: T): Set<T> {
|
||||
const leafNodes = this.getLeafNodes(node)
|
||||
let visited = new Set<T>()
|
||||
let upstreamNodes = new Set<T>()
|
||||
|
||||
// Backwards DFS for each leaf node
|
||||
leafNodes.forEach((leafNode) => {
|
||||
let stack: T[] = [leafNode]
|
||||
|
||||
while (stack.length > 0) {
|
||||
let node = stack.pop()!
|
||||
|
||||
if (visited.has(node)) {
|
||||
continue
|
||||
}
|
||||
visited.add(node)
|
||||
// Add node if it's not a leaf node (i.e. destination path)
|
||||
// Assumes destination file cannot depend on another destination file
|
||||
if (this.outDegree(node) !== 0) {
|
||||
upstreamNodes.add(node)
|
||||
}
|
||||
|
||||
// Add all unvisited parents to the stack
|
||||
this.forEachInNeighbor(node, (parentNode) => {
|
||||
if (!visited.has(parentNode)) {
|
||||
stack.push(parentNode)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return upstreamNodes
|
||||
}
|
||||
}
|
||||
@ -3,12 +3,13 @@ import { QuartzComponentProps } from "../../components/types"
|
||||
import BodyConstructor from "../../components/Body"
|
||||
import { pageResources, renderPage } from "../../components/renderPage"
|
||||
import { FullPageLayout } from "../../cfg"
|
||||
import { FullSlug } from "../../util/path"
|
||||
import { FilePath, FullSlug } from "../../util/path"
|
||||
import { sharedPageComponents } from "../../../quartz.layout"
|
||||
import { NotFound } from "../../components"
|
||||
import { defaultProcessedContent } from "../vfile"
|
||||
import { write } from "./helpers"
|
||||
import { i18n } from "../../i18n"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
export const NotFoundPage: QuartzEmitterPlugin = () => {
|
||||
const opts: FullPageLayout = {
|
||||
@ -27,6 +28,9 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
|
||||
getQuartzComponents() {
|
||||
return [Head, Body, pageBody, Footer]
|
||||
},
|
||||
async getDependencyGraph(_ctx, _content, _resources) {
|
||||
return new DepGraph<FilePath>()
|
||||
},
|
||||
async *emit(ctx, _content, resources) {
|
||||
const cfg = ctx.cfg.configuration
|
||||
const slug = "404" as FullSlug
|
||||
@ -40,7 +44,7 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
|
||||
description: notFound,
|
||||
frontmatter: { title: notFound, tags: [] },
|
||||
})
|
||||
const externalResources = pageResources(path, resources)
|
||||
const externalResources = pageResources(path, vfile.data, resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData: vfile.data,
|
||||
@ -58,6 +62,5 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
|
||||
ext: ".html",
|
||||
})
|
||||
},
|
||||
async *partialEmit() {},
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,47 +1,46 @@
|
||||
import { resolveRelative, simplifySlug } from "../../util/path"
|
||||
import { FilePath, joinSegments, resolveRelative, simplifySlug } from "../../util/path"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import { write } from "./helpers"
|
||||
import { BuildCtx } from "../../util/ctx"
|
||||
import { VFile } from "vfile"
|
||||
|
||||
async function* processFile(ctx: BuildCtx, file: VFile) {
|
||||
const ogSlug = simplifySlug(file.data.slug!)
|
||||
|
||||
for (const slug of file.data.aliases ?? []) {
|
||||
const redirUrl = resolveRelative(slug, file.data.slug!)
|
||||
yield write({
|
||||
ctx,
|
||||
content: `
|
||||
<!DOCTYPE html>
|
||||
<html lang="en-us">
|
||||
<head>
|
||||
<title>${ogSlug}</title>
|
||||
<link rel="canonical" href="${redirUrl}">
|
||||
<meta name="robots" content="noindex">
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="refresh" content="0; url=${redirUrl}">
|
||||
</head>
|
||||
</html>
|
||||
`,
|
||||
slug,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
}
|
||||
import DepGraph from "../../depgraph"
|
||||
import { getAliasSlugs } from "../transformers/frontmatter"
|
||||
|
||||
export const AliasRedirects: QuartzEmitterPlugin = () => ({
|
||||
name: "AliasRedirects",
|
||||
async *emit(ctx, content) {
|
||||
async getDependencyGraph(ctx, content, _resources) {
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
const { argv } = ctx
|
||||
for (const [_tree, file] of content) {
|
||||
yield* processFile(ctx, file)
|
||||
for (const slug of getAliasSlugs(file.data.frontmatter?.aliases ?? [], argv, file)) {
|
||||
graph.addEdge(file.data.filePath!, joinSegments(argv.output, slug + ".html") as FilePath)
|
||||
}
|
||||
}
|
||||
|
||||
return graph
|
||||
},
|
||||
async *partialEmit(ctx, _content, _resources, changeEvents) {
|
||||
for (const changeEvent of changeEvents) {
|
||||
if (!changeEvent.file) continue
|
||||
if (changeEvent.type === "add" || changeEvent.type === "change") {
|
||||
// add new ones if this file still exists
|
||||
yield* processFile(ctx, changeEvent.file)
|
||||
async *emit(ctx, content, _resources) {
|
||||
for (const [_tree, file] of content) {
|
||||
const ogSlug = simplifySlug(file.data.slug!)
|
||||
|
||||
for (const slug of file.data.aliases ?? []) {
|
||||
const redirUrl = resolveRelative(slug, file.data.slug!)
|
||||
yield write({
|
||||
ctx,
|
||||
content: `
|
||||
<!DOCTYPE html>
|
||||
<html lang="en-us">
|
||||
<head>
|
||||
<title>${ogSlug}</title>
|
||||
<link rel="canonical" href="${redirUrl}">
|
||||
<meta name="robots" content="noindex">
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="refresh" content="0; url=${redirUrl}">
|
||||
</head>
|
||||
</html>
|
||||
`,
|
||||
slug,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -3,6 +3,7 @@ import { QuartzEmitterPlugin } from "../types"
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
import { glob } from "../../util/glob"
|
||||
import DepGraph from "../../depgraph"
|
||||
import { Argv } from "../../util/ctx"
|
||||
import { QuartzConfig } from "../../cfg"
|
||||
|
||||
@ -11,41 +12,40 @@ const filesToCopy = async (argv: Argv, cfg: QuartzConfig) => {
|
||||
return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns])
|
||||
}
|
||||
|
||||
const copyFile = async (argv: Argv, fp: FilePath) => {
|
||||
const src = joinSegments(argv.directory, fp) as FilePath
|
||||
|
||||
const name = slugifyFilePath(fp)
|
||||
const dest = joinSegments(argv.output, name) as FilePath
|
||||
|
||||
// ensure dir exists
|
||||
const dir = path.dirname(dest) as FilePath
|
||||
await fs.promises.mkdir(dir, { recursive: true })
|
||||
|
||||
await fs.promises.copyFile(src, dest)
|
||||
return dest
|
||||
}
|
||||
|
||||
export const Assets: QuartzEmitterPlugin = () => {
|
||||
return {
|
||||
name: "Assets",
|
||||
async *emit({ argv, cfg }) {
|
||||
async getDependencyGraph(ctx, _content, _resources) {
|
||||
const { argv, cfg } = ctx
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
const fps = await filesToCopy(argv, cfg)
|
||||
|
||||
for (const fp of fps) {
|
||||
const ext = path.extname(fp)
|
||||
const src = joinSegments(argv.directory, fp) as FilePath
|
||||
const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
|
||||
|
||||
const dest = joinSegments(argv.output, name) as FilePath
|
||||
|
||||
graph.addEdge(src, dest)
|
||||
}
|
||||
|
||||
return graph
|
||||
},
|
||||
async *emit({ argv, cfg }, _content, _resources) {
|
||||
const assetsPath = argv.output
|
||||
const fps = await filesToCopy(argv, cfg)
|
||||
for (const fp of fps) {
|
||||
yield copyFile(argv, fp)
|
||||
}
|
||||
},
|
||||
async *partialEmit(ctx, _content, _resources, changeEvents) {
|
||||
for (const changeEvent of changeEvents) {
|
||||
const ext = path.extname(changeEvent.path)
|
||||
if (ext === ".md") continue
|
||||
const ext = path.extname(fp)
|
||||
const src = joinSegments(argv.directory, fp) as FilePath
|
||||
const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
|
||||
|
||||
if (changeEvent.type === "add" || changeEvent.type === "change") {
|
||||
yield copyFile(ctx.argv, changeEvent.path)
|
||||
} else if (changeEvent.type === "delete") {
|
||||
const name = slugifyFilePath(changeEvent.path)
|
||||
const dest = joinSegments(ctx.argv.output, name) as FilePath
|
||||
await fs.promises.unlink(dest)
|
||||
}
|
||||
const dest = joinSegments(assetsPath, name) as FilePath
|
||||
const dir = path.dirname(dest) as FilePath
|
||||
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
|
||||
await fs.promises.copyFile(src, dest)
|
||||
yield dest
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ import { FilePath, joinSegments } from "../../util/path"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import fs from "fs"
|
||||
import chalk from "chalk"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
export function extractDomainFromBaseUrl(baseUrl: string) {
|
||||
const url = new URL(`https://${baseUrl}`)
|
||||
@ -10,7 +11,10 @@ export function extractDomainFromBaseUrl(baseUrl: string) {
|
||||
|
||||
export const CNAME: QuartzEmitterPlugin = () => ({
|
||||
name: "CNAME",
|
||||
async emit({ argv, cfg }) {
|
||||
async getDependencyGraph(_ctx, _content, _resources) {
|
||||
return new DepGraph<FilePath>()
|
||||
},
|
||||
async emit({ argv, cfg }, _content, _resources) {
|
||||
if (!cfg.configuration.baseUrl) {
|
||||
console.warn(chalk.yellow("CNAME emitter requires `baseUrl` to be set in your configuration"))
|
||||
return []
|
||||
@ -23,5 +27,4 @@ export const CNAME: QuartzEmitterPlugin = () => ({
|
||||
await fs.promises.writeFile(path, content)
|
||||
return [path] as FilePath[]
|
||||
},
|
||||
async *partialEmit() {},
|
||||
})
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { FullSlug, joinSegments } from "../../util/path"
|
||||
import { FilePath, FullSlug, joinSegments } from "../../util/path"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
|
||||
// @ts-ignore
|
||||
@ -9,15 +9,11 @@ import styles from "../../styles/custom.scss"
|
||||
import popoverStyle from "../../components/styles/popover.scss"
|
||||
import { BuildCtx } from "../../util/ctx"
|
||||
import { QuartzComponent } from "../../components/types"
|
||||
import {
|
||||
googleFontHref,
|
||||
googleFontSubsetHref,
|
||||
joinStyles,
|
||||
processGoogleFonts,
|
||||
} from "../../util/theme"
|
||||
import { googleFontHref, joinStyles, processGoogleFonts } from "../../util/theme"
|
||||
import { Features, transform } from "lightningcss"
|
||||
import { transform as transpile } from "esbuild"
|
||||
import { write } from "./helpers"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
type ComponentResources = {
|
||||
css: string[]
|
||||
@ -90,7 +86,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
||||
componentResources.afterDOMLoaded.push(`
|
||||
const gtagScript = document.createElement("script")
|
||||
gtagScript.src = "https://www.googletagmanager.com/gtag/js?id=${tagId}"
|
||||
gtagScript.defer = true
|
||||
gtagScript.async = true
|
||||
document.head.appendChild(gtagScript)
|
||||
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
@ -125,7 +121,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
||||
umamiScript.src = "${cfg.analytics.host ?? "https://analytics.umami.is"}/script.js"
|
||||
umamiScript.setAttribute("data-website-id", "${cfg.analytics.websiteId}")
|
||||
umamiScript.setAttribute("data-auto-track", "false")
|
||||
umamiScript.defer = true
|
||||
umamiScript.async = true
|
||||
document.head.appendChild(umamiScript)
|
||||
|
||||
document.addEventListener("nav", () => {
|
||||
@ -136,7 +132,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
||||
componentResources.afterDOMLoaded.push(`
|
||||
const goatcounterScript = document.createElement("script")
|
||||
goatcounterScript.src = "${cfg.analytics.scriptSrc ?? "https://gc.zgo.at/count.js"}"
|
||||
goatcounterScript.defer = true
|
||||
goatcounterScript.async = true
|
||||
goatcounterScript.setAttribute("data-goatcounter",
|
||||
"https://${cfg.analytics.websiteId}.${cfg.analytics.host ?? "goatcounter.com"}/count")
|
||||
document.head.appendChild(goatcounterScript)
|
||||
@ -177,13 +173,14 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
||||
const cabinScript = document.createElement("script")
|
||||
cabinScript.src = "${cfg.analytics.host ?? "https://scripts.withcabin.com"}/hello.js"
|
||||
cabinScript.defer = true
|
||||
cabinScript.async = true
|
||||
document.head.appendChild(cabinScript)
|
||||
`)
|
||||
} else if (cfg.analytics?.provider === "clarity") {
|
||||
componentResources.afterDOMLoaded.push(`
|
||||
const clarityScript = document.createElement("script")
|
||||
clarityScript.innerHTML= \`(function(c,l,a,r,i,t,y){c[a]=c[a]||function(){(c[a].q=c[a].q||[]).push(arguments)};
|
||||
t=l.createElement(r);t.defer=1;t.src="https://www.clarity.ms/tag/"+i;
|
||||
t=l.createElement(r);t.async=1;t.src="https://www.clarity.ms/tag/"+i;
|
||||
y=l.getElementsByTagName(r)[0];y.parentNode.insertBefore(t,y);
|
||||
})(window, document, "clarity", "script", "${cfg.analytics.projectId}");\`
|
||||
document.head.appendChild(clarityScript)
|
||||
@ -207,6 +204,9 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
|
||||
export const ComponentResources: QuartzEmitterPlugin = () => {
|
||||
return {
|
||||
name: "ComponentResources",
|
||||
async getDependencyGraph(_ctx, _content, _resources) {
|
||||
return new DepGraph<FilePath>()
|
||||
},
|
||||
async *emit(ctx, _content, _resources) {
|
||||
const cfg = ctx.cfg.configuration
|
||||
// component specific scripts and styles
|
||||
@ -216,16 +216,9 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
|
||||
// let the user do it themselves in css
|
||||
} else if (cfg.theme.fontOrigin === "googleFonts" && !cfg.theme.cdnCaching) {
|
||||
// when cdnCaching is true, we link to google fonts in Head.tsx
|
||||
const theme = ctx.cfg.configuration.theme
|
||||
const response = await fetch(googleFontHref(theme))
|
||||
const response = await fetch(googleFontHref(ctx.cfg.configuration.theme))
|
||||
googleFontsStyleSheet = await response.text()
|
||||
|
||||
if (theme.typography.title) {
|
||||
const title = ctx.cfg.configuration.pageTitle
|
||||
const response = await fetch(googleFontSubsetHref(theme, title))
|
||||
googleFontsStyleSheet += `\n${await response.text()}`
|
||||
}
|
||||
|
||||
if (!cfg.baseUrl) {
|
||||
throw new Error(
|
||||
"baseUrl must be defined when using Google Fonts without cfg.theme.cdnCaching",
|
||||
@ -242,7 +235,7 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
|
||||
for (const fontFile of fontFiles) {
|
||||
const res = await fetch(fontFile.url)
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch font ${fontFile.filename}`)
|
||||
throw new Error(`failed to fetch font ${fontFile.filename}`)
|
||||
}
|
||||
|
||||
const buf = await res.arrayBuffer()
|
||||
@ -289,22 +282,19 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
|
||||
},
|
||||
include: Features.MediaQueries,
|
||||
}).code.toString(),
|
||||
})
|
||||
|
||||
yield write({
|
||||
ctx,
|
||||
slug: "prescript" as FullSlug,
|
||||
ext: ".js",
|
||||
content: prescript,
|
||||
})
|
||||
|
||||
yield write({
|
||||
ctx,
|
||||
slug: "postscript" as FullSlug,
|
||||
ext: ".js",
|
||||
content: postscript,
|
||||
})
|
||||
}),
|
||||
yield write({
|
||||
ctx,
|
||||
slug: "prescript" as FullSlug,
|
||||
ext: ".js",
|
||||
content: prescript,
|
||||
}),
|
||||
yield write({
|
||||
ctx,
|
||||
slug: "postscript" as FullSlug,
|
||||
ext: ".js",
|
||||
content: postscript,
|
||||
})
|
||||
},
|
||||
async *partialEmit() {},
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,11 +7,11 @@ import { QuartzEmitterPlugin } from "../types"
|
||||
import { toHtml } from "hast-util-to-html"
|
||||
import { write } from "./helpers"
|
||||
import { i18n } from "../../i18n"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
export type ContentIndexMap = Map<FullSlug, ContentDetails>
|
||||
export type ContentDetails = {
|
||||
slug: FullSlug
|
||||
filePath: FilePath
|
||||
title: string
|
||||
links: SimpleSlug[]
|
||||
tags: string[]
|
||||
@ -96,7 +96,27 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
|
||||
opts = { ...defaultOptions, ...opts }
|
||||
return {
|
||||
name: "ContentIndex",
|
||||
async *emit(ctx, content) {
|
||||
async getDependencyGraph(ctx, content, _resources) {
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
for (const [_tree, file] of content) {
|
||||
const sourcePath = file.data.filePath!
|
||||
|
||||
graph.addEdge(
|
||||
sourcePath,
|
||||
joinSegments(ctx.argv.output, "static/contentIndex.json") as FilePath,
|
||||
)
|
||||
if (opts?.enableSiteMap) {
|
||||
graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "sitemap.xml") as FilePath)
|
||||
}
|
||||
if (opts?.enableRSS) {
|
||||
graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "index.xml") as FilePath)
|
||||
}
|
||||
}
|
||||
|
||||
return graph
|
||||
},
|
||||
async *emit(ctx, content, _resources) {
|
||||
const cfg = ctx.cfg.configuration
|
||||
const linkIndex: ContentIndexMap = new Map()
|
||||
for (const [tree, file] of content) {
|
||||
@ -105,7 +125,6 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
|
||||
if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) {
|
||||
linkIndex.set(slug, {
|
||||
slug,
|
||||
filePath: file.data.relativePath!,
|
||||
title: file.data.frontmatter?.title!,
|
||||
links: file.data.links ?? [],
|
||||
tags: file.data.frontmatter?.tags ?? [],
|
||||
|
||||
@ -1,48 +1,54 @@
|
||||
import path from "path"
|
||||
import { visit } from "unist-util-visit"
|
||||
import { Root } from "hast"
|
||||
import { VFile } from "vfile"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import { QuartzComponentProps } from "../../components/types"
|
||||
import HeaderConstructor from "../../components/Header"
|
||||
import BodyConstructor from "../../components/Body"
|
||||
import { pageResources, renderPage } from "../../components/renderPage"
|
||||
import { FullPageLayout } from "../../cfg"
|
||||
import { pathToRoot } from "../../util/path"
|
||||
import { Argv } from "../../util/ctx"
|
||||
import { FilePath, isRelativeURL, joinSegments, pathToRoot } from "../../util/path"
|
||||
import { defaultContentPageLayout, sharedPageComponents } from "../../../quartz.layout"
|
||||
import { Content } from "../../components"
|
||||
import chalk from "chalk"
|
||||
import { write } from "./helpers"
|
||||
import { BuildCtx } from "../../util/ctx"
|
||||
import { Node } from "unist"
|
||||
import { StaticResources } from "../../util/resources"
|
||||
import { QuartzPluginData } from "../vfile"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
async function processContent(
|
||||
ctx: BuildCtx,
|
||||
tree: Node,
|
||||
fileData: QuartzPluginData,
|
||||
allFiles: QuartzPluginData[],
|
||||
opts: FullPageLayout,
|
||||
resources: StaticResources,
|
||||
) {
|
||||
const slug = fileData.slug!
|
||||
const cfg = ctx.cfg.configuration
|
||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData,
|
||||
externalResources,
|
||||
cfg,
|
||||
children: [],
|
||||
tree,
|
||||
allFiles,
|
||||
}
|
||||
// get all the dependencies for the markdown file
|
||||
// eg. images, scripts, stylesheets, transclusions
|
||||
const parseDependencies = (argv: Argv, hast: Root, file: VFile): string[] => {
|
||||
const dependencies: string[] = []
|
||||
|
||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
||||
return write({
|
||||
ctx,
|
||||
content,
|
||||
slug,
|
||||
ext: ".html",
|
||||
visit(hast, "element", (elem): void => {
|
||||
let ref: string | null = null
|
||||
|
||||
if (
|
||||
["script", "img", "audio", "video", "source", "iframe"].includes(elem.tagName) &&
|
||||
elem?.properties?.src
|
||||
) {
|
||||
ref = elem.properties.src.toString()
|
||||
} else if (["a", "link"].includes(elem.tagName) && elem?.properties?.href) {
|
||||
// transclusions will create a tags with relative hrefs
|
||||
ref = elem.properties.href.toString()
|
||||
}
|
||||
|
||||
// if it is a relative url, its a local file and we need to add
|
||||
// it to the dependency graph. otherwise, ignore
|
||||
if (ref === null || !isRelativeURL(ref)) {
|
||||
return
|
||||
}
|
||||
|
||||
let fp = path.join(file.data.filePath!, path.relative(argv.directory, ref)).replace(/\\/g, "/")
|
||||
// markdown files have the .md extension stripped in hrefs, add it back here
|
||||
if (!fp.split("/").pop()?.includes(".")) {
|
||||
fp += ".md"
|
||||
}
|
||||
dependencies.push(fp)
|
||||
})
|
||||
|
||||
return dependencies
|
||||
}
|
||||
|
||||
export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOpts) => {
|
||||
@ -73,22 +79,53 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
|
||||
Footer,
|
||||
]
|
||||
},
|
||||
async *emit(ctx, content, resources) {
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
let containsIndex = false
|
||||
async getDependencyGraph(ctx, content, _resources) {
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
for (const [tree, file] of content) {
|
||||
const sourcePath = file.data.filePath!
|
||||
const slug = file.data.slug!
|
||||
graph.addEdge(sourcePath, joinSegments(ctx.argv.output, slug + ".html") as FilePath)
|
||||
|
||||
parseDependencies(ctx.argv, tree as Root, file).forEach((dep) => {
|
||||
graph.addEdge(dep as FilePath, sourcePath)
|
||||
})
|
||||
}
|
||||
|
||||
return graph
|
||||
},
|
||||
async *emit(ctx, content, resources) {
|
||||
const cfg = ctx.cfg.configuration
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
|
||||
let containsIndex = false
|
||||
for (const [tree, file] of content) {
|
||||
const slug = file.data.slug!
|
||||
if (slug === "index") {
|
||||
containsIndex = true
|
||||
}
|
||||
|
||||
// only process home page, non-tag pages, and non-index pages
|
||||
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
|
||||
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
|
||||
const externalResources = pageResources(pathToRoot(slug), file.data, resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData: file.data,
|
||||
externalResources,
|
||||
cfg,
|
||||
children: [],
|
||||
tree,
|
||||
allFiles,
|
||||
}
|
||||
|
||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
||||
yield write({
|
||||
ctx,
|
||||
content,
|
||||
slug,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
|
||||
if (!containsIndex) {
|
||||
if (!containsIndex && !ctx.argv.fastRebuild) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`\nWarning: you seem to be missing an \`index.md\` home page file at the root of your \`${ctx.argv.directory}\` folder (\`${path.join(ctx.argv.directory, "index.md")} does not exist\`). This may cause errors when deploying.`,
|
||||
@ -96,25 +133,5 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
|
||||
)
|
||||
}
|
||||
},
|
||||
async *partialEmit(ctx, content, resources, changeEvents) {
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
|
||||
// find all slugs that changed or were added
|
||||
const changedSlugs = new Set<string>()
|
||||
for (const changeEvent of changeEvents) {
|
||||
if (!changeEvent.file) continue
|
||||
if (changeEvent.type === "add" || changeEvent.type === "change") {
|
||||
changedSlugs.add(changeEvent.file.data.slug!)
|
||||
}
|
||||
}
|
||||
|
||||
for (const [tree, file] of content) {
|
||||
const slug = file.data.slug!
|
||||
if (!changedSlugs.has(slug)) continue
|
||||
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
|
||||
|
||||
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,6 +7,7 @@ import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../
|
||||
import { FullPageLayout } from "../../cfg"
|
||||
import path from "path"
|
||||
import {
|
||||
FilePath,
|
||||
FullSlug,
|
||||
SimpleSlug,
|
||||
stripSlashes,
|
||||
@ -17,89 +18,13 @@ import {
|
||||
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
|
||||
import { FolderContent } from "../../components"
|
||||
import { write } from "./helpers"
|
||||
import { i18n, TRANSLATIONS } from "../../i18n"
|
||||
import { BuildCtx } from "../../util/ctx"
|
||||
import { StaticResources } from "../../util/resources"
|
||||
import { i18n } from "../../i18n"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
interface FolderPageOptions extends FullPageLayout {
|
||||
sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
|
||||
}
|
||||
|
||||
async function* processFolderInfo(
|
||||
ctx: BuildCtx,
|
||||
folderInfo: Record<SimpleSlug, ProcessedContent>,
|
||||
allFiles: QuartzPluginData[],
|
||||
opts: FullPageLayout,
|
||||
resources: StaticResources,
|
||||
) {
|
||||
for (const [folder, folderContent] of Object.entries(folderInfo) as [
|
||||
SimpleSlug,
|
||||
ProcessedContent,
|
||||
][]) {
|
||||
const slug = joinSegments(folder, "index") as FullSlug
|
||||
const [tree, file] = folderContent
|
||||
const cfg = ctx.cfg.configuration
|
||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData: file.data,
|
||||
externalResources,
|
||||
cfg,
|
||||
children: [],
|
||||
tree,
|
||||
allFiles,
|
||||
}
|
||||
|
||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
||||
yield write({
|
||||
ctx,
|
||||
content,
|
||||
slug,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function computeFolderInfo(
|
||||
folders: Set<SimpleSlug>,
|
||||
content: ProcessedContent[],
|
||||
locale: keyof typeof TRANSLATIONS,
|
||||
): Record<SimpleSlug, ProcessedContent> {
|
||||
// Create default folder descriptions
|
||||
const folderInfo: Record<SimpleSlug, ProcessedContent> = Object.fromEntries(
|
||||
[...folders].map((folder) => [
|
||||
folder,
|
||||
defaultProcessedContent({
|
||||
slug: joinSegments(folder, "index") as FullSlug,
|
||||
frontmatter: {
|
||||
title: `${i18n(locale).pages.folderContent.folder}: ${folder}`,
|
||||
tags: [],
|
||||
},
|
||||
}),
|
||||
]),
|
||||
)
|
||||
|
||||
// Update with actual content if available
|
||||
for (const [tree, file] of content) {
|
||||
const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
|
||||
if (folders.has(slug)) {
|
||||
folderInfo[slug] = [tree, file]
|
||||
}
|
||||
}
|
||||
|
||||
return folderInfo
|
||||
}
|
||||
|
||||
function _getFolders(slug: FullSlug): SimpleSlug[] {
|
||||
var folderName = path.dirname(slug ?? "") as SimpleSlug
|
||||
const parentFolderNames = [folderName]
|
||||
|
||||
while (folderName !== ".") {
|
||||
folderName = path.dirname(folderName ?? "") as SimpleSlug
|
||||
parentFolderNames.push(folderName)
|
||||
}
|
||||
return parentFolderNames
|
||||
}
|
||||
|
||||
export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (userOpts) => {
|
||||
const opts: FullPageLayout = {
|
||||
...sharedPageComponents,
|
||||
@ -128,6 +53,22 @@ export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (user
|
||||
Footer,
|
||||
]
|
||||
},
|
||||
async getDependencyGraph(_ctx, content, _resources) {
|
||||
// Example graph:
|
||||
// nested/file.md --> nested/index.html
|
||||
// nested/file2.md ------^
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
content.map(([_tree, vfile]) => {
|
||||
const slug = vfile.data.slug
|
||||
const folderName = path.dirname(slug ?? "") as SimpleSlug
|
||||
if (slug && folderName !== "." && folderName !== "tags") {
|
||||
graph.addEdge(vfile.data.filePath!, joinSegments(folderName, "index.html") as FilePath)
|
||||
}
|
||||
})
|
||||
|
||||
return graph
|
||||
},
|
||||
async *emit(ctx, content, resources) {
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
const cfg = ctx.cfg.configuration
|
||||
@ -142,29 +83,59 @@ export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (user
|
||||
}),
|
||||
)
|
||||
|
||||
const folderInfo = computeFolderInfo(folders, content, cfg.locale)
|
||||
yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
|
||||
},
|
||||
async *partialEmit(ctx, content, resources, changeEvents) {
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
const cfg = ctx.cfg.configuration
|
||||
const folderDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
|
||||
[...folders].map((folder) => [
|
||||
folder,
|
||||
defaultProcessedContent({
|
||||
slug: joinSegments(folder, "index") as FullSlug,
|
||||
frontmatter: {
|
||||
title: `${i18n(cfg.locale).pages.folderContent.folder}: ${folder}`,
|
||||
tags: [],
|
||||
},
|
||||
}),
|
||||
]),
|
||||
)
|
||||
|
||||
// Find all folders that need to be updated based on changed files
|
||||
const affectedFolders: Set<SimpleSlug> = new Set()
|
||||
for (const changeEvent of changeEvents) {
|
||||
if (!changeEvent.file) continue
|
||||
const slug = changeEvent.file.data.slug!
|
||||
const folders = _getFolders(slug).filter(
|
||||
(folderName) => folderName !== "." && folderName !== "tags",
|
||||
)
|
||||
folders.forEach((folder) => affectedFolders.add(folder))
|
||||
for (const [tree, file] of content) {
|
||||
const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
|
||||
if (folders.has(slug)) {
|
||||
folderDescriptions[slug] = [tree, file]
|
||||
}
|
||||
}
|
||||
|
||||
// If there are affected folders, rebuild their pages
|
||||
if (affectedFolders.size > 0) {
|
||||
const folderInfo = computeFolderInfo(affectedFolders, content, cfg.locale)
|
||||
yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
|
||||
for (const folder of folders) {
|
||||
const slug = joinSegments(folder, "index") as FullSlug
|
||||
const [tree, file] = folderDescriptions[folder]
|
||||
const externalResources = pageResources(pathToRoot(slug), file.data, resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData: file.data,
|
||||
externalResources,
|
||||
cfg,
|
||||
children: [],
|
||||
tree,
|
||||
allFiles,
|
||||
}
|
||||
|
||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
||||
yield write({
|
||||
ctx,
|
||||
content,
|
||||
slug,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function _getFolders(slug: FullSlug): SimpleSlug[] {
|
||||
var folderName = path.dirname(slug ?? "") as SimpleSlug
|
||||
const parentFolderNames = [folderName]
|
||||
|
||||
while (folderName !== ".") {
|
||||
folderName = path.dirname(folderName ?? "") as SimpleSlug
|
||||
parentFolderNames.push(folderName)
|
||||
}
|
||||
return parentFolderNames
|
||||
}
|
||||
|
||||
@ -1,17 +1,13 @@
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import { i18n } from "../../i18n"
|
||||
import { unescapeHTML } from "../../util/escape"
|
||||
import { FullSlug, getFileExtension, joinSegments, QUARTZ } from "../../util/path"
|
||||
import { FullSlug, getFileExtension } from "../../util/path"
|
||||
import { ImageOptions, SocialImageOptions, defaultImage, getSatoriFonts } from "../../util/og"
|
||||
import sharp from "sharp"
|
||||
import satori, { SatoriOptions } from "satori"
|
||||
import satori from "satori"
|
||||
import { loadEmoji, getIconCode } from "../../util/emoji"
|
||||
import { Readable } from "stream"
|
||||
import { write } from "./helpers"
|
||||
import { BuildCtx } from "../../util/ctx"
|
||||
import { QuartzPluginData } from "../vfile"
|
||||
import fs from "node:fs/promises"
|
||||
import chalk from "chalk"
|
||||
|
||||
const defaultOptions: SocialImageOptions = {
|
||||
colorScheme: "lightMode",
|
||||
@ -30,25 +26,7 @@ async function generateSocialImage(
|
||||
userOpts: SocialImageOptions,
|
||||
): Promise<Readable> {
|
||||
const { width, height } = userOpts
|
||||
const iconPath = joinSegments(QUARTZ, "static", "icon.png")
|
||||
let iconBase64: string | undefined = undefined
|
||||
try {
|
||||
const iconData = await fs.readFile(iconPath)
|
||||
iconBase64 = `data:image/png;base64,${iconData.toString("base64")}`
|
||||
} catch (err) {
|
||||
console.warn(chalk.yellow(`Warning: Could not find icon at ${iconPath}`))
|
||||
}
|
||||
|
||||
const imageComponent = userOpts.imageStructure({
|
||||
cfg,
|
||||
userOpts,
|
||||
title,
|
||||
description,
|
||||
fonts,
|
||||
fileData,
|
||||
iconBase64,
|
||||
})
|
||||
|
||||
const imageComponent = userOpts.imageStructure(cfg, userOpts, title, description, fonts, fileData)
|
||||
const svg = await satori(imageComponent, {
|
||||
width,
|
||||
height,
|
||||
@ -64,41 +42,6 @@ async function generateSocialImage(
|
||||
return sharp(Buffer.from(svg)).webp({ quality: 40 })
|
||||
}
|
||||
|
||||
async function processOgImage(
|
||||
ctx: BuildCtx,
|
||||
fileData: QuartzPluginData,
|
||||
fonts: SatoriOptions["fonts"],
|
||||
fullOptions: SocialImageOptions,
|
||||
) {
|
||||
const cfg = ctx.cfg.configuration
|
||||
const slug = fileData.slug!
|
||||
const titleSuffix = cfg.pageTitleSuffix ?? ""
|
||||
const title =
|
||||
(fileData.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
|
||||
const description =
|
||||
fileData.frontmatter?.socialDescription ??
|
||||
fileData.frontmatter?.description ??
|
||||
unescapeHTML(fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description)
|
||||
|
||||
const stream = await generateSocialImage(
|
||||
{
|
||||
title,
|
||||
description,
|
||||
fonts,
|
||||
cfg,
|
||||
fileData,
|
||||
},
|
||||
fullOptions,
|
||||
)
|
||||
|
||||
return write({
|
||||
ctx,
|
||||
content: stream,
|
||||
slug: `${slug}-og-image` as FullSlug,
|
||||
ext: ".webp",
|
||||
})
|
||||
}
|
||||
|
||||
export const CustomOgImagesEmitterName = "CustomOgImages"
|
||||
export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> = (userOpts) => {
|
||||
const fullOptions = { ...defaultOptions, ...userOpts }
|
||||
@ -115,23 +58,39 @@ export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> =
|
||||
const fonts = await getSatoriFonts(headerFont, bodyFont)
|
||||
|
||||
for (const [_tree, vfile] of content) {
|
||||
if (vfile.data.frontmatter?.socialImage !== undefined) continue
|
||||
yield processOgImage(ctx, vfile.data, fonts, fullOptions)
|
||||
}
|
||||
},
|
||||
async *partialEmit(ctx, _content, _resources, changeEvents) {
|
||||
const cfg = ctx.cfg.configuration
|
||||
const headerFont = cfg.theme.typography.header
|
||||
const bodyFont = cfg.theme.typography.body
|
||||
const fonts = await getSatoriFonts(headerFont, bodyFont)
|
||||
|
||||
// find all slugs that changed or were added
|
||||
for (const changeEvent of changeEvents) {
|
||||
if (!changeEvent.file) continue
|
||||
if (changeEvent.file.data.frontmatter?.socialImage !== undefined) continue
|
||||
if (changeEvent.type === "add" || changeEvent.type === "change") {
|
||||
yield processOgImage(ctx, changeEvent.file.data, fonts, fullOptions)
|
||||
// if this file defines socialImage, we can skip
|
||||
if (vfile.data.frontmatter?.socialImage !== undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
const slug = vfile.data.slug!
|
||||
const titleSuffix = cfg.pageTitleSuffix ?? ""
|
||||
const title =
|
||||
(vfile.data.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
|
||||
const description =
|
||||
vfile.data.frontmatter?.socialDescription ??
|
||||
vfile.data.frontmatter?.description ??
|
||||
unescapeHTML(
|
||||
vfile.data.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description,
|
||||
)
|
||||
|
||||
const stream = await generateSocialImage(
|
||||
{
|
||||
title,
|
||||
description,
|
||||
fonts,
|
||||
cfg,
|
||||
fileData: vfile.data,
|
||||
},
|
||||
fullOptions,
|
||||
)
|
||||
|
||||
yield write({
|
||||
ctx,
|
||||
content: stream,
|
||||
slug: `${slug}-og-image` as FullSlug,
|
||||
ext: ".webp",
|
||||
})
|
||||
}
|
||||
},
|
||||
externalResources: (ctx) => {
|
||||
|
||||
@ -2,11 +2,26 @@ import { FilePath, QUARTZ, joinSegments } from "../../util/path"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import fs from "fs"
|
||||
import { glob } from "../../util/glob"
|
||||
import DepGraph from "../../depgraph"
|
||||
import { dirname } from "path"
|
||||
|
||||
export const Static: QuartzEmitterPlugin = () => ({
|
||||
name: "Static",
|
||||
async *emit({ argv, cfg }) {
|
||||
async getDependencyGraph({ argv, cfg }, _content, _resources) {
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
const staticPath = joinSegments(QUARTZ, "static")
|
||||
const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
|
||||
for (const fp of fps) {
|
||||
graph.addEdge(
|
||||
joinSegments("static", fp) as FilePath,
|
||||
joinSegments(argv.output, "static", fp) as FilePath,
|
||||
)
|
||||
}
|
||||
|
||||
return graph
|
||||
},
|
||||
async *emit({ argv, cfg }, _content) {
|
||||
const staticPath = joinSegments(QUARTZ, "static")
|
||||
const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
|
||||
const outputStaticPath = joinSegments(argv.output, "static")
|
||||
@ -19,5 +34,4 @@ export const Static: QuartzEmitterPlugin = () => ({
|
||||
yield dest
|
||||
}
|
||||
},
|
||||
async *partialEmit() {},
|
||||
})
|
||||
|
||||
@ -5,94 +5,23 @@ import BodyConstructor from "../../components/Body"
|
||||
import { pageResources, renderPage } from "../../components/renderPage"
|
||||
import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../vfile"
|
||||
import { FullPageLayout } from "../../cfg"
|
||||
import { FullSlug, getAllSegmentPrefixes, joinSegments, pathToRoot } from "../../util/path"
|
||||
import {
|
||||
FilePath,
|
||||
FullSlug,
|
||||
getAllSegmentPrefixes,
|
||||
joinSegments,
|
||||
pathToRoot,
|
||||
} from "../../util/path"
|
||||
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
|
||||
import { TagContent } from "../../components"
|
||||
import { write } from "./helpers"
|
||||
import { i18n, TRANSLATIONS } from "../../i18n"
|
||||
import { BuildCtx } from "../../util/ctx"
|
||||
import { StaticResources } from "../../util/resources"
|
||||
import { i18n } from "../../i18n"
|
||||
import DepGraph from "../../depgraph"
|
||||
|
||||
interface TagPageOptions extends FullPageLayout {
|
||||
sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
|
||||
}
|
||||
|
||||
function computeTagInfo(
|
||||
allFiles: QuartzPluginData[],
|
||||
content: ProcessedContent[],
|
||||
locale: keyof typeof TRANSLATIONS,
|
||||
): [Set<string>, Record<string, ProcessedContent>] {
|
||||
const tags: Set<string> = new Set(
|
||||
allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
|
||||
)
|
||||
|
||||
// add base tag
|
||||
tags.add("index")
|
||||
|
||||
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
|
||||
[...tags].map((tag) => {
|
||||
const title =
|
||||
tag === "index"
|
||||
? i18n(locale).pages.tagContent.tagIndex
|
||||
: `${i18n(locale).pages.tagContent.tag}: ${tag}`
|
||||
return [
|
||||
tag,
|
||||
defaultProcessedContent({
|
||||
slug: joinSegments("tags", tag) as FullSlug,
|
||||
frontmatter: { title, tags: [] },
|
||||
}),
|
||||
]
|
||||
}),
|
||||
)
|
||||
|
||||
// Update with actual content if available
|
||||
for (const [tree, file] of content) {
|
||||
const slug = file.data.slug!
|
||||
if (slug.startsWith("tags/")) {
|
||||
const tag = slug.slice("tags/".length)
|
||||
if (tags.has(tag)) {
|
||||
tagDescriptions[tag] = [tree, file]
|
||||
if (file.data.frontmatter?.title === tag) {
|
||||
file.data.frontmatter.title = `${i18n(locale).pages.tagContent.tag}: ${tag}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [tags, tagDescriptions]
|
||||
}
|
||||
|
||||
async function processTagPage(
|
||||
ctx: BuildCtx,
|
||||
tag: string,
|
||||
tagContent: ProcessedContent,
|
||||
allFiles: QuartzPluginData[],
|
||||
opts: FullPageLayout,
|
||||
resources: StaticResources,
|
||||
) {
|
||||
const slug = joinSegments("tags", tag) as FullSlug
|
||||
const [tree, file] = tagContent
|
||||
const cfg = ctx.cfg.configuration
|
||||
const externalResources = pageResources(pathToRoot(slug), resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData: file.data,
|
||||
externalResources,
|
||||
cfg,
|
||||
children: [],
|
||||
tree,
|
||||
allFiles,
|
||||
}
|
||||
|
||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
||||
return write({
|
||||
ctx,
|
||||
content,
|
||||
slug: file.data.slug!,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
|
||||
export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts) => {
|
||||
const opts: FullPageLayout = {
|
||||
...sharedPageComponents,
|
||||
@ -121,50 +50,89 @@ export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts)
|
||||
Footer,
|
||||
]
|
||||
},
|
||||
async getDependencyGraph(ctx, content, _resources) {
|
||||
const graph = new DepGraph<FilePath>()
|
||||
|
||||
for (const [_tree, file] of content) {
|
||||
const sourcePath = file.data.filePath!
|
||||
const tags = (file.data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes)
|
||||
// if the file has at least one tag, it is used in the tag index page
|
||||
if (tags.length > 0) {
|
||||
tags.push("index")
|
||||
}
|
||||
|
||||
for (const tag of tags) {
|
||||
graph.addEdge(
|
||||
sourcePath,
|
||||
joinSegments(ctx.argv.output, "tags", tag + ".html") as FilePath,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return graph
|
||||
},
|
||||
async *emit(ctx, content, resources) {
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
const cfg = ctx.cfg.configuration
|
||||
const [tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
|
||||
|
||||
for (const tag of tags) {
|
||||
yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
|
||||
}
|
||||
},
|
||||
async *partialEmit(ctx, content, resources, changeEvents) {
|
||||
const allFiles = content.map((c) => c[1].data)
|
||||
const cfg = ctx.cfg.configuration
|
||||
const tags: Set<string> = new Set(
|
||||
allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
|
||||
)
|
||||
|
||||
// Find all tags that need to be updated based on changed files
|
||||
const affectedTags: Set<string> = new Set()
|
||||
for (const changeEvent of changeEvents) {
|
||||
if (!changeEvent.file) continue
|
||||
const slug = changeEvent.file.data.slug!
|
||||
// add base tag
|
||||
tags.add("index")
|
||||
|
||||
// If it's a tag page itself that changed
|
||||
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
|
||||
[...tags].map((tag) => {
|
||||
const title =
|
||||
tag === "index"
|
||||
? i18n(cfg.locale).pages.tagContent.tagIndex
|
||||
: `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
|
||||
return [
|
||||
tag,
|
||||
defaultProcessedContent({
|
||||
slug: joinSegments("tags", tag) as FullSlug,
|
||||
frontmatter: { title, tags: [] },
|
||||
}),
|
||||
]
|
||||
}),
|
||||
)
|
||||
|
||||
for (const [tree, file] of content) {
|
||||
const slug = file.data.slug!
|
||||
if (slug.startsWith("tags/")) {
|
||||
const tag = slug.slice("tags/".length)
|
||||
affectedTags.add(tag)
|
||||
}
|
||||
|
||||
// If a file with tags changed, we need to update those tag pages
|
||||
const fileTags = changeEvent.file.data.frontmatter?.tags ?? []
|
||||
fileTags.flatMap(getAllSegmentPrefixes).forEach((tag) => affectedTags.add(tag))
|
||||
|
||||
// Always update the index tag page if any file changes
|
||||
affectedTags.add("index")
|
||||
}
|
||||
|
||||
// If there are affected tags, rebuild their pages
|
||||
if (affectedTags.size > 0) {
|
||||
// We still need to compute all tags because tag pages show all tags
|
||||
const [_tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
|
||||
|
||||
for (const tag of affectedTags) {
|
||||
if (tagDescriptions[tag]) {
|
||||
yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
|
||||
if (tags.has(tag)) {
|
||||
tagDescriptions[tag] = [tree, file]
|
||||
if (file.data.frontmatter?.title === tag) {
|
||||
file.data.frontmatter.title = `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const tag of tags) {
|
||||
const slug = joinSegments("tags", tag) as FullSlug
|
||||
const [tree, file] = tagDescriptions[tag]
|
||||
const externalResources = pageResources(pathToRoot(slug), file.data, resources)
|
||||
const componentData: QuartzComponentProps = {
|
||||
ctx,
|
||||
fileData: file.data,
|
||||
externalResources,
|
||||
cfg,
|
||||
children: [],
|
||||
tree,
|
||||
allFiles,
|
||||
}
|
||||
|
||||
const content = renderPage(cfg, slug, componentData, opts, externalResources)
|
||||
yield write({
|
||||
ctx,
|
||||
content,
|
||||
slug: file.data.slug!,
|
||||
ext: ".html",
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,13 +5,11 @@ import { escapeHTML } from "../../util/escape"
|
||||
|
||||
export interface Options {
|
||||
descriptionLength: number
|
||||
maxDescriptionLength: number
|
||||
replaceExternalLinks: boolean
|
||||
}
|
||||
|
||||
const defaultOptions: Options = {
|
||||
descriptionLength: 150,
|
||||
maxDescriptionLength: 300,
|
||||
replaceExternalLinks: true,
|
||||
}
|
||||
|
||||
@ -39,41 +37,35 @@ export const Description: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
text = text.replace(urlRegex, "$<domain>" + "$<path>")
|
||||
}
|
||||
|
||||
if (frontMatterDescription) {
|
||||
file.data.description = frontMatterDescription
|
||||
file.data.text = text
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise, use the text content
|
||||
const desc = text
|
||||
const desc = frontMatterDescription ?? text
|
||||
const sentences = desc.replace(/\s+/g, " ").split(/\.\s/)
|
||||
let finalDesc = ""
|
||||
const finalDesc: string[] = []
|
||||
const len = opts.descriptionLength
|
||||
let sentenceIdx = 0
|
||||
let currentDescriptionLength = 0
|
||||
|
||||
// Add full sentences until we exceed the guideline length
|
||||
while (sentenceIdx < sentences.length) {
|
||||
const sentence = sentences[sentenceIdx]
|
||||
if (!sentence) break
|
||||
|
||||
const currentSentence = sentence.endsWith(".") ? sentence : sentence + "."
|
||||
const nextLength = finalDesc.length + currentSentence.length + (finalDesc ? 1 : 0)
|
||||
|
||||
// Add the sentence if we're under the guideline length
|
||||
// or if this is the first sentence (always include at least one)
|
||||
if (nextLength <= opts.descriptionLength || sentenceIdx === 0) {
|
||||
finalDesc += (finalDesc ? " " : "") + currentSentence
|
||||
if (sentences[0] !== undefined && sentences[0].length >= len) {
|
||||
const firstSentence = sentences[0].split(" ")
|
||||
while (currentDescriptionLength < len) {
|
||||
const sentence = firstSentence[sentenceIdx]
|
||||
if (!sentence) break
|
||||
finalDesc.push(sentence)
|
||||
currentDescriptionLength += sentence.length
|
||||
sentenceIdx++
|
||||
}
|
||||
finalDesc.push("...")
|
||||
} else {
|
||||
while (currentDescriptionLength < len) {
|
||||
const sentence = sentences[sentenceIdx]
|
||||
if (!sentence) break
|
||||
const currentSentence = sentence.endsWith(".") ? sentence : sentence + "."
|
||||
finalDesc.push(currentSentence)
|
||||
currentDescriptionLength += currentSentence.length
|
||||
sentenceIdx++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// truncate to max length if necessary
|
||||
file.data.description =
|
||||
finalDesc.length > opts.maxDescriptionLength
|
||||
? finalDesc.slice(0, opts.maxDescriptionLength) + "..."
|
||||
: finalDesc
|
||||
file.data.description = finalDesc.join(" ")
|
||||
file.data.text = text
|
||||
}
|
||||
},
|
||||
|
||||
@ -3,9 +3,12 @@ import remarkFrontmatter from "remark-frontmatter"
|
||||
import { QuartzTransformerPlugin } from "../types"
|
||||
import yaml from "js-yaml"
|
||||
import toml from "toml"
|
||||
import { FilePath, FullSlug, getFileExtension, slugifyFilePath, slugTag } from "../../util/path"
|
||||
import { FilePath, FullSlug, joinSegments, slugifyFilePath, slugTag } from "../../util/path"
|
||||
import { QuartzPluginData } from "../vfile"
|
||||
import { i18n } from "../../i18n"
|
||||
import { Argv } from "../../util/ctx"
|
||||
import { VFile } from "vfile"
|
||||
import path from "path"
|
||||
|
||||
export interface Options {
|
||||
delimiters: string | [string, string]
|
||||
@ -40,24 +43,26 @@ function coerceToArray(input: string | string[]): string[] | undefined {
|
||||
.map((tag: string | number) => tag.toString())
|
||||
}
|
||||
|
||||
function getAliasSlugs(aliases: string[]): FullSlug[] {
|
||||
const res: FullSlug[] = []
|
||||
for (const alias of aliases) {
|
||||
const isMd = getFileExtension(alias) === "md"
|
||||
const mockFp = isMd ? alias : alias + ".md"
|
||||
const slug = slugifyFilePath(mockFp as FilePath)
|
||||
res.push(slug)
|
||||
export function getAliasSlugs(aliases: string[], argv: Argv, file: VFile): FullSlug[] {
|
||||
const dir = path.posix.relative(argv.directory, path.dirname(file.data.filePath!))
|
||||
const slugs: FullSlug[] = aliases.map(
|
||||
(alias) => path.posix.join(dir, slugifyFilePath(alias as FilePath)) as FullSlug,
|
||||
)
|
||||
const permalink = file.data.frontmatter?.permalink
|
||||
if (typeof permalink === "string") {
|
||||
slugs.push(permalink as FullSlug)
|
||||
}
|
||||
|
||||
return res
|
||||
// fix any slugs that have trailing slash
|
||||
return slugs.map((slug) =>
|
||||
slug.endsWith("/") ? (joinSegments(slug, "index") as FullSlug) : slug,
|
||||
)
|
||||
}
|
||||
|
||||
export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts) => {
|
||||
const opts = { ...defaultOptions, ...userOpts }
|
||||
return {
|
||||
name: "FrontMatter",
|
||||
markdownPlugins(ctx) {
|
||||
const { cfg, allSlugs } = ctx
|
||||
markdownPlugins({ cfg, allSlugs, argv }) {
|
||||
return [
|
||||
[remarkFrontmatter, ["yaml", "toml"]],
|
||||
() => {
|
||||
@ -83,18 +88,9 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
|
||||
const aliases = coerceToArray(coalesceAliases(data, ["aliases", "alias"]))
|
||||
if (aliases) {
|
||||
data.aliases = aliases // frontmatter
|
||||
file.data.aliases = getAliasSlugs(aliases)
|
||||
allSlugs.push(...file.data.aliases)
|
||||
const slugs = (file.data.aliases = getAliasSlugs(aliases, argv, file))
|
||||
allSlugs.push(...slugs)
|
||||
}
|
||||
|
||||
if (data.permalink != null && data.permalink.toString() !== "") {
|
||||
data.permalink = data.permalink.toString() as FullSlug
|
||||
const aliases = file.data.aliases ?? []
|
||||
aliases.push(data.permalink)
|
||||
file.data.aliases = aliases
|
||||
allSlugs.push(data.permalink)
|
||||
}
|
||||
|
||||
const cssclasses = coerceToArray(coalesceAliases(data, ["cssclasses", "cssclass"]))
|
||||
if (cssclasses) data.cssclasses = cssclasses
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import fs from "fs"
|
||||
import path from "path"
|
||||
import { Repository } from "@napi-rs/simple-git"
|
||||
import { QuartzTransformerPlugin } from "../types"
|
||||
import chalk from "chalk"
|
||||
import path from "path"
|
||||
|
||||
export interface Options {
|
||||
priority: ("frontmatter" | "git" | "filesystem")[]
|
||||
@ -31,29 +31,17 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
|
||||
const opts = { ...defaultOptions, ...userOpts }
|
||||
return {
|
||||
name: "CreatedModifiedDate",
|
||||
markdownPlugins(ctx) {
|
||||
markdownPlugins() {
|
||||
return [
|
||||
() => {
|
||||
let repo: Repository | undefined = undefined
|
||||
let repositoryWorkdir: string
|
||||
if (opts.priority.includes("git")) {
|
||||
try {
|
||||
repo = Repository.discover(ctx.argv.directory)
|
||||
repositoryWorkdir = repo.workdir() ?? ctx.argv.directory
|
||||
} catch (e) {
|
||||
console.log(
|
||||
chalk.yellow(`\nWarning: couldn't find git repository for ${ctx.argv.directory}`),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return async (_tree, file) => {
|
||||
let created: MaybeDate = undefined
|
||||
let modified: MaybeDate = undefined
|
||||
let published: MaybeDate = undefined
|
||||
|
||||
const fp = file.data.relativePath!
|
||||
const fullFp = file.data.filePath!
|
||||
const fp = file.data.filePath!
|
||||
const fullFp = path.isAbsolute(fp) ? fp : path.posix.join(file.cwd, fp)
|
||||
for (const source of opts.priority) {
|
||||
if (source === "filesystem") {
|
||||
const st = await fs.promises.stat(fullFp)
|
||||
@ -63,14 +51,21 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
|
||||
created ||= file.data.frontmatter.created as MaybeDate
|
||||
modified ||= file.data.frontmatter.modified as MaybeDate
|
||||
published ||= file.data.frontmatter.published as MaybeDate
|
||||
} else if (source === "git" && repo) {
|
||||
} else if (source === "git") {
|
||||
if (!repo) {
|
||||
// Get a reference to the main git repo.
|
||||
// It's either the same as the workdir,
|
||||
// or 1+ level higher in case of a submodule/subtree setup
|
||||
repo = Repository.discover(file.cwd)
|
||||
}
|
||||
|
||||
try {
|
||||
const relativePath = path.relative(repositoryWorkdir, fullFp)
|
||||
modified ||= await repo.getFileLatestModifiedDateAsync(relativePath)
|
||||
modified ||= await repo.getFileLatestModifiedDateAsync(file.data.filePath!)
|
||||
} catch {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`\nWarning: ${file.data.filePath!} isn't yet tracked by git, dates will be inaccurate`,
|
||||
`\nWarning: ${file.data
|
||||
.filePath!} isn't yet tracked by git, last modification date is not available for this file`,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@ -54,7 +54,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
textTransform(_ctx, src) {
|
||||
if (opts.wikilinks) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(relrefRegex, (_value, ...capture) => {
|
||||
src = src.replaceAll(relrefRegex, (value, ...capture) => {
|
||||
const [text, link] = capture
|
||||
return `[${text}](${link})`
|
||||
})
|
||||
@ -62,7 +62,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
|
||||
if (opts.removePredefinedAnchor) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(predefinedHeadingIdRegex, (_value, ...capture) => {
|
||||
src = src.replaceAll(predefinedHeadingIdRegex, (value, ...capture) => {
|
||||
const [headingText] = capture
|
||||
return headingText
|
||||
})
|
||||
@ -70,7 +70,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
|
||||
if (opts.removeHugoShortcode) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => {
|
||||
src = src.replaceAll(hugoShortcodeRegex, (value, ...capture) => {
|
||||
const [scContent] = capture
|
||||
return scContent
|
||||
})
|
||||
@ -78,7 +78,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
|
||||
if (opts.replaceFigureWithMdImg) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(figureTagRegex, (_value, ...capture) => {
|
||||
src = src.replaceAll(figureTagRegex, (value, ...capture) => {
|
||||
const [src] = capture
|
||||
return ``
|
||||
})
|
||||
@ -86,11 +86,11 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
|
||||
|
||||
if (opts.replaceOrgLatex) {
|
||||
src = src.toString()
|
||||
src = src.replaceAll(inlineLatexRegex, (_value, ...capture) => {
|
||||
src = src.replaceAll(inlineLatexRegex, (value, ...capture) => {
|
||||
const [eqn] = capture
|
||||
return `$${eqn}$`
|
||||
})
|
||||
src = src.replaceAll(blockLatexRegex, (_value, ...capture) => {
|
||||
src = src.replaceAll(blockLatexRegex, (value, ...capture) => {
|
||||
const [eqn] = capture
|
||||
return `$$${eqn}$$`
|
||||
})
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
import { QuartzTransformerPlugin } from "../types"
|
||||
import { PluggableList } from "unified"
|
||||
import { visit } from "unist-util-visit"
|
||||
import { SKIP, visit } from "unist-util-visit"
|
||||
import { ReplaceFunction, findAndReplace as mdastFindReplace } from "mdast-util-find-and-replace"
|
||||
import { Root, Html, Paragraph, Text, Link, Parent } from "mdast"
|
||||
import { Node } from "unist"
|
||||
import { VFile } from "vfile"
|
||||
import { BuildVisitor } from "unist-util-visit"
|
||||
|
||||
export interface Options {
|
||||
@ -32,10 +34,21 @@ const defaultOptions: Options = {
|
||||
const orRegex = new RegExp(/{{or:(.*?)}}/, "g")
|
||||
const TODORegex = new RegExp(/{{.*?\bTODO\b.*?}}/, "g")
|
||||
const DONERegex = new RegExp(/{{.*?\bDONE\b.*?}}/, "g")
|
||||
const videoRegex = new RegExp(/{{.*?\[\[video\]\].*?\:(.*?)}}/, "g")
|
||||
const youtubeRegex = new RegExp(
|
||||
/{{.*?\[\[video\]\].*?(https?:\/\/(?:www\.)?youtu(?:be\.com\/watch\?v=|\.be\/)([\w\-\_]*)(&(amp;)?[\w\?=]*)?)}}/,
|
||||
"g",
|
||||
)
|
||||
|
||||
// const multimediaRegex = new RegExp(/{{.*?\b(video|audio)\b.*?\:(.*?)}}/, "g")
|
||||
|
||||
const audioRegex = new RegExp(/{{.*?\[\[audio\]\].*?\:(.*?)}}/, "g")
|
||||
const pdfRegex = new RegExp(/{{.*?\[\[pdf\]\].*?\:(.*?)}}/, "g")
|
||||
const blockquoteRegex = new RegExp(/(\[\[>\]\])\s*(.*)/, "g")
|
||||
const roamHighlightRegex = new RegExp(/\^\^(.+)\^\^/, "g")
|
||||
const roamItalicRegex = new RegExp(/__(.+)__/, "g")
|
||||
const tableRegex = new RegExp(/- {{.*?\btable\b.*?}}/, "g") /* TODO */
|
||||
const attributeRegex = new RegExp(/\b\w+(?:\s+\w+)*::/, "g") /* TODO */
|
||||
|
||||
function isSpecialEmbed(node: Paragraph): boolean {
|
||||
if (node.children.length !== 2) return false
|
||||
@ -122,7 +135,7 @@ export const RoamFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | un
|
||||
const plugins: PluggableList = []
|
||||
|
||||
plugins.push(() => {
|
||||
return (tree: Root) => {
|
||||
return (tree: Root, file: VFile) => {
|
||||
const replacements: [RegExp, ReplaceFunction][] = []
|
||||
|
||||
// Handle special embeds (audio, video, PDF)
|
||||
|
||||
@ -4,7 +4,7 @@ import { ProcessedContent } from "./vfile"
|
||||
import { QuartzComponent } from "../components/types"
|
||||
import { FilePath } from "../util/path"
|
||||
import { BuildCtx } from "../util/ctx"
|
||||
import { VFile } from "vfile"
|
||||
import DepGraph from "../depgraph"
|
||||
|
||||
export interface PluginTypes {
|
||||
transformers: QuartzTransformerPluginInstance[]
|
||||
@ -33,33 +33,26 @@ export type QuartzFilterPluginInstance = {
|
||||
shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean
|
||||
}
|
||||
|
||||
export type ChangeEvent = {
|
||||
type: "add" | "change" | "delete"
|
||||
path: FilePath
|
||||
file?: VFile
|
||||
}
|
||||
|
||||
export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
|
||||
opts?: Options,
|
||||
) => QuartzEmitterPluginInstance
|
||||
export type QuartzEmitterPluginInstance = {
|
||||
name: string
|
||||
emit: (
|
||||
emit(
|
||||
ctx: BuildCtx,
|
||||
content: ProcessedContent[],
|
||||
resources: StaticResources,
|
||||
) => Promise<FilePath[]> | AsyncGenerator<FilePath>
|
||||
partialEmit?: (
|
||||
ctx: BuildCtx,
|
||||
content: ProcessedContent[],
|
||||
resources: StaticResources,
|
||||
changeEvents: ChangeEvent[],
|
||||
) => Promise<FilePath[]> | AsyncGenerator<FilePath> | null
|
||||
): Promise<FilePath[]> | AsyncGenerator<FilePath>
|
||||
/**
|
||||
* Returns the components (if any) that are used in rendering the page.
|
||||
* This helps Quartz optimize the page by only including necessary resources
|
||||
* for components that are actually used.
|
||||
*/
|
||||
getQuartzComponents?: (ctx: BuildCtx) => QuartzComponent[]
|
||||
getDependencyGraph?(
|
||||
ctx: BuildCtx,
|
||||
content: ProcessedContent[],
|
||||
resources: StaticResources,
|
||||
): Promise<DepGraph<FilePath>>
|
||||
externalResources?: ExternalResourcesFn
|
||||
}
|
||||
|
||||
@ -11,7 +11,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
|
||||
const perf = new PerfTimer()
|
||||
const log = new QuartzLogger(ctx.argv.verbose)
|
||||
|
||||
log.start(`Emitting files`)
|
||||
log.start(`Emitting output files`)
|
||||
|
||||
let emittedFiles = 0
|
||||
const staticResources = getStaticResourcesFromPlugins(ctx)
|
||||
@ -26,7 +26,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
|
||||
if (ctx.argv.verbose) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
} else {
|
||||
log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
|
||||
log.updateText(`Emitting output files: ${chalk.gray(file)}`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -36,7 +36,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
|
||||
if (ctx.argv.verbose) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
} else {
|
||||
log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
|
||||
log.updateText(`Emitting output files: ${chalk.gray(file)}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,13 +7,12 @@ import { Root as HTMLRoot } from "hast"
|
||||
import { MarkdownContent, ProcessedContent } from "../plugins/vfile"
|
||||
import { PerfTimer } from "../util/perf"
|
||||
import { read } from "to-vfile"
|
||||
import { FilePath, QUARTZ, slugifyFilePath } from "../util/path"
|
||||
import { FilePath, FullSlug, QUARTZ, slugifyFilePath } from "../util/path"
|
||||
import path from "path"
|
||||
import workerpool, { Promise as WorkerPromise } from "workerpool"
|
||||
import { QuartzLogger } from "../util/log"
|
||||
import { trace } from "../util/trace"
|
||||
import { BuildCtx, WorkerSerializableBuildCtx } from "../util/ctx"
|
||||
import chalk from "chalk"
|
||||
import { BuildCtx } from "../util/ctx"
|
||||
|
||||
export type QuartzMdProcessor = Processor<MDRoot, MDRoot, MDRoot>
|
||||
export type QuartzHtmlProcessor = Processor<undefined, MDRoot, HTMLRoot>
|
||||
@ -172,46 +171,25 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<Pro
|
||||
workerType: "thread",
|
||||
})
|
||||
const errorHandler = (err: any) => {
|
||||
console.error(err)
|
||||
console.error(`${err}`.replace(/^error:\s*/i, ""))
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const serializableCtx: WorkerSerializableBuildCtx = {
|
||||
buildId: ctx.buildId,
|
||||
argv: ctx.argv,
|
||||
allSlugs: ctx.allSlugs,
|
||||
allFiles: ctx.allFiles,
|
||||
incremental: ctx.incremental,
|
||||
}
|
||||
|
||||
const textToMarkdownPromises: WorkerPromise<MarkdownContent[]>[] = []
|
||||
let processedFiles = 0
|
||||
const mdPromises: WorkerPromise<[MarkdownContent[], FullSlug[]]>[] = []
|
||||
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
||||
textToMarkdownPromises.push(pool.exec("parseMarkdown", [serializableCtx, chunk]))
|
||||
mdPromises.push(pool.exec("parseMarkdown", [ctx.buildId, argv, chunk]))
|
||||
}
|
||||
const mdResults: [MarkdownContent[], FullSlug[]][] =
|
||||
await WorkerPromise.all(mdPromises).catch(errorHandler)
|
||||
|
||||
const mdResults: Array<MarkdownContent[]> = await Promise.all(
|
||||
textToMarkdownPromises.map(async (promise) => {
|
||||
const result = await promise
|
||||
processedFiles += result.length
|
||||
log.updateText(`text->markdown ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
|
||||
return result
|
||||
}),
|
||||
).catch(errorHandler)
|
||||
|
||||
const markdownToHtmlPromises: WorkerPromise<ProcessedContent[]>[] = []
|
||||
processedFiles = 0
|
||||
for (const mdChunk of mdResults) {
|
||||
markdownToHtmlPromises.push(pool.exec("processHtml", [serializableCtx, mdChunk]))
|
||||
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
|
||||
for (const [_, extraSlugs] of mdResults) {
|
||||
ctx.allSlugs.push(...extraSlugs)
|
||||
}
|
||||
const results: ProcessedContent[][] = await Promise.all(
|
||||
markdownToHtmlPromises.map(async (promise) => {
|
||||
const result = await promise
|
||||
processedFiles += result.length
|
||||
log.updateText(`markdown->html ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
|
||||
return result
|
||||
}),
|
||||
).catch(errorHandler)
|
||||
for (const [mdChunk, _] of mdResults) {
|
||||
childPromises.push(pool.exec("processHtml", [ctx.buildId, argv, mdChunk, ctx.allSlugs]))
|
||||
}
|
||||
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch(errorHandler)
|
||||
|
||||
res = results.flat()
|
||||
await pool.terminate()
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
@use "./base.scss";
|
||||
@use "./themes";
|
||||
|
||||
// put your custom CSS here!
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
@use "./base.scss";
|
||||
|
||||
// put your custom CSS here!
|
||||
@ -1 +0,0 @@
|
||||
404: Not Found
|
||||
@ -1,170 +0,0 @@
|
||||
:root[saved-theme="dark"] {
|
||||
--accent-h: 202;
|
||||
--accent-s: 100%;
|
||||
--accent-l: 75%;
|
||||
--bg_dark2_x: 18, 18, 24;
|
||||
--bg_dark2: rgb(var(--bg_dark2_x));
|
||||
--bg_dark_x: 22, 22, 30;
|
||||
--bg_dark: rgb(var(--bg_dark_x));
|
||||
--bg_x: 26, 27, 38;
|
||||
--bg: rgb(var(--bg_x));
|
||||
--bg_highlight_x: 41, 46, 66;
|
||||
--bg_highlight: rgb(var(--bg_highlight_x));
|
||||
--bg_highlight_dark_x: 36, 40, 59;
|
||||
--bg_highlight_dark: rgb(var(--bg_highlight_dark_x));
|
||||
--terminal_black_x: 65, 72, 104;
|
||||
--terminal_black: rgb(var(--terminal_black_x));
|
||||
--fg_x: 192, 202, 245;
|
||||
--fg: rgb(var(--fg_x));
|
||||
--fg_dark_x: 169, 177, 214;
|
||||
--fg_dark: rgb(var(--fg_dark_x));
|
||||
--comment_x: 86, 95, 137;
|
||||
--comment: rgb(var(--comment_x));
|
||||
--blue0_x: 61, 89, 161;
|
||||
--blue0: rgb(var(--blue0_x));
|
||||
--blue_x: 122, 162, 247;
|
||||
--blue: rgb(var(--blue_x));
|
||||
--cyan_hsl: 202 100% 75%;
|
||||
--cyan_x: 125, 207, 255;
|
||||
--cyan: rgb(var(--cyan_x));
|
||||
--magent_hsl: 261 85% 79%;
|
||||
--magenta_x: 187, 154, 247;
|
||||
--magenta: rgb(var(--magenta_x));
|
||||
--orange_x: 255, 158, 100;
|
||||
--orange: rgb(var(--orange_x));
|
||||
--yellow_x: 224, 175, 104;
|
||||
--yellow: rgb(var(--yellow_x));
|
||||
--green_x: 158, 206, 106;
|
||||
--green: rgb(var(--green_x));
|
||||
--teal_x: 26, 188, 156;
|
||||
--teal: rgb(var(--teal_x));
|
||||
--red_x: 255, 117, 127;
|
||||
--red: rgb(var(--red_x));
|
||||
--red1_x: 219, 75, 75;
|
||||
--red1: rgb(var(--red1_x));
|
||||
--unknown: #ffffff;
|
||||
--color_red_rgb: var(--red_x);
|
||||
--color-red: var(--red);
|
||||
--color_purple_rgb: var(--magenta_x);
|
||||
--color-purple: var(--magenta);
|
||||
--color_green_rgb: var(--green_x);
|
||||
--color-green: var(--green);
|
||||
--color_cyan_rgb: var(--cyan_x);
|
||||
--color-cyan: var(--cyan);
|
||||
--color_blue_rgb: var(--blue_x);
|
||||
--color-blue: var(--blue);
|
||||
--color_yellow_rgb: var(--yellow_x);
|
||||
--color-yellow: var(--yellow);
|
||||
--color_orange_rgb: var(--orange_x);
|
||||
--color-orange: var(--orange);
|
||||
--color_pink_rgb: var(--magenta_x);
|
||||
--color-pink: var(--magenta);
|
||||
--background-primary: var(--bg);
|
||||
--background-primary-alt: var(--bg);
|
||||
--background-secondary: var(--bg_dark);
|
||||
--background-secondary-alt: var(--bg_dark);
|
||||
--background-modifier-border: var(--bg_highlight);
|
||||
--background-modifier-border-focus: var(--bg_highlight);
|
||||
--background-modifier-border-hover: var(--bg_highlight);
|
||||
--background-modifier-form-field: var(--bg_dark);
|
||||
--background-modifier-form-field-highlighted: var(--bg_dark);
|
||||
--background-modifier-box-shadow: rgba(0, 0, 0, 0.3);
|
||||
--background-modifier-success: var(--green);
|
||||
--background-modifier-error: var(--red1);
|
||||
--background-modifier-error-hover: var(--red);
|
||||
--background-modifier-cover: rgba(var(--bg_dark_x), 0.8);
|
||||
--background-modifier-hover: var(--bg_highlight);
|
||||
--background-modifier-message: rgba(var(--bg_highlight_x), 0.9);
|
||||
--background-modifier-active-hover: var(--bg_highlight);
|
||||
--text-normal: var(--fg);
|
||||
--text-faint: var(--comment);
|
||||
--text-muted: var(--fg_dark);
|
||||
--text-error: var(--red);
|
||||
--text-accent: var(--magenta);
|
||||
--text-accent-hover: var(--cyan);
|
||||
--text-error: var(--red1);
|
||||
--text-error-hover: var(--red);
|
||||
--text-selection: var(--unknown);
|
||||
--text-on-accent: var(--bg);
|
||||
--text-highlight-bg: rgba(var(--orange_x), 0.4);
|
||||
--text-selection: rgba(var(--blue0_x), 0.6);
|
||||
--bold-color: var(--cyan);
|
||||
--italic-color: var(--cyan);
|
||||
--interactive-normal: var(--bg_dark);
|
||||
--interactive-hover: var(--bg);
|
||||
--interactive-success: var(--green);
|
||||
--interactive-accent: hsl(var(--accent-h), var(--accent-s), var(--accent-l));
|
||||
--interactive-accent-hover: var(--blue);
|
||||
--scrollbar-bg: var(--bg_dark2);
|
||||
--scrollbar-thumb-bg: var(--comment);
|
||||
--scrollbar-active-thumb-bg: var(--bg_dark);
|
||||
--scrollbar-width: 0px;
|
||||
--h1-color: var(--red);
|
||||
--h2-color: var(--yellow);
|
||||
--h3-color: var(--green);
|
||||
--h4-color: var(--cyan);
|
||||
--h5-color: var(--blue);
|
||||
--h6-color: var(--magenta);
|
||||
--border-width: 2px;
|
||||
--tag-color: var(--magenta);
|
||||
--tag-background: rgba(var(--magenta_x), 0.15);
|
||||
--tag-color-hover: var(--cyan);
|
||||
--tag-background-hover: rgba(var(--cyan_x), 0.15);
|
||||
--link-color: var(--magenta);
|
||||
--link-color-hover: var(--cyan);
|
||||
--link-external-color: var(--magenta);
|
||||
--link-external-color-hover: var(--cyan);
|
||||
--checkbox-radius: var(--radius-l);
|
||||
--checkbox-color: var(--green);
|
||||
--checkbox-color-hover: var(--green);
|
||||
--checkbox-marker-color: var(--bg);
|
||||
--checkbox-border-color: var(--comment);
|
||||
--checkbox-border-color-hover: var(--comment);
|
||||
--table-header-background: var(--bg_dark2);
|
||||
--table-header-background-hover: var(--bg_dark2);
|
||||
--flashing-background: rgba(var(--blue0_x), 0.3);
|
||||
--code-normal: var(--fg);
|
||||
--code-background: var(--bg_highlight_dark);
|
||||
--mermaid-note: var(--blue0);
|
||||
--mermaid-actor: var(--fg_dark);
|
||||
--mermaid-loopline: var(--blue);
|
||||
--blockquote-background-color: var(--bg_dark);
|
||||
--callout-default: var(--blue_x);
|
||||
--callout-info: var(--blue_x);
|
||||
--callout-summary: var(--cyan_x);
|
||||
--callout-tip: var(--cyan_x);
|
||||
--callout-todo: var(--cyan_x);
|
||||
--callout-bug: var(--red_x);
|
||||
--callout-error: var(--red1_x);
|
||||
--callout-fail: var(--red1_x);
|
||||
--callout-example: var(--magenta_x);
|
||||
--callout-important: var(--green_x);
|
||||
--callout-success: var(--teal_x);
|
||||
--callout-question: var(--yellow_x);
|
||||
--callout-warning: var(--orange_x);
|
||||
--callout-quote: var(--fg_dark_x);
|
||||
--icon-color-hover: var(--blue);
|
||||
--icon-color-focused: var(--magenta);
|
||||
--icon-color-active: var(--magenta);
|
||||
--nav-item-color-hover: var(--fg);
|
||||
--nav-item-background-hover: var(--bg_highlight);
|
||||
--nav-item-color-active: var(--red);
|
||||
--nav-item-background-active: var(--bg_highlight);
|
||||
--nav-file-tag: rgba(var(--yellow_x), 0.9);
|
||||
--nav-indentation-guide-color: var(--bg_highlight);
|
||||
--indentation-guide-color: var(--comment);
|
||||
--indentation-guide-color-active: var(--comment);
|
||||
--graph-line: var(--comment);
|
||||
--graph-node: var(--fg);
|
||||
--graph-node-tag: var(--orange);
|
||||
--graph-node-attachment: var(--blue);
|
||||
--tab-text-color-focused-active: rgba(var(--red_x), 0.8);
|
||||
--tab-text-color-focused-active-current: var(--red);
|
||||
--modal-border-color: var(--bg_highlight);
|
||||
--prompt-border-color: var(--bg_highlight);
|
||||
--slider-track-background: var(--bg_highlight);
|
||||
--embed-background: var(--bg_dark);
|
||||
--embed-padding: 1.5rem 1.5rem 0.5rem;
|
||||
--canvas-color: var(--bg_highlight_x);
|
||||
--toggle-thumb-color: var(--bg);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,169 +0,0 @@
|
||||
:root[saved-theme="light"] {
|
||||
--accent-h: 202;
|
||||
--accent-s: 86%;
|
||||
--accent-l: 43%;
|
||||
--bg_dark2_x: 188, 189, 194;
|
||||
--bg_dark2: rgb(var(--bg_dark2_x));
|
||||
--bg_dark_x: 203, 204, 209;
|
||||
--bg_dark: rgb(var(--bg_dark_x));
|
||||
--bg_x: 213, 214, 219;
|
||||
--bg: rgb(var(--bg_x));
|
||||
--bg_highlight_x: 220, 222, 226;
|
||||
--bg_highlight: rgb(var(--bg_highlight_x));
|
||||
--bg_highlight_dark_x: 195, 197, 201;
|
||||
--bg_highlight_dark: rgb(var(--bg_highlight_dark_x));
|
||||
--terminal_black_x: 15, 15, 20;
|
||||
--terminal_black: rgb(var(--terminal_black_x));
|
||||
--fg_x: 52, 59, 88;
|
||||
--fg: rgb(var(--fg_x));
|
||||
--fg_dark_x: 39, 46, 75;
|
||||
--fg_dark: rgb(var(--fg_dark_x));
|
||||
--comment_x: 150, 153, 163;
|
||||
--comment: rgb(var(--comment_x));
|
||||
--blue0_x: 39, 71, 125;
|
||||
--blue0: rgb(var(--blue0_x));
|
||||
--blue_x: 52, 84, 138;
|
||||
--blue: rgb(var(--blue_x));
|
||||
--cyan_x: 15, 75, 110;
|
||||
--cyan: rgb(var(--cyan_x));
|
||||
--magent_hsl: 261 24% 38%;
|
||||
--magenta_x: 90, 74, 120;
|
||||
--magenta: rgb(var(--magenta_x));
|
||||
--orange_x: 150, 80, 39;
|
||||
--orange: rgb(var(--orange_x));
|
||||
--yellow_x: 143, 94, 21;
|
||||
--yellow: rgb(var(--yellow_x));
|
||||
--green_x: 51, 99, 92;
|
||||
--green: rgb(var(--green_x));
|
||||
--teal_x: 22, 103, 117;
|
||||
--teal: rgb(var(--teal_x));
|
||||
--red_x: 140, 67, 81;
|
||||
--red: rgb(var(--red_x));
|
||||
--red1_x: 115, 42, 56;
|
||||
--red1: rgb(var(--red1_x));
|
||||
--unknown: #000000;
|
||||
--color_red_rgb: var(--red_x);
|
||||
--color-red: var(--red);
|
||||
--color_purple_rgb: var(--magenta_x);
|
||||
--color-purple: var(--magenta);
|
||||
--color_green_rgb: var(--green_x);
|
||||
--color-green: var(--green);
|
||||
--color_cyan_rgb: var(--cyan_x);
|
||||
--color-cyan: var(--cyan);
|
||||
--color_blue_rgb: var(--blue_x);
|
||||
--color-blue: var(--blue);
|
||||
--color_yellow_rgb: var(--yellow_x);
|
||||
--color-yellow: var(--yellow);
|
||||
--color_orange_rgb: var(--orange_x);
|
||||
--color-orange: var(--orange);
|
||||
--color_pink_rgb: var(--magenta_x);
|
||||
--color-pink: var(--magenta);
|
||||
--background-primary: var(--bg);
|
||||
--background-primary-alt: var(--bg);
|
||||
--background-secondary: var(--bg_dark);
|
||||
--background-secondary-alt: var(--bg_dark);
|
||||
--background-modifier-border: var(--bg_highlight);
|
||||
--background-modifier-border-focus: var(--bg_highlight);
|
||||
--background-modifier-border-hover: var(--bg_highlight);
|
||||
--background-modifier-form-field: var(--bg_dark);
|
||||
--background-modifier-form-field-highlighted: var(--bg_dark);
|
||||
--background-modifier-box-shadow: rgba(0, 0, 0, 0.3);
|
||||
--background-modifier-success: var(--green);
|
||||
--background-modifier-error: var(--red1);
|
||||
--background-modifier-error-hover: var(--red);
|
||||
--background-modifier-cover: rgba(var(--bg_dark_x), 0.8);
|
||||
--background-modifier-hover: var(--bg_highlight);
|
||||
--background-modifier-message: rgba(var(--bg_highlight_x), 0.9);
|
||||
--background-modifier-active-hover: var(--bg_highlight);
|
||||
--text-normal: var(--fg);
|
||||
--text-faint: var(--comment);
|
||||
--text-muted: var(--fg_dark);
|
||||
--text-error: var(--red);
|
||||
--text-accent: var(--magenta);
|
||||
--text-accent-hover: var(--cyan);
|
||||
--text-error: var(--red1);
|
||||
--text-error-hover: var(--red);
|
||||
--text-selection: var(--unknown);
|
||||
--text-on-accent: var(--bg);
|
||||
--text-highlight-bg: rgba(var(--orange_x), 0.4);
|
||||
--text-selection: rgba(var(--blue0_x), 0.6);
|
||||
--bold-color: var(--cyan);
|
||||
--italic-color: var(--cyan);
|
||||
--interactive-normal: var(--bg_dark);
|
||||
--interactive-hover: var(--bg);
|
||||
--interactive-success: var(--green);
|
||||
--interactive-accent: hsl(var(--accent-h), var(--accent-s), var(--accent-l));
|
||||
--interactive-accent-hover: var(--blue);
|
||||
--scrollbar-bg: var(--bg_dark2);
|
||||
--scrollbar-thumb-bg: var(--comment);
|
||||
--scrollbar-active-thumb-bg: var(--bg_dark);
|
||||
--scrollbar-width: 0px;
|
||||
--h1-color: var(--red);
|
||||
--h2-color: var(--yellow);
|
||||
--h3-color: var(--green);
|
||||
--h4-color: var(--cyan);
|
||||
--h5-color: var(--blue);
|
||||
--h6-color: var(--magenta);
|
||||
--border-width: 2px;
|
||||
--tag-color: var(--magenta);
|
||||
--tag-background: rgba(var(--magenta_x), 0.15);
|
||||
--tag-color-hover: var(--cyan);
|
||||
--tag-background-hover: rgba(var(--cyan_x), 0.15);
|
||||
--link-color: var(--magenta);
|
||||
--link-color-hover: var(--cyan);
|
||||
--link-external-color: var(--magenta);
|
||||
--link-external-color-hover: var(--cyan);
|
||||
--checkbox-radius: var(--radius-l);
|
||||
--checkbox-color: var(--green);
|
||||
--checkbox-color-hover: var(--green);
|
||||
--checkbox-marker-color: var(--bg);
|
||||
--checkbox-border-color: var(--comment);
|
||||
--checkbox-border-color-hover: var(--comment);
|
||||
--table-header-background: var(--bg_dark2);
|
||||
--table-header-background-hover: var(--bg_dark2);
|
||||
--flashing-background: rgba(var(--blue0_x), 0.3);
|
||||
--code-normal: var(--fg);
|
||||
--code-background: var(--bg_highlight_dark);
|
||||
--mermaid-note: var(--blue0);
|
||||
--mermaid-actor: var(--fg_dark);
|
||||
--mermaid-loopline: var(--blue);
|
||||
--blockquote-background-color: var(--bg_dark);
|
||||
--callout-default: var(--blue_x);
|
||||
--callout-info: var(--blue_x);
|
||||
--callout-summary: var(--cyan_x);
|
||||
--callout-tip: var(--cyan_x);
|
||||
--callout-todo: var(--cyan_x);
|
||||
--callout-bug: var(--red_x);
|
||||
--callout-error: var(--red1_x);
|
||||
--callout-fail: var(--red1_x);
|
||||
--callout-example: var(--magenta_x);
|
||||
--callout-important: var(--green_x);
|
||||
--callout-success: var(--teal_x);
|
||||
--callout-question: var(--yellow_x);
|
||||
--callout-warning: var(--orange_x);
|
||||
--callout-quote: var(--fg_dark_x);
|
||||
--icon-color-hover: var(--blue);
|
||||
--icon-color-focused: var(--magenta);
|
||||
--icon-color-active: var(--magenta);
|
||||
--nav-item-color-hover: var(--fg);
|
||||
--nav-item-background-hover: var(--bg_highlight);
|
||||
--nav-item-color-active: var(--red);
|
||||
--nav-item-background-active: var(--bg_highlight);
|
||||
--nav-file-tag: rgba(var(--yellow_x), 0.9);
|
||||
--nav-indentation-guide-color: var(--bg_highlight);
|
||||
--indentation-guide-color: var(--comment);
|
||||
--indentation-guide-color-active: var(--comment);
|
||||
--graph-line: var(--comment);
|
||||
--graph-node: var(--fg);
|
||||
--graph-node-tag: var(--orange);
|
||||
--graph-node-attachment: var(--blue);
|
||||
--tab-text-color-focused-active: rgba(var(--red_x), 0.8);
|
||||
--tab-text-color-focused-active-current: var(--red);
|
||||
--modal-border-color: var(--bg_highlight);
|
||||
--prompt-border-color: var(--bg_highlight);
|
||||
--slider-track-background: var(--bg_highlight);
|
||||
--embed-background: var(--bg_dark);
|
||||
--embed-padding: 1.5rem 1.5rem 0.5rem;
|
||||
--canvas-color: var(--bg_highlight_x);
|
||||
--toggle-thumb-color: var(--bg);
|
||||
}
|
||||
@ -1,12 +1,12 @@
|
||||
import { QuartzConfig } from "../cfg"
|
||||
import { FilePath, FullSlug } from "./path"
|
||||
import { FullSlug } from "./path"
|
||||
|
||||
export interface Argv {
|
||||
directory: string
|
||||
verbose: boolean
|
||||
output: string
|
||||
serve: boolean
|
||||
watch: boolean
|
||||
fastRebuild: boolean
|
||||
port: number
|
||||
wsPort: number
|
||||
remoteDevHost?: string
|
||||
@ -18,8 +18,4 @@ export interface BuildCtx {
|
||||
argv: Argv
|
||||
cfg: QuartzConfig
|
||||
allSlugs: FullSlug[]
|
||||
allFiles: FilePath[]
|
||||
incremental: boolean
|
||||
}
|
||||
|
||||
export type WorkerSerializableBuildCtx = Omit<BuildCtx, "cfg">
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
import test, { describe, beforeEach } from "node:test"
|
||||
import assert from "node:assert"
|
||||
import { FileTrieNode } from "./fileTrie"
|
||||
import { FullSlug } from "./path"
|
||||
|
||||
interface TestData {
|
||||
title: string
|
||||
slug: string
|
||||
filePath: string
|
||||
}
|
||||
|
||||
describe("FileTrie", () => {
|
||||
@ -28,24 +26,11 @@ describe("FileTrie", () => {
|
||||
const data = {
|
||||
title: "Test Title",
|
||||
slug: "test",
|
||||
filePath: "test.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
assert.strictEqual(trie.children[0].displayName, "Test Title")
|
||||
})
|
||||
|
||||
test("should be able to set displayName", () => {
|
||||
const data = {
|
||||
title: "Test Title",
|
||||
slug: "test",
|
||||
filePath: "test.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
trie.children[0].displayName = "Modified"
|
||||
assert.strictEqual(trie.children[0].displayName, "Modified")
|
||||
})
|
||||
})
|
||||
|
||||
describe("add", () => {
|
||||
@ -53,7 +38,6 @@ describe("FileTrie", () => {
|
||||
const data = {
|
||||
title: "Test",
|
||||
slug: "test",
|
||||
filePath: "test.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
@ -66,7 +50,6 @@ describe("FileTrie", () => {
|
||||
const data = {
|
||||
title: "Index",
|
||||
slug: "index",
|
||||
filePath: "index.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
@ -78,13 +61,11 @@ describe("FileTrie", () => {
|
||||
const data1 = {
|
||||
title: "Nested",
|
||||
slug: "folder/test",
|
||||
filePath: "folder/test.md",
|
||||
}
|
||||
|
||||
const data2 = {
|
||||
title: "Really nested index",
|
||||
slug: "a/b/c/index",
|
||||
filePath: "a/b/c/index.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
@ -111,8 +92,8 @@ describe("FileTrie", () => {
|
||||
|
||||
describe("filter", () => {
|
||||
test("should filter nodes based on condition", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = { title: "Test2", slug: "test2", filePath: "test2.md" }
|
||||
const data1 = { title: "Test1", slug: "test1" }
|
||||
const data2 = { title: "Test2", slug: "test2" }
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
@ -125,8 +106,8 @@ describe("FileTrie", () => {
|
||||
|
||||
describe("map", () => {
|
||||
test("should apply function to all nodes", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = { title: "Test2", slug: "test2", filePath: "test2.md" }
|
||||
const data1 = { title: "Test1", slug: "test1" }
|
||||
const data2 = { title: "Test2", slug: "test2" }
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
@ -140,41 +121,12 @@ describe("FileTrie", () => {
|
||||
assert.strictEqual(trie.children[0].displayName, "Modified")
|
||||
assert.strictEqual(trie.children[1].displayName, "Modified")
|
||||
})
|
||||
|
||||
test("map over folders should work", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = {
|
||||
title: "Test2",
|
||||
slug: "a/b-with-space/test2",
|
||||
filePath: "a/b with space/test2.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
|
||||
trie.map((node) => {
|
||||
if (node.isFolder) {
|
||||
node.displayName = `Folder: ${node.displayName}`
|
||||
} else {
|
||||
node.displayName = `File: ${node.displayName}`
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(trie.children[0].displayName, "File: Test1")
|
||||
assert.strictEqual(trie.children[1].displayName, "Folder: a")
|
||||
assert.strictEqual(trie.children[1].children[0].displayName, "Folder: b with space")
|
||||
assert.strictEqual(trie.children[1].children[0].children[0].displayName, "File: Test2")
|
||||
})
|
||||
})
|
||||
|
||||
describe("entries", () => {
|
||||
test("should return all entries", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = {
|
||||
title: "Test2",
|
||||
slug: "a/b-with-space/test2",
|
||||
filePath: "a/b with space/test2.md",
|
||||
}
|
||||
const data1 = { title: "Test1", slug: "test1" }
|
||||
const data2 = { title: "Test2", slug: "a/b/test2" }
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
@ -186,117 +138,26 @@ describe("FileTrie", () => {
|
||||
["index", trie.data],
|
||||
["test1", data1],
|
||||
["a/index", null],
|
||||
["a/b-with-space/index", null],
|
||||
["a/b-with-space/test2", data2],
|
||||
["a/b/index", null],
|
||||
["a/b/test2", data2],
|
||||
],
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fromEntries", () => {
|
||||
test("nested", () => {
|
||||
const trie = FileTrieNode.fromEntries([
|
||||
["index" as FullSlug, { title: "Root", slug: "index", filePath: "index.md" }],
|
||||
[
|
||||
"folder/file1" as FullSlug,
|
||||
{ title: "File 1", slug: "folder/file1", filePath: "folder/file1.md" },
|
||||
],
|
||||
[
|
||||
"folder/index" as FullSlug,
|
||||
{ title: "Folder Index", slug: "folder/index", filePath: "folder/index.md" },
|
||||
],
|
||||
[
|
||||
"folder/file2" as FullSlug,
|
||||
{ title: "File 2", slug: "folder/file2", filePath: "folder/file2.md" },
|
||||
],
|
||||
[
|
||||
"folder/folder2/index" as FullSlug,
|
||||
{
|
||||
title: "Subfolder Index",
|
||||
slug: "folder/folder2/index",
|
||||
filePath: "folder/folder2/index.md",
|
||||
},
|
||||
],
|
||||
])
|
||||
|
||||
assert.strictEqual(trie.children.length, 1)
|
||||
assert.strictEqual(trie.children[0].slug, "folder/index")
|
||||
assert.strictEqual(trie.children[0].children.length, 3)
|
||||
assert.strictEqual(trie.children[0].children[0].slug, "folder/file1")
|
||||
assert.strictEqual(trie.children[0].children[1].slug, "folder/file2")
|
||||
assert.strictEqual(trie.children[0].children[2].slug, "folder/folder2/index")
|
||||
assert.strictEqual(trie.children[0].children[2].children.length, 0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("findNode", () => {
|
||||
test("should find root node with empty path", () => {
|
||||
const data = { title: "Root", slug: "index", filePath: "index.md" }
|
||||
trie.add(data)
|
||||
const found = trie.findNode([])
|
||||
assert.strictEqual(found, trie)
|
||||
})
|
||||
|
||||
test("should find node at first level", () => {
|
||||
const data = { title: "Test", slug: "test", filePath: "test.md" }
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["test"])
|
||||
assert.strictEqual(found?.data, data)
|
||||
})
|
||||
|
||||
test("should find nested node", () => {
|
||||
const data = {
|
||||
title: "Nested",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["folder", "subfolder", "test"])
|
||||
assert.strictEqual(found?.data, data)
|
||||
|
||||
// should find the folder and subfolder indexes too
|
||||
assert.strictEqual(
|
||||
trie.findNode(["folder", "subfolder", "index"]),
|
||||
trie.children[0].children[0],
|
||||
)
|
||||
assert.strictEqual(trie.findNode(["folder", "index"]), trie.children[0])
|
||||
})
|
||||
|
||||
test("should return undefined for non-existent path", () => {
|
||||
const data = { title: "Test", slug: "test", filePath: "test.md" }
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["nonexistent"])
|
||||
assert.strictEqual(found, undefined)
|
||||
})
|
||||
|
||||
test("should return undefined for partial path", () => {
|
||||
const data = {
|
||||
title: "Nested",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["folder"])
|
||||
assert.strictEqual(found?.data, null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getFolderPaths", () => {
|
||||
test("should return all folder paths", () => {
|
||||
const data1 = {
|
||||
title: "Root",
|
||||
slug: "index",
|
||||
filePath: "index.md",
|
||||
}
|
||||
const data2 = {
|
||||
title: "Test",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
const data3 = {
|
||||
title: "Folder Index",
|
||||
slug: "abc/index",
|
||||
filePath: "abc/index.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
@ -315,9 +176,9 @@ describe("FileTrie", () => {
|
||||
|
||||
describe("sort", () => {
|
||||
test("should sort nodes according to sort function", () => {
|
||||
const data1 = { title: "A", slug: "a", filePath: "a.md" }
|
||||
const data2 = { title: "B", slug: "b", filePath: "b.md" }
|
||||
const data3 = { title: "C", slug: "c", filePath: "c.md" }
|
||||
const data1 = { title: "A", slug: "a" }
|
||||
const data2 = { title: "B", slug: "b" }
|
||||
const data3 = { title: "C", slug: "c" }
|
||||
|
||||
trie.add(data3)
|
||||
trie.add(data1)
|
||||
|
||||
@ -4,7 +4,6 @@ import { FullSlug, joinSegments } from "./path"
|
||||
interface FileTrieData {
|
||||
slug: string
|
||||
title: string
|
||||
filePath: string
|
||||
}
|
||||
|
||||
export class FileTrieNode<T extends FileTrieData = ContentDetails> {
|
||||
@ -12,11 +11,6 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
|
||||
children: Array<FileTrieNode<T>>
|
||||
|
||||
private slugSegments: string[]
|
||||
// prefer showing the file path segment over the slug segment
|
||||
// so that folders that dont have index files can be shown as is
|
||||
// without dashes in the slug
|
||||
private fileSegmentHint?: string
|
||||
private displayNameOverride?: string
|
||||
data: T | null
|
||||
|
||||
constructor(segments: string[], data?: T) {
|
||||
@ -24,18 +18,10 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
|
||||
this.slugSegments = segments
|
||||
this.data = data ?? null
|
||||
this.isFolder = false
|
||||
this.displayNameOverride = undefined
|
||||
}
|
||||
|
||||
get displayName(): string {
|
||||
const nonIndexTitle = this.data?.title === "index" ? undefined : this.data?.title
|
||||
return (
|
||||
this.displayNameOverride ?? nonIndexTitle ?? this.fileSegmentHint ?? this.slugSegment ?? ""
|
||||
)
|
||||
}
|
||||
|
||||
set displayName(name: string) {
|
||||
this.displayNameOverride = name
|
||||
return this.data?.title ?? this.slugSegment ?? ""
|
||||
}
|
||||
|
||||
get slug(): FullSlug {
|
||||
@ -77,9 +63,6 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
|
||||
// recursive case, we are not at the end of the path
|
||||
const child =
|
||||
this.children.find((c) => c.slugSegment === segment) ?? this.makeChild(path, undefined)
|
||||
|
||||
const fileParts = file.filePath.split("/")
|
||||
child.fileSegmentHint = fileParts.at(-path.length)
|
||||
child.insert(path.slice(1), file)
|
||||
}
|
||||
}
|
||||
@ -89,14 +72,6 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
|
||||
this.insert(file.slug.split("/"), file)
|
||||
}
|
||||
|
||||
findNode(path: string[]): FileTrieNode<T> | undefined {
|
||||
if (path.length === 0 || (path.length === 1 && path[0] === "index")) {
|
||||
return this
|
||||
}
|
||||
|
||||
return this.children.find((c) => c.slugSegment === path[0])?.findNode(path.slice(1))
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter trie nodes. Behaves similar to `Array.prototype.filter()`, but modifies tree in place
|
||||
*/
|
||||
|
||||
@ -1,23 +1,18 @@
|
||||
import truncate from "ansi-truncate"
|
||||
import readline from "readline"
|
||||
|
||||
export class QuartzLogger {
|
||||
verbose: boolean
|
||||
private spinnerInterval: NodeJS.Timeout | undefined
|
||||
private spinnerText: string = ""
|
||||
private updateSuffix: string = ""
|
||||
private spinnerIndex: number = 0
|
||||
private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
|
||||
|
||||
constructor(verbose: boolean) {
|
||||
const isInteractiveTerminal =
|
||||
process.stdout.isTTY && process.env.TERM !== "dumb" && !process.env.CI
|
||||
this.verbose = verbose || !isInteractiveTerminal
|
||||
this.verbose = verbose
|
||||
}
|
||||
|
||||
start(text: string) {
|
||||
this.spinnerText = text
|
||||
|
||||
if (this.verbose) {
|
||||
console.log(text)
|
||||
} else {
|
||||
@ -25,22 +20,14 @@ export class QuartzLogger {
|
||||
this.spinnerInterval = setInterval(() => {
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
|
||||
const columns = process.stdout.columns || 80
|
||||
let output = `${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`
|
||||
if (this.updateSuffix) {
|
||||
output += `: ${this.updateSuffix}`
|
||||
}
|
||||
|
||||
const truncated = truncate(output, columns)
|
||||
process.stdout.write(truncated)
|
||||
process.stdout.write(`${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`)
|
||||
this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length
|
||||
}, 50)
|
||||
}, 100)
|
||||
}
|
||||
}
|
||||
|
||||
updateText(text: string) {
|
||||
this.updateSuffix = text
|
||||
this.spinnerText = text
|
||||
}
|
||||
|
||||
end(text?: string) {
|
||||
|
||||
@ -3,17 +3,14 @@ import { FontWeight, SatoriOptions } from "satori/wasm"
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
import { JSXInternal } from "preact/src/jsx"
|
||||
import { FontSpecification, getFontSpecificationName, ThemeKey } from "./theme"
|
||||
import { FontSpecification, ThemeKey } from "./theme"
|
||||
import path from "path"
|
||||
import { QUARTZ } from "./path"
|
||||
import { formatDate, getDate } from "../components/Date"
|
||||
import readingTime from "reading-time"
|
||||
import { i18n } from "../i18n"
|
||||
import chalk from "chalk"
|
||||
import { formatDate } from "../components/Date"
|
||||
import { getDate } from "../components/Date"
|
||||
|
||||
const defaultHeaderWeight = [700]
|
||||
const defaultBodyWeight = [400]
|
||||
|
||||
export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: FontSpecification) {
|
||||
// Get all weights for header and body fonts
|
||||
const headerWeights: FontWeight[] = (
|
||||
@ -28,38 +25,29 @@ export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: Fo
|
||||
const headerFontName = typeof headerFont === "string" ? headerFont : headerFont.name
|
||||
const bodyFontName = typeof bodyFont === "string" ? bodyFont : bodyFont.name
|
||||
|
||||
// Fetch fonts for all weights and convert to satori format in one go
|
||||
const headerFontPromises = headerWeights.map(async (weight) => {
|
||||
const data = await fetchTtf(headerFontName, weight)
|
||||
if (!data) return null
|
||||
return {
|
||||
name: headerFontName,
|
||||
data,
|
||||
weight,
|
||||
style: "normal" as const,
|
||||
}
|
||||
})
|
||||
// Fetch fonts for all weights
|
||||
const headerFontPromises = headerWeights.map((weight) => fetchTtf(headerFontName, weight))
|
||||
const bodyFontPromises = bodyWeights.map((weight) => fetchTtf(bodyFontName, weight))
|
||||
|
||||
const bodyFontPromises = bodyWeights.map(async (weight) => {
|
||||
const data = await fetchTtf(bodyFontName, weight)
|
||||
if (!data) return null
|
||||
return {
|
||||
name: bodyFontName,
|
||||
data,
|
||||
weight,
|
||||
style: "normal" as const,
|
||||
}
|
||||
})
|
||||
|
||||
const [headerFonts, bodyFonts] = await Promise.all([
|
||||
const [headerFontData, bodyFontData] = await Promise.all([
|
||||
Promise.all(headerFontPromises),
|
||||
Promise.all(bodyFontPromises),
|
||||
])
|
||||
|
||||
// Filter out any failed fetches and combine header and body fonts
|
||||
// Convert fonts to satori font format and return
|
||||
const fonts: SatoriOptions["fonts"] = [
|
||||
...headerFonts.filter((font): font is NonNullable<typeof font> => font !== null),
|
||||
...bodyFonts.filter((font): font is NonNullable<typeof font> => font !== null),
|
||||
...headerFontData.map((data, idx) => ({
|
||||
name: headerFontName,
|
||||
data,
|
||||
weight: headerWeights[idx],
|
||||
style: "normal" as const,
|
||||
})),
|
||||
...bodyFontData.map((data, idx) => ({
|
||||
name: bodyFontName,
|
||||
data,
|
||||
weight: bodyWeights[idx],
|
||||
style: "normal" as const,
|
||||
})),
|
||||
]
|
||||
|
||||
return fonts
|
||||
@ -72,11 +60,10 @@ export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: Fo
|
||||
* @returns `.ttf` file of google font
|
||||
*/
|
||||
export async function fetchTtf(
|
||||
rawFontName: string,
|
||||
fontName: string,
|
||||
weight: FontWeight,
|
||||
): Promise<Buffer<ArrayBufferLike> | undefined> {
|
||||
const fontName = rawFontName.replaceAll(" ", "+")
|
||||
const cacheKey = `${fontName}-${weight}`
|
||||
): Promise<Buffer<ArrayBufferLike>> {
|
||||
const cacheKey = `${fontName.replaceAll(" ", "-")}-${weight}`
|
||||
const cacheDir = path.join(QUARTZ, ".quartz-cache", "fonts")
|
||||
const cachePath = path.join(cacheDir, cacheKey)
|
||||
|
||||
@ -99,19 +86,20 @@ export async function fetchTtf(
|
||||
const match = urlRegex.exec(css)
|
||||
|
||||
if (!match) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`\nWarning: Failed to fetch font ${rawFontName} with weight ${weight}, got ${cssResponse.statusText}`,
|
||||
),
|
||||
)
|
||||
return
|
||||
throw new Error("Could not fetch font")
|
||||
}
|
||||
|
||||
// fontData is an ArrayBuffer containing the .ttf file data
|
||||
const fontResponse = await fetch(match[1])
|
||||
const fontData = Buffer.from(await fontResponse.arrayBuffer())
|
||||
await fs.mkdir(cacheDir, { recursive: true })
|
||||
await fs.writeFile(cachePath, fontData)
|
||||
|
||||
try {
|
||||
await fs.mkdir(cacheDir, { recursive: true })
|
||||
await fs.writeFile(cachePath, fontData)
|
||||
} catch (error) {
|
||||
console.warn(`Failed to cache font: ${error}`)
|
||||
// Continue even if caching fails
|
||||
}
|
||||
|
||||
return fontData
|
||||
}
|
||||
@ -135,12 +123,21 @@ export type SocialImageOptions = {
|
||||
excludeRoot: boolean
|
||||
/**
|
||||
* JSX to use for generating image. See satori docs for more info (https://github.com/vercel/satori)
|
||||
* @param cfg global quartz config
|
||||
* @param userOpts options that can be set by user
|
||||
* @param title title of current page
|
||||
* @param description description of current page
|
||||
* @param fonts global font that can be used for styling
|
||||
* @param fileData full fileData of current page
|
||||
* @returns prepared jsx to be used for generating image
|
||||
*/
|
||||
imageStructure: (
|
||||
options: ImageOptions & {
|
||||
userOpts: UserOpts
|
||||
iconBase64?: string
|
||||
},
|
||||
cfg: GlobalConfiguration,
|
||||
userOpts: UserOpts,
|
||||
title: string,
|
||||
description: string,
|
||||
fonts: SatoriOptions["fonts"],
|
||||
fileData: QuartzPluginData,
|
||||
) => JSXInternal.Element
|
||||
}
|
||||
|
||||
@ -170,32 +167,24 @@ export type ImageOptions = {
|
||||
}
|
||||
|
||||
// This is the default template for generated social image.
|
||||
export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
cfg,
|
||||
userOpts,
|
||||
title,
|
||||
description,
|
||||
fileData,
|
||||
iconBase64,
|
||||
}) => {
|
||||
const { colorScheme } = userOpts
|
||||
export const defaultImage: SocialImageOptions["imageStructure"] = (
|
||||
cfg: GlobalConfiguration,
|
||||
{ colorScheme }: UserOpts,
|
||||
title: string,
|
||||
description: string,
|
||||
fonts: SatoriOptions["fonts"],
|
||||
fileData: QuartzPluginData,
|
||||
) => {
|
||||
const fontBreakPoint = 32
|
||||
const useSmallerFont = title.length > fontBreakPoint
|
||||
const iconPath = `https://${cfg.baseUrl}/static/icon.png`
|
||||
|
||||
// Format date if available
|
||||
const rawDate = getDate(cfg, fileData)
|
||||
const date = rawDate ? formatDate(rawDate, cfg.locale) : null
|
||||
|
||||
// Calculate reading time
|
||||
const { minutes } = readingTime(fileData.text ?? "")
|
||||
const readingTimeText = i18n(cfg.locale).components.contentMeta.readingTime({
|
||||
minutes: Math.ceil(minutes),
|
||||
})
|
||||
|
||||
// Get tags if available
|
||||
const tags = fileData.frontmatter?.tags ?? []
|
||||
const bodyFont = getFontSpecificationName(cfg.theme.typography.body)
|
||||
const headerFont = getFontSpecificationName(cfg.theme.typography.header)
|
||||
|
||||
return (
|
||||
<div
|
||||
@ -206,7 +195,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
width: "100%",
|
||||
backgroundColor: cfg.theme.colors[colorScheme].light,
|
||||
padding: "2.5rem",
|
||||
fontFamily: bodyFont,
|
||||
fontFamily: fonts[1].name,
|
||||
}}
|
||||
>
|
||||
{/* Header Section */}
|
||||
@ -218,22 +207,20 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
marginBottom: "0.5rem",
|
||||
}}
|
||||
>
|
||||
{iconBase64 && (
|
||||
<img
|
||||
src={iconBase64}
|
||||
width={56}
|
||||
height={56}
|
||||
style={{
|
||||
borderRadius: "50%",
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<img
|
||||
src={iconPath}
|
||||
width={56}
|
||||
height={56}
|
||||
style={{
|
||||
borderRadius: "50%",
|
||||
}}
|
||||
/>
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
fontSize: 32,
|
||||
color: cfg.theme.colors[colorScheme].gray,
|
||||
fontFamily: bodyFont,
|
||||
fontFamily: fonts[1].name,
|
||||
}}
|
||||
>
|
||||
{cfg.baseUrl}
|
||||
@ -252,7 +239,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
style={{
|
||||
margin: 0,
|
||||
fontSize: useSmallerFont ? 64 : 72,
|
||||
fontFamily: headerFont,
|
||||
fontFamily: fonts[0].name,
|
||||
fontWeight: 700,
|
||||
color: cfg.theme.colors[colorScheme].dark,
|
||||
lineHeight: 1.2,
|
||||
@ -260,7 +247,6 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
WebkitBoxOrient: "vertical",
|
||||
WebkitLineClamp: 2,
|
||||
overflow: "hidden",
|
||||
textOverflow: "ellipsis",
|
||||
}}
|
||||
>
|
||||
{title}
|
||||
@ -282,9 +268,8 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
margin: 0,
|
||||
display: "-webkit-box",
|
||||
WebkitBoxOrient: "vertical",
|
||||
WebkitLineClamp: 5,
|
||||
WebkitLineClamp: 4,
|
||||
overflow: "hidden",
|
||||
textOverflow: "ellipsis",
|
||||
}}
|
||||
>
|
||||
{description}
|
||||
@ -302,12 +287,11 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
borderTop: `1px solid ${cfg.theme.colors[colorScheme].lightgray}`,
|
||||
}}
|
||||
>
|
||||
{/* Left side - Date and Reading Time */}
|
||||
{/* Left side - Date */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "2rem",
|
||||
color: cfg.theme.colors[colorScheme].gray,
|
||||
fontSize: 28,
|
||||
}}
|
||||
@ -330,20 +314,6 @@ export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
{date}
|
||||
</div>
|
||||
)}
|
||||
<div style={{ display: "flex", alignItems: "center" }}>
|
||||
<svg
|
||||
style={{ marginRight: "0.5rem" }}
|
||||
width="28"
|
||||
height="28"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
>
|
||||
<circle cx="12" cy="12" r="10"></circle>
|
||||
<polyline points="12 6 12 12 16 14"></polyline>
|
||||
</svg>
|
||||
{readingTimeText}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Right side - Tags */}
|
||||
|
||||
@ -247,7 +247,7 @@ export function transformLink(src: FullSlug, target: string, opts: TransformOpti
|
||||
}
|
||||
|
||||
// path helpers
|
||||
export function isFolderPath(fplike: string): boolean {
|
||||
function isFolderPath(fplike: string): boolean {
|
||||
return (
|
||||
fplike.endsWith("/") ||
|
||||
endsWith(fplike, "index") ||
|
||||
@ -260,7 +260,7 @@ export function endsWith(s: string, suffix: string): boolean {
|
||||
return s === suffix || s.endsWith("/" + suffix)
|
||||
}
|
||||
|
||||
export function trimSuffix(s: string, suffix: string): string {
|
||||
function trimSuffix(s: string, suffix: string): string {
|
||||
if (endsWith(s, suffix)) {
|
||||
s = s.slice(0, -suffix.length)
|
||||
}
|
||||
|
||||
@ -25,7 +25,6 @@ export type FontSpecification =
|
||||
|
||||
export interface Theme {
|
||||
typography: {
|
||||
title?: FontSpecification
|
||||
header: FontSpecification
|
||||
body: FontSpecification
|
||||
code: FontSpecification
|
||||
@ -49,10 +48,7 @@ export function getFontSpecificationName(spec: FontSpecification): string {
|
||||
return spec.name
|
||||
}
|
||||
|
||||
function formatFontSpecification(
|
||||
type: "title" | "header" | "body" | "code",
|
||||
spec: FontSpecification,
|
||||
) {
|
||||
function formatFontSpecification(type: "header" | "body" | "code", spec: FontSpecification) {
|
||||
if (typeof spec === "string") {
|
||||
spec = { name: spec }
|
||||
}
|
||||
@ -86,19 +82,12 @@ function formatFontSpecification(
|
||||
}
|
||||
|
||||
export function googleFontHref(theme: Theme) {
|
||||
const { header, body, code } = theme.typography
|
||||
const { code, header, body } = theme.typography
|
||||
const headerFont = formatFontSpecification("header", header)
|
||||
const bodyFont = formatFontSpecification("body", body)
|
||||
const codeFont = formatFontSpecification("code", code)
|
||||
|
||||
return `https://fonts.googleapis.com/css2?family=${headerFont}&family=${bodyFont}&family=${codeFont}&display=swap`
|
||||
}
|
||||
|
||||
export function googleFontSubsetHref(theme: Theme, text: string) {
|
||||
const title = theme.typography.title || theme.typography.header
|
||||
const titleFont = formatFontSpecification("title", title)
|
||||
|
||||
return `https://fonts.googleapis.com/css2?family=${titleFont}&text=${encodeURIComponent(text)}&display=swap`
|
||||
return `https://fonts.googleapis.com/css2?family=${bodyFont}&family=${headerFont}&family=${codeFont}&display=swap`
|
||||
}
|
||||
|
||||
export interface GoogleFontFile {
|
||||
@ -146,10 +135,9 @@ ${stylesheet.join("\n\n")}
|
||||
--highlight: ${theme.colors.lightMode.highlight};
|
||||
--textHighlight: ${theme.colors.lightMode.textHighlight};
|
||||
|
||||
--titleFont: "${getFontSpecificationName(theme.typography.title || theme.typography.header)}", ${DEFAULT_SANS_SERIF};
|
||||
--headerFont: "${getFontSpecificationName(theme.typography.header)}", ${DEFAULT_SANS_SERIF};
|
||||
--bodyFont: "${getFontSpecificationName(theme.typography.body)}", ${DEFAULT_SANS_SERIF};
|
||||
--codeFont: "${getFontSpecificationName(theme.typography.code)}", ${DEFAULT_MONO};
|
||||
--headerFont: "${theme.typography.header}", ${DEFAULT_SANS_SERIF};
|
||||
--bodyFont: "${theme.typography.body}", ${DEFAULT_SANS_SERIF};
|
||||
--codeFont: "${theme.typography.code}", ${DEFAULT_MONO};
|
||||
}
|
||||
|
||||
:root[saved-theme="dark"] {
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import sourceMapSupport from "source-map-support"
|
||||
sourceMapSupport.install(options)
|
||||
import cfg from "../quartz.config"
|
||||
import { BuildCtx, WorkerSerializableBuildCtx } from "./util/ctx"
|
||||
import { FilePath } from "./util/path"
|
||||
import { Argv, BuildCtx } from "./util/ctx"
|
||||
import { FilePath, FullSlug } from "./util/path"
|
||||
import {
|
||||
createFileParser,
|
||||
createHtmlProcessor,
|
||||
@ -14,24 +14,35 @@ import { MarkdownContent, ProcessedContent } from "./plugins/vfile"
|
||||
|
||||
// only called from worker thread
|
||||
export async function parseMarkdown(
|
||||
partialCtx: WorkerSerializableBuildCtx,
|
||||
buildId: string,
|
||||
argv: Argv,
|
||||
fps: FilePath[],
|
||||
): Promise<MarkdownContent[]> {
|
||||
): Promise<[MarkdownContent[], FullSlug[]]> {
|
||||
// this is a hack
|
||||
// we assume markdown parsers can add to `allSlugs`,
|
||||
// but don't actually use them
|
||||
const allSlugs: FullSlug[] = []
|
||||
const ctx: BuildCtx = {
|
||||
...partialCtx,
|
||||
buildId,
|
||||
cfg,
|
||||
argv,
|
||||
allSlugs,
|
||||
}
|
||||
return await createFileParser(ctx, fps)(createMdProcessor(ctx))
|
||||
return [await createFileParser(ctx, fps)(createMdProcessor(ctx)), allSlugs]
|
||||
}
|
||||
|
||||
// only called from worker thread
|
||||
export function processHtml(
|
||||
partialCtx: WorkerSerializableBuildCtx,
|
||||
buildId: string,
|
||||
argv: Argv,
|
||||
mds: MarkdownContent[],
|
||||
allSlugs: FullSlug[],
|
||||
): Promise<ProcessedContent[]> {
|
||||
const ctx: BuildCtx = {
|
||||
...partialCtx,
|
||||
buildId,
|
||||
cfg,
|
||||
argv,
|
||||
allSlugs,
|
||||
}
|
||||
return createMarkdownParser(ctx, mds)(createHtmlProcessor(ctx))
|
||||
}
|
||||
|
||||
@ -11,8 +11,6 @@
|
||||
"skipLibCheck": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"jsxImportSource": "preact"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user